List of usage examples for java.lang Integer equals
public boolean equals(Object obj)
From source file:com.abiquo.api.services.cloud.VirtualMachineService.java
/** * Validates the given object with links to a NIC and returns the referenced list of * {@link IpPoolManagement}.//from w w w .ja va 2 s .co m * * @param links The links to validate the hard disk. * @param expectedVirtualDatacenter The expected virtual datacenter to be found in the link. * @return The list of {@link IpPoolManagement} referenced by the link. * @throws Exception If the link is not valid. */ public List<IpPoolManagement> getNICsFromDto(final VirtualDatacenter vdc, final SingleResourceTransportDto dto) { List<IpPoolManagement> ips = new LinkedList<IpPoolManagement>(); // Validate and load each volume from the link list for (RESTLink link : dto.searchLinks(PrivateNetworkResource.PRIVATE_IP)) { // Parse the URI with the expected parameters and extract the identifier values. String buildPath = buildPath(VirtualDatacentersResource.VIRTUAL_DATACENTERS_PATH, VirtualDatacenterResource.VIRTUAL_DATACENTER_PARAM, PrivateNetworksResource.PRIVATE_NETWORKS_PATH, PrivateNetworkResource.PRIVATE_NETWORK_PARAM, IpAddressesResource.IP_ADDRESSES, IpAddressesResource.IP_ADDRESS_PARAM); MultivaluedMap<String, String> ipsValues = URIResolver.resolveFromURI(buildPath, link.getHref()); // URI needs to have an identifier to a VDC, another one to a Private Network // and another one to Private IP if (ipsValues == null || !ipsValues.containsKey(VirtualDatacenterResource.VIRTUAL_DATACENTER) || !ipsValues.containsKey(PrivateNetworkResource.PRIVATE_NETWORK) || !ipsValues.containsKey(IpAddressesResource.IP_ADDRESS)) { throw new BadRequestException(APIError.VLANS_PRIVATE_IP_INVALID_LINK); } // Private IP must belong to the same Virtual Datacenter where the Virtual Machine // belongs to. Integer idVdc = Integer.parseInt(ipsValues.getFirst(VirtualDatacenterResource.VIRTUAL_DATACENTER)); if (!idVdc.equals(vdc.getId())) { throw new BadRequestException(APIError.VLANS_IP_LINK_INVALID_VDC); } // Extract the vlanId and ipId values to execute the association. Integer vlanId = Integer.parseInt(ipsValues.getFirst(PrivateNetworkResource.PRIVATE_NETWORK)); Integer ipId = Integer.parseInt(ipsValues.getFirst(IpAddressesResource.IP_ADDRESS)); VLANNetwork vlan = vdcRep.findVlanByVirtualDatacenterId(vdc, vlanId); if (vlan == null) { String errorCode = APIError.VLANS_NON_EXISTENT_VIRTUAL_NETWORK.getCode(); String message = APIError.VLANS_NON_EXISTENT_VIRTUAL_NETWORK.getMessage() + ": Vlan id " + vlanId; CommonError error = new CommonError(errorCode, message); addNotFoundErrors(error); continue; } IpPoolManagement ip = vdcRep.findIp(vlan, ipId); if (ip == null) { String errorCode = APIError.NON_EXISTENT_IP.getCode(); String message = APIError.NON_EXISTENT_IP.getMessage() + ": Vlan id " + vlan.getId(); CommonError error = new CommonError(errorCode, message); addNotFoundErrors(error); continue; } ips.add(ip); } // Throw the exception with all the disks we have not found. flushErrors(); return ips; }
From source file:com.abiquo.api.services.cloud.VirtualMachineService.java
/** * Provides a standard method to allocate a resource and check if its already allocated. * /*from w w w. ja va 2 s .c o m*/ * @param vm {@link VirtualMachine} virtual machine where the resource will be allocated. * @param vapp {@link VirtualAppliance} virtual appiance where the resource will be allocated. * @param resource resource to allocate * @param attachOrder the number of allocation order for this resource. * @return true if the resource has been allocated, false if it was previously allocated. */ protected boolean allocateResource(final VirtualMachine vm, final VirtualAppliance vapp, final RasdManagement resource, final Integer attachOrder) { if (resource.isAttached()) { // FIXME BE AWARE OF IT: // the provided vm sometimes have ID (came form DDBB) and sometimes havent ID // (createBackup) but have the TemporalID. So it is not always called with the same type // of parameter. final Integer currentId = resource.getVirtualMachine().getId() != null ? resource.getVirtualMachine().getId() : resource.getVirtualMachine().getTemporal(); if (!currentId.equals(vm.getId())) { addConflictErrors(APIError.RESOURCE_ALREADY_ASSIGNED_TO_A_VIRTUAL_MACHINE); flushErrors(); } return false; } if (resource.getVirtualMachine() != null && resource.getVirtualMachine().getTemporal() != null) { if (!resource.getVirtualMachine().getTemporal().equals(vm.getId())) { addConflictErrors(APIError.RESOURCE_ALREADY_ASSIGNED_TO_A_VIRTUAL_MACHINE); flushErrors(); } // else do nothing, the resource is already asigned to this virtual machine. return false; } else { resource.attach(attachOrder, vm, vapp); return true; } }
From source file:com.aurel.track.item.history.HistorySaverBL.java
/** * Builds the trail text for history//from w w w .j a va2 s . co m * @param fieldChanges Map with FieldChange values * @param longFields the fields with longer texts (description, comment). This will be added at the end of the trail text * @param locale * @param isNew whether creating a new issue (isCreate || isCopy) or editing an existing one * @param newLineString * * @return */ private static boolean persistHistory(SortedMap<Integer, FieldChange> fieldChanges, AfterItemSaveEventParam afterItemSaveEventParam, Integer personID, List<Integer> longFields, Locale locale, boolean isCreate, boolean isCopy, Integer fieldChangeID) { SortedMap<Integer, FieldChange> historyLongTextMap = new TreeMap<Integer, FieldChange>(); //maintain order TWorkItemBean workItemBeanNew = afterItemSaveEventParam.getWorkItemNew(); TWorkItemBean workItemBeanOld = afterItemSaveEventParam.getWorkItemOld(); boolean needHistoryTransaction = false; if (isCreate || isCopy) { //need first status in history needHistoryTransaction = true; } Map<Integer, TFieldChangeBean> lastHistoryFieldChangesMap = null; if (!needHistoryTransaction && fieldChanges != null) { //gather the fields with explicit history List<Integer> explicitHistoryFields = new LinkedList<Integer>(); int minutes = GeneralSettings.getHistoryAndEmailDelay(); for (FieldChange fieldChange : fieldChanges.values()) { if (fieldChange.isChanged()) { needHistoryTransaction = true; if (minutes == 0 || minutes < 0) { //no need to handle recent history changes break; } Integer fieldID = fieldChange.getFieldID(); if (fieldChange.isExplicitHistory() && !SystemFields.INTEGER_STATE.equals(fieldID) && !SystemFields.INTEGER_COMMENT.equals(fieldID)) { //the status field although is hardcoded to have explicit history but me make it exception from rule. //(the status change will be added to the history even the last status change happened within x minutes) //the comment should be added in the history anyway explicitHistoryFields.add(fieldChange.getFieldID()); } } } if (!explicitHistoryFields.isEmpty()) { Date targetTime = new Date(); //now targetTime = DateUtils.addMinutes(targetTime, -minutes); Map<Integer, THistoryTransactionBean> lastHistoryTransactionsMap = GeneralUtils.createMapFromList( HistoryTransactionBL.loadByItemAndFieldsSince(workItemBeanNew.getObjectID(), explicitHistoryFields, targetTime)); List<TFieldChangeBean> lastFieldChanges = FieldChangeBL .loadByItemAndFieldsSince(workItemBeanNew.getObjectID(), explicitHistoryFields, targetTime); lastHistoryFieldChangesMap = new HashMap<Integer, TFieldChangeBean>(); for (TFieldChangeBean fieldChangeBean : lastFieldChanges) { Integer transactionID = fieldChangeBean.getHistoryTransaction(); Integer fieldID = fieldChangeBean.getFieldKey(); THistoryTransactionBean historyTransactionBean = lastHistoryTransactionsMap.get(transactionID); if (historyTransactionBean != null) { //only the first found Integer changedByPersonID = historyTransactionBean.getChangedByID(); if (personID.equals(changedByPersonID) && lastHistoryFieldChangesMap.get(fieldID) == null) { lastHistoryFieldChangesMap.put(fieldID, fieldChangeBean); } explicitHistoryFields.remove(fieldID); if (explicitHistoryFields.isEmpty()) { break; } } } } } boolean mightTriggerEmail = false; if (!needHistoryTransaction) { return false; } //Integer historyTransactionID = HistoryTransactionBL.saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); if (isCreate || isCopy) { //add the first status change history entry if not deep copy if (!workItemBeanNew.isDeepCopy()) { //with deep copy the status changes will be copied also no need for first status change in history //set null for workItemBeanOld parameter (by create is null anyway) because otherwise the //values are the same and will not be saved Integer statusTransactionID = HistoryTransactionBL .saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); saveExplicitField(SystemFields.INTEGER_STATE, statusTransactionID, workItemBeanNew, null, null); } mightTriggerEmail = true; } StringBuilder compoundTextNewBuffer = new StringBuilder(); StringBuilder compoundTextOldBuffer = new StringBuilder(); if (isCopy) { Object[] msgArguments = null; String messageKey = null; if (ApplicationBean.getInstance().getSiteBean().getProjectSpecificIDsOn()) { String projectSpecificID = SystemProjectSpecificIssueNoRT .getShowValue(workItemBeanOld.getIDNumber(), workItemBeanOld); msgArguments = new Object[] { projectSpecificID }; messageKey = "item.history.copyMessageProjectSpecificID"; } else { msgArguments = new Object[] { workItemBeanOld.getObjectID() }; messageKey = "item.history.copyMessage"; } compoundTextNewBuffer.append(LocalizeUtil.getParametrizedString(messageKey, msgArguments, locale)); } Set<Integer> attachmentHistoryFields = HistoryLoaderBL.getAttachmentHistoryFields(); Integer historyTransactionID = null; for (Map.Entry<Integer, FieldChange> entry : fieldChanges.entrySet()) { FieldChange fieldChange = (FieldChange) entry.getValue(); Integer fieldID = fieldChange.getFieldID(); String fieldLabel = fieldChange.getLocalizedFieldLabel(); String newValue = fieldChange.getNewShowValue(); String oldValue = fieldChange.getOldShowValue(); //For history text we are interested in: //1. all field changes for existing issues //2. "Comment" for new issues //For that the fieldChange.isChanged() should be set accordingly already if (!fieldChange.isChanged()) { continue; } if (attachmentHistoryFields.contains(fieldID)) { Integer attachmentChangeTransactionID = HistoryTransactionBL .saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); insertFieldChange(attachmentChangeTransactionID, fieldID, newValue, oldValue); mightTriggerEmail = true; continue; } if (fieldChange.isExplicitHistory() || SystemFields.INTEGER_COMMENT.equals(fieldID)) { TFieldChangeBean fieldChangeBean = null; boolean isCommentChange = false; if (fieldChangeID == null) { if (lastHistoryFieldChangesMap != null) { fieldChangeBean = lastHistoryFieldChangesMap.get(fieldID); } if (fieldChangeBean == null) { //no previous entry within x minutes mightTriggerEmail = true; } } else { isCommentChange = true; fieldChangeBean = FieldChangeBL.loadByPrimaryKey(fieldChangeID); mightTriggerEmail = true; } if (historyTransactionID == null && !isCommentChange) { historyTransactionID = HistoryTransactionBL .saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); } saveExplicitField(fieldID, historyTransactionID, workItemBeanNew, workItemBeanOld, fieldChangeBean); //the comment is saved anyway explicitly in the history as Comment field //even if explicit history is not configured. //Explicit history for comment means whether to historize the comment changes (edit and delete). //The field set into the workitemContext is COMMENT also for edit and delete comment //(instead of COMMENT_DELETE_HISTORY_FIELD or COMMENT_MODIFY_HISTORY_FIELD) comment because //we need the explicit history flag which is set only for COMMENT field (the other two are only pseudo fields) if (fieldChange.isExplicitHistory() && SystemFields.INTEGER_COMMENT.equals(fieldID)) { if (oldValue != null && !"".equals(oldValue)) { //history only if the comment is edited or deleted Integer commentChangeTransactionID = HistoryTransactionBL .saveHistoryTransaction(workItemBeanNew.getObjectID(), personID, new Date(), null); if (newValue == null || "".equals(newValue)) { insertFieldChange(commentChangeTransactionID, SystemFields.COMMENT_DELETE_HISTORY_FIELD, newValue, oldValue); } else { insertFieldChange(commentChangeTransactionID, SystemFields.COMMENT_MODIFY_HISTORY_FIELD, newValue, oldValue); } } } } else { //fields without explicit history if (longFields.contains(fieldID)) { //gather the changed long fields to add them at the end historyLongTextMap.put(fieldID, fieldChange); mightTriggerEmail = true; } else { if (newValue != null && !"".equals(newValue)) { if (compoundTextNewBuffer.length() > 0) { //some content already present compoundTextNewBuffer.append(commonFieldsSeparator + lineBreak); } compoundTextNewBuffer.append(fieldLabel + fieldLabelSeparator + newValue); mightTriggerEmail = true; } if (oldValue != null && !"".equals(oldValue)) { if (compoundTextOldBuffer.length() > 0) { //some content already present compoundTextOldBuffer.append(commonFieldsSeparator + lineBreak); } compoundTextOldBuffer.append(fieldLabel + fieldLabelSeparator + oldValue); mightTriggerEmail = true; } } } } //add the longText changes at the end //add the commonFieldsSeparator only after the last short field //after long fields (HTML text) it does not make sense (for ex. after a <p>) boolean firstLongField = true; for (Map.Entry<Integer, FieldChange> entry : historyLongTextMap.entrySet()) { FieldChange fieldChange = entry.getValue(); if (fieldChange != null) { if (compoundTextNewBuffer.length() > 0) { //some content already present if (firstLongField) { compoundTextNewBuffer.append(commonFieldsSeparator + lineBreak); } else { compoundTextNewBuffer.append(lineBreak); } } if (compoundTextOldBuffer.length() > 0) { //some content already present if (firstLongField) { compoundTextOldBuffer.append(commonFieldsSeparator + lineBreak); } else { compoundTextOldBuffer.append(lineBreak); } } firstLongField = false; String fieldLabel = fieldChange.getLocalizedFieldLabel(); String newShowValue = fieldChange.getNewShowValue(); if (newShowValue != null && !"".equals(newShowValue)) { compoundTextNewBuffer.append(fieldLabel + fieldLabelSeparator + newShowValue); } String oldShowValue = fieldChange.getOldShowValue(); if (oldShowValue != null && !"".equals(oldShowValue)) { compoundTextOldBuffer.append(fieldLabel + fieldLabelSeparator + oldShowValue); } } } saveCompoundField(historyTransactionID, workItemBeanNew.getObjectID(), personID, compoundTextNewBuffer.toString(), compoundTextOldBuffer.toString()); return mightTriggerEmail; }
From source file:com.seajas.search.attender.service.attender.dao.ProfileDAO.java
/** * Find all notifiable subscribers from the given profile. * /* w w w .java2 s.c om*/ * @param calendar * @param profile * @return List<ProfileSubscriber> */ public List<ProfileSubscriber> findNotifiableSubscribers(final Calendar calendar, final List<ProfileSubscriber> subscribers) { if (!calendar.getTimeZone().getID().equals("UTC")) throw new IllegalArgumentException( "The given calendar must have a UTC timezone (has " + calendar.getTimeZone().getID() + ")"); // Calculate the current time in UTC Integer currentHour = calendar.get(Calendar.HOUR_OF_DAY), currentMinute = calendar.get(Calendar.MINUTE), currentDay = calendar.get(Calendar.DAY_OF_WEEK); Date currentTime = new Date(calendar.getTimeInMillis()); // Return a list of all applicable subscribers for this profile List<ProfileSubscriber> result = new ArrayList<ProfileSubscriber>(); // Find all active profiles' confirmed subscribers whose notification preference fall within the current day and whose time // values are greater than or equal to the current hour / minute; then take 7 days for weekly updates, 1 for daily updates, // and 1 for weekdays where Saturday / Sunday are excluded for (ProfileSubscriber subscriber : subscribers) { Integer subscriberDay = subscriber.getNotificationDay(), subscriberHour = subscriber.getNotificationHour(), subscriberMinute = subscriber.getNotificationMinute(); Date lastNotification = subscriber.getLastNotification(); NotificationType subscriberType = subscriber.getNotificationType(); // Correct for the user-specific timezone TimeZone timeZone = TimeZone.getTimeZone(subscriber.getTimeZone()); // Take leap seconds into account, although we're not so precise to really care if (subscriberMinute != null) { subscriberMinute -= (int) ((timeZone.getOffset(calendar.getTimeInMillis()) % MILLISECONDS_PER_HOUR) / MILLISECONDS_PER_MINUTE); if (subscriberMinute < 0) { subscriberMinute += 60; subscriberHour--; } else if (subscriberMinute > 59) { subscriberMinute -= 60; subscriberHour++; } } if (subscriberHour != null) { subscriberHour -= (int) (timeZone.getOffset(calendar.getTimeInMillis()) / MILLISECONDS_PER_HOUR); if (subscriberHour < 0) { subscriberHour += 24; if (subscriberDay != null) subscriberDay--; } else if (subscriberHour > 23) { subscriberHour -= 24; if (subscriberDay != null) subscriberDay++; } } if (subscriberDay == -1) subscriberDay = 6; else if (subscriberDay == 7) subscriberDay = 0; // Now determine the actual applicability (last_notification is stored as a UTC date, so does not need to be converted) if (subscriber.getIsConfirmed() && ((subscriberType.equals(NotificationType.Weekly) && subscriberDay.equals(currentDay) && subscriberHour <= currentHour && subscriberMinute <= currentMinute && dateDifference(lastNotification, currentTime) >= 6) || (subscriberType.equals(NotificationType.Weekdays) && currentDay != Calendar.SATURDAY && currentDay != Calendar.SUNDAY && subscriberHour <= currentHour && subscriberMinute <= currentMinute && !DateUtils.isSameDay(lastNotification, currentTime)) || (subscriberType.equals(NotificationType.Daily) && subscriberHour <= currentHour && subscriberMinute <= currentMinute && !DateUtils.isSameDay(lastNotification, currentTime)))) { if (logger.isInfoEnabled()) logger.info("Adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' to be notified"); result.add(subscriber); } else if (logger.isDebugEnabled()) { if (!subscriber.getIsConfirmed()) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - not confirmed"); else if (subscriberType.equals(NotificationType.Direct)) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - direct notifications are handled by search-enricher instances"); else if (subscriberType.equals(NotificationType.Weekly)) { if (!subscriberDay.equals(currentDay)) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - weekly notification does not fall on current day"); if (!(subscriberHour <= currentHour && subscriberMinute <= currentMinute)) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - weekly notification falls outside of current time"); if (!(dateDifference(lastNotification, currentTime) >= 6)) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - weekly notification difference (in days) smaller than 7"); } else if (subscriberType.equals(NotificationType.Weekdays)) { if (!(currentDay != Calendar.SATURDAY && currentDay != Calendar.SUNDAY)) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - weekday notification does not fall on weekday"); if (!(subscriberHour <= currentHour && subscriberMinute <= currentMinute)) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - weekday notification falls outside of current time"); if (DateUtils.isSameDay(lastNotification, currentTime)) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - weekday notification difference indicates a notification has already gone out today"); } else if (subscriberType.equals(NotificationType.Daily)) { if (!(subscriberHour <= currentHour && subscriberMinute <= currentMinute)) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - daily notification falls outside of current time"); if (DateUtils.isSameDay(lastNotification, currentTime)) logger.debug("Not adding subscriber '" + subscriber.getUniqueId() + "' with e-mail '" + subscriber.getEmail() + "' - daily notification difference indicates a notification has already gone out today"); } } } return result; }
From source file:com.aurel.track.persist.TWorkItemPeer.java
/** * Sets the wbs on the affected workItems after a drag and drop operation * @param draggedWorkItemID//from w ww .j av a 2 s . co m * @param droppedToWorkItemID * @param before */ @Override public synchronized void dropNearWorkItem(Integer draggedWorkItemID, Integer droppedToWorkItemID, boolean before) { TWorkItemBean workItemBeanDragged = null; try { workItemBeanDragged = loadByPrimaryKey(draggedWorkItemID); } catch (ItemLoaderException e1) { LOGGER.warn("The fromWorkItemID " + draggedWorkItemID + " does not exist"); return; } TWorkItemBean workItemBeanDroppedTo = null; try { workItemBeanDroppedTo = loadByPrimaryKey(droppedToWorkItemID); } catch (ItemLoaderException e1) { LOGGER.warn("The toWorkItemID " + droppedToWorkItemID + " does not exist"); return; } Integer projectID = workItemBeanDragged.getProjectID(); if (projectID == null) { LOGGER.warn("No project found for " + draggedWorkItemID + " does not exist"); return; } if (workItemBeanDragged.getWBSOnLevel() == null || workItemBeanDroppedTo.getWBSOnLevel() == null) { setWbs(projectID); try { //load them again this time with wbs numbers set workItemBeanDragged = loadByPrimaryKey(draggedWorkItemID); workItemBeanDroppedTo = loadByPrimaryKey(droppedToWorkItemID); } catch (ItemLoaderException e) { } } if (!projectID.equals(workItemBeanDroppedTo.getProjectID())) { LOGGER.debug("The drop target is not from the same project: abort drop"); return; } Integer parentOfDraggedWorkItem = workItemBeanDragged.getSuperiorworkitem(); Integer parentOfDroppedToWorkItem = workItemBeanDroppedTo.getSuperiorworkitem(); Integer draggedSortOrder = workItemBeanDragged.getWBSOnLevel(); Integer droppedToSortOrder = workItemBeanDroppedTo.getWBSOnLevel(); if (draggedSortOrder == null || droppedToSortOrder == null) { LOGGER.warn("The draggedSortOrder " + draggedSortOrder + " droppedToSortOrder " + droppedToSortOrder); return; } String sqlStmt = null; String droppedCriteria = ""; String draggedCriteria = ""; Integer workItemSortOrder; boolean parentsNull = parentOfDraggedWorkItem == null && parentOfDroppedToWorkItem == null; boolean parentsNotNull = parentOfDraggedWorkItem != null && parentOfDroppedToWorkItem != null; if (parentsNull || (parentsNotNull && parentOfDraggedWorkItem != null && parentOfDraggedWorkItem.equals(parentOfDroppedToWorkItem))) { if (draggedSortOrder.equals(droppedToSortOrder)) { //on the same level the same sortorder, not a real move, do nothing LOGGER.debug("The draggedSortOrder " + draggedSortOrder + " equals droppedToSortOrder " + droppedToSortOrder); return; } String parentCriteria = ""; //same level: both parent null or same parent if (parentsNotNull) { parentCriteria = " AND SUPERIORWORKITEM = " + parentOfDraggedWorkItem; } else { parentCriteria = " AND SUPERIORWORKITEM IS NULL "; } int inc = 0; if (draggedSortOrder > droppedToSortOrder) { inc = 1; if (before) { droppedCriteria = " AND WBSONLEVEL >= " + droppedToSortOrder; draggedCriteria = " AND WBSONLEVEL < " + draggedSortOrder; workItemSortOrder = droppedToSortOrder; } else { droppedCriteria = " AND WBSONLEVEL > " + droppedToSortOrder; draggedCriteria = " AND WBSONLEVEL < " + draggedSortOrder; workItemSortOrder = droppedToSortOrder + 1; } } else { inc = -1; if (before) { droppedCriteria = " AND WBSONLEVEL < " + droppedToSortOrder; draggedCriteria = " AND WBSONLEVEL > " + draggedSortOrder; workItemSortOrder = droppedToSortOrder - 1; } else { droppedCriteria = " AND WBSONLEVEL <= " + droppedToSortOrder; draggedCriteria = " AND WBSONLEVEL > " + draggedSortOrder; workItemSortOrder = droppedToSortOrder; } } sqlStmt = "UPDATE TWORKITEM SET WBSONLEVEL = WBSONLEVEL " + " + " + inc + " WHERE " + " PROJECTKEY = " + projectID + parentCriteria + draggedCriteria + droppedCriteria; executeStatemant(sqlStmt); } else { if (EqualUtils.equal(draggedWorkItemID, parentOfDroppedToWorkItem)) { LOGGER.warn("The WBS change would cause the issue " + draggedWorkItemID + " to became parent of itself"); //avoid same parentID as issueID return; } if (ItemBL.isAscendant(draggedWorkItemID, parentOfDroppedToWorkItem)) { LOGGER.warn("The chosen parent " + parentOfDroppedToWorkItem + " is already a descendant of the " + draggedWorkItemID); return; } //different levels //1. remove the dragged item from the original position and shift the following items up shiftBranch(draggedSortOrder, parentOfDraggedWorkItem, projectID, false); //2. shift down the actual items in the new place to make space for the dragged item to the new position String parentCriteriaOfDroppedItem = ""; if (parentOfDroppedToWorkItem != null) { parentCriteriaOfDroppedItem = " AND SUPERIORWORKITEM = " + parentOfDroppedToWorkItem; } else { parentCriteriaOfDroppedItem = " AND SUPERIORWORKITEM IS NULL "; } if (before) { droppedCriteria = " AND WBSONLEVEL >= " + droppedToSortOrder; workItemSortOrder = droppedToSortOrder; } else { droppedCriteria = " AND WBSONLEVEL > " + droppedToSortOrder; workItemSortOrder = droppedToSortOrder + 1; } sqlStmt = "UPDATE TWORKITEM SET WBSONLEVEL = WBSONLEVEL" + "+1 " + " WHERE " + " PROJECTKEY = " + projectID + parentCriteriaOfDroppedItem + droppedCriteria; executeStatemant(sqlStmt); workItemBeanDragged.setSuperiorworkitem(parentOfDroppedToWorkItem); } workItemBeanDragged.setWBSOnLevel(workItemSortOrder); try { saveSimple(workItemBeanDragged); } catch (ItemPersisterException e) { LOGGER.error("Saving the new droppedToSortOrder " + droppedToSortOrder + " for workItemID " + draggedWorkItemID + failedWith + e.getMessage(), e); } }
From source file:io.pyd.synchro.SyncJob.java
protected Map<String, Object[]> applyChanges(Map<String, Object[]> changes, IProgressMonitor monitor, MonitorTaskType taskType) {//from w ww . j av a 2 s . c o m Set<Entry<String, Object[]>> changesEntrySet = changes.entrySet(); Iterator<Map.Entry<String, Object[]>> it = changesEntrySet.iterator(); Map<String, Object[]> notApplied = createMapDBFile("notApplied"); // Make sure to apply those one at the end Map<String, Object[]> moves = createMapDBFile("moves"); Map<String, Object[]> deletes = createMapDBFile("deletes"); RestRequest rest = this.getRequest(); int total = changes.size(); int work = 0; if (monitor != null) { monitor.begin(currentJobNodeID, getMonitorTaskName(taskType)); } while (it.hasNext()) { notifyProgressMonitor(monitor, total, work++); Map.Entry<String, Object[]> entry = it.next(); String k = entry.getKey(); Object[] value = entry.getValue().clone(); Integer v = (Integer) value[0]; Node n = (Node) value[1]; if (n == null) continue; if (this.interruptRequired) { value[2] = STATUS_INTERRUPTED; notApplied.put(k, value); continue; } // Thread.sleep(2000); try { Map<String, Node> tmpNodes = findNodesInTmpSnapshot(k); if (n.isLeaf() && value[2].equals(STATUS_CONFLICT_SOLVED)) { if (v.equals(TASK_SOLVE_KEEP_MINE)) { v = TASK_REMOTE_PUT_CONTENT; } else if (v.equals(TASK_SOLVE_KEEP_THEIR)) { v = TASK_LOCAL_GET_CONTENT; } else if (v.equals(TASK_SOLVE_KEEP_BOTH)) { // COPY LOCAL FILE AND GET REMOTE COPY File origFile = new File(currentLocalFolder, k); File targetFile = new File(currentLocalFolder, k + ".mine"); InputStream in = new FileInputStream(origFile); OutputStream out = new FileOutputStream(targetFile); byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); out.close(); v = TASK_LOCAL_GET_CONTENT; } } if (v == TASK_LOCAL_GET_CONTENT) { if (direction.equals("up")) continue; if (tmpNodes.get("local") != null && tmpNodes.get("remote") != null) { if (tmpNodes.get("local").getPropertyValue("md5") == null) { updateLocalMD5(tmpNodes.get("local")); } if (tmpNodes.get("local").getPropertyValue("md5") != null && tmpNodes.get("local") .getPropertyValue("md5").equals(tmpNodes.get("remote").getPropertyValue("md5"))) { continue; } } Node node = new Node(Node.NODE_TYPE_ENTRY, "", null); node.setPath(k); File targetFile = new File(currentLocalFolder, k); this.logChange(getMessage("job_log_downloading"), k); try { this.updateNode(node, targetFile, n); } catch (IllegalStateException e) { if (this.statRemoteFile(node, "file", rest) == null) continue; else throw e; } if (!targetFile.exists() || targetFile.length() != Integer.parseInt(n.getPropertyValue("bytesize"))) { JSONObject obj = this.statRemoteFile(node, "file", rest); if (obj == null || obj.get("size").equals(0)) continue; else throw new Exception("Error while downloading file from server"); } if (n != null) { targetFile.setLastModified(n.getLastModified().getTime()); } countFilesDownloaded++; } else if (v == TASK_LOCAL_MKDIR) { if (direction.equals("up")) continue; File f = new File(currentLocalFolder, k); if (!f.exists()) { this.logChange(getMessage("job_log_mkdir"), k); boolean res = f.mkdirs(); if (!res) { throw new Exception("Error while creating local folder"); } countResourcesSynchronized++; } } else if (v == TASK_LOCAL_REMOVE) { if (direction.equals("up")) continue; deletes.put(k, value); } else if (v == TASK_REMOTE_REMOVE) { if (direction.equals("down")) continue; deletes.put(k, value); } else if (v == TASK_REMOTE_MKDIR) { if (direction.equals("down")) continue; this.logChange(getMessage("job_log_mkdir_remote"), k); Node currentDirectory = new Node(Node.NODE_TYPE_ENTRY, "", null); int lastSlash = k.lastIndexOf("/"); currentDirectory.setPath(k.substring(0, lastSlash)); RestStateHolder.getInstance().setDirectory(currentDirectory); rest.getStatusCodeForRequest(AjxpAPI.getInstance().getMkdirUri(k.substring(lastSlash + 1))); JSONObject object = rest.getJSonContent(AjxpAPI.getInstance().getStatUri(k)); if (!object.has("mtime")) { throw new Exception("Could not create remote folder"); } countResourcesSynchronized++; } else if (v == TASK_REMOTE_PUT_CONTENT) { if (direction.equals("down")) continue; if (tmpNodes.get("local") != null && tmpNodes.get("remote") != null) { if (tmpNodes.get("local").getPropertyValue("md5") == null) { updateLocalMD5(tmpNodes.get("local")); } if (tmpNodes.get("local").getPropertyValue("md5") != null && tmpNodes.get("local") .getPropertyValue("md5").equals(tmpNodes.get("remote").getPropertyValue("md5"))) { continue; } } this.logChange(getMessage("job_log_uploading"), k); Node currentDirectory = new Node(Node.NODE_TYPE_ENTRY, "", null); int lastSlash = k.lastIndexOf("/"); currentDirectory.setPath(k.substring(0, lastSlash)); RestStateHolder.getInstance().setDirectory(currentDirectory); File sourceFile = new File(currentLocalFolder, k); if (!sourceFile.exists()) { // Silently ignore, or it will continously try to // reupload it. continue; } boolean checked = false; if (sourceFile.length() == 0) { rest.getStringContent(AjxpAPI.getInstance().getMkfileUri(sourceFile.getName())); } else { checked = this.synchronousUP(currentDirectory, sourceFile, n); } if (!checked) { JSONObject object = null; String path = n.getPath(true); try { object = rest.getJSonContent(AjxpAPI.getInstance().getStatUri(path)); } catch (Exception e) { Logger.getRootLogger().error("Error during uploading file: " + path, e); continue; } if (object != null && (!object.has("size") || object.getInt("size") != (int) sourceFile.length())) { throw new Exception("Could not upload file to the server"); } } countFilesUploaded++; } else if (v == TASK_DO_NOTHING && value[2] == STATUS_CONFLICT) { // Recheck that it's a real conflict? this.logChange(getMessage("job_log_conflict"), k); notApplied.put(k, value); countConflictsDetected++; } else if (v == TASK_LOCAL_MOVE_FILE || v == TASK_REMOTE_MOVE_FILE) { if (v == TASK_LOCAL_MOVE_FILE && direction.equals("up")) continue; if (v == TASK_REMOTE_MOVE_FILE && direction.equals("down")) continue; moves.put(k, value); } } catch (FileNotFoundException ex) { addSyncDetailMessage(k, ex); ex.printStackTrace(); countResourcesErrors++; // Do not put in the notApplied again, otherwise it will // indefinitely happen. } catch (Exception e) { addSyncDetailMessage(k, e); Logger.getRootLogger().error("Synchro", e); countResourcesErrors++; value[2] = STATUS_ERROR; notApplied.put(k, value); } } if (monitor != null) { monitor.end(currentJobNodeID); monitor.begin(currentJobNodeID, getMonitorTaskName(taskType) + " - " + getMonitorTaskName(MonitorTaskType.APPLY_CHANGES_MOVES)); } // APPLY MOVES Set<Entry<String, Object[]>> movesEntrySet = moves.entrySet(); Iterator<Map.Entry<String, Object[]>> mIt = movesEntrySet.iterator(); total = moves.size(); work = 0; while (mIt.hasNext()) { notifyProgressMonitor(monitor, total, work++); Map.Entry<String, Object[]> entry = mIt.next(); String k = entry.getKey(); Object[] value = entry.getValue().clone(); Integer v = (Integer) value[0]; Node n = (Node) value[1]; if (this.interruptRequired) { value[2] = STATUS_INTERRUPTED; notApplied.put(k, value); continue; } try { if (v == TASK_LOCAL_MOVE_FILE && value.length == 4) { this.logChange("Moving resource locally", k); Node dest = (Node) value[3]; File origFile = new File(currentLocalFolder, n.getPath()); if (!origFile.exists()) { // Cannot move a non-existing file! Download instead! value[0] = TASK_LOCAL_GET_CONTENT; value[1] = dest; value[2] = STATUS_TODO; notApplied.put(dest.getPath(true), value); continue; } File destFile = new File(currentLocalFolder, dest.getPath()); origFile.renameTo(destFile); if (!destFile.exists()) { throw new Exception("Error while creating " + dest.getPath()); } countResourcesSynchronized++; } else if (v == TASK_REMOTE_MOVE_FILE && value.length == 4) { this.logChange("Moving resource remotely", k); Node dest = (Node) value[3]; JSONObject object = rest.getJSonContent(AjxpAPI.getInstance().getStatUri(n.getPath())); if (!object.has("size")) { value[0] = TASK_REMOTE_PUT_CONTENT; value[1] = dest; value[2] = STATUS_TODO; notApplied.put(dest.getPath(true), value); continue; } rest.getStatusCodeForRequest(AjxpAPI.getInstance().getRenameUri(n, dest)); object = rest.getJSonContent(AjxpAPI.getInstance().getStatUri(dest.getPath())); if (!object.has("size")) { throw new Exception("Could not move remote file to " + dest.getPath()); } countResourcesSynchronized++; } } catch (FileNotFoundException ex) { addSyncDetailMessage(k, ex); ex.printStackTrace(); countResourcesErrors++; // Do not put in the notApplied again, otherwise it will // indefinitely happen. } catch (Exception e) { addSyncDetailMessage(k, e); Logger.getRootLogger().error("Synchro", e); countResourcesErrors++; value[2] = STATUS_ERROR; notApplied.put(k, value); } } // APPLY DELETES if (monitor != null) { monitor.end(currentJobNodeID); monitor.begin(currentJobNodeID, getMonitorTaskName(taskType) + " - " + getMonitorTaskName(MonitorTaskType.APPLY_CHANGES_DELETES)); } Set<Entry<String, Object[]>> deletesEntrySet = deletes.entrySet(); Iterator<Map.Entry<String, Object[]>> dIt = deletesEntrySet.iterator(); total = deletes.size(); work = 0; while (dIt.hasNext()) { notifyProgressMonitor(monitor, total, work++); Map.Entry<String, Object[]> entry = dIt.next(); String k = entry.getKey(); Object[] value = entry.getValue().clone(); Integer v = (Integer) value[0]; // Node n = (Node)value[1]; if (this.interruptRequired) { value[2] = STATUS_INTERRUPTED; notApplied.put(k, value); continue; } try { if (v == TASK_LOCAL_REMOVE) { this.logChange(getMessage("job_log_rmlocal"), k); File f = new File(currentLocalFolder, k); if (f.exists()) { boolean res = f.delete(); if (!res) { throw new Exception("Error while removing local resource: " + f.getPath()); } countResourcesSynchronized++; } } else if (v == TASK_REMOTE_REMOVE) { this.logChange(getMessage("job_log_rmremote"), k); Node currentDirectory = new Node(Node.NODE_TYPE_ENTRY, "", null); int lastSlash = k.lastIndexOf("/"); currentDirectory.setPath(k.substring(0, lastSlash)); RestStateHolder.getInstance().setDirectory(currentDirectory); rest.getStatusCodeForRequest(AjxpAPI.getInstance().getDeleteUri(k)); JSONObject object = rest.getJSonContent(AjxpAPI.getInstance().getStatUri(k)); if (object.has("mtime")) { // Still exists, should be empty! throw new Exception("Could not remove the resource from the server"); } countResourcesSynchronized++; } } catch (FileNotFoundException ex) { addSyncDetailMessage(k, ex); ex.printStackTrace(); countResourcesErrors++; // Do not put in the notApplied again, otherwise it will // indefinitely happen. } catch (Exception e) { addSyncDetailMessage(k, e); Logger.getRootLogger().error("Synchro", e); countResourcesErrors++; value[2] = STATUS_ERROR; notApplied.put(k, value); } } if (monitor != null) { monitor.end(currentJobNodeID); } rest.release(); return notApplied; }
From source file:com.cloud.resource.ResourceManagerImpl.java
protected void processResourceEvent(final Integer event, final Object... params) { final List<ResourceListener> lst = _lifeCycleListeners.get(event); if (lst == null || lst.size() == 0) { return;/*w ww . j a v a 2 s. c om*/ } String eventName; for (final ResourceListener l : lst) { if (event.equals(ResourceListener.EVENT_DISCOVER_BEFORE)) { l.processDiscoverEventBefore((Long) params[0], (Long) params[1], (Long) params[2], (URI) params[3], (String) params[4], (String) params[5], (List<String>) params[6]); eventName = "EVENT_DISCOVER_BEFORE"; } else if (event.equals(ResourceListener.EVENT_DISCOVER_AFTER)) { l.processDiscoverEventAfter((Map<? extends ServerResource, Map<String, String>>) params[0]); eventName = "EVENT_DISCOVER_AFTER"; } else if (event.equals(ResourceListener.EVENT_DELETE_HOST_BEFORE)) { l.processDeleteHostEventBefore((HostVO) params[0]); eventName = "EVENT_DELETE_HOST_BEFORE"; } else if (event.equals(ResourceListener.EVENT_DELETE_HOST_AFTER)) { l.processDeletHostEventAfter((HostVO) params[0]); eventName = "EVENT_DELETE_HOST_AFTER"; } else if (event.equals(ResourceListener.EVENT_CANCEL_MAINTENANCE_BEFORE)) { l.processCancelMaintenaceEventBefore((Long) params[0]); eventName = "EVENT_CANCEL_MAINTENANCE_BEFORE"; } else if (event.equals(ResourceListener.EVENT_CANCEL_MAINTENANCE_AFTER)) { l.processCancelMaintenaceEventAfter((Long) params[0]); eventName = "EVENT_CANCEL_MAINTENANCE_AFTER"; } else if (event.equals(ResourceListener.EVENT_PREPARE_MAINTENANCE_BEFORE)) { l.processPrepareMaintenaceEventBefore((Long) params[0]); eventName = "EVENT_PREPARE_MAINTENANCE_BEFORE"; } else if (event.equals(ResourceListener.EVENT_PREPARE_MAINTENANCE_AFTER)) { l.processPrepareMaintenaceEventAfter((Long) params[0]); eventName = "EVENT_PREPARE_MAINTENANCE_AFTER"; } else { throw new CloudRuntimeException("Unknown resource event:" + event); } s_logger.debug("Sent resource event " + eventName + " to listener " + l.getClass().getSimpleName()); } }
From source file:com.google.cloud.dns.testing.LocalDnsHelper.java
/** * Lists changes. Next page token is the ID of the last change listed. *//*from ww w. j a v a 2 s. c o m*/ @VisibleForTesting Response listChanges(String projectId, String zoneName, String query) { Map<String, Object> options = OptionParsers.parseListChangesOptions(query); Response response = checkListOptions(options); if (response != null) { return response; } ZoneContainer zoneContainer = findZone(projectId, zoneName); if (zoneContainer == null) { return Error.NOT_FOUND.response( String.format("The 'parameters.managedZone' resource named '%s' does not exist", zoneName)); } // take a sorted snapshot of the current change list NavigableMap<Integer, Change> changes = new TreeMap<>(); for (Change c : zoneContainer.changes()) { if (c.getId() != null) { changes.put(Integer.valueOf(c.getId()), c); } } String[] fields = (String[]) options.get("fields"); String sortOrder = (String) options.get("sortOrder"); String pageToken = (String) options.get("pageToken"); Integer maxResults = options.get("maxResults") == null ? null : Integer.valueOf((String) options.get("maxResults")); // as the only supported field is change sequence, we are not reading sortBy NavigableSet<Integer> keys; if ("descending".equals(sortOrder)) { keys = changes.descendingKeySet(); } else { keys = changes.navigableKeySet(); } Integer from = null; try { from = Integer.valueOf(pageToken); } catch (NumberFormatException ex) { // ignore page token } keys = from != null ? keys.tailSet(from, false) : keys; NavigableMap<Integer, Change> fragment = from != null && changes.containsKey(from) ? changes.tailMap(from, false) : changes; boolean sizeReached = false; boolean hasMorePages = false; LinkedList<String> serializedResults = new LinkedList<>(); String lastChangeId = null; for (Integer key : keys) { Change change = fragment.get(key); if (sizeReached) { // we do not add this, just note that there would be more and there should be a token hasMorePages = true; break; } else { lastChangeId = change.getId(); try { serializedResults.addLast(jsonFactory.toString(OptionParsers.extractFields(change, fields))); } catch (IOException e) { return Error.INTERNAL_ERROR.response( String.format("Error when serializing change %s in managed zone %s in project %s", lastChangeId, zoneName, projectId)); } } sizeReached = maxResults != null && maxResults.equals(serializedResults.size()); } boolean includePageToken = hasMorePages && (fields == null || Arrays.asList(fields).contains("nextPageToken")); return toListResponse(serializedResults, "changes", lastChangeId, includePageToken); }