List of usage examples for java.util HashMap clear
public void clear()
From source file:com.krawler.crm.contact.bizservice.ContactManagementServiceImpl.java
/** * * @param ll//from ww w . j a v a 2 s . c o m * @param totalSize * @param isexport * @param dateFormat * @param userid * @param companyid * @param currencyid * @param selectExportJson * @return * @throws com.krawler.common.session.SessionExpiredException */ public JSONObject getContactJsonExport(List<CrmContact> ll, int totalSize, boolean isexport, DateFormat dateFormat, String userid, String companyid, String currencyid, String selectExportJson, StringBuffer usersList) throws SessionExpiredException { JSONArray jarr = new JSONArray(); JSONObject jobj = new JSONObject(); try { HashMap<String, Object> requestParams = new HashMap<String, Object>(); requestParams.put("isexport", isexport); requestParams.put("filter_names", Arrays.asList("companyid", "moduleid")); requestParams.put("filter_values", Arrays.asList(companyid, Constants.Crm_contact_moduleid)); HashMap<String, String> replaceFieldMap = new HashMap<String, String>(); HashMap<String, Integer> FieldMap = fieldDataManagercntrl.getFieldParamsMap1(requestParams, replaceFieldMap); HashMap<String, DefaultMasterItem> defaultMasterMap = crmContactHandler .getContactDefaultMasterItemsMap(companyid, usersList, crmCommonDAO, kwlCommonTablesDAOObj); int fromIndex = 0; int maxNumbers = Constants.maxrecordsMapCount; int totalCount = ll.size(); while (fromIndex < totalCount) { List<CrmContact> subList; if (totalCount <= maxNumbers) { subList = ll; } else { int toIndex = (((fromIndex + maxNumbers) < totalCount) ? (fromIndex + maxNumbers) + 1 : totalCount);//Add +1 as subList exclude toIndex item subList = ll.subList(fromIndex, toIndex); } List<String> idsList = new ArrayList<String>(); for (CrmContact obj : subList) { idsList.add(obj.getContactid()); } HashMap<String, CrmContactCustomData> contactCustomDataMap = crmContactDAOObj .getContactCustomDataMap(idsList, companyid); Map<String, ExpressionVariables> exprVarMap = expressionVariableMap(subList, contactCustomDataMap, companyid, FieldMap, replaceFieldMap, isexport, dateFormat); // get owners Map<String, List<ContactOwnerInfo>> owners = crmContactDAOObj.getContactOwners(idsList); for (CrmContact obj : subList) { requestParams.clear(); requestParams.put("companyid", companyid); requestParams.put("recid", obj.getContactid()); JSONObject tmpObj = new JSONObject(); String aid = crmManagerCommon.moduleObjNull(obj.getCrmAccount(), "Accountid"); if (StringUtil.isNullOrEmpty(aid)) { JSONObject jobjNone = new JSONObject(); jobjNone = crmManagerCommon.insertNone(); aid = jobjNone.getString("id"); } tmpObj.put("relatedname", aid); tmpObj.put("oldrelatedname", aid); tmpObj = getContactJsonObject(exprVarMap, obj, tmpObj, companyid, currencyid, FieldMap, isexport, dateFormat, defaultMasterMap, contactCustomDataMap, owners); jarr.put(tmpObj); } fromIndex += maxNumbers + 1; } jobj.put("success", true); jobj.put("data", jarr); jobj.put("totalCount", totalSize); } catch (Exception e) { LOGGER.warn("Exception in crmContactController.getContactJson:", e); } return jobj; }
From source file:org.freebxml.omar.server.cache.ClassificationSchemeCache.java
/** * Clear all affectedObjects in AuditableEvent from cache. When * called, internalCache may be out of date with respect to dB (where * transaction has been committed) and objectCache (where affected * classification nodes have already been processed).<br> * This code keeps the cache primed if it was primed earlier. The side * effect of this choice is every other context (separate transaction) * immediately knows about the just-committed changes. That is, this * cache imitates TRANSACTION_READ_COMMITTED transaction isolation * unless the caching event setting is "never". *///from w w w .ja v a 2s . c om public void onEvent(ServerRequestContext context, AuditableEventType ae) { final String eventType = ae.getEventType(); final boolean justRemove = primeCacheEvent.equalsIgnoreCase("never"); final boolean wasChanged = eventType.equalsIgnoreCase(CanonicalConstants.CANONICAL_EVENT_TYPE_ID_Created) || eventType.equalsIgnoreCase(CanonicalConstants.CANONICAL_EVENT_TYPE_ID_Updated) || eventType.equalsIgnoreCase(CanonicalConstants.CANONICAL_EVENT_TYPE_ID_Versioned); final boolean wasRemoved = eventType.equalsIgnoreCase(CanonicalConstants.CANONICAL_EVENT_TYPE_ID_Deleted); Set schemesToRemove = new HashSet(); HashMap schemesToUpdate = new HashMap(); primeCacheOnFirstUse(context); if (wasChanged || wasRemoved) { try { List affectedObjects = ae.getAffectedObjects().getObjectRef(); Iterator iter = affectedObjects.iterator(); while (iter.hasNext()) { ObjectRefType ref = (ObjectRefType) iter.next(); String objectId = ref.getId(); RegistryObjectType ro = (RegistryObjectType) context.getAffectedObjectsMap().get(objectId); if (null == ro) { // In case missing (removed?) object was a scheme schemesToRemove.add(objectId); } else { if (ro instanceof ClassificationSchemeType) { if (wasRemoved || justRemove) { schemesToRemove.add(objectId); } else { schemesToUpdate.put(objectId, ro); } } else if (ro instanceof ClassificationNodeType) { String schemeId = bu.getSchemeIdForRegistryObject(ro); // Handle case where a node in a scheme has been // added, deleted or updated. if (justRemove) { schemesToRemove.add(schemeId); } else if (!(schemesToRemove.contains(schemeId) || schemesToUpdate.containsKey(schemeId))) { ClassificationSchemeType scheme = (ClassificationSchemeType) getRegistryObjectInternal( context, schemeId, "ClassScheme"); if (null != scheme) { schemesToUpdate.put(schemeId, scheme); // ??? Why is this necessary for all // ??? schemes loaded? loadChildren(context, scheme, getDepthForScheme(schemeId)); } } } } } } catch (JAXRException e) { log.error(e); //Just update all schemes to be safe in case of any errors reset(context); // Make following block a no-op. schemesToRemove.clear(); schemesToUpdate.clear(); } } synchronized (internalCache) { final int oldSize = internalCache.getSize(); Iterator iter = schemesToRemove.iterator(); while (iter.hasNext()) { String objectId = (String) iter.next(); internalCache.remove(objectId); } if (justRemove) { // Cache may become primed regardless of primeCacheEvent // setting, pay attention if we have undone that. if (oldSize != internalCache.getSize()) { cacheIsPrimed = false; } } else if (schemesToUpdate.size() > 0) { addClassificationSchemesToCache(schemesToUpdate.values()); } } }
From source file:org.openbravo.erpCommon.ad_forms.Fact.java
/** * Balance Source Segment/* w w w. j av a2 s . c om*/ * * @param segmentType * segment type */ private void balanceSegment(String segmentType, ConnectionProvider conn) { // no lines -> balanced if (m_lines.size() == 0) return; log4jFact.debug("balanceSegment (" + segmentType + ") - "); // Org if (segmentType.equals(AcctSchemaElement.SEGMENT_Org)) { HashMap<String, BigDecimal> map = new HashMap<String, BigDecimal>(); // Add up values by key for (int i = 0; i < m_lines.size(); i++) { FactLine line = (FactLine) m_lines.get(i); String key = line.getAD_Org_ID(conn); BigDecimal bal = line.getSourceBalance(); BigDecimal oldBal = map.get(key); if (oldBal != null) bal = bal.add(oldBal); map.put(key, bal); } // Create entry for non-zero element Iterator<String> keys = map.keySet().iterator(); while (keys.hasNext()) { String key = keys.next(); BigDecimal diff = map.get(key); // if (diff.compareTo(ZERO) != 0) { // Create Balancing Entry if (m_lines.size() == 0) { log4jFact.error("balanceSegment failed."); return; } FactLine fl = (FactLine) m_lines.get(0); FactLine line = new FactLine(m_doc.AD_Table_ID, m_doc.Record_ID, "", fl.m_Fact_Acct_Group_ID, fl.m_SeqNo, fl.m_DocBaseType); line.setDocumentInfo(m_doc, null); line.setJournalInfo(m_doc.GL_Category_ID); line.setPostingType(m_postingType); // Amount & Account if (diff.compareTo(ZERO) < 0) { line.setAmtSource(m_doc.C_Currency_ID, diff.abs().toString(), ZERO.toString()); line.setAccount(m_acctSchema, m_acctSchema.m_DueFrom_Acct); } else { line.setAmtSource(m_doc.C_Currency_ID, ZERO.toString(), diff.abs().toString()); line.setAccount(m_acctSchema, m_acctSchema.m_DueTo_Acct); } line.convert(m_acctSchema.getC_Currency_ID(), m_doc.DateAcct, m_acctSchema.getCurrencyRateType(), conn); line.setAD_Org_ID(key); log4jFact.debug("balanceSegment (" + segmentType + ") - "); log4jFact.debug("************* fact - balanceSegment - m_lines.size() - " + m_lines.size() + " - line.ad_org_id - " + line.getAD_Org_ID(conn)); m_lines.add(line); } } map.clear(); } }
From source file:net.tourbook.tour.photo.TourPhotoManager.java
/** * create pseudo tours for photos which are not contained in a tour and remove all tours which * do not contain any photos//from ww w .java 2 s. c om * * @param allPhotos * @param visibleTourPhotoLinks * @param isShowToursOnlyWithPhotos * @param isShowToursWithoutSavedPhotos * @param allTourCameras */ void createTourPhotoLinks(final ArrayList<Photo> allPhotos, final ArrayList<TourPhotoLink> visibleTourPhotoLinks, final HashMap<String, Camera> allTourCameras, final boolean isShowToursOnlyWithPhotos, final boolean isShowToursWithoutSavedPhotos) { loadToursFromDb(allPhotos, true); TourPhotoLink currentTourPhotoLink = createTourPhotoLinks_10_GetFirstTour(allPhotos); final HashMap<String, String> tourCameras = new HashMap<String, String>(); final int numberOfRealTours = _dbTourPhotoLinks.size(); long nextDbTourStartTime = numberOfRealTours > 0 ? _dbTourPhotoLinks.get(0).tourStartTime : Long.MIN_VALUE; int tourIndex = 0; long photoTime = 0; // loop: all photos for (final Photo photo : allPhotos) { photoTime = photo.adjustedTimeLink; // check if current photo can be put into current tour photo link if (currentTourPhotoLink.isHistoryTour == false && photoTime <= currentTourPhotoLink.tourEndTime) { // current photo can be put into current real tour } else if (currentTourPhotoLink.isHistoryTour && photoTime < nextDbTourStartTime) { // current photo can be put into current history tour } else { // current photo do not fit into current photo link // finalize current tour photo link createTourPhotoLinks_30_FinalizeCurrentTourPhotoLink(currentTourPhotoLink, tourCameras, visibleTourPhotoLinks, isShowToursOnlyWithPhotos, isShowToursWithoutSavedPhotos); currentTourPhotoLink = null; tourCameras.clear(); /* * create/get new merge tour */ if (tourIndex >= numberOfRealTours) { /* * there are no further tours which can contain photos, put remaining photos * into a history tour */ nextDbTourStartTime = Long.MAX_VALUE; } else { for (; tourIndex < numberOfRealTours; tourIndex++) { final TourPhotoLink dbTourPhotoLink = _dbTourPhotoLinks.get(tourIndex); final long dbTourStart = dbTourPhotoLink.tourStartTime; final long dbTourEnd = dbTourPhotoLink.tourEndTime; if (photoTime < dbTourStart) { // image time is before the next tour start, create history tour nextDbTourStartTime = dbTourStart; break; } if (photoTime >= dbTourStart && photoTime <= dbTourEnd) { // current photo can be put into current tour currentTourPhotoLink = dbTourPhotoLink; break; } // current tour do not contain any images if (isShowToursOnlyWithPhotos == false && isShowToursWithoutSavedPhotos == false) { // tours without photos are displayed createTourPhotoLinks_40_AddTour(dbTourPhotoLink, visibleTourPhotoLinks); } // get start time for the next tour if (tourIndex + 1 < numberOfRealTours) { nextDbTourStartTime = _dbTourPhotoLinks.get(tourIndex + 1).tourStartTime; } else { nextDbTourStartTime = Long.MAX_VALUE; } } } if (currentTourPhotoLink == null) { // create history tour currentTourPhotoLink = new TourPhotoLink(photoTime); } } currentTourPhotoLink.linkPhotos.add(photo); // set camera into the photo final Camera camera = setCamera(photo, allTourCameras); tourCameras.put(camera.cameraName, camera.cameraName); // set number of GPS/No GPS photos final double latitude = photo.getLinkLatitude(); if (latitude == 0) { currentTourPhotoLink.numberOfNoGPSPhotos++; } else { currentTourPhotoLink.numberOfGPSPhotos++; } } createTourPhotoLinks_30_FinalizeCurrentTourPhotoLink(currentTourPhotoLink, tourCameras, visibleTourPhotoLinks, isShowToursOnlyWithPhotos, isShowToursWithoutSavedPhotos); createTourPhotoLinks_60_MergeHistoryTours(visibleTourPhotoLinks); /* * set tour GPS into photo */ final List<TourPhotoLink> tourPhotoLinksWithGps = new ArrayList<TourPhotoLink>(); for (final TourPhotoLink tourPhotoLink : visibleTourPhotoLinks) { if (tourPhotoLink.tourId != Long.MIN_VALUE) { tourPhotoLinksWithGps.add(tourPhotoLink); } } if (tourPhotoLinksWithGps.size() > 0) { setTourGpsIntoPhotos(tourPhotoLinksWithGps); } }
From source file:it.cnr.icar.eric.server.cache.ClassificationSchemeCache.java
/** * Clear all affectedObjects in AuditableEvent from cache. When called, * internalCache may be out of date with respect to dB (where transaction * has been committed) and objectCache (where affected classification nodes * have already been processed).<br> * This code keeps the cache primed if it was primed earlier. The side * effect of this choice is every other context (separate transaction) * immediately knows about the just-committed changes. That is, this cache * imitates TRANSACTION_READ_COMMITTED transaction isolation unless the * caching event setting is "never".// w w w.ja v a 2 s . c o m */ public void onEvent(ServerRequestContext context, AuditableEventType ae) { final String eventType = ae.getEventType(); final boolean justRemove = primeCacheEvent.equalsIgnoreCase("never"); final boolean wasChanged = eventType.equalsIgnoreCase(CanonicalConstants.CANONICAL_EVENT_TYPE_ID_Created) || eventType.equalsIgnoreCase(CanonicalConstants.CANONICAL_EVENT_TYPE_ID_Updated) || eventType.equalsIgnoreCase(CanonicalConstants.CANONICAL_EVENT_TYPE_ID_Versioned); final boolean wasRemoved = eventType.equalsIgnoreCase(CanonicalConstants.CANONICAL_EVENT_TYPE_ID_Deleted); Set<String> schemesToRemove = new HashSet<String>(); HashMap<String, RegistryObjectType> schemesToUpdate = new HashMap<String, RegistryObjectType>(); primeCacheOnFirstUse(context); if (wasChanged || wasRemoved) { try { List<ObjectRefType> affectedObjects = ae.getAffectedObjects().getObjectRef(); Iterator<ObjectRefType> iter = affectedObjects.iterator(); while (iter.hasNext()) { ObjectRefType ref = iter.next(); String objectId = ref.getId(); RegistryObjectType ro = context.getAffectedObjectsMap().get(objectId); if (null == ro) { // In case missing (removed?) object was a scheme schemesToRemove.add(objectId); } else { if (ro instanceof ClassificationSchemeType) { if (wasRemoved || justRemove) { schemesToRemove.add(objectId); } else { schemesToUpdate.put(objectId, ro); } } else if (ro instanceof ClassificationNodeType) { String schemeId = bu.getSchemeIdForRegistryObject(ro); // Handle case where a node in a scheme has been // added, deleted or updated. if (justRemove) { schemesToRemove.add(schemeId); } else if (!(schemesToRemove.contains(schemeId) || schemesToUpdate.containsKey(schemeId))) { ClassificationSchemeType scheme = (ClassificationSchemeType) getRegistryObjectInternal( context, schemeId, "ClassScheme"); if (null != scheme) { schemesToUpdate.put(schemeId, scheme); // ??? Why is this necessary for all // ??? schemes loaded? loadChildren(context, scheme, getDepthForScheme(schemeId)); } } } } } } catch (JAXRException e) { log.error(e); // Just update all schemes to be safe in case of any errors reset(context); // Make following block a no-op. schemesToRemove.clear(); schemesToUpdate.clear(); } } synchronized (internalCache) { int oldSize; // xxx pa 110816 added try / catch for CacheException (ehcache 1.0 effect?) try { oldSize = internalCache.getSize(); Iterator<String> iter = schemesToRemove.iterator(); while (iter.hasNext()) { String objectId = iter.next(); internalCache.remove(objectId); } if (justRemove) { // Cache may become primed regardless of primeCacheEvent // setting, pay attention if we have undone that. if (oldSize != internalCache.getSize()) { cacheIsPrimed = false; } } else if (schemesToUpdate.size() > 0) { addClassificationSchemesToCache(schemesToUpdate.values()); } } catch (CacheException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
From source file:gov.anl.cue.arcane.engine.matrix.MatrixModel.java
/** * Extract assignments./*from ww w. ja v a2 s .com*/ * * @param regularSuffix the regular suffix * @param combinedSuffix the combined suffix * @return the hash map */ public Set<String> knit(String regularSuffix, String combinedSuffix) { // Declare the storage. Set<String> knit = new TreeSet<String>(); HashMap<String, String> substitutions = new HashMap<String, String>(); String equation; // Scan the source nodes (i.e., rows). for (int source = 0; source < this.nodeCount(); source++) { // Prepare to make the next substitutions list. substitutions.clear(); // Scan the variables. for (MatrixVariable matrixVariable : this) { // Extract the next variable. substitutions.put(matrixVariable.name, matrixVariable.name + MatrixModelConstants.getString("MatrixModel.20") + this.nodeName(source) + regularSuffix); } // Make substitutions, as required. for (MatrixVariable matrixVariable : this) { // Make the next substitution. equation = Util.simultaneousStringReplaceAndEscape(substitutions, matrixVariable.equations.get(source)); // Check to see if the new equation is meaningful. if (equation.trim().length() > 0) { knit.add(matrixVariable.name + MatrixModelConstants.getString("MatrixModel.20") + this.nodeName(source) + combinedSuffix + " = " + equation); } } } // Return the results. return knit; }
From source file:com.tulskiy.musique.library.Library.java
public void rescan(Map<String, Object> progress) { List<String> folders = LibraryConfiguration.getFolders(); if (CollectionUtils.isEmpty(folders)) { return;// w w w .ja v a2s .c o m } progress.put("processing.file", ""); data.removeDeadItems(); HashMap<TrackData, Track> trackDatas = new HashMap<TrackData, Track>(); for (Track track : data) { trackDatas.put(track.getTrackData(), track); } LinkedList<File> queue = new LinkedList<File>(); for (String path : folders) { File f = new File(path); if (f.exists()) queue.add(f); } HashSet<Track> processed = new HashSet<Track>(); final Set<String> formats = Codecs.getFormats(); ArrayList<Track> temp = new ArrayList<Track>(); while (!queue.isEmpty()) { try { File file = queue.pop(); if (progress != null) { if (progress.get("processing.stop") != null) { break; } progress.put("processing.file", file.getAbsolutePath()); } if (file.isDirectory()) { queue.addAll(0, Arrays.asList(file.listFiles(new FileFilter() { @Override public boolean accept(File file) { if (file.isHidden() || !file.canRead()) { return false; } if (file.isDirectory()) return true; String ext = Util.getFileExt(file).toLowerCase(); if (formats.contains(ext)) { String name = Util.removeExt(file.getAbsolutePath()) + ".cue"; return !new File(name).exists(); } return ext.equals("cue"); } }))); } else { TrackData trackData = new TrackData(file.toURI(), 0); Track track = trackDatas.get(trackData); if (track != null) { if (track.getTrackData().getLastModified() != file.lastModified()) { track.getTrackData().clearTags(); TrackIO.getAudioFileReader(file.getName()).reload(track); } processed.add(track); } else { temp.clear(); TrackIO.getAudioFileReader(file.getName()).read(file, temp); for (Track newTrack : temp) { trackData = newTrack.getTrackData(); if (trackDatas.containsKey(trackData)) { // it must be the cue file, so merge the track data trackData.merge(newTrack.getTrackData()); } processed.add(newTrack); } } } } catch (Exception e) { e.printStackTrace(); } } data.clear(); data.addAll(processed); processed.clear(); trackDatas.clear(); rebuildTree(); }
From source file:com.krawler.spring.hrms.common.hrmsCommonController.java
public ModelAndView getAvailableManagers(HttpServletRequest request, HttpServletResponse response) { JSONObject jobj = new JSONObject(); JSONObject jobj1 = new JSONObject(); KwlReturnObject result = null;//from w ww . j a va 2 s. c o m try { String userid = request.getParameter("userid"); String companyid = sessionHandlerImplObj.getCompanyid(request); HashMap<String, Object> requestParams = new HashMap<String, Object>(); String mid[] = request.getParameterValues("managerids"); String ids = ""; for (int i = 0; i < mid.length; i++) { if (i == 0) { ids = ids + " ( "; } ids = ids + "'" + mid[i] + "',"; } if (mid.length != 0) { ids = ids.substring(0, ids.length() - 1); ids = ids + " )"; } requestParams.clear(); requestParams.put("filter_names", Arrays.asList("deleteflag", "company.companyID", "!userID")); requestParams.put("filter_values", Arrays.asList(0, companyid, userid)); requestParams.put("ss", StringUtil.checkForNull(request.getParameter("ss"))); requestParams.put("searchcol", new String[] { "firstName", "lastName" }); requestParams.put("append", " and userID not in " + ids + " "); requestParams.put("allflag", false); StringUtil.checkpaging(requestParams, request); result = hrmsCommonDAOObj.getUsers(requestParams); Iterator ite = result.getEntityList().iterator(); JSONArray jarr = new JSONArray(); while (ite.hasNext()) { User u = (User) ite.next(); Useraccount ua = (Useraccount) kwlCommonTablesDAOObj .getObject("com.krawler.common.admin.Useraccount", u.getUserID()); JSONObject tmpObj = new JSONObject(); tmpObj.put("userid", u.getUserID()); tmpObj.put("username", u.getFirstName() + " " + (u.getLastName() == null ? "" : u.getLastName())); tmpObj.put("departmentname", ua.getDepartment() == null ? "" : ua.getDepartment().getValue()); tmpObj.put("designation", ua.getDesignationid() == null ? "" : ua.getDesignationid().getValue()); jarr.put(tmpObj); } jobj.put("data", jarr); jobj.put("count", result.getRecordTotalCount()); jobj1.put("data", jobj.toString()); jobj1.put("valid", true); } catch (Exception e) { e.printStackTrace(); } return new ModelAndView("jsonView", "model", jobj1.toString()); }
From source file:org.dasein.cloud.rackspace.network.CloudLoadBalancers.java
@Override public void addServers(String toLoadBalancerId, String... serverIdsToAdd) throws CloudException, InternalException { Logger logger = RackspaceCloud.getLogger(CloudLoadBalancers.class, "std"); if (logger.isTraceEnabled()) { logger.trace("enter - " + CloudLoadBalancers.class.getName() + ".addServers(" + toLoadBalancerId + "," + serverIdsToAdd + ")"); }//from w ww . java 2 s .c om try { ArrayList<HashMap<String, Object>> nodes = new ArrayList<HashMap<String, Object>>(); LoadBalancer lb = getLoadBalancer(toLoadBalancerId); int port = -1; if (lb == null) { logger.error("addServers(): No such load balancer: " + toLoadBalancerId); throw new CloudException("No such load balancer: " + toLoadBalancerId); } LbListener[] listeners = lb.getListeners(); if (listeners != null) { for (LbListener listener : listeners) { port = listener.getPrivatePort(); break; } if (port == -1) { for (LbListener listener : listeners) { port = listener.getPublicPort(); break; } } } if (port == -1) { if (lb.getPublicPorts() != null && lb.getPublicPorts().length > 0) { port = lb.getPublicPorts()[0]; } if (port == -1) { logger.error("addServers(): Could not determine a proper private port for mapping"); throw new CloudException("No port understanding exists for this load balancer"); } } for (String id : serverIdsToAdd) { if (logger.isTraceEnabled()) { logger.trace("addServers(): Adding " + id + "..."); } VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport().getVirtualMachine(id); if (vm == null) { logger.error("addServers(): Failed to add " + id + " because it does not exist"); throw new CloudException("No such server: " + id); } String address = null; if (vm.getProviderRegionId().equals(provider.getContext().getRegionId())) { for (String addr : vm.getPrivateIpAddresses()) { address = addr; break; } } if (address == null) { for (String addr : vm.getPublicIpAddresses()) { address = addr; break; } } if (address == null) { logger.error("addServers(): No address exists for mapping the load balancer to this server"); throw new CloudException("The virtual machine " + id + " has no mappable addresses"); } if (logger.isDebugEnabled()) { logger.debug("addServers(): Mapping IP is: " + address); } HashMap<String, Object> node = new HashMap<String, Object>(); node.put("address", address); node.put("condition", "ENABLED"); node.put("port", port); nodes.add(node); } if (!nodes.isEmpty()) { HashMap<String, Object> json = new HashMap<String, Object>(); json.put("nodes", nodes); RackspaceMethod method = new RackspaceMethod(provider); if (logger.isTraceEnabled()) { logger.debug("addServers(): Calling cloud..."); } try { method.postLoadBalancers("/loadbalancers", toLoadBalancerId + "/nodes", new JSONObject(json)); } catch (RackspaceException e) { if (e.getHttpCode() == 422 && nodes.size() == 1) { nodes.clear(); for (String id : serverIdsToAdd) { if (logger.isTraceEnabled()) { logger.trace("addServers(): Adding " + id + "..."); } VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport() .getVirtualMachine(id); if (vm == null) { logger.error("addServers(): Failed to add " + id + " because it does not exist"); throw new CloudException("No such server:" + id); } String address = null; for (String addr : vm.getPublicIpAddresses()) { address = addr; break; } if (address == null) { logger.error( "addServers(): No public address exists for mapping the load balancer to this server"); throw new CloudException( "The virtual machine " + id + " has no publicly mappable addresses"); } if (logger.isDebugEnabled()) { logger.debug("addServers(): Mapping IP is: " + address); } HashMap<String, Object> node = new HashMap<String, Object>(); node.put("address", address); node.put("condition", "ENABLED"); node.put("port", port); nodes.add(node); } json.clear(); json.put("nodes", nodes); if (logger.isTraceEnabled()) { logger.debug("addServers(): Attemptign with public IP..."); } method.postLoadBalancers("/loadbalancers", toLoadBalancerId + "/nodes", new JSONObject(json)); } } if (logger.isTraceEnabled()) { logger.debug("addServers(): Done."); } } } finally { if (logger.isTraceEnabled()) { logger.trace("exit - " + CloudLoadBalancers.class.getName() + ".addServers()"); } } }
From source file:gov.anl.cue.arcane.engine.matrix.MatrixModel.java
/** * Fitness./* w w w . j a v a 2s.c o m*/ * * @param regularSuffix the regular suffix * @return the string */ public String fitness(String regularSuffix) { // Declare the storage. HashMap<String, String> substitutions = new HashMap<String, String>(); String equation; // Start formulating the fitness equation. String fitness = ""; // Scan the source nodes (i.e., rows). for (int source = 0; source < this.nodeCount(); source++) { // Prepare to make the next substitutions list. substitutions.clear(); // Scan the variables. for (MatrixVariable matrixVariable : this) { // Extract the next variable. substitutions.put(matrixVariable.name, matrixVariable.name + MatrixModelConstants.getString("MatrixModel.20") + this.nodeName(source) + regularSuffix); } // Make the next substitution. equation = Util.simultaneousStringReplaceAndEscape(substitutions, this.fitnessEquations.get(source)); // Check to see if the new equation is meaningful. if (equation.trim().length() > 0) { if (fitness.length() > 0) fitness += " + "; fitness = fitness + "(" + equation + ")"; } } // Return the result. return fitness; }