List of usage examples for java.util SortedMap isEmpty
boolean isEmpty();
From source file:org.n52.ifgicopter.spf.input.InputPluginCollector.java
/** * @param newData//from ww w . j a v a 2 s. com * new data * @throws Exception * if processing failed */ public void addNewData(final Map<String, Object> newData) { /* * do we have IPositionListeners? */ if (this.engine.getPositionListeners().size() > 0) { if (InputPluginCollector.this.plugin.isMobile()) { final Object first = newData .get(InputPluginCollector.this.plugin.getLocation().getFirstCoordinateName()); final Object second = newData .get(InputPluginCollector.this.plugin.getLocation().getSecondCoordinateName()); /* * start a new thread to avoid blocking */ SPFRegistry.getInstance().getThreadPool().submitTask(new Runnable() { @Override public void run() { if (first != null && second != null) { for (IPositionListener ipl : InputPluginCollector.this.engine.getPositionListeners()) { ipl.positionUpdate(InputPluginCollector.this.plugin, newData); } } } }); } } /* * we need to check which type the time parameter is of */ Object tmp = newData.get(InputPluginCollector.this.plugin.getTime().getProperty()); Long time = null; if (tmp instanceof Long) { time = (Long) tmp; } else if (tmp instanceof String) { try { /* * try millis as string */ time = Long.valueOf((String) tmp); } catch (NumberFormatException e) { /* * try iso-date */ time = Long.valueOf(new DateTime(tmp).getMillis()); } } else if (tmp instanceof Date) { time = Long.valueOf(new DateTime(tmp).getMillis()); } else if (tmp instanceof DateTime) { time = Long.valueOf(((DateTime) tmp).getMillis()); } if (time == null) { log.warn("Could not process the timestamp '" + tmp + "'. Using current system time."); time = Long.valueOf(System.currentTimeMillis()); } synchronized (InputPluginCollector.this.itemCollection) { /* * check for p'n'p feature */ if (this.plugAndPlayBehaviour) { for (String key : newData.keySet()) { if (!InputPluginCollector.this.plugin.getInputProperties().contains(key)) { /* * try to recognize unknown property */ PNPDialog pnp = this.engine.doPNP(key); if (pnp.isCanceled()) { /* * the dialog was cancelled continue */ continue; } Item item = new Item(key); item.setDataType(pnp.getDatatype()); item.setDefinition(pnp.getDefintion()); item.setUom(pnp.getUom()); /* * add to the plugin description */ if (pnp.isOutput()) { InputPluginCollector.this.plugin.addOutputProperty(item); } else if (pnp.isMandatory()) { InputPluginCollector.this.plugin.addMandatoryProperty(item); } else { InputPluginCollector.this.plugin.addInputProperty(item); } InputPluginCollector.this.itemCollection.put(key, new TreeMap<Long, Object>()); } } } for (Entry<String, Object> entry : newData.entrySet()) { if (!entry.getKey().equals(InputPluginCollector.this.plugin.getTime().getProperty())) { if (!InputPluginCollector.this.plugin.getInputProperties().contains(entry.getKey())) { continue; } Map<Long, Object> collection = InputPluginCollector.this.itemCollection.get(entry.getKey()); /* * put the data of this item to this itemlist. */ collection.put(time, entry.getValue()); } } /* * first check if there is minimum one item per mandatory property in the Collector. if not * checked, we will be stuck with a deadlock in the OnAvailableOutputThread. */ if (!InputPluginCollector.this.hasMandatories) { if (InputPluginCollector.this.plugin.getMandatoryProperties().size() == 0) { InputPluginCollector.this.hasMandatories = true; } for (String prop : InputPluginCollector.this.plugin.getMandatoryProperties()) { SortedMap<Long, Object> data = InputPluginCollector.this.itemCollection.get(prop); if (!data.isEmpty() && data.firstKey().longValue() <= time.longValue()) { /* * ok, continue; */ InputPluginCollector.this.hasMandatories = true; } else { InputPluginCollector.this.hasMandatories = false; return; } } } } if (InputPluginCollector.this.availabilityBehaviour) { /* * check if outputItems are here */ Set<String> containsOutputs = new HashSet<String>(); for (String prop : InputPluginCollector.this.plugin.getOutputProperties()) { if (newData.keySet().contains(prop)) { containsOutputs.add(prop); } } if (containsOutputs.size() > 0) { /* * This is an output creating property!! * * start new thread and put register all items */ if (InputPluginCollector.this.activeTimestamps.contains(time)) { /* * do not create a thread if we already have one for this timestamp -> data would be * duplicated */ return; } OnAvailableOutputThread ot = new OnAvailableOutputThread(time); for (String item : newData.keySet()) { /* * register items at the new thread only add outputProperties or mandatoryProperties items * (others should not be included in this data tuple) */ if (containsOutputs.contains(item) || InputPluginCollector.this.plugin.getMandatoryProperties().contains(item)) { ot.registerItem(item); } } synchronized (InputPluginCollector.this.outputThreads) { InputPluginCollector.this.outputThreads.add(ot); } SPFRegistry.getInstance().getThreadPool().submitTask(ot); } else { /* * register at the threads - perhaps one or more are waiting */ synchronized (InputPluginCollector.this.outputThreads) { for (OnAvailableOutputThread ot : InputPluginCollector.this.outputThreads) { for (String item : newData.keySet()) { if (newData.get(item) == null) continue; /* * only add mandatoryProperties. the other will not be added at this data tuple. */ if (InputPluginCollector.this.plugin.getMandatoryProperties().contains(item)) { ot.registerItem(item); } } } } } } }
From source file:okuyama.imdst.util.DataDispatcher.java
/** * ConsitentHash??????.<br>// ww w . j ava 2 s. c om * ??????????????????keyNodeMap?<br> * "oldNodeCircle"????????.<br> * ??????????????Hash??????<br> * FullName???HashMap????.<br> * ???HashMap?"main"??????"sub"???????"third"?????Map?????.<br> * ???"tomain"??????"tosub"??????"tothird"????????.<br> * * @param keyNodeFullName ? "192.168.1.3:5555" * @param subKeyNodeFullName ? "192.168.2.3:5555" * @param thirdKeyNodeFullName ? "192.168.2.3:5555" * @return HashMap ? */ public static HashMap addNode4ConsistentHash(String keyNodeFullName, String subKeyNodeFullName, String thirdKeyNodeFullName) { if (oldCircle != null) return null; HashMap retMap = new HashMap(2); HashMap convertMap = new HashMap(); HashMap subConvertMap = new HashMap(); HashMap thirdConvertMap = new HashMap(); ArrayList keyNodeList = new ArrayList(); ArrayList subKeyNodeList = new ArrayList(); ArrayList thirdKeyNodeList = new ArrayList(); oldCircle = new TreeMap(); // ???? Set set = nodeCircle.keySet(); Iterator iterator = set.iterator(); // oldCircle? while (iterator.hasNext()) { Integer key = (Integer) iterator.next(); String nodeFullName = (String) nodeCircle.get(key); oldCircle.put(key, nodeFullName); } // ? convertMap = new HashMap(); for (int i = 0; i < virtualNodeSize; i++) { int targetHash = sha1Hash4Int(keyNodeFullName + "_" + i); int targetHashStart = 0; int targetHashEnd = targetHash; String nodeName = null; SortedMap headMap = nodeCircle.headMap(targetHash); SortedMap tailMap = nodeCircle.tailMap(targetHash); // ??????????????? // ?????1????????? // ???????????? // ????Node01,Node02,Node03???Node04????? // Node01?511112430???Node02?45676987654?? if (headMap.isEmpty()) { int hash = ((Integer) nodeCircle.lastKey()).intValue(); targetHashStart = hash + 1; nodeName = (String) nodeCircle.get(nodeCircle.firstKey()); } else { int hash = ((Integer) headMap.lastKey()).intValue(); targetHashStart = hash + 1; if (tailMap.isEmpty()) { nodeName = (String) nodeCircle.get(nodeCircle.firstKey()); } else { nodeName = (String) nodeCircle.get(tailMap.firstKey()); } } // ??????????? // _?? // Node01:5553,"6756-9876_12345-987654" // Node02:5553,"342-3456_156456-178755" if (convertMap.containsKey(nodeName)) { String work = (String) convertMap.get(nodeName); convertMap.put(nodeName, work + "_" + targetHashStart + "-" + targetHashEnd); // String[] subDataNodeInfo = (String[]) keyNodeMap.get(nodeName + "_sub"); if (subDataNodeInfo != null) { subConvertMap.put(subDataNodeInfo[2], work + "_" + targetHashStart + "-" + targetHashEnd); } // String[] thirdDataNodeInfo = (String[]) keyNodeMap.get(nodeName + "_third"); if (thirdDataNodeInfo != null) { thirdConvertMap.put(thirdDataNodeInfo[2], work + "_" + targetHashStart + "-" + targetHashEnd); } } else { convertMap.put(nodeName, targetHashStart + "-" + targetHashEnd); // String[] subDataNodeInfo = (String[]) keyNodeMap.get(nodeName + "_sub"); if (subDataNodeInfo != null) { subConvertMap.put(subDataNodeInfo[2], targetHashStart + "-" + targetHashEnd); } // String[] thirdDataNodeInfo = (String[]) keyNodeMap.get(nodeName + "_third"); if (thirdDataNodeInfo != null) { thirdConvertMap.put(thirdDataNodeInfo[2], targetHashStart + "-" + targetHashEnd); } } } // ???Map? retMap.put("tomain", keyNodeFullName); retMap.put("tosub", subKeyNodeFullName); retMap.put("tothird", thirdKeyNodeFullName); retMap.put("main", convertMap); retMap.put("sub", subConvertMap); retMap.put("third", thirdConvertMap); // ??? // ?? // [0][*]=Name // [1][*]=Port // [2][*]=Full // [3][*]=Name // [4][*]=Port // [5][*]=Full // [6][*]=Name // [7][*]=Port // [8][*]=Full String[][] allNodeDetailList = (String[][]) keyNodeMap.get("list"); String[][] newAllNodeDetailList = new String[9][allNodeDetailList.length + 1]; keyNodeList = (ArrayList) allNodeMap.get("main"); // allNodeDetailList?????? for (int allNodeDetailListIdx = 0; allNodeDetailListIdx < allNodeDetailList[0].length; allNodeDetailListIdx++) { newAllNodeDetailList[0][allNodeDetailListIdx] = allNodeDetailList[0][allNodeDetailListIdx]; newAllNodeDetailList[1][allNodeDetailListIdx] = allNodeDetailList[1][allNodeDetailListIdx]; newAllNodeDetailList[2][allNodeDetailListIdx] = allNodeDetailList[2][allNodeDetailListIdx]; newAllNodeDetailList[3][allNodeDetailListIdx] = allNodeDetailList[3][allNodeDetailListIdx]; newAllNodeDetailList[4][allNodeDetailListIdx] = allNodeDetailList[4][allNodeDetailListIdx]; newAllNodeDetailList[5][allNodeDetailListIdx] = allNodeDetailList[5][allNodeDetailListIdx]; newAllNodeDetailList[6][allNodeDetailListIdx] = allNodeDetailList[6][allNodeDetailListIdx]; newAllNodeDetailList[7][allNodeDetailListIdx] = allNodeDetailList[7][allNodeDetailListIdx]; newAllNodeDetailList[8][allNodeDetailListIdx] = allNodeDetailList[8][allNodeDetailListIdx]; } String keyNode = keyNodeFullName; String[] keyNodeDt = keyNode.split(":"); keyNodeList.add(keyNode); // ??allNodeDetailList? newAllNodeDetailList[2][allNodeDetailList.length] = keyNode; newAllNodeDetailList[0][allNodeDetailList.length] = keyNodeDt[0]; newAllNodeDetailList[1][allNodeDetailList.length] = keyNodeDt[1]; String[] mainNodeDt = { keyNodeDt[0], keyNodeDt[1], keyNode }; // keyNodeMap? keyNodeMap.put(keyNode, mainNodeDt); // ConsistentHash?? for (int i = 0; i < virtualNodeSize; i++) { // FullName???????????Hash???Circle? // KeyNode?? // ??keyNodeMap????? nodeCircle.put(new Integer(sha1Hash4Int(keyNode + "_" + i)), keyNode); } synchronized (syncObj) { allNodeMap.put("main", keyNodeList); } // SubNode? if (subKeyNodeFullName != null && !subKeyNodeFullName.equals("")) { String subKeyNode = subKeyNodeFullName; String[] subKeyNodeDt = subKeyNode.split(":"); subKeyNodeList = (ArrayList) allNodeMap.put("sub", subKeyNodeList); subKeyNodeList.add(subKeyNode); newAllNodeDetailList[5][allNodeDetailList.length] = subKeyNode; newAllNodeDetailList[3][allNodeDetailList.length] = subKeyNodeDt[0]; newAllNodeDetailList[4][allNodeDetailList.length] = subKeyNodeDt[1]; String[] subNodeDt = { subKeyNodeDt[0], subKeyNodeDt[1], subKeyNode }; keyNodeMap.put(newAllNodeDetailList[2][allNodeDetailList.length] + "_sub", subNodeDt); synchronized (syncObj) { allNodeMap.put("sub", subKeyNodeList); } } // ThirdNode? if (thirdKeyNodeFullName != null && !thirdKeyNodeFullName.equals("")) { String thirdKeyNode = thirdKeyNodeFullName; String[] thirdKeyNodeDt = thirdKeyNode.split(":"); thirdKeyNodeList = (ArrayList) allNodeMap.put("third", thirdKeyNodeList); thirdKeyNodeList.add(thirdKeyNode); newAllNodeDetailList[8][allNodeDetailList.length] = thirdKeyNode; newAllNodeDetailList[6][allNodeDetailList.length] = thirdKeyNodeDt[0]; newAllNodeDetailList[7][allNodeDetailList.length] = thirdKeyNodeDt[1]; String[] thirdNodeDt = { thirdKeyNodeDt[0], thirdKeyNodeDt[1], thirdKeyNode }; keyNodeMap.put(newAllNodeDetailList[2][allNodeDetailList.length] + "_third", thirdNodeDt); synchronized (syncObj) { allNodeMap.put("third", thirdKeyNodeList); } } // ???? keyNodeMap.put("list", newAllNodeDetailList); return retMap; }
From source file:com.aurel.track.exchange.excel.ExcelImportAction.java
/** * Execute the import from excel//from w w w . j a va 2 s . com * @return */ public String excelImport() { String excelMappingsDirectory = AttachBL.getExcelImportDirBase() + personID; Workbook workbook = ExcelFieldMatchBL.loadWorkbook(excelMappingsDirectory, fileName); Set<Integer> lastSavedIdentifierFieldIDIsSet = null; Map<String, Integer> columNameToFieldIDMap = null; try { File file = new File(excelMappingsDirectory, mappingFileName); FileInputStream fileInputStream = new FileInputStream(file); ObjectInputStream objectInputStream = new ObjectInputStream(fileInputStream); columNameToFieldIDMap = (Map<String, Integer>) objectInputStream.readObject(); lastSavedIdentifierFieldIDIsSet = (Set<Integer>) objectInputStream.readObject(); objectInputStream.close(); } catch (FileNotFoundException e) { LOGGER.warn("Creating the input stream for mapping failed with " + e.getMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); } catch (IOException e) { LOGGER.warn("Saving the mapping failed with " + e.getMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); } catch (ClassNotFoundException e) { LOGGER.warn("Class not found for the mapping " + e.getMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); } if (workbook == null) { JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageJSON( ImportJSON.ERROR_CODES.ERROR_MESSAGE, getText("admin.actions.importTp.err.uploadAgain"), true)); return null; } if (columNameToFieldIDMap == null) { //for example the sheet contains no columns at all columNameToFieldIDMap = new HashMap<String, Integer>(); } try { Map<Integer, String> columnIndexToColumNameMap = ExcelFieldMatchBL.getFirstRowHeaders(workbook, selectedSheet); Map<Integer, Integer> columnIndexToFieldIDMap = ExcelImportBL .getColumnIndexToFieldID(columNameToFieldIDMap, columnIndexToColumNameMap); Map<Integer, Integer> fieldIDToColumnIndexMap = ExcelImportBL.reverseMap(columnIndexToFieldIDMap); List<ErrorData> errorDataList = ExcelImportBL.validateRequiredColumns(workbook, selectedSheet, fieldIDToColumnIndexMap, lastSavedIdentifierFieldIDIsSet, invalidValueHandlingMap, defaultValuesMap, locale); if (!errorDataList.isEmpty()) { //required columns are missing: do not disable the Finish button and do not delete //the file because it may be solved by stepping back and forth in the wizard JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageListJSON( ErrorHandlerJSONAdapter.handleErrorList(errorDataList, locale), ImportJSON.ERROR_CODES.ERROR_MESSAGES, false)); return null; } else { //delete the file for this case because it //either results in a error which should be resolved in the excel file //consequently a new upload cannot be avoided before re-import //or everything is fine and in this case no new import is needed //with any other return a //grid errors Map<Integer, SortedMap<Integer, SortedMap<String, ErrorData>>> gridErrorsMap = new HashMap<Integer, SortedMap<Integer, SortedMap<String, ErrorData>>>(); //row errors Map<Integer, SortedSet<Integer>> rowErrorsMap = new HashMap<Integer, SortedSet<Integer>>(); Map<Integer, SortedSet<Integer>> requiredFieldErrorsMap = new HashMap<Integer, SortedSet<Integer>>(); Map<Integer, Map<Integer, List<Integer>>> rowNoToPseudoFieldsOriginal = new HashMap<Integer, Map<Integer, List<Integer>>>(); Map<Integer, Map<Integer, List<Integer>>> rowNoToPseudoFieldsExcel = new HashMap<Integer, Map<Integer, List<Integer>>>(); Map<Integer, Integer> rowToParentRow = new HashMap<Integer, Integer>(); SortedMap<Integer, TWorkItemBean> workItemBeansMap = ExcelImportBL.getAndValidateGridData(workbook, selectedSheet, personID, locale, columnIndexToFieldIDMap, fieldIDToColumnIndexMap, lastSavedIdentifierFieldIDIsSet, defaultValuesMap, invalidValueHandlingMap, rowNoToPseudoFieldsOriginal, rowNoToPseudoFieldsExcel, gridErrorsMap, rowErrorsMap, requiredFieldErrorsMap, rowToParentRow); Collection<TWorkItemBean> workItemBeans = workItemBeansMap.values(); if (gridErrorsMap.isEmpty() && rowErrorsMap.isEmpty() && requiredFieldErrorsMap.isEmpty()) { List<Integer> alreadyExistingRows = ExcelImportBL.getExistingWorkItemRows(workItemBeans); //already existing rows with the same synopsis, project, issueType and release scheduled //(independently of the identifierFieldIDs) to avoid importing the same new issues more //(only the not found i.e. new issues are tested) if (!alreadyExistingRows.isEmpty()) { JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageJSON( ImportJSON.ERROR_CODES.ERROR_MESSAGE, LocalizeUtil.getParametrizedString("admin.actions.importExcel.err.existingRows", new String[] { MergeUtil.getMergedString(alreadyExistingRows, ", ") }, locale), true)); return null; } else { Set<Integer> presentFieldIDs = ExcelImportBL.getPresentFields(columNameToFieldIDMap); presentFieldIDs.addAll(FieldsManagerRT.getRequiredSystemFieldsList()); //the explicit change of this field is not allowed presentFieldIDs.remove(SystemFields.LASTMODIFIEDDATE); presentFieldIDs.remove(SystemFields.CREATEDATE); Map<Integer, Map<Integer, Map<Integer, TFieldConfigBean>>> projectsIssueTypesFieldConfigsMap = FieldRuntimeBL .getFieldConfigsForWorkItemBeans(workItemBeans, presentFieldIDs, locale); Map<Integer, Map<Integer, Map<String, Object>>> projectsIssueTypesFieldSettingsMap = FieldRuntimeBL .getFieldSettingsForFieldConfigs(projectsIssueTypesFieldConfigsMap); Map<Integer, WorkItemContext> existingIssueContextsMap = FieldsManagerRT .createImportContext(workItemBeans, presentFieldIDs, projectsIssueTypesFieldConfigsMap, projectsIssueTypesFieldSettingsMap, null, null, personID, locale); SortedMap<Integer, List<ErrorData>> validationErrorsMap = FieldsManagerRT.validateWorkItems( workItemBeans, presentFieldIDs, existingIssueContextsMap, projectsIssueTypesFieldConfigsMap, projectsIssueTypesFieldSettingsMap, personID, locale); //validation errors: either grid (workItem and field) or row (workItem) level errors. There is a chance to resolve the problems //without modifying the excel file: for ex. by setting further/other default values if (!validationErrorsMap.isEmpty()) { List<String> rowErrors = ExcelImportBL.renderRowErrors(validationErrorsMap, fieldIDToColumnIndexMap, locale); JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageListJSON(rowErrors, ImportJSON.ERROR_CODES.ERROR_MESSAGES, false)); return null; } else { if (overwriteMap == null) { overwriteMap = new HashMap<String, Boolean>(); } SortedMap<Integer, SortedMap<Integer, Map<Integer, Object>>> confictsMap = ExcelImportBL .conflictResolutionWorkItems(workItemBeans, presentFieldIDs, existingIssueContextsMap, projectsIssueTypesFieldConfigsMap, columnIndexToColumNameMap, fieldIDToColumnIndexMap, personID, locale, overwriteMap); if (confictsMap != null && !confictsMap.isEmpty()) { //render conflicts //do not disable Finish and do not delete the file instead resolve the conflicts and import again JSONUtility.encodeJSON(servletResponse, ExcelImportJSON.getExcelConflictsJSON(confictsMap, locale, false)); return null; } else { //no conflicts or conflict handling is set (overwriteMap was submitted) List<ErrorData> errorsList = new ArrayList<ErrorData>(); ImportCounts importCounts = FieldsManagerRT.saveWorkItems(workItemBeansMap, presentFieldIDs, existingIssueContextsMap, projectsIssueTypesFieldConfigsMap, projectsIssueTypesFieldSettingsMap, rowNoToPseudoFieldsOriginal, rowNoToPseudoFieldsExcel, rowToParentRow, personID, locale, errorsList); if (!errorsList.isEmpty()) { JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageListJSON( ErrorHandlerJSONAdapter.handleErrorList(errorDataList, locale), ImportJSON.ERROR_CODES.ERROR_MESSAGES, true)); return null; } JSONUtility.encodeJSON(servletResponse, ImportJSON.importMessageJSON(true, LocalizeUtil.getParametrizedString( "admin.actions.importExcel.message.importResult", new String[] { Integer.valueOf(importCounts.getNoOfCreatedIssues()) .toString(), Integer.valueOf(importCounts.getNoOfUpdatedIssues()) .toString() }, locale), true, locale)); //successful import, delete the file File file = new File(excelMappingsDirectory, fileName); file.delete(); return null; } } } } else { //grid or row errors Map<Integer, List<String>> gridErrorsForJsonMap = null; if (!gridErrorsMap.isEmpty()) { gridErrorsForJsonMap = ExcelImportBL.getGridErrorsForJsonMap(gridErrorsMap, locale); } Map<String, String> rowErrorsForJsonMap = null; if (!rowErrorsMap.isEmpty()) { rowErrorsForJsonMap = ExcelImportBL.getRowErrorsForJsonMap(rowErrorsMap); } List<String> requiredFieldErrorsList = null; if (!requiredFieldErrorsMap.isEmpty()) { requiredFieldErrorsList = ExcelImportBL .getMissingRequiredFieldErrorsForJsonMap(requiredFieldErrorsMap, locale); } JSONUtility.encodeJSON(servletResponse, ExcelImportJSON.getExcelWrongGridValuesJSON( gridErrorsForJsonMap, rowErrorsForJsonMap, requiredFieldErrorsList, locale, true)); } } } catch (Exception e) { addActionError(getText("admin.actions.importTp.err.failed")); LOGGER.error(ExceptionUtils.getStackTrace(e)); JSONUtility.encodeJSON(servletResponse, ImportJSON.importErrorMessageJSON(ImportJSON.ERROR_CODES.ERROR_MESSAGE, LocalizeUtil .getLocalizedTextFromApplicationResources("admin.actions.importTp.err.failed", locale), true)); } //delete the uploaded excel file return null; }
From source file:org.jahia.modules.modulemanager.forge.ForgeService.java
public List<Module> loadModules() { if (flushModules || (lastModulesLoad + loadModulesDelay) < new Date().getTime()) { modules.clear();/*from www.j a v a2 s .c o m*/ for (Forge forge : forges) { String url = forge.getUrl() + "/contents/modules-repository.moduleList.json"; Map<String, String> headers = new HashMap<String, String>(); if (!StringUtils.isEmpty(forge.getUser())) { headers.put("Authorization", "Basic " + Base64.encode((forge.getUser() + ":" + forge.getPassword()).getBytes())); } headers.put("accept", "application/json"); String jsonModuleList = httpClientService.executeGet(url, headers); try { JSONArray modulesRoot = new JSONArray(jsonModuleList); JSONArray moduleList = modulesRoot.getJSONObject(0).getJSONArray("modules"); for (int i = 0; i < moduleList.length(); i++) { boolean add = true; final JSONObject moduleObject = moduleList.getJSONObject(i); for (Module m : modules) { if (StringUtils.equals(m.getId(), moduleObject.getString("name")) && StringUtils.equals(m.getGroupId(), moduleObject.getString("groupId"))) { add = false; break; } } if (add) { final JSONArray moduleVersions = moduleObject.getJSONArray("versions"); SortedMap<Version, JSONObject> sortedVersions = new TreeMap<Version, JSONObject>(); final Version jahiaVersion = new Version(Jahia.VERSION); for (int j = 0; j < moduleVersions.length(); j++) { JSONObject object = moduleVersions.getJSONObject(j); Version version = new Version(object.getString("version")); Version requiredVersion = new Version(StringUtils .substringAfter(object.getString("requiredVersion"), "version-")); if (requiredVersion.compareTo(jahiaVersion) <= 0) { sortedVersions.put(version, object); } } if (!sortedVersions.isEmpty()) { Module module = new Module(); JSONObject versionObject = sortedVersions.get(sortedVersions.lastKey()); module.setRemoteUrl(moduleObject.getString("remoteUrl")); module.setRemotePath(moduleObject.getString("path")); if (moduleObject.has("icon")) { module.setIcon(moduleObject.getString("icon")); } module.setVersion(versionObject.getString("version")); module.setName(moduleObject.getString("title")); module.setId(moduleObject.getString("name")); module.setGroupId(moduleObject.getString("groupId")); module.setDownloadUrl(versionObject.getString("downloadUrl")); module.setForgeId(forge.getId()); modules.add(module); } } } } catch (JSONException e) { logger.error("unable to parse JSON return string for " + url); } catch (Exception e) { logger.error("unable to get store information" + e.getMessage()); } } Collections.sort(modules); lastModulesLoad = new Date().getTime(); flushModules = false; } return modules; }
From source file:org.texai.torrent.PeerCoordinator.java
/** Reports the upload statistics. */ public void reportUploadStatistics() { final SortedMap<Long, TrackedPeerInfo> sortedUploadStatisticsDictionary = new TreeMap<>(); synchronized (uploadStatisticsDictionary) { for (final Entry<TrackedPeerInfo, Long> entry : uploadStatisticsDictionary.entrySet()) { sortedUploadStatisticsDictionary.put(entry.getValue(), entry.getKey()); }// w ww . ja v a 2 s . c om } LOGGER.info("number bytes uploaded to peers ..."); if (sortedUploadStatisticsDictionary.isEmpty()) { LOGGER.info(" none"); } else { for (final Entry<Long, TrackedPeerInfo> entry : sortedUploadStatisticsDictionary.entrySet()) { LOGGER.info(" " + entry.getValue() + " " + entry.getKey()); } } }
From source file:org.texai.torrent.PeerCoordinator.java
/** Reports the download statistics. */ public void reportDownloadStatistics() { final SortedMap<Long, TrackedPeerInfo> sortedDownloadStatisticsDictionary = new TreeMap<>(); synchronized (downloadStatisticsDictionary) { for (final Entry<TrackedPeerInfo, Long> entry : downloadStatisticsDictionary.entrySet()) { sortedDownloadStatisticsDictionary.put(entry.getValue(), entry.getKey()); }/* www. j av a 2s. c om*/ } LOGGER.info("number bytes downloaded from peers ..."); if (sortedDownloadStatisticsDictionary.isEmpty()) { LOGGER.info(" none"); } else { for (final Entry<Long, TrackedPeerInfo> entry : sortedDownloadStatisticsDictionary.entrySet()) { LOGGER.info(" " + entry.getValue() + " " + entry.getKey()); } } }
From source file:de.interactive_instruments.ShapeChange.Transformation.Profiling.Profiler.java
/** * Looks up the IDs of all classes used by the given class, either directly * (through attributes, associations, supertypes, and subtypes [possibly * restricted via a configuration parameter]) or indirectly (e.g. through * association classes and other complex types used by the class - to the * deepest sublevel).//from ww w.j a v a 2s . c om * * Type ids are only added to the set if they are not already contained in * it. * * This method is called recursively for each type that is added to the set * (so if that type is already contained in the set, no recursive call is * performed). * * @param ci * @param usedCisById * @param residualTypeRemoval_includeSubtypesFor */ private void deepSearchForTypesUsedByClass(ClassInfo ci, Set<String> usedCisById, GenericModel genModel, Pattern residualTypeRemoval_includeSubtypesFor) { if (usedCisById.contains(ci.id())) { /* * apparently we already searched and found the given class, through * a previous search */ return; } // add the id of the given class to the set usedCisById.add(ci.id()); /* look up all properties of the class */ SortedMap<StructuredNumber, PropertyInfo> ciPis = ci.properties(); if (ciPis != null && !ciPis.isEmpty()) { // look up types used by properties for (PropertyInfo pi : ciPis.values()) { ClassInfo typeCi = genModel.classById(pi.typeInfo().id); // if the type is not contained in the set, perform a deep // search on it (which automatically adds it to the set) if (typeCi != null && !usedCisById.contains(pi.typeInfo().id)) { deepSearchForTypesUsedByClass(typeCi, usedCisById, genModel, residualTypeRemoval_includeSubtypesFor); // if the property has an association with an association // class and that class is not already in the set, perform a // deep search on it (which automatically adds it to the set if (pi.association() != null) { ClassInfo assoClass = pi.association().assocClass(); if (assoClass != null && !usedCisById.contains(assoClass.id())) { deepSearchForTypesUsedByClass(assoClass, usedCisById, genModel, residualTypeRemoval_includeSubtypesFor); } } } } } // look up and search through any supertype(s) Set<String> supertypeIds = ci.supertypes(); /* * if the class has one or more supertypes and the supertypes are not * already in the set, perform a deep search on each of them (which * automatically adds them to the set) */ if (supertypeIds != null && !supertypeIds.isEmpty()) { for (String supertypeId : supertypeIds) { ClassInfo supertype = genModel.classById(supertypeId); if (supertype != null && !usedCisById.contains(supertype.id())) { deepSearchForTypesUsedByClass(supertype, usedCisById, genModel, residualTypeRemoval_includeSubtypesFor); } } } if (residualTypeRemoval_includeSubtypesFor != null && residualTypeRemoval_includeSubtypesFor.matcher(ci.name()).matches()) { // look up and search through any subtype(s) Set<String> subtypeIds = ci.subtypes(); /* * if the class has one or more subtypes and the subtypes are not * already in the set, perform a deep search on each of them (which * automatically adds them to the set) * * If the configuration contains a parameter that provides a regular * expression to identify relevant class names, take that into * account. */ if (subtypeIds != null && !subtypeIds.isEmpty()) { for (String subtypeId : subtypeIds) { ClassInfo subtype = genModel.classById(subtypeId); if (subtype != null && !usedCisById.contains(subtype.id())) { deepSearchForTypesUsedByClass(subtype, usedCisById, genModel, residualTypeRemoval_includeSubtypesFor); } } } } }
From source file:org.jahia.services.templates.JahiaTemplateManagerService.java
public boolean differentModuleWithSameIdExists(String symbolicName, String groupId) { SortedMap<ModuleVersion, JahiaTemplatesPackage> moduleVersions = templatePackageRegistry .getAllModuleVersions().get(symbolicName); return moduleVersions != null && !moduleVersions.isEmpty() && !moduleVersions.get(moduleVersions.firstKey()).getGroupId().equals(groupId); }
From source file:org.jahia.services.render.scripting.bundle.BundleScriptResolver.java
/** * Callback for unregistering resource views for a bundle. * * @param bundle the bundle to unregister views for * @param scripts the URLs of the views to unregister *///from ww w . j a v a2 s . c om public void removeBundleScripts(Bundle bundle, List<URL> scripts) { final String bundleName = bundle.getSymbolicName(); final SortedMap<String, ViewResourceInfo> existingBundleScripts = availableScripts.get(bundleName); if (existingBundleScripts == null) { return; } if (!scripts.isEmpty()) { boolean didRemove = false; for (URL script : scripts) { didRemove = existingBundleScripts.remove(script.getPath()) != null; } if (didRemove) { // remove entry if we don't have any scripts anymore for this bundle if (existingBundleScripts.isEmpty()) { availableScripts.remove(bundleName); } logger.info("Bundle {} unregistered {} views", bundle, scripts); clearCaches(); } } }
From source file:io.fabric8.maven.plugin.mojo.internal.ImportMojo.java
private void chooseSshKeyPairs(Map<String, String> secretData, String host) throws MojoExecutionException { String homeDir = System.getProperty("user.home", "."); File sshDir = new File(homeDir, ".ssh"); SortedMap<String, String> keyPairs = new TreeMap<>(); if (sshDir.isDirectory() && sshDir.exists()) { File[] files = sshDir.listFiles(); if (files != null) { for (File file : files) { String publicName = file.getName(); if (file.isFile() && publicName.endsWith(".pub")) { String privateName = Strings.stripSuffix(publicName, ".pub"); if (new File(sshDir, privateName).isFile()) { keyPairs.put(privateName, publicName); }/*w w w .j av a 2 s . c o m*/ } } } } if (keyPairs.isEmpty()) { log.warn("No SSH key pairs could be found in %s to choose from!", sshDir); log.warn("You may want to clone the git repository over https:// instead to avoid ssh key pairs?"); } else { if (keyPairs.size() == 0) { String privateName = keyPairs.firstKey(); importSshKeys(secretData, sshDir, privateName, keyPairs.get(privateName)); } else { List<String> privateKeys = new ArrayList<>(keyPairs.keySet()); String privateKey = null; try { privateKey = prompter.prompt( "Which public / private key pair do you wish to use for SSH authentication with host: " + host, privateKeys); } catch (PrompterException e) { log.warn("Failed to get user input: %s", e); } if (Strings.isNotBlank(privateKey)) { String publicKey = keyPairs.get(privateKey); if (Strings.isNullOrBlank(publicKey)) { log.warn("Invalid answer: %s when available values are: %s", privateKey, privateKeys); } else { importSshKeys(secretData, sshDir, privateKey, publicKey); } } } } }