List of usage examples for java.util TreeMap entrySet
EntrySet entrySet
To view the source code for java.util TreeMap entrySet.
Click Source Link
From source file:de.tudarmstadt.ukp.experiments.dip.wp1.documents.Step9AgreementCollector.java
@SuppressWarnings("unchecked") public static void computeObservedAgreement(File goldDataFolder, File outputDir) throws Exception { // iterate over query containers for (File f : FileUtils.listFiles(goldDataFolder, new String[] { "xml" }, false)) { QueryResultContainer queryResultContainer = QueryResultContainer .fromXML(FileUtils.readFileToString(f, "utf-8")); for (QueryResultContainer.SingleRankedResult rankedResult : queryResultContainer.rankedResults) { // only non-empty and annotated results // No annotations found for document: clueWebID: clueweb12-1407wb-22-10643, queryID: 1006 // <clueWebID>clueweb12-1407wb-22-10643</clueWebID> // <score>5.93809186</score> // <additionalInfo>indri</additionalInfo> // <plainText></plainText> if (rankedResult.plainText != null && !rankedResult.plainText.isEmpty()) { if (rankedResult.mTurkRelevanceVotes.isEmpty()) { // throw new IllegalStateException("No annotations found for document: " System.err.println("No annotations found for document: " + "clueWebID: " + rankedResult.clueWebID + ", queryID: " + queryResultContainer.qID); } else { // first, get all the sentence IDs byte[] bytes = new BASE64Decoder() .decodeBuffer(new ByteArrayInputStream(rankedResult.originalXmi.getBytes())); JCas jCas = JCasFactory.createJCas(); XmiCasDeserializer.deserialize(new ByteArrayInputStream(bytes), jCas.getCas()); // for each sentence, we'll collect all its annotations TreeMap<Integer, SortedMap<String, String>> sentencesAndRelevanceAnnotations = collectSentenceIDs( jCas);/*from ww w .j ava2s .com*/ // now we will the map with mturk annotations // the list of true/false for each sentence will be consistent (the annotator ordering remains) for (QueryResultContainer.MTurkRelevanceVote mTurkRelevanceVote : rankedResult.mTurkRelevanceVotes) { for (QueryResultContainer.SingleSentenceRelevanceVote sentenceRelevanceVote : mTurkRelevanceVote.singleSentenceRelevanceVotes) { String sentenceIDString = sentenceRelevanceVote.sentenceID; if (sentenceIDString == null || sentenceIDString.isEmpty()) { throw new IllegalStateException("Empty sentence ID for turker " + mTurkRelevanceVote.turkID + ", HIT: " + mTurkRelevanceVote.hitID + ", clueWebID: " + rankedResult.clueWebID + ", queryID: " + queryResultContainer.qID); } else { Integer sentenceIDInt = Integer.valueOf(sentenceIDString); String value = sentenceRelevanceVote.relevant; // add to the list // sanity check first if (sentencesAndRelevanceAnnotations.get(sentenceIDInt) .containsKey(mTurkRelevanceVote.turkID)) { System.err.println("Annotations for sentence " + sentenceIDInt + " for turker " + mTurkRelevanceVote.turkID + " are duplicate"); } sentencesAndRelevanceAnnotations.get(sentenceIDInt) .put(mTurkRelevanceVote.turkID, value); } } } // for (Map.Entry<Integer, SortedMap<String, String>> entry : sentencesAndRelevanceAnnotations // .entrySet()) { // System.out.println(entry.getKey() + ": " + entry.getValue()); // } // we collect only the "clean" ones Map<Integer, SortedMap<String, String>> cleanSentencesAndRelevanceAnnotations = new HashMap<>(); // sanity check -- all sentences are covered with the same number of annotations for (Map.Entry<Integer, SortedMap<String, String>> entry : sentencesAndRelevanceAnnotations .entrySet()) { SortedMap<String, String> singleSentenceAnnotations = entry.getValue(); // remove empty sentences if (singleSentenceAnnotations.values().isEmpty()) { // throw new IllegalStateException( System.err.println("Empty annotations for sentence, " + "sentenceID: " + entry.getKey() + ", " + "clueWebID: " + rankedResult.clueWebID + ", queryID: " + queryResultContainer.qID + "; number of assignments: " + singleSentenceAnnotations.values().size() + ", expected: " + NUMBER_OF_TURKERS_PER_HIT + ". Sentence will be skipped in evaluation"); } else if (singleSentenceAnnotations.values().size() != NUMBER_OF_TURKERS_PER_HIT) { System.err.println("Inconsistent annotations for sentences, " + "sentenceID: " + entry.getKey() + ", " + "clueWebID: " + rankedResult.clueWebID + ", queryID: " + queryResultContainer.qID + "; number of assignments: " + singleSentenceAnnotations.values().size() + ", expected: " + NUMBER_OF_TURKERS_PER_HIT + ". Sentence will be skipped in evaluation"); } else { cleanSentencesAndRelevanceAnnotations.put(entry.getKey(), entry.getValue()); } } // fill the annotation study CodingAnnotationStudy study = new CodingAnnotationStudy(NUMBER_OF_TURKERS_PER_HIT); study.addCategory("true"); study.addCategory("false"); for (SortedMap<String, String> singleSentenceAnnotations : cleanSentencesAndRelevanceAnnotations .values()) { // only non-empty sentences Collection<String> values = singleSentenceAnnotations.values(); if (!values.isEmpty() && values.size() == NUMBER_OF_TURKERS_PER_HIT) { study.addItemAsArray(values.toArray()); } } // System.out.println(study.getCategories()); // Fleiss' multi-pi. FleissKappaAgreement fleissKappaAgreement = new FleissKappaAgreement(study); double percentage; try { percentage = fleissKappaAgreement.calculateObservedAgreement(); } catch (InsufficientDataException ex) { // dkpro-statistics feature, see https://github.com/dkpro/dkpro-statistics/issues/24 percentage = 1.0; } if (!Double.isNaN(percentage)) { rankedResult.observedAgreement = percentage; // System.out.println(sentencesAndRelevanceAnnotations.values()); } else { System.err.println("Observed agreement is NaN."); } } } } // and save the query to output dir File outputFile = new File(outputDir, queryResultContainer.qID + ".xml"); FileUtils.writeStringToFile(outputFile, queryResultContainer.toXML(), "utf-8"); System.out.println("Finished " + outputFile); } }
From source file:it.imtech.metadata.MetaUtility.java
/** * Metodo che costruisce l'albero delle classificazione e setta il nodo * selezionato se esistente/*from ww w . j a v a2 s . c om*/ * * @param nodes Il nodo nel quale aggiungere i nuovi nodi (Nodo Padre) * @param taxons La lista di nodi per livello */ private void recursiveOefosTreeviewBuild(DefaultMutableTreeNode nodes, TreeMap<Object, Taxon> taxons, String sequence, String panelname) throws Exception { try { for (Map.Entry<Object, Taxon> kv : taxons.entrySet()) { ClassNode iNode = new ClassNode("" + kv.getKey(), kv.getValue().upstream_identifier + ": " + kv.getValue().description); ClassMutableNode inner = new ClassMutableNode(iNode); nodes.add(inner); String oefosname = panelname + "----" + sequence; if (this.oefos_path.get(oefosname) != null) { if (this.oefos_path.get(oefosname).containsValue(kv.getValue().TID)) { selected = inner; } } recursiveOefosTreeviewBuild(inner, kv.getValue().subtaxons, sequence, panelname); } // Utility.sortTreeChildren(nodes); } catch (Exception ex) { throw new Exception("Exception in recursiveOefosTreeviewBuild: " + ex.getStackTrace() + "\n"); } }
From source file:org.jclouds.kinetic.strategy.internal.KineticStorageStrategyImpl.java
@Override public Blob getBlob(final String container, final String key) { BlobBuilder builder = blobBuilders.get(); builder.name(key);/* w ww . ja va 2s . co m*/ File file = getFileForBlobKey(container, key); TreeMap<Long, Blob> blobs = new TreeMap<Long, Blob>(); long fileLength = file.length(); long currentByte = 0; while (currentByte < fileLength) { byte[] chunkContents = new byte[0]; try { chunkContents = Files.asByteSource(file) .slice(currentByte, KineticConstants.PROPERTY_CHUNK_SIZE_BYTES - KineticConstants.PROPERTY_CHUNK_FULL_HEADER_SIZE_BYTES) .read(); } catch (IOException e) { e.printStackTrace(); } Chunk chunk = new Chunk(this, 0, currentByte); chunk.setData(chunkContents); chunk.processChunk(); System.out.printf("Chunk Encoded: %s\n", Arrays.toString(chunk.getData(false))); System.out.printf("Chunk Decoded: %s\n", Arrays.toString(chunk.getData(true))); Blob chunkBlob = this.getChunkedBlob(container, key, currentByte); blobs.put(currentByte, chunkBlob); currentByte += KineticConstants.PROPERTY_CHUNK_SIZE_BYTES - KineticConstants.PROPERTY_CHUNK_FULL_HEADER_SIZE_BYTES; } List<ByteSource> byteSources = new ArrayList<ByteSource>(); for (Map.Entry<Long, Blob> entry : blobs.entrySet()) { byteSources.add((ByteSource) (entry.getValue().getPayload().getRawContent())); } ByteSource finalByteSource = ByteSource.concat(byteSources); return createBlobFromByteSource(container, key, finalByteSource); }
From source file:org.alfresco.bm.report.XLSXReporter.java
private void createPropertiesSheet(XSSFWorkbook workbook) throws IOException, NotFoundException { DBObject testRunObj;// ww w . ja va 2s. co m try { testRunObj = services.getTestDAO().getTestRun(test, run, true); } catch (ObjectNotFoundException e) { logger.error("Test run not found!", e); return; } // Ensure we don't leak passwords testRunObj = AbstractRestResource.maskValues(testRunObj); BasicDBList propertiesList = (BasicDBList) testRunObj.get(FIELD_PROPERTIES); if (propertiesList == null) { logger.error("Properties not found!"); return; } // Order the properties, nicely TreeMap<String, DBObject> properties = new TreeMap<String, DBObject>(); for (Object propertyObj : propertiesList) { DBObject property = (DBObject) propertyObj; String key = (String) property.get(FIELD_NAME); properties.put(key, property); } XSSFSheet sheet = workbook.createSheet("Properties"); // Create the fonts we need Font fontBold = workbook.createFont(); fontBold.setBoldweight(Font.BOLDWEIGHT_BOLD); // Create the styles we need XSSFCellStyle propertyStyle = sheet.getWorkbook().createCellStyle(); propertyStyle.setAlignment(HorizontalAlignment.RIGHT); propertyStyle.setWrapText(true); XSSFCellStyle headerStyle = sheet.getWorkbook().createCellStyle(); headerStyle.setAlignment(HorizontalAlignment.RIGHT); headerStyle.setFont(fontBold); XSSFRow row = null; int rowCount = 0; XSSFCell cell = null; int cellCount = 0; row = sheet.createRow(rowCount++); cell = row.createCell(cellCount++); { cell.setCellValue("Property"); cell.setCellStyle(headerStyle); } cell = row.createCell(cellCount++); { cell.setCellValue("Value"); cell.setCellStyle(headerStyle); } cell = row.createCell(cellCount++); { cell.setCellValue("Origin"); cell.setCellStyle(headerStyle); } cellCount = 0; // Iterate all the properties for the test run for (Map.Entry<String, DBObject> entry : properties.entrySet()) { DBObject property = entry.getValue(); String key = (String) property.get(FIELD_NAME); String value = (String) property.get(FIELD_VALUE); String origin = (String) property.get(FIELD_ORIGIN); row = sheet.createRow(rowCount++); cell = row.createCell(cellCount++); { cell.setCellValue(key); cell.setCellStyle(propertyStyle); } cell = row.createCell(cellCount++); { cell.setCellValue(value); cell.setCellStyle(propertyStyle); } cell = row.createCell(cellCount++); { cell.setCellValue(origin); cell.setCellStyle(propertyStyle); } // Back to first column cellCount = 0; } // Size the columns sheet.autoSizeColumn(0); sheet.setColumnWidth(1, 15360); sheet.autoSizeColumn(2); // Printing PrintSetup ps = sheet.getPrintSetup(); sheet.setAutobreaks(true); ps.setFitWidth((short) 1); ps.setLandscape(true); // Header and footer sheet.getHeader().setCenter(title); }
From source file:com.sec.ose.osi.thread.ui_related.UserCommandExecutionThread.java
@Override public void run() { long startTime = System.currentTimeMillis(); log.info(UserRequestHandler.getCommandName(mRequestCode) + " - start"); isDone = false;//from w w w . ja v a 2s.com this.mObserver.pushMessage("Preparing to request to server"); Property prop = Property.getInstance(); switch (mRequestCode) { case UserRequestHandler.DELETE_IDENTIFICATION_TABLE: if (IdentifyQueue.getInstance().size() > 0) { return; } Collection<OSIProjectInfo> infoList = OSIProjectInfoMgr.getInstance().getAllProjects(); for (OSIProjectInfo info : infoList) { if (info.getProjectName().equals(IdentifyMediator.getInstance().getSelectedProjectName())) { continue; } if (info.isManaged() == true) { continue; } mObserver.setMessageHeader("Deleting identification table...\n"); mObserver.pushMessageWithHeader(" > target project : " + info.getProjectName() + "\n"); // Drop identification tables IdentificationDBManager.dropTable(info.getProjectName()); // Remove memory ProjectDiscoveryControllerMap.removeProjectDiscoveryController(info.getProjectName()); } break; case UserRequestHandler.GET_PROTEX_PROJECT_INFO: UEProtexProjectInfo uap = (UEProtexProjectInfo) mEntity; OSIProjectInfo protexProjectInfo = UISDKInterfaceManager.getSDKInterface() .getProjectInfoByName(uap.getProjectName()); this.mObserver.setReturnValue(protexProjectInfo); this.mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); break; case UserRequestHandler.LOAD_IDENTIFICATION_DATA: { UEComboProjectName uei = (UEComboProjectName) mEntity; String projectName = uei.getProjectName(); log.info("loading project [" + projectName + "]"); mObserver.setMessageHeader("Loading Identification Data ...\n"); // Local Component Loading mObserver.pushMessageWithHeader(" > Loading local component info ...\n"); ComponentAPIWrapper.loadLocalComponent(ProjectAPIWrapper.getProjectID(projectName), false); ProjectDiscoveryControllerMap.loadProjectDiscoveryController(projectName, this.mObserver); CodeTreeAPIWrapper.setCodeTree(projectName, this.mObserver); log.debug("Selected Project : " + projectName); mObserver.pushMessageWithHeader(" > Loading sql dat to memory ...\n"); AbstractDiscoveryController stringMatchDiscovery = ProjectDiscoveryControllerMap .getDiscoveryController(projectName, IdentificationConstantValue.STRING_MATCH_TYPE); AbstractDiscoveryController codeMatchDiscovery = ProjectDiscoveryControllerMap .getDiscoveryController(projectName, IdentificationConstantValue.CODE_MATCH_TYPE); AbstractDiscoveryController patternMatchDiscovery = ProjectDiscoveryControllerMap .getDiscoveryController(projectName, IdentificationConstantValue.PATTERN_MATCH_TYPE); int displayedMatchedType = IdentificationConstantValue.STRING_MATCH_TYPE; if (stringMatchDiscovery.getNumOfPendingFiles() > 0) { displayedMatchedType = IdentificationConstantValue.STRING_MATCH_TYPE; } else if (codeMatchDiscovery.getNumOfPendingFiles() > 0) { displayedMatchedType = IdentificationConstantValue.CODE_MATCH_TYPE; } else if (patternMatchDiscovery.getNumOfPendingFiles() > 0) { displayedMatchedType = IdentificationConstantValue.PATTERN_MATCH_TYPE; } else if (stringMatchDiscovery.getNumOfDiscoveryFiles() > 0) { displayedMatchedType = IdentificationConstantValue.STRING_MATCH_TYPE; } else if (codeMatchDiscovery.getNumOfDiscoveryFiles() > 0) { displayedMatchedType = IdentificationConstantValue.CODE_MATCH_TYPE; } else if (patternMatchDiscovery.getNumOfDiscoveryFiles() > 0) { displayedMatchedType = IdentificationConstantValue.PATTERN_MATCH_TYPE; } mObserver.pushMessageWithHeader(" > Updating Panel ...\n"); IdentifyMediator.getInstance().changeSelectedIdentificationPanel(displayedMatchedType); // Tree & List Identification mObserver.pushMessageWithHeader(" > Updating Tree and List ...\n"); IdentifyMediator.getInstance().refreshIdentificationInfoForTreeListChildFrames(projectName, null, displayedMatchedType); this.mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); } break; case UserRequestHandler.SYNC_TO_SERVER: { syncToServer(); } break; case UserRequestHandler.SYNC_FROM_SERVER: { syncFromServer(); } break; case UserRequestHandler.GET_ALL_PROJECT_NAMES: { ArrayList<String> projectNames = UISDKInterfaceManager.getSDKInterface() .getProjectNames(this.mObserver); this.mObserver.setReturnValue(projectNames); } break; case UserRequestHandler.LOGIN: UELogin ueLogin = (UELogin) mEntity; this.mObserver.pushMessage("Sending login request to server"); mSDKInterface.userLogin(ueLogin.getUserID(), ueLogin.getPassword(), ueLogin.getProtexServerIP(), this.mObserver); if (this.mObserver.getResult() == UIResponseObserver.RESULT_SUCCESS) { IdentifyQueue.getInstance().makeBackup(); loadMainFrame(ueLogin, this.mObserver); } break; case UserRequestHandler.GENERATE_BOTH_REPORT: generateBothReport(); break; case UserRequestHandler.GENERATE_IDENTIFY_REPORT: generateIdentifyReport(); break; case UserRequestHandler.GENERATE_SPDX_REPORT: generateSPDXReport(); break; case UserRequestHandler.GET_BOM_LIST_FROM_SERVER: { UEProjectName ueProjetName = (UEProjectName) mEntity; String projectName = ueProjetName.getProjectName(); UISDKInterfaceManager.getSDKInterface().getBOMListFromProjectName(projectName, this.mObserver); } break; case UserRequestHandler.GET_BOM_LIST_MAP_FROM_SERVER: { UEProjectName ueProjetName = (UEProjectName) mEntity; ArrayList<String> projectNames = ueProjetName.getProjectNames(); UISDKInterfaceManager.getSDKInterface().getBOMListMapFromProjectNames(projectNames, this.mObserver); } break; case UserRequestHandler.PROJECT_CLONE: { UEProjectClone ueProjectClone = (UEProjectClone) mEntity; if (ueProjectClone.getOriginalProjectName() == null) { this.mObserver.setResult(UIResponseObserver.RESULT_FAIL); return; } String projectID = null; String newProjectName = ueProjectClone.getNewProjectName(); String originalProjectName = ueProjectClone.getOriginalProjectName(); projectID = SDKInterfaceImpl.getInstance().cloneProject(newProjectName, originalProjectName, mObserver); ArrayList<OSIProjectInfo> osiProjectInfoList = new ArrayList<OSIProjectInfo>(); if (projectID != null) { String sourceLocation = ueProjectClone.getSourceLocation(); boolean isAnalyzed = false; if (OSIProjectInfoMgr.getInstance().getProjectInfo(originalProjectName) != null) { isAnalyzed = OSIProjectInfoMgr.getInstance().getProjectInfo(originalProjectName).isAnalyzed(); } OSIProjectInfo curCreateProjectInfo = createOSIProjectInfo(projectID, newProjectName, sourceLocation, isAnalyzed); osiProjectInfoList.add(curCreateProjectInfo); this.mObserver.setReturnValue(osiProjectInfoList); this.mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); } else { this.mObserver.setResult(UIResponseObserver.RESULT_FAIL); } } break; case UserRequestHandler.PROJECT_CREATE: { UEProjectCreate ueProjectCreate = (UEProjectCreate) mEntity; String projectID = null; TreeMap<String, ProjectSplitInfo> mapOfAnalyzeTarget = ueProjectCreate.getMapOfAnalyzeTarget(); String newProjectName = ueProjectCreate.getProjectName(); ArrayList<OSIProjectInfo> osiProjectInfoList = new ArrayList<OSIProjectInfo>(); if (mapOfAnalyzeTarget == null) { // one project, no source path this.mObserver.pushMessage("Creating project [ " + newProjectName + " ] ..."); projectID = SDKInterfaceImpl.getInstance().createProject(newProjectName, null, mObserver); if (projectID != null) { String sourceLocation = ""; boolean isAnalyzed = false; OSIProjectInfo curCreateProjectInfo = createOSIProjectInfo(projectID, newProjectName, sourceLocation, isAnalyzed); osiProjectInfoList.add(curCreateProjectInfo); this.mObserver.setReturnValue(osiProjectInfoList); this.mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); } else { this.mObserver.setResult(UIResponseObserver.RESULT_FAIL); } } else { // split or no split int curProjectNum = 0; int totalProjectNum = mapOfAnalyzeTarget.size(); Iterator<Map.Entry<String, ProjectSplitInfo>> iter = mapOfAnalyzeTarget.entrySet().iterator(); if (iter == null) { return; } while (iter.hasNext()) { String strProjectName = iter.next().getKey(); ++curProjectNum; if (mapOfAnalyzeTarget.size() <= 1) { this.mObserver.pushMessage("Creating project [ " + newProjectName + " ] ..."); } else { this.mObserver.pushMessage("Creating project [ (" + curProjectNum + "/" + totalProjectNum + ") " + newProjectName + " ] ..."); } String sourceLocation = mapOfAnalyzeTarget.get(strProjectName).getAnalyzeTargetPath(); projectID = SDKInterfaceImpl.getInstance().createProject(strProjectName, sourceLocation, mObserver); if (projectID != null) { boolean isAnalyzed = false; OSIProjectInfo curCreateProjectInfo = createOSIProjectInfo(projectID, strProjectName, sourceLocation, isAnalyzed); osiProjectInfoList.add(curCreateProjectInfo); this.mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); } else { this.mObserver.setResult(UIResponseObserver.RESULT_FAIL); } } this.mObserver.setReturnValue(osiProjectInfoList); } } break; case UserRequestHandler.PROCESS_IDENTIFY: { IdentifyThread thread = BackgroundJobManager.getInstance().getIdentifyThread(); if (thread != null) thread.setIsStopByUser(true); mObserver.setMessageHeader("Identify processing... \n"); mObserver.pushMessageWithHeader(" > Update local database.\n"); boolean result = ActionIdentifyOrReset.requestIdentify(mObserver, (UEIdentifyResetComment) mEntity); if (result) { mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); } else { mObserver.setResult(UIResponseObserver.RESULT_FAIL); } thread.setIsStopByUser(false); } break; case UserRequestHandler.PROCESS_RESET: { IdentifyThread thread = BackgroundJobManager.getInstance().getIdentifyThread(); if (thread != null) thread.setIsStopByUser(true); mObserver.setMessageHeader("Reset processing... \n"); mObserver.pushMessageWithHeader(" > Update local database.\n"); boolean result = ActionIdentifyOrReset.requestReset(mObserver, (UEIdentifyResetComment) mEntity); if (result) { mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); } else { mObserver.setResult(UIResponseObserver.RESULT_FAIL); } thread.setIsStopByUser(false); } break; case UserRequestHandler.PROJECT_SPLIT: { mObserver.setMessageHeader("Assessing Project Size... \n"); boolean result = splitProject(mObserver); if (result) { mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); mObserver.setSuccessMessage("Complete splitting!!"); } else { mObserver.setResult(UIResponseObserver.RESULT_FAIL); mObserver.setFailMessage("Fail splitting!!"); } } break; case UserRequestHandler.PROJECT_INFO: { UEProjectInfo ue = (UEProjectInfo) mEntity; String projectName = ue.getProjectName(); mObserver.setMessageHeader("Getting Project information... \n"); boolean result = getProjectAnalysisInfo(projectName, mObserver); if (result) { mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); mObserver.setSuccessMessage("Complete!!"); } else { mObserver.setResult(UIResponseObserver.RESULT_FAIL); mObserver.setFailMessage("Fail!!"); } } break; case UserRequestHandler.SPDX_AUTO_IDENTIFY: { mObserver.setMessageHeader("Ready to start SPDX Auto Identify ... \n"); UESPDXAutoIdentify ue = (UESPDXAutoIdentify) mEntity; SPDXAutoIdentifyController controller = new SPDXAutoIdentifyController(); AutoIdentifyResult autoIdentifyResultReport = controller.startAutoIdentifyFromSPDX(ue, mObserver); if (mObserver.getResult() != UIResponseObserver.RESULT_FAIL) { mObserver.setSuccessMessage(autoIdentifyResultReport.toString()); mObserver.setResult(UIResponseObserver.RESULT_SUCCESS); } else { mObserver.setFailMessage(autoIdentifyResultReport.toString()); } mObserver.setReturnValue(autoIdentifyResultReport); } break; } mObserver.pushMessage("Execution Thread - execution is completed"); isDone = true; long finishTime = System.currentTimeMillis(); log.info(UserRequestHandler.getCommandName(mRequestCode) + " - finish: (" + (finishTime - startTime) + " ms.)"); closeAction(); }
From source file:org.apache.druid.indexing.kafka.supervisor.KafkaSupervisor.java
/** * This method does two things -//from w w w . ja va 2 s . c o m * 1. Makes sure the checkpoints information in the taskGroup is consistent with that of the tasks, if not kill * inconsistent tasks. * 2. truncates the checkpoints in the taskGroup corresponding to which segments have been published, so that any newly * created tasks for the taskGroup start indexing from after the latest published offsets. */ private void verifyAndMergeCheckpoints(final TaskGroup taskGroup) { final int groupId = taskGroup.groupId; final List<Pair<String, TreeMap<Integer, Map<Integer, Long>>>> taskSequences = new ArrayList<>(); final List<ListenableFuture<TreeMap<Integer, Map<Integer, Long>>>> futures = new ArrayList<>(); final List<String> taskIds = new ArrayList<>(); for (String taskId : taskGroup.taskIds()) { final ListenableFuture<TreeMap<Integer, Map<Integer, Long>>> checkpointsFuture = taskClient .getCheckpointsAsync(taskId, true); taskIds.add(taskId); futures.add(checkpointsFuture); } try { List<TreeMap<Integer, Map<Integer, Long>>> futuresResult = Futures.successfulAsList(futures) .get(futureTimeoutInSeconds, TimeUnit.SECONDS); for (int i = 0; i < futuresResult.size(); i++) { final TreeMap<Integer, Map<Integer, Long>> checkpoints = futuresResult.get(i); final String taskId = taskIds.get(i); if (checkpoints == null) { try { // catch the exception in failed futures futures.get(i).get(); } catch (Exception e) { log.error(e, "Problem while getting checkpoints for task [%s], killing the task", taskId); killTask(taskId); taskGroup.tasks.remove(taskId); } } else if (checkpoints.isEmpty()) { log.warn("Ignoring task [%s], as probably it is not started running yet", taskId); } else { taskSequences.add(new Pair<>(taskId, checkpoints)); } } } catch (Exception e) { throw new RuntimeException(e); } final KafkaDataSourceMetadata latestDataSourceMetadata = (KafkaDataSourceMetadata) indexerMetadataStorageCoordinator .getDataSourceMetadata(dataSource); final boolean hasValidOffsetsFromDb = latestDataSourceMetadata != null && latestDataSourceMetadata.getKafkaPartitions() != null && ioConfig.getTopic().equals(latestDataSourceMetadata.getKafkaPartitions().getTopic()); final Map<Integer, Long> latestOffsetsFromDb; if (hasValidOffsetsFromDb) { latestOffsetsFromDb = latestDataSourceMetadata.getKafkaPartitions().getPartitionOffsetMap(); } else { latestOffsetsFromDb = null; } // order tasks of this taskGroup by the latest sequenceId taskSequences.sort((o1, o2) -> o2.rhs.firstKey().compareTo(o1.rhs.firstKey())); final Set<String> tasksToKill = new HashSet<>(); final AtomicInteger earliestConsistentSequenceId = new AtomicInteger(-1); int taskIndex = 0; while (taskIndex < taskSequences.size()) { TreeMap<Integer, Map<Integer, Long>> taskCheckpoints = taskSequences.get(taskIndex).rhs; String taskId = taskSequences.get(taskIndex).lhs; if (earliestConsistentSequenceId.get() == -1) { // find the first replica task with earliest sequenceId consistent with datasource metadata in the metadata // store if (taskCheckpoints.entrySet().stream() .anyMatch(sequenceCheckpoint -> sequenceCheckpoint.getValue().entrySet().stream() .allMatch(partitionOffset -> Longs.compare(partitionOffset.getValue(), latestOffsetsFromDb == null ? partitionOffset.getValue() : latestOffsetsFromDb.getOrDefault(partitionOffset.getKey(), partitionOffset.getValue())) == 0) && earliestConsistentSequenceId.compareAndSet(-1, sequenceCheckpoint.getKey())) || (pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() > 0 && earliestConsistentSequenceId.compareAndSet(-1, taskCheckpoints.firstKey()))) { final SortedMap<Integer, Map<Integer, Long>> latestCheckpoints = new TreeMap<>( taskCheckpoints.tailMap(earliestConsistentSequenceId.get())); log.info("Setting taskGroup sequences to [%s] for group [%d]", latestCheckpoints, groupId); taskGroup.sequenceOffsets.clear(); taskGroup.sequenceOffsets.putAll(latestCheckpoints); } else { log.debug("Adding task [%s] to kill list, checkpoints[%s], latestoffsets from DB [%s]", taskId, taskCheckpoints, latestOffsetsFromDb); tasksToKill.add(taskId); } } else { // check consistency with taskGroup sequences if (taskCheckpoints.get(taskGroup.sequenceOffsets.firstKey()) == null || !(taskCheckpoints.get(taskGroup.sequenceOffsets.firstKey()) .equals(taskGroup.sequenceOffsets.firstEntry().getValue())) || taskCheckpoints.tailMap(taskGroup.sequenceOffsets.firstKey()) .size() != taskGroup.sequenceOffsets.size()) { log.debug("Adding task [%s] to kill list, checkpoints[%s], taskgroup checkpoints [%s]", taskId, taskCheckpoints, taskGroup.sequenceOffsets); tasksToKill.add(taskId); } } taskIndex++; } if ((tasksToKill.size() > 0 && tasksToKill.size() == taskGroup.tasks.size()) || (taskGroup.tasks.size() == 0 && pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() == 0)) { // killing all tasks or no task left in the group ? // clear state about the taskgroup so that get latest offset information is fetched from metadata store log.warn("Clearing task group [%d] information as no valid tasks left the group", groupId); taskGroups.remove(groupId); partitionGroups.get(groupId).replaceAll((partition, offset) -> NOT_SET); } taskSequences.stream().filter(taskIdSequences -> tasksToKill.contains(taskIdSequences.lhs)) .forEach(sequenceCheckpoint -> { log.warn( "Killing task [%s], as its checkpoints [%s] are not consistent with group checkpoints[%s] or latest " + "persisted offsets in metadata store [%s]", sequenceCheckpoint.lhs, sequenceCheckpoint.rhs, taskGroup.sequenceOffsets, latestOffsetsFromDb); killTask(sequenceCheckpoint.lhs); taskGroup.tasks.remove(sequenceCheckpoint.lhs); }); }
From source file:it.imtech.metadata.MetaUtility.java
/** * Metodo adibito alla creazione dinamica ricorsiva dell'interfaccia dei * metadati//from ww w .j a va 2s . c o m * * @param submetadatas Map contente i metadati e i sottolivelli di metadati * @param vocabularies Map contenente i dati contenuti nel file xml * vocabulary.xml * @param parent Jpanel nel quale devono venir inseriti i metadati * @param level Livello corrente */ public void create_metadata_view(Map<Object, Metadata> submetadatas, JPanel parent, int level, final String panelname) throws Exception { ResourceBundle bundle = ResourceBundle.getBundle(Globals.RESOURCES, Globals.CURRENT_LOCALE, Globals.loader); int lenght = submetadatas.size(); int labelwidth = 220; int i = 0; JButton addcontribute = null; for (Map.Entry<Object, Metadata> kv : submetadatas.entrySet()) { ArrayList<Component> tabobjects = new ArrayList<Component>(); if (kv.getValue().MID == 17 || kv.getValue().MID == 23 || kv.getValue().MID == 18 || kv.getValue().MID == 137) { continue; } //Crea un jpanel nuovo e fa appen su parent JPanel innerPanel = new JPanel(new MigLayout("fillx, insets 1 1 1 1")); innerPanel.setName("pannello" + level + i); i++; String datatype = kv.getValue().datatype.toString(); if (kv.getValue().MID == 45) { JPanel choice = new JPanel(new MigLayout()); JComboBox combo = addClassificationChoice(choice, kv.getValue().sequence, panelname); JLabel labelc = new JLabel(); labelc.setText(Utility.getBundleString("selectclassif", bundle)); labelc.setPreferredSize(new Dimension(100, 20)); choice.add(labelc); findLastClassification(panelname); if (last_classification != 0 && classificationRemoveButton == null) { logger.info("Removing last clasification"); classificationRemoveButton = new JButton("-"); classificationRemoveButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { BookImporter.getInstance().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); removeClassificationToMetadata(panelname); BookImporter.getInstance().refreshMetadataTab(false, panelname); findLastClassification(panelname); //update last_classification BookImporter.getInstance().setCursor(null); } }); if (Integer.parseInt(kv.getValue().sequence) == last_classification) { choice.add(classificationRemoveButton, "wrap, width :50:"); } } if (classificationAddButton == null) { logger.info("Adding a new classification"); choice.add(combo, "width 100:600:600"); classificationAddButton = new JButton("+"); classificationAddButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { BookImporter.getInstance().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); addClassificationToMetadata(panelname); BookImporter.getInstance().refreshMetadataTab(false, panelname); BookImporter.getInstance().setCursor(null); } }); choice.add(classificationAddButton, "width :50:"); } else { //choice.add(combo, "wrap,width 100:700:700"); choice.add(combo, "width 100:700:700"); if (Integer.parseInt(kv.getValue().sequence) == last_classification) { choice.add(classificationRemoveButton, "wrap, width :50"); } } parent.add(choice, "wrap,width 100:700:700"); classificationMID = kv.getValue().MID; innerPanel.setName(panelname + "---ImPannelloClassif---" + kv.getValue().sequence); try { addClassification(innerPanel, classificationMID, kv.getValue().sequence, panelname); } catch (Exception ex) { logger.error("Errore nell'aggiunta delle classificazioni"); } parent.add(innerPanel, "wrap, growx"); BookImporter.policy.addIndexedComponent(combo); continue; } if (datatype.equals("Node")) { JLabel label = new JLabel(); label.setText(kv.getValue().description); label.setPreferredSize(new Dimension(100, 20)); int size = 16 - (level * 2); Font myFont = new Font("MS Sans Serif", Font.PLAIN, size); label.setFont(myFont); if (Integer.toString(kv.getValue().MID).equals("11")) { JPanel temppanel = new JPanel(new MigLayout()); //update last_contribute findLastContribute(panelname); if (last_contribute != 0 && removeContribute == null) { logger.info("Removing last contribute"); removeContribute = new JButton("-"); removeContribute.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { BookImporter.getInstance() .setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); removeContributorToMetadata(panelname); BookImporter.getInstance().refreshMetadataTab(false, panelname); BookImporter.getInstance().setCursor(null); } }); if (!kv.getValue().sequence.equals("")) { if (Integer.parseInt(kv.getValue().sequence) == last_contribute) { innerPanel.add(removeContribute, "width :50:"); } } } if (addcontribute == null) { logger.info("Adding a new contribute"); addcontribute = new JButton("+"); addcontribute.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { BookImporter.getInstance() .setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); addContributorToMetadata(panelname); BookImporter.getInstance().refreshMetadataTab(false, panelname); BookImporter.getInstance().setCursor(null); } }); temppanel.add(label, " width :200:"); temppanel.add(addcontribute, "width :50:"); innerPanel.add(temppanel, "wrap, growx"); } else { temppanel.add(label, " width :200:"); findLastContribute(panelname); if (!kv.getValue().sequence.equals("")) { if (Integer.parseInt(kv.getValue().sequence) == last_contribute) { temppanel.add(removeContribute, "width :50:"); } } innerPanel.add(temppanel, "wrap, growx"); } } else if (Integer.toString(kv.getValue().MID).equals("115")) { logger.info("Devo gestire una provenience!"); } } else { String title = ""; if (kv.getValue().mandatory.equals("Y") || kv.getValue().MID == 14 || kv.getValue().MID == 15) { title = kv.getValue().description + " *"; } else { title = kv.getValue().description; } innerPanel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), title, TitledBorder.LEFT, TitledBorder.TOP)); if (datatype.equals("Vocabulary")) { TreeMap<String, String> entryCombo = new TreeMap<String, String>(); int index = 0; String selected = null; if (!Integer.toString(kv.getValue().MID).equals("8")) entryCombo.put(Utility.getBundleString("comboselect", bundle), Utility.getBundleString("comboselect", bundle)); for (Map.Entry<String, TreeMap<String, VocEntry>> vc : vocabularies.entrySet()) { String tempmid = Integer.toString(kv.getValue().MID); if (Integer.toString(kv.getValue().MID_parent).equals("11") || Integer.toString(kv.getValue().MID_parent).equals("13")) { String[] testmid = tempmid.split("---"); tempmid = testmid[0]; } if (vc.getKey().equals(tempmid)) { TreeMap<String, VocEntry> iEntry = vc.getValue(); for (Map.Entry<String, VocEntry> ivc : iEntry.entrySet()) { entryCombo.put(ivc.getValue().description, ivc.getValue().ID); if (kv.getValue().value != null) { if (kv.getValue().value.equals(ivc.getValue().ID)) { selected = ivc.getValue().ID; } } index++; } } } final ComboMapImpl model = new ComboMapImpl(); model.setVocabularyCombo(true); model.putAll(entryCombo); final JComboBox voc = new javax.swing.JComboBox(model); model.specialRenderCombo(voc); if (Integer.toString(kv.getValue().MID_parent).equals("11") || Integer.toString(kv.getValue().MID_parent).equals("13")) { voc.setName("MID_" + Integer.toString(kv.getValue().MID) + "---" + kv.getValue().sequence); } else { voc.setName("MID_" + Integer.toString(kv.getValue().MID)); } if (Integer.toString(kv.getValue().MID).equals("8") && selected == null) selected = "44"; selected = (selected == null) ? Utility.getBundleString("comboselect", bundle) : selected; for (int k = 0; k < voc.getItemCount(); k++) { Map.Entry<String, String> el = (Map.Entry<String, String>) voc.getItemAt(k); if (el.getValue().equals(selected)) voc.setSelectedIndex(k); } voc.setPreferredSize(new Dimension(150, 30)); innerPanel.add(voc, "wrap, width :400:"); tabobjects.add(voc); } else if (datatype.equals("CharacterString") || datatype.equals("GPS")) { final JTextArea textField = new javax.swing.JTextArea(); if (Integer.toString(kv.getValue().MID_parent).equals("11") || Integer.toString(kv.getValue().MID_parent).equals("13")) { textField.setName( "MID_" + Integer.toString(kv.getValue().MID) + "---" + kv.getValue().sequence); } else { textField.setName("MID_" + Integer.toString(kv.getValue().MID)); } textField.setPreferredSize(new Dimension(230, 0)); textField.setText(kv.getValue().value); textField.setLineWrap(true); textField.setWrapStyleWord(true); innerPanel.add(textField, "wrap, width :300:"); textField.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (e.getKeyCode() == KeyEvent.VK_TAB) { if (e.getModifiers() > 0) { textField.transferFocusBackward(); } else { textField.transferFocus(); } e.consume(); } } }); tabobjects.add(textField); } else if (datatype.equals("LangString")) { JScrollPane inner_scroll = new javax.swing.JScrollPane(); inner_scroll.setHorizontalScrollBarPolicy( javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); inner_scroll.setVerticalScrollBarPolicy( javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED); inner_scroll.setPreferredSize(new Dimension(240, 80)); inner_scroll.setName("langStringScroll"); final JTextArea jTextArea1 = new javax.swing.JTextArea(); jTextArea1.setName("MID_" + Integer.toString(kv.getValue().MID)); jTextArea1.setText(kv.getValue().value); jTextArea1.setSize(new Dimension(350, 70)); jTextArea1.setLineWrap(true); jTextArea1.setWrapStyleWord(true); inner_scroll.setViewportView(jTextArea1); innerPanel.add(inner_scroll, "width :300:"); //Add combo language box JComboBox voc = getComboLangBox(kv.getValue().language); voc.setName("MID_" + Integer.toString(kv.getValue().MID) + "_lang"); voc.setPreferredSize(new Dimension(200, 20)); innerPanel.add(voc, "wrap, width :300:"); jTextArea1.addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (e.getKeyCode() == KeyEvent.VK_TAB) { if (e.getModifiers() > 0) { jTextArea1.transferFocusBackward(); } else { jTextArea1.transferFocus(); } e.consume(); } } }); tabobjects.add(jTextArea1); tabobjects.add(voc); } else if (datatype.equals("Language")) { final JComboBox voc = getComboLangBox(kv.getValue().value); voc.setName("MID_" + Integer.toString(kv.getValue().MID)); voc.setPreferredSize(new Dimension(150, 20)); voc.setBounds(5, 5, 150, 20); innerPanel.add(voc, "wrap, width :500:"); //BookImporter.policy.addIndexedComponent(voc); tabobjects.add(voc); } else if (datatype.equals("Boolean")) { int selected = 0; TreeMap bin = new TreeMap<String, String>(); bin.put("yes", Utility.getBundleString("voc1", bundle)); bin.put("no", Utility.getBundleString("voc2", bundle)); if (kv.getValue().value == null) { switch (kv.getValue().MID) { case 35: selected = 0; break; case 36: selected = 1; break; } } else if (kv.getValue().value.equals("yes")) { selected = 1; } else { selected = 0; } final ComboMapImpl model = new ComboMapImpl(); model.putAll(bin); final JComboBox voc = new javax.swing.JComboBox(model); model.specialRenderCombo(voc); voc.setName("MID_" + Integer.toString(kv.getValue().MID)); voc.setSelectedIndex(selected); voc.setPreferredSize(new Dimension(150, 20)); voc.setBounds(5, 5, 150, 20); innerPanel.add(voc, "wrap, width :300:"); //BookImporter.policy.addIndexedComponent(voc); tabobjects.add(voc); } else if (datatype.equals("License")) { String selectedIndex = null; int vindex = 0; int defaultIndex = 0; TreeMap<String, String> entryCombo = new TreeMap<String, String>(); for (Map.Entry<String, TreeMap<String, VocEntry>> vc : vocabularies.entrySet()) { if (vc.getKey().equals(Integer.toString(kv.getValue().MID))) { TreeMap<String, VocEntry> iEntry = vc.getValue(); for (Map.Entry<String, VocEntry> ivc : iEntry.entrySet()) { entryCombo.put(ivc.getValue().description, ivc.getValue().ID); if (ivc.getValue().ID.equals("1")) defaultIndex = vindex; if (kv.getValue().value != null) { if (ivc.getValue().ID.equals(kv.getValue().value)) { selectedIndex = Integer.toString(vindex); } } vindex++; } } } if (selectedIndex == null) selectedIndex = Integer.toString(defaultIndex); ComboMapImpl model = new ComboMapImpl(); model.putAll(entryCombo); model.setVocabularyCombo(true); JComboBox voc = new javax.swing.JComboBox(model); model.specialRenderCombo(voc); voc.setName("MID_" + Integer.toString(kv.getValue().MID)); voc.setSelectedIndex(Integer.parseInt(selectedIndex)); voc.setPreferredSize(new Dimension(150, 20)); voc.setBounds(5, 5, 150, 20); innerPanel.add(voc, "wrap, width :500:"); //BookImporter.policy.addIndexedComponent(voc); tabobjects.add(voc); } else if (datatype.equals("DateTime")) { //final JXDatePicker datePicker = new JXDatePicker(); JDateChooser datePicker = new JDateChooser(); datePicker.setName("MID_" + Integer.toString(kv.getValue().MID)); JPanel test = new JPanel(new MigLayout()); JLabel lbefore = new JLabel(Utility.getBundleString("beforechristlabel", bundle)); JCheckBox beforechrist = new JCheckBox(); beforechrist.setName("MID_" + Integer.toString(kv.getValue().MID) + "_check"); if (kv.getValue().value != null) { try { if (kv.getValue().value.charAt(0) == '-') { beforechrist.setSelected(true); } Date date1 = new Date(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); if (kv.getValue().value.charAt(0) == '-') { date1 = sdf.parse(adjustDate(kv.getValue().value)); } else { date1 = sdf.parse(kv.getValue().value); } datePicker.setDate(date1); } catch (Exception e) { //Console.WriteLine("ERROR import date:" + ex.Message); } } test.add(datePicker, "width :200:"); test.add(lbefore, "gapleft 30"); test.add(beforechrist, "wrap"); innerPanel.add(test, "wrap"); } } //Recursive call create_metadata_view(kv.getValue().submetadatas, innerPanel, level + 1, panelname); if (kv.getValue().editable.equals("Y") || (datatype.equals("Node") && kv.getValue().hidden.equals("0"))) { parent.add(innerPanel, "wrap, growx"); for (Component tabobject : tabobjects) { BookImporter.policy.addIndexedComponent(tabobject); } } } }
From source file:org.apache.hadoop.mapred.HFSPScheduler.java
/** * Sort job durations queues based on the current value * //from w w w . jav a2 s. c o m * @param type */ private void sortSizeBasedQueue(TaskType type) { TreeMap<JobDurationInfo, JobInProgress> newQueue = new TreeMap<JobDurationInfo, JobInProgress>( HFSPScheduler.JOB_DURATION_COMPARATOR); Map<JobDurationInfo, JobInProgress> oldQueue = this.getSizeBasedJobQueue(type); if (LOG.isDebugEnabled()) { // TODO: deleteme HashMap<JobID, JobDurationInfo> jdis = new HashMap<JobID, JobDurationInfo>(); for (Entry<JobDurationInfo, JobInProgress> entry : oldQueue.entrySet()) { JobDurationInfo jdi = entry.getKey(); assert !jdis.containsKey(jdi.getJobID()) : String.format("%s %s %s", jdi.getJobID(), jdis.get(jdi.getJobID()), jdi); jdis.put(jdi.getJobID(), jdi); } } int oldSize = oldQueue.size(); synchronized (oldQueue) { newQueue.putAll(oldQueue); oldQueue.clear(); // FIXME: putAll not working with comparator, don't know why for (Entry<JobDurationInfo, JobInProgress> entry : newQueue.entrySet()) { oldQueue.put(entry.getKey(), entry.getValue()); } } assert oldSize == oldQueue.size() : String.format("oldSize: %s newSize: %s", oldSize, oldQueue.size()); // if (LOG.isDebugEnabled()) { // StringBuilder builder = new StringBuilder("time update on " + // "SizeBasedQueue(").append(type).append( "): ["); // boolean first = true; // for (Entry<JobDurationInfo, JobInProgress> entry : oldQueue.entrySet()) { // if (first) // first = false; // else // builder.append(", "); // builder.append(entry.getKey().getPhaseDuration()) // .append(" -> ").append(entry.getValue().getJobID()); // } // builder.append("]"); // LOG.debug(builder.toString()); // } }
From source file:org.finra.herd.dao.helper.EmrPricingHelper.java
/** * Finds all the clusters that are within the range of lowest core instance price. * <p>//w ww. ja v a 2s . c o m * For example, if the core prices are 0.30, 0.32, 0.34, 0.36, and the threshold value is 0.1(10%), then the lowest core price range should be [0.30, 0.33]. * The upper bound is derived by calculating 0.30*(1 + 0.1) = 0.33 * * @param emrClusterPrices the list of clusters to select from * @param lowestCoreInstancePriceThresholdPercentage the threshold value that defines the range of lowest core instance price * * @return the list of clusters that fall in lowest core instance price range */ List<EmrClusterPriceDto> getEmrClusterPricesWithinLowestCoreInstancePriceThreshold( final List<EmrClusterPriceDto> emrClusterPrices, final BigDecimal lowestCoreInstancePriceThresholdPercentage) { // Builds a tree map that has the core instance price as the key, and the list of pricing with the same core instance price as the value. The tree map // is automatically sorted, so it is easy to find the lowest core instance price range. TreeMap<BigDecimal, List<EmrClusterPriceDto>> emrClusterPriceMapKeyedByCoreInstancePrice = new TreeMap<>(); for (final EmrClusterPriceDto emrClusterPriceDto : emrClusterPrices) { final BigDecimal coreInstancePrice = getEmrClusterCoreInstancePrice(emrClusterPriceDto); if (emrClusterPriceMapKeyedByCoreInstancePrice.containsKey(coreInstancePrice)) { emrClusterPriceMapKeyedByCoreInstancePrice.get(coreInstancePrice).add(emrClusterPriceDto); } else { List<EmrClusterPriceDto> emrClusterPriceList = new ArrayList<>(); emrClusterPriceList.add(emrClusterPriceDto); emrClusterPriceMapKeyedByCoreInstancePrice.put(coreInstancePrice, emrClusterPriceList); } } // Log all the information in the tree map LOGGER.info("All available EMR clusters keyed by core instance price: availableEmrClusters={}", jsonHelper.objectToJson(emrClusterPriceMapKeyedByCoreInstancePrice)); // Finds the list of pricing in the range of the lowest core instance price List<EmrClusterPriceDto> lowestCoreInstancePriceEmrClusters = new ArrayList<>(); if (!emrClusterPriceMapKeyedByCoreInstancePrice.isEmpty()) { // calculate the lowest core instance price range final BigDecimal lowestCoreInstancePriceLowerBound = emrClusterPriceMapKeyedByCoreInstancePrice .firstEntry().getKey(); final BigDecimal lowestCoreInstancePriceUpperBound = lowestCoreInstancePriceLowerBound .multiply(BigDecimal.ONE.add(lowestCoreInstancePriceThresholdPercentage)); LOGGER.info("emrClusterLowestCoreInstancePriceRange={}", jsonHelper.objectToJson( Arrays.asList(lowestCoreInstancePriceLowerBound, lowestCoreInstancePriceUpperBound))); for (final Map.Entry<BigDecimal, List<EmrClusterPriceDto>> entry : emrClusterPriceMapKeyedByCoreInstancePrice .entrySet()) { final BigDecimal coreInstancePrice = entry.getKey(); // Fall into the lowest price range? add it to the list. // There is no need to check the lower bound here, since the tree map is sorted, and lower bound is the lowest core price in the tree map. if (coreInstancePrice.compareTo(lowestCoreInstancePriceUpperBound) <= 0) { lowestCoreInstancePriceEmrClusters.addAll(entry.getValue()); } else { // since the tree map is sorted in ascending order, we do not need to check the rest of entries in the map break; } } } return lowestCoreInstancePriceEmrClusters; }
From source file:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler.java
/** * Reads the global counters produced by a job on the group labeled with PIG_MAP_RANK_NAME. * Then, it is calculated the cumulative sum, which consists on the sum of previous cumulative * sum plus the previous global counter value. * @param job with the global counters collected. * @param operationID After being collected on global counters (POCounter), * these values are passed via configuration file to PORank, by using the unique * operation identifier/* w w w . j a v a2 s . c o m*/ */ private void saveCounters(Job job, String operationID, boolean isRowNumber) { Counters counters; Group groupCounters; int counterSize = -1; Long previousValue = 0L; Long previousSum = 0L; ArrayList<Pair<String, Long>> counterPairs; try { counters = HadoopShims.getCounters(job); String groupName = getGroupName(counters.getGroupNames()); // In case that the counter group was not find, we need to find // out why. Only acceptable state is that the relation has been // empty. if (groupName == null) { Counter outputRecords = counters.getGroup(MRPigStatsUtil.TASK_COUNTER_GROUP) .getCounterForName(MRPigStatsUtil.MAP_OUTPUT_RECORDS); if (outputRecords.getCounter() == 0) { globalCounters.put(operationID, new ArrayList<Pair<String, Long>>()); return; } else { throw new RuntimeException("Did not found RANK counter group for operationId: " + operationID); } } groupCounters = counters.getGroup(groupName); TreeMap<Integer, Long> counterList = new TreeMap<Integer, Long>(); Iterator<Counter> it = groupCounters.iterator(); while (it.hasNext()) { try { Counter c = it.next(); counterList.put(Integer.valueOf(c.getDisplayName()), c.getValue()); } catch (Exception ex) { ex.printStackTrace(); } } counterSize = counterList.size(); counterPairs = new ArrayList<Pair<String, Long>>(); // There could be empty tasks with no counters. That is not an issue // and we only need to calculate offsets for non-empty task ids // which will be accessed in PORank. for (Entry<Integer, Long> entry : counterList.entrySet()) { previousSum += previousValue; previousValue = entry.getValue(); counterPairs.add(new Pair<String, Long>(JobControlCompiler.PIG_MAP_COUNTER + operationID + JobControlCompiler.PIG_MAP_SEPARATOR + entry.getKey(), previousSum)); } globalCounters.put(operationID, counterPairs); } catch (Exception e) { String msg = "Error to read counters into Rank operation counterSize " + counterSize; throw new RuntimeException(msg, e); } }