List of usage examples for java.util Collections reverseOrder
@SuppressWarnings("unchecked") public static <T> Comparator<T> reverseOrder()
From source file:org.kuali.coeus.common.committee.impl.lookup.keyvalue.CommitteeIdValuesFinderBase.java
/** * This method will return the list of all highest-sequence number committee instances. * Will always return non-null (but possibly empty) collection. *///w w w .ja v a 2s.com public List<CommitteeBase> getActiveCommittees() { Map<String, String> criteria = new HashMap<String, String>(); criteria.put(COMMITTEE_TYPE_CODE, getCommitteeTypeCodeHook()); ArrayList<CommitteeBase> returnCommitteeList = new ArrayList<CommitteeBase>(); Collection<? extends CommitteeBase> committees = this.getBusinessObjectService() .findMatching(getCommitteeBOClassHook(), criteria); // sort and iterate through to get only the latest instances if (CollectionUtils.isNotEmpty(committees)) { List<String> committeeIds = new ArrayList<String>(); // only the active ones Collections.sort((List<CommitteeBase>) committees, Collections.reverseOrder()); for (CommitteeBase committee : committees) { if (!committeeIds.contains(committee.getCommitteeId())) { returnCommitteeList.add(committee); committeeIds.add(committee.getCommitteeId()); } } } return returnCommitteeList; }
From source file:Utils.CVEUtils.java
/** * Lookup a load of CVEs at once./*from w w w .j a v a 2 s . c om*/ * * @param cves * @return a vector of String[] with format { cveid, cvss_risk, summary } */ public Vector getCVEs(HashSet cves) { Vector answer = new Vector(); ArrayList al = new ArrayList(); Iterator it = cves.iterator(); while (it.hasNext()) { String cve = (String) it.next(); String[] cve_details = getCVE(cve); // If it is null then that vuln didn't exist. if (cve_details != null) { answer.add(cve_details); CVE c = new CVE(); c.setCveId(cve_details[0]); c.setRiskScore(cve_details[1]); c.setSummary(cve_details[2]); al.add(c); } else { System.out.println("==CVEUtils=getCVEs: No local vuln for " + cve + ", consider updating"); } } Collections.sort(al, Collections.reverseOrder()); Vector actual_answer = new Vector(); actual_answer.addAll(al); return actual_answer; }
From source file:de.tudarmstadt.ukp.teaching.uima.nounDecompounding.ranking.ProbabilityBased.java
@Override public List<Split> rank(List<Split> splits) { for (Split split : splits) { split.setWeight(this.calcRank(split)); }/*www .jav a 2 s. co m*/ Collections.sort(splits, Collections.reverseOrder()); return splits; }
From source file:org.apache.streams.messaging.service.impl.CassandraActivityService.java
@Override public List<String> getActivitiesForFilters(List<String> filters, Date lastUpdated) { List<CassandraActivityStreamsEntry> activityObjects = cassandraActivityStreamsRepository .getActivitiesForFilters(filters, lastUpdated); Collections.sort(activityObjects, Collections.reverseOrder()); //TODO: make the number of streams returned configurable return getJsonList(activityObjects.subList(0, Math.min(activityObjects.size(), 10))); }
From source file:dpfmanager.shell.interfaces.gui.component.report.ReportsModel.java
public void readReports() { int start = getData().size() - 1; if (start < 0) { start = 0;//from w w w .j a va 2 s. c o m } int count = reports_loaded; ObservableList<ReportRow> rows = FXCollections.observableArrayList(new ArrayList<>()); String baseDir = ReportGenerator.getReportsFolder(); File reportsDir = new File(baseDir); if (reportsDir.exists()) { String[] directories = reportsDir.list((current, name) -> new File(current, name).isDirectory()); Arrays.sort(directories, Collections.reverseOrder()); int index = 0; for (int i = 0; i < directories.length; i++) { String reportDay = directories[i]; File reportsDay = new File(baseDir + "/" + reportDay); String[] directories2 = reportsDay.list((current, name) -> new File(current, name).isDirectory()); // Convert to ints for ordering Integer[] int_directories = new Integer[directories2.length]; for (int j = 0; j < directories2.length; j++) { try { int_directories[j] = Integer.parseInt(directories2[j]); } catch (Exception ex) { context.send(BasicConfig.MODULE_MESSAGE, new LogMessage(getClass(), Level.DEBUG, bundle.getString("incorrectReport") + ": " + directories2[j])); int_directories[j] = -1; } } Arrays.sort(int_directories, Collections.reverseOrder()); if (index + directories2.length >= start) { String[] available_formats = { "html", "xml", "json", "pdf" }; for (int j = 0; j < int_directories.length; j++) { if (int_directories[j] < 0) continue; String reportDir = String.valueOf(int_directories[j]); if (index >= start && index < start + count) { ReportRow rr = null; File reportXml = new File(baseDir + "/" + reportDay + "/" + reportDir + "/summary.xml"); File reportJson = new File( baseDir + "/" + reportDay + "/" + reportDir + "/summary.json"); File reportHtml = new File( baseDir + "/" + reportDay + "/" + reportDir + "/report.html"); File reportPdf = new File(baseDir + "/" + reportDay + "/" + reportDir + "/report.pdf"); if (reportXml.exists() && reportXml.length() > 0) { rr = ReportRow.createRowFromXml(reportDay, reportXml, getBundle()); } if (rr == null && reportJson.exists() && reportJson.length() > 0) { rr = ReportRow.createRowFromJson(reportDay, reportJson, getBundle()); } if (rr == null && reportHtml.exists() && reportHtml.length() > 0) { rr = ReportRow.createRowFromHtml(reportDay, reportHtml, getBundle()); } if (rr == null && reportPdf.exists() && reportPdf.length() > 0) { rr = ReportRow.createRowFromPdf(reportDay, reportPdf, getBundle()); } if (rr != null) { // Add formats for (String format : available_formats) { File report; if (format == "json" || format == "xml") { report = new File( baseDir + "/" + reportDay + "/" + reportDir + "/summary." + format); } else { report = new File( baseDir + "/" + reportDay + "/" + reportDir + "/report." + format); } if (report.exists() && report.length() > 0) { rr.addFormat(format, report.getPath()); } } // Add mets File folder = new File(baseDir + "/" + reportDay + "/" + reportDir + "/"); if (folder.exists() && folder.isDirectory()) { String[] filter = { "mets.xml" }; Collection<File> childs = FileUtils.listFiles(folder, filter, false); if (childs.size() > 0) { rr.addFormat("mets", folder.getPath()); } } rows.add(rr); index++; } // Check if all done if (i == directories.length - 1 && j == directories2.length - 1) { all_reports_loaded = true; } } else { index++; } } } else { index += directories2.length; } } } data.addAll(rows); }
From source file:io.seldon.recommendation.combiner.RankSumCombiner.java
@Override public RecommendationPeer.RecResultContext combine(int numRecsRequired, List<RecommendationPeer.RecResultContext> resultsSets) { Map<Long, String> item_recommender_lookup = new HashMap<>(); Map<ItemRecommendationResultSet.ItemRecommendationResult, Integer> rankSumMap = new HashMap<>(); List<RecommendationPeer.RecResultContext> validResultSets = new ArrayList<>(); List<String> validResultsAlgKeys = new ArrayList<>(); for (RecommendationPeer.RecResultContext set : resultsSets) { if ((strict && set.resultSet.getResults().size() >= numRecsRequired) || (!strict && set.resultSet.getResults().size() > 0)) { validResultSets.add(set);/*from w w w .j a v a 2 s . com*/ validResultsAlgKeys.add(set.algKey); } } for (int i = 0; i < numRecsRequired; i++) { for (RecommendationPeer.RecResultContext validResultSet : validResultSets) { List<ItemRecommendationResultSet.ItemRecommendationResult> ordered = validResultSet.resultSet .getResults(); Collections.sort(ordered, Collections.reverseOrder()); if (i < ordered.size()) { final ItemRecommendationResultSet.ItemRecommendationResult itemRecommendationResult = ordered .get(i); Integer rankSum = rankSumMap.get(itemRecommendationResult); if (rankSum == null) rankSum = 0; rankSum += (numRecsRequired - i); rankSumMap.put(itemRecommendationResult, rankSum); { // capture the recommender used for item String original_value = item_recommender_lookup.put(itemRecommendationResult.item, validResultSet.resultSet.getRecommenderName()); if (original_value != null) { item_recommender_lookup.put(itemRecommendationResult.item, original_value); } } } } } List<ItemRecommendationResultSet.ItemRecommendationResult> orderedResults = new ArrayList<>(); for (Map.Entry<ItemRecommendationResultSet.ItemRecommendationResult, Integer> entry : rankSumMap .entrySet()) { Float newScore = entry.getValue().floatValue(); Long item = entry.getKey().item; ItemRecommendationResultSet.ItemRecommendationResult result = new ItemRecommendationResultSet.ItemRecommendationResult( item, newScore); orderedResults.add(result); } Collections.sort(orderedResults, Collections.reverseOrder()); RecommendationPeer.RecResultContext recResultContext = new RecommendationPeer.RecResultContext( new ItemRecommendationResultSet(orderedResults, StringUtils.join(validResultsAlgKeys, ':')), StringUtils.join(validResultsAlgKeys, ':')); recResultContext.item_recommender_lookup = item_recommender_lookup; return recResultContext; }
From source file:org.dawnsci.commandserver.ui.view.ConsumerView.java
@Override public void createPartControl(Composite content) { content.setLayout(new GridLayout(1, false)); GridUtils.removeMargins(content);//from w w w. j a v a 2s .co m this.viewer = new TableViewer(content, SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL); viewer.setUseHashlookup(true); viewer.getTable().setHeaderVisible(true); viewer.getControl().setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true)); createColumns(); viewer.setContentProvider(createContentProvider()); consumers = new TreeMap<String, ConsumerBean>(Collections.reverseOrder()); viewer.setInput(consumers); createActions(); try { createTopicListener(getUri()); } catch (Exception e) { logger.error("Cannot listen to topic of command server!", e); } final Thread job = new Thread(new Runnable() { @Override public void run() { while (!viewer.getTable().isDisposed()) { try { Thread.sleep(Constants.NOTIFICATION_FREQUENCY); if (viewer.getControl().isDisposed()) return; viewer.getControl().getDisplay().syncExec(new Runnable() { public void run() { viewer.refresh(); } }); } catch (InterruptedException e) { return; } } } }); job.setPriority(Thread.MIN_PRIORITY); job.setDaemon(true); job.setName("Refresh consumer table"); job.start(); }
From source file:org.apache.tajo.util.history.HistoryReader.java
private synchronized List<QueryInfo> findQueryInfoInStorage(int page, int size, @Nullable QueryId queryId) throws IOException { List<QueryInfo> result = Lists.newLinkedList(); FileSystem fs = HistoryWriter.getNonCrcFileSystem(historyParentPath, tajoConf); try {//from ww w.j a v a2 s .c o m if (!fs.exists(historyParentPath)) { return result; } } catch (Throwable e) { return result; } FileStatus[] files = fs.listStatus(historyParentPath); if (files == null || files.length == 0) { return result; } Set<QueryInfo> queryInfos = Sets.newTreeSet(Collections.reverseOrder()); int startIndex = page < 1 ? page : ((page - 1) * size) + 1; int currentIndex = 0; int skipSize = 0; ArrayUtils.reverse(files); for (FileStatus eachDateFile : files) { Path queryListPath = new Path(eachDateFile.getPath(), HistoryWriter.QUERY_LIST); if (eachDateFile.isFile() || !fs.exists(queryListPath)) { continue; } FileStatus[] dateFiles = fs.listStatus(queryListPath); if (dateFiles == null || dateFiles.length == 0) { continue; } ArrayUtils.reverse(dateFiles); for (FileStatus eachFile : dateFiles) { Path path = eachFile.getPath(); if (eachFile.isDirectory() || !path.getName().endsWith(HistoryWriter.HISTORY_FILE_POSTFIX)) { continue; } FSDataInputStream in = null; List<String> jsonList = Lists.newArrayList(); try { in = fs.open(path); //If history file does not close, FileStatus.getLen() are not being updated //So, this code block should check the EOFException while (true) { int length = in.readInt(); byte[] buf = new byte[length]; in.readFully(buf, 0, length); jsonList.add(new String(buf, 0, length, Bytes.UTF8_CHARSET)); currentIndex++; } } catch (EOFException eof) { } catch (Throwable e) { LOG.warn("Reading error:" + path + ", " + e.getMessage()); } finally { IOUtils.cleanup(LOG, in); } //skip previous page if (startIndex > currentIndex) { skipSize += jsonList.size(); } else { for (String json : jsonList) { QueryInfo queryInfo = QueryInfo.fromJson(json); if (queryId != null) { if (queryInfo.getQueryId().equals(queryId)) { result.add(queryInfo); return result; } } else { queryInfos.add(queryInfo); } } } if (currentIndex - (startIndex - 1) >= size) { result.addAll(queryInfos); int fromIndex = (startIndex - 1) - skipSize; return result.subList(fromIndex, fromIndex + size); } } } result.addAll(queryInfos); return result; }
From source file:juicebox.tools.utils.juicer.arrowhead.BlockBuster.java
/** * Actual Arrowhead algorithm - should be called separately for each chromosome * * @return contact domain list and scores for given list/control *///from www .java 2 s.co m public static void run(int chrIndex, String chrName, int chrLength, int resolution, int matrixWidth, MatrixZoomData zd, NormalizationType norm, ArrowheadScoreList list, ArrowheadScoreList control, Feature2DList contactDomainsGenomeWide, Feature2DList contactDomainListScoresGenomeWide, Feature2DList contactDomainControlScoresGenomeWide) { // used for sliding window across diagonal int increment = matrixWidth / 2; int maxDataLengthAtResolution = (int) Math.ceil(((double) chrLength) / resolution); try { // get large number of blocks (lower confidence) CumulativeBlockResults results = null; for (double signThreshold = 0.4; signThreshold >= 0; signThreshold -= 0.1) { results = callSubBlockbuster(zd, maxDataLengthAtResolution, Double.NaN, signThreshold, matrixWidth, increment, list, control, norm, resolution); if (results.getCumulativeResults().size() > 0) { break; } } // high variance threshold, fewer blocks, high confidence CumulativeBlockResults highConfidenceResults = callSubBlockbuster(zd, maxDataLengthAtResolution, 0.2f, 0.5f, matrixWidth, increment, new ArrowheadScoreList(resolution), new ArrowheadScoreList(resolution), norm, resolution); List<HighScore> uniqueBlocks = orderedSetDifference(results.getCumulativeResults(), highConfidenceResults.getCumulativeResults()); // remove the blocks that are small List<HighScore> filteredUniqueBlocks = filterBlocksBySize(uniqueBlocks, 60); appendNonConflictingBlocks(highConfidenceResults.getCumulativeResults(), filteredUniqueBlocks); // merge the high/low confidence results results.setCumulativeResults(highConfidenceResults.getCumulativeResults()); results.mergeScores(); // prior to this point, everything should be in terms of i,j indices in a binned matrix results.scaleIndicesByResolution(resolution); // if any contact domains are found if (results.getCumulativeResults().size() > 0) { if (HiCGlobals.printVerboseComments) { System.out.println("Initial # of contact domains: " + results.getCumulativeResults().size()); } // merge/bin domains in very close proximity List<HighScore> binnedScores = binScoresByDistance(results.getCumulativeResults(), 5 * resolution); binnedScores = binScoresByDistance(binnedScores, 10 * resolution); Collections.sort(binnedScores, Collections.reverseOrder()); // convert to Feature2DList format Feature2DList blockResults = Feature2DParser.parseHighScoreList(chrIndex, chrName, resolution, binnedScores); Feature2DList blockResultListScores = Feature2DParser.parseArrowheadScoreList(chrIndex, chrName, results.getCumulativeInternalList()); Feature2DList blockResultControlScores = Feature2DParser.parseArrowheadScoreList(chrIndex, chrName, results.getCumulativeInternalControl()); // add results to genome-wide accumulator contactDomainsGenomeWide.add(blockResults); contactDomainListScoresGenomeWide.add(blockResultListScores); contactDomainControlScoresGenomeWide.add(blockResultControlScores); } else { if (HiCGlobals.printVerboseComments) { System.out.println("No contact domains found for chromosome " + chrName); } } } catch (IOException e) { System.err.println("Data not available for this chromosome."); } }
From source file:net.myrrix.online.candidate.LocationSensitiveHashTest.java
private static List<Long> findTopRecommendations(FastByIDMap<float[]> Y, float[] userVec) { SortedMap<Double, Long> allScores = Maps.newTreeMap(Collections.reverseOrder()); for (FastByIDMap.MapEntry<float[]> entry : Y.entrySet()) { double dot = SimpleVectorMath.dot(entry.getValue(), userVec); allScores.put(dot, entry.getKey()); }/*from ww w. jav a2 s . c o m*/ List<Long> topRecommendations = Lists.newArrayList(); for (Map.Entry<Double, Long> entry : allScores.entrySet()) { topRecommendations.add(entry.getValue()); if (topRecommendations.size() == NUM_RECS) { return topRecommendations; } } return topRecommendations; }