List of usage examples for java.util Collections sort
@SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> void sort(List<T> list, Comparator<? super T> c)
From source file:Main.java
public void sortByValue() { List<Map.Entry<K, V>> list = new LinkedList<Map.Entry<K, V>>(entrySet()); Collections.sort(list, new Comparator<Map.Entry<K, V>>() { public int compare(Map.Entry<K, V> entry1, Map.Entry<K, V> entry2) { return entry1.getValue().compareTo(entry2.getValue()); }//from ww w .ja v a 2 s .c o m }); clear(); for (Map.Entry<K, V> entry : list) { put(entry.getKey(), entry.getValue()); } }
From source file:Main.java
public static <K, V extends Comparable<? super V>> Map<K, V> sortByValue(Map<K, V> map, boolean decesnding) { int s2 = 1;// ww w . jav a 2s. c o m if (decesnding) s2 = -1; final int sign = s2; List<Map.Entry<K, V>> list = new LinkedList<>(map.entrySet()); Collections.sort(list, new Comparator<Map.Entry<K, V>>() { @Override public int compare(Entry<K, V> o1, Entry<K, V> o2) { return sign * o1.getValue().compareTo(o2.getValue()); } }); Map<K, V> sorted = new LinkedHashMap<>(); for (Map.Entry<K, V> entry : list) { sorted.put(entry.getKey(), entry.getValue()); } return sorted; }
From source file:dk.hippogrif.prettyxml.app.Main.java
private static List sortOptions(Options options) { ArrayList list = new ArrayList(options.getOptions()); Collections.sort(list, new Comparator() { public int compare(Object o1, Object o2) { return ((Option) o1).getOpt().compareTo(((Option) o2).getOpt()); }//from w w w . j a v a 2 s.com }); return list; }
From source file:info.mikaelsvensson.devtools.analysis.db2eventlog.Db2EventLogReportGenerator.java
private static void sortQueryDataByTime(List<QueryStatistics> data) { Collections.sort(data, QueryStatistics.TOTAL_TIME_COMPARATOR); }
From source file:net.sourceforge.fenixedu.presentationTier.renderers.providers.teacher.TutorshipMonitoringExecutionYearProvider.java
public static List<ExecutionYear> getExecutionYears(StudentsPerformanceInfoBean bean) { List<ExecutionYear> executionYears = new ArrayList<ExecutionYear>(); for (ExecutionYear year : Bennu.getInstance().getExecutionYearsSet()) { if (year.isAfterOrEquals(bean.getStudentsEntryYear())) { executionYears.add(year);// ww w .java2 s. c o m } } Collections.sort(executionYears, new ReverseComparator()); return executionYears; }
From source file:com.agiletec.apsadmin.user.role.RoleFinderAction.java
public List<Role> getRoles() { List<Role> roles = this.getRoleManager().getRoles(); BeanComparator comparator = new BeanComparator("description"); Collections.sort(roles, comparator); return roles; }
From source file:com.agiletec.apsadmin.user.group.GroupFinderAction.java
public List<Group> getGroups() { List<Group> groups = this.getGroupManager().getGroups(); BeanComparator comparator = new BeanComparator("descr"); Collections.sort(groups, comparator); return groups; }
From source file:edu.upenn.egricelab.AlignerBoost.FilterSAMAlignPE.java
public static void main(String[] args) { if (args.length == 0) { printUsage();// www. j a v a 2 s .com return; } try { parseOptions(args); } catch (IllegalArgumentException e) { System.err.println("Error: " + e.getMessage()); printUsage(); return; } // Read in chrList, if specified if (chrFile != null) { chrFilter = new HashSet<String>(); try { BufferedReader chrFilterIn = new BufferedReader(new FileReader(chrFile)); String chr = null; while ((chr = chrFilterIn.readLine()) != null) chrFilter.add(chr); chrFilterIn.close(); if (verbose > 0) System.err.println( "Only looking at alignments on " + chrFilter.size() + " specified chromosomes"); } catch (IOException e) { System.err.println("Error: " + e.getMessage()); return; } } if (verbose > 0) { // Start the processMonitor processMonitor = new Timer(); // Start the ProcessStatusTask statusTask = new ProcessStatusTask(); // Schedule to show the status every 1 second processMonitor.scheduleAtFixedRate(statusTask, 0, statusFreq); } // Read in known SNP file, if specified if (knownSnpFile != null) { if (verbose > 0) System.err.println("Checking known SNPs from user specified VCF file"); knownVCF = new VCFFileReader(new File(knownSnpFile)); } SamReaderFactory readerFac = SamReaderFactory.makeDefault(); SAMFileWriterFactory writerFac = new SAMFileWriterFactory(); if (!isSilent) readerFac.validationStringency(ValidationStringency.LENIENT); // use LENIENT stringency else readerFac.validationStringency(ValidationStringency.SILENT); // use SILENT stringency SamReader in = readerFac.open(new File(inFile)); SAMFileHeader inHeader = in.getFileHeader(); if (inHeader.getGroupOrder() == GroupOrder.reference && inHeader.getSortOrder() == SortOrder.coordinate) System.err.println("Warning: Input file '" + inFile + "' might be sorted by coordinate and cannot be correctly processed!"); SAMFileHeader header = inHeader.clone(); // copy the inFile header as outFile header // Add new programHeader SAMProgramRecord progRec = new SAMProgramRecord(progName); progRec.setProgramName(progName); progRec.setProgramVersion(progVer); progRec.setCommandLine(StringUtils.join(" ", args)); header.addProgramRecord(progRec); //System.err.println(inFile + " groupOrder: " + in.getFileHeader().getGroupOrder() + " sortOrder: " + in.getFileHeader().getSortOrder()); // reset the orders header.setGroupOrder(groupOrder); header.setSortOrder(sortOrder); // write SAMHeader String prevID = null; SAMRecord prevRecord = null; List<SAMRecord> alnList = new ArrayList<SAMRecord>(); List<SAMRecordPair> alnPEList = null; // Estimate fragment length distribution by scan one-pass through the alignments SAMRecordIterator results = in.iterator(); if (!NO_ESTIMATE) { if (verbose > 0) { System.err.println("Estimating insert fragment size distribution ..."); statusTask.reset(); statusTask.setInfo("alignments scanned"); } long N = 0; double fragL_S = 0; // fragLen sum double fragL_SS = 0; // fragLen^2 sum while (results.hasNext()) { SAMRecord record = results.next(); if (verbose > 0) statusTask.updateStatus(); if (record.getFirstOfPairFlag() && !record.isSecondaryOrSupplementary()) { double fragLen = Math.abs(record.getInferredInsertSize()); if (fragLen != 0 && fragLen >= MIN_FRAG_LEN && fragLen <= MAX_FRAG_LEN) { // only consider certain alignments N++; fragL_S += fragLen; fragL_SS += fragLen * fragLen; } // stop estimate if already enough if (MAX_ESTIMATE_SCAN > 0 && N >= MAX_ESTIMATE_SCAN) break; } } if (verbose > 0) statusTask.finish(); // estimate fragment size if (N >= MIN_ESTIMATE_BASE) { // override command line values MEAN_FRAG_LEN = fragL_S / N; SD_FRAG_LEN = Math.sqrt((N * fragL_SS - fragL_S * fragL_S) / (N * (N - 1))); String estStr = String.format("Estimated fragment size distribution: N(%.1f, %.1f)", MEAN_FRAG_LEN, SD_FRAG_LEN); if (verbose > 0) System.err.println(estStr); // also add the estimation to comment header.addComment(estStr); } else { System.err.println( "Unable to estimate the fragment size distribution due to too few observed alignments"); System.err.println( "You have to specify the '--mean-frag-len' and '--sd-frag-len' on the command line and re-run this step"); statusTask.cancel(); processMonitor.cancel(); return; } // Initiate the normal model normModel = new NormalDistribution(MEAN_FRAG_LEN, SD_FRAG_LEN); // reset the iterator, if necessary if (in.type() == SamReader.Type.SAM_TYPE) { try { in.close(); } catch (IOException e) { System.err.println(e.getMessage()); } in = readerFac.open(new File(inFile)); } results.close(); results = in.iterator(); } // end of NO_ESTIMATE SAMFileWriter out = OUT_IS_SAM ? writerFac.makeSAMWriter(header, false, new File(outFile)) : writerFac.makeBAMWriter(header, false, new File(outFile)); // check each alignment again if (verbose > 0) { System.err.println("Filtering alignments ..."); statusTask.reset(); statusTask.setInfo("alignments processed"); } while (results.hasNext()) { SAMRecord record = results.next(); if (verbose > 0) statusTask.updateStatus(); String ID = record.getReadName(); // fix read and quality string for this read, if is a secondary hit from multiple hits, used for BWA alignment if (ID.equals(prevID) && record.getReadLength() == 0) SAMAlignFixer.fixSAMRecordRead(record, prevRecord); if (chrFilter != null && !chrFilter.contains(record.getReferenceName())) { prevID = ID; prevRecord = record; continue; } // fix MD:Z string for certain aligners with invalid format (i.e. seqAlto) if (fixMD) SAMAlignFixer.fixMisStr(record); // fix alignment, ignore if failed (unmapped or empty) if (!SAMAlignFixer.fixSAMRecord(record, knownVCF, DO_1DP)) { prevID = ID; prevRecord = record; continue; } if (!record.getReadPairedFlag()) { System.err.println("Error: alignment is not from a paired-end read at\n" + record.getSAMString()); out.close(); statusTask.cancel(); processMonitor.cancel(); return; } if (!ID.equals(prevID) && prevID != null || !results.hasNext()) { // a non-first new ID meet, or end of alignments // create alnPEList from filtered alnList alnPEList = createAlnPEListFromAlnList(alnList); //System.err.printf("%d alignments for %s transformed to %d alnPairs%n", alnList.size(), prevID, alnPEList.size()); int totalPair = alnPEList.size(); // filter highly unlikely PEhits filterPEHits(alnPEList, MIN_ALIGN_RATE, MIN_IDENTITY); // calculate posterior mapQ for each pair calcPEHitPostP(alnPEList, totalPair, MAX_HIT); // filter hits by mapQ if (MIN_MAPQ > 0) filterPEHits(alnPEList, MIN_MAPQ); // sort the list first with an anonymous class of comparator, with DESCREASING order Collections.sort(alnPEList, Collections.reverseOrder()); // control max-best if (MAX_BEST != 0 && alnPEList.size() > MAX_BEST) { // potential too much best hits int nBestStratum = 0; int bestMapQ = alnPEList.get(0).getPEMapQ(); // best mapQ from first PE for (SAMRecordPair pr : alnPEList) if (pr.getPEMapQ() == bestMapQ) nBestStratum++; else break; // stop searching for sorted list if (nBestStratum > MAX_BEST) alnPEList.clear(); } // filter alignments with auxiliary filters if (!MAX_SENSITIVITY) filterPEHits(alnPEList, MAX_SEED_MIS, MAX_SEED_INDEL, MAX_ALL_MIS, MAX_ALL_INDEL); // report remaining secondary alignments, up-to MAX_REPORT for (int i = 0; i < alnPEList.size() && (MAX_REPORT == 0 || i < MAX_REPORT); i++) { SAMRecordPair repPair = alnPEList.get(i); if (doUpdateBit) repPair.setNotPrimaryAlignmentFlags(i != 0); int nReport = MAX_REPORT == 0 ? Math.min(alnPEList.size(), MAX_REPORT) : alnPEList.size(); int nFiltered = alnPEList.size(); if (repPair.fwdRecord != null) { repPair.fwdRecord.setAttribute("NH", nReport); repPair.fwdRecord.setAttribute("XN", nFiltered); out.addAlignment(repPair.fwdRecord); } if (repPair.revRecord != null) { repPair.revRecord.setAttribute("NH", nReport); repPair.revRecord.setAttribute("XN", nFiltered); out.addAlignment(repPair.revRecord); } } // reset list alnList.clear(); alnPEList.clear(); } // update if (!ID.equals(prevID)) { prevID = ID; prevRecord = record; } alnList.add(record); } // end while try { in.close(); out.close(); } catch (IOException e) { System.err.println(e.getMessage()); } // Terminate the monitor task and monitor if (verbose > 0) { statusTask.cancel(); statusTask.finish(); processMonitor.cancel(); } }
From source file:viewmodel.TeamResult.java
public void setTeamResults(List<Result> teamResults, int teamPlaces) { Collections.sort(teamResults, (r1, r2) -> r1.getRt().compareTo(r2.getRt())); for (int i = 0; i < teamPlaces; ++i) { Duration d = Utils.dateToDuration(teamResults.get(i).getRt()); teamDuration = teamDuration.plus(d); }/*from ww w . jav a2 s .com*/ this.teamResults = teamResults; }
From source file:com.u2apple.tool.util.StaticMapFileUtils.java
private static void sortModels(VID vid) { List<Modal> models = vid.getModals(); //Sort model values. models.stream().forEach((model) -> { if (model.getValues() == null || model.getValues().isEmpty()) { System.out.println(model); }//ww w. ja v a 2 s . c o m sortValue(model.getValues()); }); //Sort models. Collections.sort(models, (o1, o2) -> o1.getValues().get(0).getValue().compareToIgnoreCase(o2.getValues().get(0).getValue())); //Merge models. fastMergeModels(models); //Reverse contained model. for (int i = 0; i < models.size(); i++) { for (int j = i + 1; j < models.size(); j++) { if (valueContains(models.get(j).getValues(), models.get(i).getValues())) { models.add(i, models.remove(j)); } } } }