List of usage examples for java.util TreeMap firstEntry
public Map.Entry<K, V> firstEntry()
From source file:com.git.ifly6.components.Census.java
public static void main(String[] args) { Scanner scan = new Scanner(System.in); try {/*from w w w . j a v a 2 s . c om*/ region = new NSRegion(args[0]); } catch (ArrayIndexOutOfBoundsException e) { System.out.print("Please input the name of your region: \t"); region = new NSRegion(scan.nextLine()); } try { HashMap<String, Integer> endoMap = new HashMap<String, Integer>(); String[] waMembers = region.getWAMembers(); int[] valueCount = new int[waMembers.length]; System.out.println( "[INFO] This census will take: " + time((int) Math.round(waitTime * waMembers.length))); for (int i = 0; i < waMembers.length; i++) { NSNation nation = new NSNation(waMembers[i]); valueCount[i] = nation.getEndoCount(); endoMap.put(waMembers[i], new Integer(valueCount[i])); System.out.println("[LOG] Fetched information for: " + waMembers[i] + ", " + (i + 1) + " of " + waMembers.length); } TreeMap<String, Integer> sortedMap = sortByValue(endoMap); int current = 0; int previous = sortedMap.firstEntry().getValue(); System.out.printf("%-35s %12s %12s%n", "Nations", "Endorsements", "Difference"); System.out.println("-------------------------------------------------------------"); for (Map.Entry<String, Integer> entry : sortedMap.entrySet()) { String nationName = StringUtils.capitalize(entry.getKey().replace('_', ' ')); current = entry.getValue(); if ((previous - current) != 0) { System.out.printf("%-35s %12s %12s%n", nationName, entry.getValue(), (previous - current)); } else { System.out.printf("%-35s %12s %12s%n", nationName, entry.getValue(), "-"); } previous = entry.getValue(); } System.out.println("-------------------------------------------------------------"); System.out.printf("%-35s %12s %12s%n", "Delegate", "Endorsements", "Proportion"); System.out.printf("%-35s %12s %12s%n", StringUtils.capitalize(sortedMap.firstEntry().getKey().replace('_', ' ')), sortedMap.firstEntry().getValue(), (double) (sortedMap.firstEntry().getValue() / waMembers.length)); } catch (IOException e) { printError("Failed to fetch WA members or get endorsements in this region. " + "Check your internet connection or the state of the API."); } scan.close(); }
From source file:Main.java
public static void main(String[] args) { TreeMap<Integer, String> treemap = new TreeMap<Integer, String>(); // populating tree map treemap.put(2, "two"); treemap.put(1, "one"); treemap.put(3, "three"); treemap.put(6, "six"); treemap.put(5, "from java2s.com"); System.out.println("Checking first entry"); System.out.println("First entry is: " + treemap.firstEntry()); }
From source file:org.apache.zeppelin.interpreter.InterpreterResult.java
/** * Magic is like %html %text./*from ww w .ja v a2 s . c om*/ * * @param msg * @return */ private String getData(String msg) { if (msg == null) { return null; } Type[] types = type.values(); TreeMap<Integer, Type> typesLastIndexInMsg = buildIndexMap(msg); if (typesLastIndexInMsg.size() == 0) { return msg; } else { Map.Entry<Integer, Type> lastType = typesLastIndexInMsg.firstEntry(); //add 1 for the % char int magicLength = lastType.getValue().name().length() + 1; // 1 for the last \n or space after magic int subStringPos = magicLength + lastType.getKey() + 1; return msg.substring(subStringPos); } }
From source file:org.apache.zeppelin.interpreter.InterpreterResult.java
private Type getType(String msg) { if (msg == null) { return Type.TEXT; }/* w w w . j a va2 s .c o m*/ Type[] types = type.values(); TreeMap<Integer, Type> typesLastIndexInMsg = buildIndexMap(msg); if (typesLastIndexInMsg.size() == 0) { return Type.TEXT; } else { Map.Entry<Integer, Type> lastType = typesLastIndexInMsg.firstEntry(); return lastType.getValue(); } }
From source file:com.act.lcms.db.analysis.WaveformAnalysis.java
/** * This function picks the best retention time among the best peaks from the standard wells. The algorithm is * looking for the following heuristics for standard well peak detection: a) a great peak profile * b) magnitude of peak is high c) the well is not from MeOH media. It implements this by picking the global * 3 best peaks from ALL the standard wells which are not in MeOH media using a peak feature detector. It then * compares overlaps between these peaks against the local 3 best peaks of the negative controls and positive samples. * If there is an overlap, we have detected a positive signal. * @param standardWells The list of standard wells to benchmark from * @param representativeMetlinIon This is the metlin ion that is used for the analysis, usually it is the best * metlin ion picked up an algorithm among the standard well scans. * @param positiveAndNegativeWells These are positive and negative wells against which the retention times are * compared to see for overlaps. * @return A map of Scandata to XZ values for those signals where peaks match between the standard and pos/neg runs. *///from w w w. j a v a 2 s .c om public static Map<ScanData<LCMSWell>, XZ> pickBestRepresentativeRetentionTimeFromStandardWells( List<ScanData<StandardWell>> standardWells, String representativeMetlinIon, List<ScanData<LCMSWell>> positiveAndNegativeWells) { List<XZ> bestStandardPeaks = new ArrayList<>(); for (ScanData<StandardWell> well : standardWells) { if (well.getWell() != null) { // For retention times, select standard runs where the media is not MeOH since // MeOH has a lot more skew in retention time than other media. Moreover, none // of the feeding runs have their media as MeOH. if (well.getWell().getMedia() == null || !well.getWell().getMedia().equals("MeOH")) { bestStandardPeaks.addAll(detectPeaksInIntensityTimeWaveform( well.getMs1ScanResults().getIonsToSpectra().get(representativeMetlinIon), PEAK_DETECTION_THRESHOLD)); } } } // Sort in descending order of intensity Collections.sort(bestStandardPeaks, new Comparator<XZ>() { @Override public int compare(XZ o1, XZ o2) { return o2.getIntensity().compareTo(o1.getIntensity()); } }); Map<ScanData<LCMSWell>, XZ> result = new HashMap<>(); // Select from the top peaks in the standards run for (ScanData<LCMSWell> well : positiveAndNegativeWells) { List<XZ> topPeaksOfSample = detectPeaksInIntensityTimeWaveform( well.getMs1ScanResults().getIonsToSpectra().get(representativeMetlinIon), PEAK_DETECTION_THRESHOLD); for (XZ topPeak : bestStandardPeaks.subList(0, NUMBER_OF_BEST_PEAKS_TO_SELECTED_FROM - 1)) { int count = topPeaksOfSample.size() >= NUMBER_OF_BEST_PEAKS_TO_SELECTED_FROM ? NUMBER_OF_BEST_PEAKS_TO_SELECTED_FROM - 1 : topPeaksOfSample.size(); // Collisions do not matter here since we are just going to pick the highest intensity peak match, so ties // are arbitarily broker based on the order for access in the for loop below. TreeMap<Double, XZ> intensityToIntensityTimeValue = new TreeMap<>(Collections.reverseOrder()); for (int i = 0; i < count; i++) { if (topPeaksOfSample.get(i).getTime() > topPeak.getTime() - TIME_SKEW_CORRECTION && topPeaksOfSample.get(i).getTime() < topPeak.getTime() + TIME_SKEW_CORRECTION) { // There has been significant overlap in peaks between standard and sample. intensityToIntensityTimeValue.put(topPeaksOfSample.get(i).getIntensity(), topPeaksOfSample.get(i)); } } if (intensityToIntensityTimeValue.keySet().size() > 0) { // Get the best peak overlap based on the largest magnitude intensity result.put(well, intensityToIntensityTimeValue.firstEntry().getValue()); } } } return result; }
From source file:com.smartitengineering.cms.api.impl.type.FieldDefImpl.java
@Override public VariationDef getVariationDefForMimeType(String mimeType) { TreeMap<String, VariationDef> map = new TreeMap<String, VariationDef>(); for (VariationDef def : variationDefs) { if (def.getMIMEType().equals(mimeType)) { map.put(def.getName(), def); }/*from w w w . ja v a 2 s.c om*/ } if (map.isEmpty()) { return null; } else { return map.firstEntry().getValue(); } }
From source file:io.openmessaging.rocketmq.consumer.LocalMessageCache.java
private void cleanExpireMsg() { for (final Map.Entry<MessageQueue, ProcessQueue> next : rocketmqPullConsumer.getDefaultMQPullConsumerImpl() .getRebalanceImpl().getProcessQueueTable().entrySet()) { ProcessQueue pq = next.getValue(); MessageQueue mq = next.getKey(); ReadWriteLock lockTreeMap = getLockInProcessQueue(pq); if (lockTreeMap == null) { log.error("Gets tree map lock in process queue error, may be has compatibility issue"); return; }//from ww w . j ava2s. c o m TreeMap<Long, MessageExt> msgTreeMap = pq.getMsgTreeMap(); int loop = msgTreeMap.size(); for (int i = 0; i < loop; i++) { MessageExt msg = null; try { lockTreeMap.readLock().lockInterruptibly(); try { if (!msgTreeMap.isEmpty()) { msg = msgTreeMap.firstEntry().getValue(); if (System.currentTimeMillis() - Long.parseLong(MessageAccessor.getConsumeStartTimeStamp(msg)) > clientConfig .getRmqMessageConsumeTimeout() * 60 * 1000) { //Expired, ack and remove it. } else { break; } } else { break; } } finally { lockTreeMap.readLock().unlock(); } } catch (InterruptedException e) { log.error("Gets expired message exception", e); } try { rocketmqPullConsumer.sendMessageBack(msg, 3); log.info("Send expired msg back. topic={}, msgId={}, storeHost={}, queueId={}, queueOffset={}", msg.getTopic(), msg.getMsgId(), msg.getStoreHost(), msg.getQueueId(), msg.getQueueOffset()); ack(mq, pq, msg); } catch (Exception e) { log.error("Send back expired msg exception", e); } } } }
From source file:org.cloudata.core.client.Row.java
/** * Return first Cell object in specified column<BR> * @param columnName/*from w w w .jav a2 s . com*/ * @return */ public Cell getFirst(String columnName) { TreeMap<Cell.Key, Cell> cellMap = cells.get(columnName); if (cellMap == null || cellMap.isEmpty()) { return null; } return cellMap.firstEntry().getValue(); }
From source file:com.smartitengineering.cms.api.impl.type.ContentTypeImpl.java
@Override public RepresentationDef getRepresentationDefForMimeType(String mimeType) { TreeMap<String, RepresentationDef> map = new TreeMap<String, RepresentationDef>(); for (RepresentationDef def : representationDefs) { if (def.getMIMEType().equals(mimeType)) { map.put(def.getName(), def); }//from w w w .j ava 2 s.c o m } if (map.isEmpty()) { return null; } else { return map.firstEntry().getValue(); } }
From source file:ubic.gemma.core.datastructure.matrix.ExpressionDataMatrixColumnSort.java
/** * Identify the FactorValue that should be treated as 'Baseline' for each of the given factors. This is done * heuristically, and if all else fails we choose arbitrarily. For continuous factors, the minimum value is treated * as baseline./*from www. j a v a 2 s.c o m*/ * * @param samplesUsed These are used to make sure we don't bother using factor values as baselines if they are not * used by any of the samples. This is important for subsets. If null, this is ignored. * @param factors factors * @return map of factors to the baseline factorvalue for that factor. */ public static Map<ExperimentalFactor, FactorValue> getBaselineLevels(List<BioMaterial> samplesUsed, Collection<ExperimentalFactor> factors) { Map<ExperimentalFactor, FactorValue> result = new HashMap<>(); for (ExperimentalFactor factor : factors) { if (factor.getFactorValues().isEmpty()) { throw new IllegalStateException("Factor has no factor values: " + factor); } if (ExperimentalDesignUtils.isContinuous(factor)) { // then there is no baseline, but we'll take the minimum value. TreeMap<Double, FactorValue> sortedVals = new TreeMap<>(); for (FactorValue fv : factor.getFactorValues()) { /* * Check that this factor value is used by at least one of the given samples. Only matters if this * is a subset of the full data set. */ if (samplesUsed != null && !ExpressionDataMatrixColumnSort.used(fv, samplesUsed)) { // this factorValue cannot be a candidate baseline for this subset. continue; } if (fv.getMeasurement() == null) { throw new IllegalStateException("Continuous factors should have Measurements as values"); } Double v = Double.parseDouble(fv.getMeasurement().getValue()); sortedVals.put(v, fv); } result.put(factor, sortedVals.firstEntry().getValue()); } else { for (FactorValue fv : factor.getFactorValues()) { /* * Check that this factor value is used by at least one of the given samples. Only matters if this * is a subset of the full data set. */ if (samplesUsed != null && !ExpressionDataMatrixColumnSort.used(fv, samplesUsed)) { // this factorValue cannot be a candidate baseline for this subset. continue; } if (BaselineSelection.isForcedBaseline(fv)) { ExpressionDataMatrixColumnSort.log.info("Baseline chosen: " + fv); result.put(factor, fv); break; } if (BaselineSelection.isBaselineCondition(fv)) { if (result.containsKey(factor)) { ExpressionDataMatrixColumnSort.log .warn("A second potential baseline was found for " + factor + ": " + fv); continue; } ExpressionDataMatrixColumnSort.log.info("Baseline chosen: " + fv); result.put(factor, fv); } } if (!result.containsKey(factor)) { // fallback FactorValue arbitraryBaselineFV = null; if (samplesUsed != null) { // make sure we choose a fv that is actually used (see above for non-arbitrary case) for (FactorValue fv : factor.getFactorValues()) { for (BioMaterial bm : samplesUsed) { for (FactorValue bfv : bm.getFactorValues()) { if (fv.equals(bfv)) { arbitraryBaselineFV = fv; break; } } if (arbitraryBaselineFV != null) break; } if (arbitraryBaselineFV != null) break; } } else { arbitraryBaselineFV = factor.getFactorValues().iterator().next(); } if (arbitraryBaselineFV == null) { throw new IllegalStateException("No baseline could be identified for factor: " + factor + " has " + factor.getFactorValues().size() + " factor values"); } ExpressionDataMatrixColumnSort.log .info("Falling back on choosing baseline arbitrarily: " + arbitraryBaselineFV); result.put(factor, arbitraryBaselineFV); } } } return result; }