List of usage examples for java.util PriorityQueue peek
public E peek()
From source file:Main.java
public static void main(String args[]) { PriorityQueue<Integer> prq = new PriorityQueue<Integer>(); for (int i = 3; i < 10; i++) { prq.add(i);//from w ww . j a va 2s .c om } System.out.println(prq); // get the head from the queue Integer head = prq.peek(); System.out.println("Head of the queue is: " + head); }
From source file:org.sample.whiteboardapp.MyWhiteboard.java
static void searchKDSubtree(PriorityQueue<Double> pq, HashMap<Double, Node> hm, Node root, double[] Qpoint, int k, int depth) { Node child = null;// w ww .j a va 2 s . c o m int dim = depth; double dist = Distance(Qpoint, root.point); if (pq.size() < k) { pq.add(dist); hm.put(dist, root); } else if (dist < pq.peek()) { pq.poll(); pq.add(dist); hm.put(dist, root); } if (Qpoint[dim] < root.point[dim]) { if (root.left != null) { searchKDSubtree(pq, hm, root.left, Qpoint, k, (depth + 1) % 2); child = root.right; } } else { if (root.right != null) { searchKDSubtree(pq, hm, root.right, Qpoint, k, (depth + 1) % 2); child = root.left; } } if ((pq.size() < k || (Qpoint[dim] - root.point[dim]) < pq.peek()) && child != null) { searchKDSubtree(pq, hm, child, Qpoint, k, (depth + 1) % 2); } }
From source file:main.java.RMDupper.java
public static void queueOrOutput(DupStats dupStats, OccurenceCounterMerged occurenceCounterMerged, SAMFileWriter outputSam, Boolean allReadsAsMerged, PriorityQueue<ImmutableTriple<Integer, Integer, SAMRecord>> recordBuffer, PriorityQueue<ImmutableTriple<Integer, Integer, SAMRecord>> duplicateBuffer, Set<String> discardSet, SAMRecord curr) {//w ww .j av a2s.co m //Don't do anything with unmapped reads, just write them into the output! if (curr.getReadUnmappedFlag() || curr.getMappingQuality() == 0) { outputSam.addAlignment(curr); } else { if (recordBuffer.size() > 0 && recordBuffer.peek().middle < curr.getAlignmentStart()) { checkForDuplication(dupStats, occurenceCounterMerged, outputSam, allReadsAsMerged, recordBuffer, duplicateBuffer, discardSet); } recordBuffer.add(new ImmutableTriple<Integer, Integer, SAMRecord>(curr.getAlignmentStart(), curr.getAlignmentEnd(), curr)); } }
From source file:main.java.RMDupper.java
public static void checkForDuplication(DupStats dupStats, OccurenceCounterMerged occurenceCounterMerged, SAMFileWriter outputSam, Boolean allReadsAsMerged, PriorityQueue<ImmutableTriple<Integer, Integer, SAMRecord>> recordBuffer, PriorityQueue<ImmutableTriple<Integer, Integer, SAMRecord>> duplicateBuffer, Set<String> discardSet) { // At this point recordBuffer contains all alignments that overlap with its first entry // Therefore the task here is to de-duplicate for the first entry in recordBuffer duplicateBuffer.clear();/* w ww . j a v a 2 s . c o m*/ Iterator<ImmutableTriple<Integer, Integer, SAMRecord>> it = recordBuffer.iterator(); while (it.hasNext()) { ImmutableTriple<Integer, Integer, SAMRecord> maybeDuplicate = it.next(); if (allReadsAsMerged) { if (recordBuffer.peek().left.equals(maybeDuplicate.left) && recordBuffer.peek().middle.equals(maybeDuplicate.middle)) { duplicateBuffer.add(maybeDuplicate); } } else { // We build a logic table EnumSet<DL> testConditon = EnumSet.noneOf(DL.class); if (recordBuffer.peek().right.getReadName().startsWith("M_")) { testConditon.add(DL.buffer_read_merged); } else if (recordBuffer.peek().right.getReadName().startsWith("F_")) { testConditon.add(DL.buffer_read_one); } else if (recordBuffer.peek().right.getReadName().startsWith("R_")) { testConditon.add(DL.buffer_read_two); } else { throw new RuntimeException("Unlabelled read '" + recordBuffer.peek().right.getReadName() + "' read name must start with one of M_,F_,R when not treating all reads as merged"); } if (maybeDuplicate.right.getReadName().startsWith("M_")) { testConditon.add(DL.maybed_read_merged); } else if (maybeDuplicate.right.getReadName().startsWith("F_")) { testConditon.add(DL.maybed_read_one); } else if (maybeDuplicate.right.getReadName().startsWith("R_")) { testConditon.add(DL.maybed_read_two); } else { System.err.println("Unlabelled read '" + maybeDuplicate.right.getReadName() + "' read name must start with one of M_,F_,R when not treating all reads as merged"); } if (recordBuffer.peek().left.equals(maybeDuplicate.left)) { testConditon.add(DL.equal_alignment_start); } if (recordBuffer.peek().middle.equals(maybeDuplicate.middle)) { testConditon.add(DL.equal_alignment_end); } boolean duplicateIsShorterOrEqual = maybeDuplicate.middle - maybeDuplicate.left <= recordBuffer.peek().middle - recordBuffer.peek().left; boolean duplicateIsLongerOrEqual = recordBuffer.peek().middle - recordBuffer.peek().left <= maybeDuplicate.middle - maybeDuplicate.left; if (duplicateIsShorterOrEqual) { testConditon.add(DL.maybed_shorter_or_equal); } if (duplicateIsLongerOrEqual) { testConditon.add(DL.maybed_longer_or_equal); } if (recordBuffer.peek().right.getReadNegativeStrandFlag()) { testConditon.add(DL.buffer_reverse_strand); } else { testConditon.add(DL.buffer_forward_strand); } if (maybeDuplicate.right.getReadNegativeStrandFlag()) { testConditon.add(DL.maybed_reverse_strand); } else { testConditon.add(DL.maybed_forward_strand); } //System.out.println("Testing for duplication: "+testConditon); //System.out.println(recordBuffer.peek().right.getReadName()+"\t"+recordBuffer.peek().right.getAlignmentStart()+"\t"+recordBuffer.peek().right.getAlignmentEnd()); //System.out.println(maybeDuplicate.right.getReadName()+"\t"+maybeDuplicate.right.getAlignmentStart()+"\t"+maybeDuplicate.right.getAlignmentEnd()); //for ( EnumSet<DL> match : duplicateConditionSet.stream().filter(dc -> testConditon.containsAll(dc) ).collect(Collectors.toList()) ) { // System.out.println("Match to: "+match); //} //for ( EnumSet<DL> match : duplicateConditionSet.stream().collect(Collectors.toList()) ) { // System.out.println("Try to match: "+match); // if ( match.containsAll(testConditon) ) // { // System.out.println("success"); // } //} // Test for Duplication if (duplicateConditionSet.stream().anyMatch(dc -> testConditon.containsAll(dc))) { duplicateBuffer.add(maybeDuplicate); } } } //START DEBUG /* System.out.println ("recordBuffer"); Comparator<SAMRecord> samRecordComparatorForRecordBuffer = new SAMRecordPositionAndQualityComparator(); ArrayList<ImmutableTriple<Integer, Integer, SAMRecord>> sortedRecordBuffer = new ArrayList<ImmutableTriple<Integer, Integer, SAMRecord>>(recordBuffer.size()); Iterator<ImmutableTriple<Integer, Integer, SAMRecord>> rit = recordBuffer.iterator(); while (rit.hasNext()) { sortedRecordBuffer.add(rit.next()); } sortedRecordBuffer.sort(Comparator.comparing(ImmutableTriple<Integer, Integer, SAMRecord>::getRight, samRecordComparatorForRecordBuffer)); for ( ImmutableTriple<Integer, Integer, SAMRecord> currTriple : sortedRecordBuffer ) { System.out.println(" srb: "+(currTriple.right.getReadNegativeStrandFlag()?"-":"+")+" "+currTriple+" "+SAMRecordQualityComparator.getQualityScore(currTriple.right.getBaseQualityString())); } System.out.println ("duplicateBuffer"); ArrayList<ImmutableTriple<Integer, Integer, SAMRecord>> sortedDuplicateBuffer = new ArrayList<ImmutableTriple<Integer, Integer, SAMRecord>>(duplicateBuffer.size()); Iterator<ImmutableTriple<Integer, Integer, SAMRecord>> dit = duplicateBuffer.iterator(); while (dit.hasNext()) { sortedDuplicateBuffer.add(dit.next()); } sortedDuplicateBuffer.sort(Comparator.comparing(ImmutableTriple<Integer, Integer, SAMRecord>::getMiddle)); for ( ImmutableTriple<Integer, Integer, SAMRecord> currTriple : sortedDuplicateBuffer ) { System.out.println(" dbe: "+(currTriple.right.getReadNegativeStrandFlag()?"-":"+")+" "+currTriple+" "+SAMRecordQualityComparator.getQualityScore(currTriple.right.getBaseQualityString())); } // Sort again with priority queue order sortedDuplicateBuffer.sort(Comparator.comparing(ImmutableTriple<Integer, Integer, SAMRecord>::getRight, samRecordComparator.reversed())); for ( ImmutableTriple<Integer, Integer, SAMRecord> currTriple : sortedDuplicateBuffer ) { System.out.println("sdbe: "+(currTriple.right.getReadNegativeStrandFlag()?"-":"+")+" "+currTriple+" "+SAMRecordQualityComparator.getQualityScore(currTriple.right.getBaseQualityString())); } */ //END DEBUG if (!duplicateBuffer.isEmpty() && !discardSet.contains(duplicateBuffer.peek().right.getReadName())) { //System.out.println("WRITE "+duplicateBuffer.peek()); decrementDuplicateStats(dupStats, allReadsAsMerged, duplicateBuffer.peek().right.getReadName()); occurenceCounterMerged.putValue(Long .valueOf(duplicateBuffer.stream() .filter(d -> allReadsAsMerged || d.right.getReadName().startsWith("M_")).count()) .intValue() - 1); outputSam.addAlignment(duplicateBuffer.peek().right); } while (!duplicateBuffer.isEmpty()) { discardSet.add(duplicateBuffer.poll().right.getReadName()); } // Maintain the invariant that the first item in recordBuffer may have duplicates while (!recordBuffer.isEmpty() && discardSet.contains(recordBuffer.peek().right.getReadName())) { String duplicateReadName = recordBuffer.poll().right.getReadName(); incrementDuplicateStats(dupStats, allReadsAsMerged, duplicateReadName); discardSet.remove(duplicateReadName); } }
From source file:org.eclipse.recommenders.jayes.transformation.LatentDeterministicDecomposition.java
private List<double[]> getBest(final Map<double[], Integer> counts, int basisSize, int minTotalCounts) { PriorityQueue<double[]> q = new PriorityQueue<double[]>(basisSize, Ordering.natural().onResultOf(Functions.forMap(counts))); for (Entry<double[], Integer> e : counts.entrySet()) { if (q.isEmpty() || q.size() < basisSize) { q.add(e.getKey());//from www. j a va2 s .co m } else { double[] head = q.peek(); if (counts.get(head) < counts.get(e.getKey())) { q.remove(); q.add(e.getKey()); } } } int totalcounts = 0; for (double[] v : q) { totalcounts += counts.get(v); } if (totalcounts < minTotalCounts) return null; return new ArrayList<double[]>(q); }
From source file:net.sourceforge.jasa.market.FourHeapOrderBook.java
/** * Unify the shout at the top of the heap with the supplied shout, so that * quantity(shout) = quantity(top(heap)). This is achieved by splitting the * supplied shout or the shout at the top of the heap. * /*from w w w . j a v a 2 s . co m*/ * @param shout * The shout. * @param to * The heap being moved to. * * @return A reference to the, possibly modified, shout. * */ protected Order unifyShout(Order shout, PriorityQueue<Order> from, PriorityQueue<Order> to) { Order top = (Order) from.peek(); if (shout.getQuantity() > top.getQuantity()) { shout = shout.splat(shout.getQuantity() - top.getQuantity()); } else { if (top.getQuantity() > shout.getQuantity()) { Order remainder = top.split(top.getQuantity() - shout.getQuantity()); from.add(remainder); assert ((remainder.isBid && from == bOut) || ((!remainder.isBid) && from == sOut)); to.add(from.remove()); // to.add(remainder); return shout; } } to.add(from.remove()); return shout; }
From source file:com.android.switchaccess.HuffmanTreeBuilder.java
/** * Builds a Huffman tree with all the clickable nodes in the tree anchored at * {@code windowRoot}. The context provides information about actions the user has taken so far * and allows the probabilities for the views in a window to be adjusted based on that. * * @param windowRoot The root of the tree of SwitchAccessNodeCompat * @param treeToBuildOn A tree of OptionScanNodes that should be included as part of the * Huffman tree./*from w ww . j a v a2 s . c o m*/ * @param context The actions the user has taken so far. In case of an IME, this would be what * the user has typed so far. * @return A Huffman tree of OptionScanNodes including the tree {@code treeToBuildOn} and all * clickable nodes from the {@code windowRoot} tree. If there are no clickable nodes in * {@code windowRoot and the treeToBuildOn is {@code null}, a {@code ClearFocusNode} is * returned. */ /* TODO(rmorina) It will probably not be possible to capture context using a string only. * Once we understand how to capture context better we need to change this. */ public OptionScanNode buildTreeFromNodeTree(SwitchAccessNodeCompat windowRoot, OptionScanNode treeToBuildOn, String context) { PriorityQueue<HuffmanNode> optionScanNodeProbabilities = getOptionScanNodeProbabilities(context, windowRoot); ClearFocusNode clearFocusNode = new ClearFocusNode(); if (treeToBuildOn != null) { optionScanNodeProbabilities.add(new HuffmanNode(treeToBuildOn, DEFAULT_PROBABILITY)); } else if (optionScanNodeProbabilities.isEmpty()) { return clearFocusNode; } optionScanNodeProbabilities.add(createParentNode(optionScanNodeProbabilities, getNodesPerParent(optionScanNodeProbabilities.size()), clearFocusNode)); while (optionScanNodeProbabilities.size() > 1) { optionScanNodeProbabilities.add(createParentNode(optionScanNodeProbabilities, mDegree, clearFocusNode)); } return optionScanNodeProbabilities.peek().getOptionScanNode(); }
From source file:edu.stanford.cfuller.colocalization3d.correction.PositionCorrector.java
/** * Creates a correction from a set of objects whose positions should be the same in each channel. * * @param imageObjects A Vector containing all the ImageObjects to be used for the correction * or in the order it appears in a multiwavelength image file. * @return A Correction object that can be used to correct the positions of other objects based upon the standards provided. *//* w ww .ja va2s . c o m*/ public Correction getCorrection(java.util.List<ImageObject> imageObjects) { int referenceChannel = this.parameters.getIntValueForKey(REF_CH_PARAM); int channelToCorrect = this.parameters.getIntValueForKey(CORR_CH_PARAM); if (!this.parameters.hasKeyAndTrue(DET_CORR_PARAM)) { try { return Correction.readFromDisk(FileUtils.getCorrectionFilename(this.parameters)); } catch (java.io.IOException e) { java.util.logging.Logger .getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME) .severe("Exception encountered while reading correction from disk: "); e.printStackTrace(); } catch (ClassNotFoundException e) { java.util.logging.Logger .getLogger(edu.stanford.cfuller.colocalization3d.Colocalization3DMain.LOGGER_NAME) .severe("Exception encountered while reading correction from disk: "); e.printStackTrace(); } return null; } int numberOfPointsToFit = this.parameters.getIntValueForKey(NUM_POINT_PARAM); RealMatrix correctionX = new Array2DRowRealMatrix(imageObjects.size(), numberOfCorrectionParameters); RealMatrix correctionY = new Array2DRowRealMatrix(imageObjects.size(), numberOfCorrectionParameters); RealMatrix correctionZ = new Array2DRowRealMatrix(imageObjects.size(), numberOfCorrectionParameters); RealVector distanceCutoffs = new ArrayRealVector(imageObjects.size(), 0.0); RealVector ones = new ArrayRealVector(numberOfPointsToFit, 1.0); RealVector distancesToObjects = new ArrayRealVector(imageObjects.size(), 0.0); RealMatrix allCorrectionParametersMatrix = new Array2DRowRealMatrix(numberOfPointsToFit, numberOfCorrectionParameters); for (int i = 0; i < imageObjects.size(); i++) { RealVector ithPos = imageObjects.get(i).getPositionForChannel(referenceChannel); for (int j = 0; j < imageObjects.size(); j++) { double d = imageObjects.get(j).getPositionForChannel(referenceChannel).subtract(ithPos).getNorm(); distancesToObjects.setEntry(j, d); } //the sorting becomes a bottleneck once the number of points gets large //reverse comparator so we can use the priority queue and get the max element at the head Comparator<Double> cdReverse = new Comparator<Double>() { public int compare(Double o1, Double o2) { if (o1.equals(o2)) return 0; if (o1 > o2) return -1; return 1; } }; PriorityQueue<Double> pq = new PriorityQueue<Double>(numberOfPointsToFit + 2, cdReverse); double maxElement = Double.MAX_VALUE; for (int p = 0; p < numberOfPointsToFit + 1; p++) { pq.add(distancesToObjects.getEntry(p)); } maxElement = pq.peek(); for (int p = numberOfPointsToFit + 1; p < distancesToObjects.getDimension(); p++) { double value = distancesToObjects.getEntry(p); if (value < maxElement) { pq.poll(); pq.add(value); maxElement = pq.peek(); } } double firstExclude = pq.poll(); double lastDist = pq.poll(); double distanceCutoff = (lastDist + firstExclude) / 2.0; distanceCutoffs.setEntry(i, distanceCutoff); RealVector xPositionsToFit = new ArrayRealVector(numberOfPointsToFit, 0.0); RealVector yPositionsToFit = new ArrayRealVector(numberOfPointsToFit, 0.0); RealVector zPositionsToFit = new ArrayRealVector(numberOfPointsToFit, 0.0); RealMatrix differencesToFit = new Array2DRowRealMatrix(numberOfPointsToFit, imageObjects.get(0).getPositionForChannel(referenceChannel).getDimension()); int toFitCounter = 0; for (int j = 0; j < imageObjects.size(); j++) { if (distancesToObjects.getEntry(j) < distanceCutoff) { xPositionsToFit.setEntry(toFitCounter, imageObjects.get(j).getPositionForChannel(referenceChannel).getEntry(0)); yPositionsToFit.setEntry(toFitCounter, imageObjects.get(j).getPositionForChannel(referenceChannel).getEntry(1)); zPositionsToFit.setEntry(toFitCounter, imageObjects.get(j).getPositionForChannel(referenceChannel).getEntry(2)); differencesToFit.setRowVector(toFitCounter, imageObjects.get(j) .getVectorDifferenceBetweenChannels(referenceChannel, channelToCorrect)); toFitCounter++; } } RealVector x = xPositionsToFit.mapSubtractToSelf(ithPos.getEntry(0)); RealVector y = yPositionsToFit.mapSubtractToSelf(ithPos.getEntry(1)); allCorrectionParametersMatrix.setColumnVector(0, ones); allCorrectionParametersMatrix.setColumnVector(1, x); allCorrectionParametersMatrix.setColumnVector(2, y); allCorrectionParametersMatrix.setColumnVector(3, x.map(new Power(2))); allCorrectionParametersMatrix.setColumnVector(4, y.map(new Power(2))); allCorrectionParametersMatrix.setColumnVector(5, x.ebeMultiply(y)); DecompositionSolver solver = (new QRDecomposition(allCorrectionParametersMatrix)).getSolver(); RealVector cX = solver.solve(differencesToFit.getColumnVector(0)); RealVector cY = solver.solve(differencesToFit.getColumnVector(1)); RealVector cZ = solver.solve(differencesToFit.getColumnVector(2)); correctionX.setRowVector(i, cX); correctionY.setRowVector(i, cY); correctionZ.setRowVector(i, cZ); } Correction c = new Correction(correctionX, correctionY, correctionZ, distanceCutoffs, imageObjects, referenceChannel, channelToCorrect); return c; }
From source file:edu.utsa.sifter.som.MainSOM.java
void initTerms() throws IOException { final Terms terms = MultiFields.getTerms(Reader, "body"); System.out.println("number of terms in index: " + terms.size()); final PriorityQueue<TermPair> topTerms = new PriorityQueue<TermPair>(Conf.MAX_VECTOR_FEATURES, new TermPair.TermPairComparator()); int num = 0;/*from w ww. j a v a 2 s.c o m*/ TermsEnum term = terms.iterator(null); while (term.next() != null) { final int count = term.docFreq(); final double r = ((double) count) / Reader.numDocs(); if (Conf.DOC_FREQ_THRESHOLD_LOW <= r && r <= Conf.DOC_FREQ_THRESHOLD_HIGH) { final String s = term.term().utf8ToString(); if (s.length() >= Conf.MIN_SOM_TERM_LENGTH) { if (topTerms.size() < Conf.MAX_VECTOR_FEATURES) { topTerms.add(new TermPair(s, count)); } else if (topTerms.peek().DocCount < count) { topTerms.remove(); topTerms.add(new TermPair(s, count)); } ++num; } } } System.out.println(num + " terms with in doc frequency range"); final int numFeatures = Math.min(topTerms.size(), Conf.MAX_VECTOR_FEATURES); TermIndices = new HashMap<String, Integer>((numFeatures * 4 + 1) / 3); // respect load factor Terms = new java.util.Vector<String>(numFeatures); Terms.setSize(numFeatures); System.out.println("the top " + numFeatures + " features will be used"); for (int i = numFeatures - 1; i > -1; --i) { // reverse order, to put top terms first TermPair t = topTerms.poll(); // least remaining TermIndices.put(t.Term, i); Terms.set(i, t.Term); // System.out.println("Including term " + t.Term + " (" + t.DocCount + ")"); } }
From source file:com.jxt.web.service.AgentEventServiceImpl.java
private List<AgentEvent> createAgentEvents(List<AgentEventBo> agentEventBos, boolean includeEventMessage) { if (CollectionUtils.isEmpty(agentEventBos)) { return Collections.emptyList(); }// ww w .j a v a2 s . c om List<AgentEvent> agentEvents = new ArrayList<>(agentEventBos.size()); PriorityQueue<DurationalAgentEvent> durationalAgentEvents = new PriorityQueue<>(agentEventBos.size(), AgentEvent.EVENT_TIMESTAMP_ASC_COMPARATOR); for (AgentEventBo agentEventBo : agentEventBos) { if (agentEventBo.getEventType().isCategorizedAs(AgentEventTypeCategory.DURATIONAL)) { durationalAgentEvents.add(createDurationalAgentEvent(agentEventBo, includeEventMessage)); } else { agentEvents.add(createAgentEvent(agentEventBo, includeEventMessage)); } } long durationStartTimestamp = DurationalAgentEvent.UNKNOWN_TIMESTAMP; while (!durationalAgentEvents.isEmpty()) { DurationalAgentEvent currentEvent = durationalAgentEvents.remove(); if (durationStartTimestamp == DurationalAgentEvent.UNKNOWN_TIMESTAMP) { durationStartTimestamp = currentEvent.getEventTimestamp(); } currentEvent.setDurationStartTimestamp(durationStartTimestamp); DurationalAgentEvent nextEvent = durationalAgentEvents.peek(); if (nextEvent != null) { long nextEventTimestamp = nextEvent.getEventTimestamp(); currentEvent.setDurationEndTimestamp(nextEventTimestamp); durationStartTimestamp = nextEventTimestamp; } agentEvents.add(currentEvent); } return agentEvents; }