Example usage for java.util PriorityQueue add

List of usage examples for java.util PriorityQueue add

Introduction

In this page you can find the example usage for java.util PriorityQueue add.

Prototype

public boolean add(E e) 

Source Link

Document

Inserts the specified element into this priority queue.

Usage

From source file:MSUmpire.PSMDataStructure.ProtID.java

public float GetAbundanceByMS1_TopN(int topN, float pepweight) {
    if (PeptideID.isEmpty()) {
        return 0;
    }/*from  w  ww .j av a 2  s .c  om*/
    PriorityQueue<Float> TopQueue = new PriorityQueue<>(PeptideID.size(), Collections.reverseOrder());
    for (PepIonID peptide : PeptideID.values()) {
        if (peptide.PeakHeight != null && peptide.FilteringWeight > pepweight) {
            TopQueue.add(peptide.PeakHeight[0]);
        }
    }

    float totalabundance = 0f;
    int num = Math.min(topN, TopQueue.size());

    for (int i = 0; i < num; i++) {
        totalabundance += TopQueue.poll();
    }
    return totalabundance / num;
}

From source file:exploration.rendezvous.MultiPointRendezvousStrategy.java

/**
 * This method finds a point among connectionsToBase (that is in comm range of Base Station)
 * that is closest to origPoint. That is, it's an estimate of the shortest distance we need to
 * travel from origPoint to get into comm range of the Base station
 *
 * @param origPoint//from w  w  w  .j a va2  s  . co  m
 * @param connectionsToBase
 * @param ag
 * @return
 */
public static int findNearestPointInBaseCommRange(NearRVPoint origPoint, List<CommLink> connectionsToBase,
        RealAgent ag) {
    int pathsCalculated = 0;
    // only calculate nearest base point for connectedPoint if we haven't already.
    if (origPoint.distanceToParent == Double.MAX_VALUE) {
        PriorityQueue<NearRVPoint> lineOfSightBasePoints = new PriorityQueue<NearRVPoint>();
        PriorityQueue<NearRVPoint> nonLOSBasePoints = new PriorityQueue<NearRVPoint>();
        for (CommLink baseLink : connectionsToBase) {
            NearRVPoint basePoint = new NearRVPoint(baseLink.getRemotePoint().x, baseLink.getRemotePoint().y);
            double approxPathLen = basePoint.distance(origPoint);
            basePoint.setDistanceToFrontier(approxPathLen);
            if (baseLink.numObstacles == 0) {
                lineOfSightBasePoints.add(basePoint);
            } else {
                nonLOSBasePoints.add(basePoint);
            }
        }

        LinkedList<NearRVPoint> pointsConnectedToBase = new LinkedList<NearRVPoint>();

        for (int j = 0; (j < 5) && !lineOfSightBasePoints.isEmpty(); j++) {
            pointsConnectedToBase.add(lineOfSightBasePoints.poll());
        }

        for (int j = 0; (j < 20) && !nonLOSBasePoints.isEmpty(); j++) {
            pointsConnectedToBase.add(nonLOSBasePoints.poll());
        }

        for (NearRVPoint basePoint : pointsConnectedToBase) {
            pathsCalculated++;
            Path pathToBase = ag.calculatePath(origPoint, basePoint, false, false);
            double pathLen = Double.MAX_VALUE;
            if (pathToBase.found) {
                pathLen = pathToBase.getLength();
            }
            if (pathLen < origPoint.distanceToParent) {
                origPoint.distanceToParent = pathLen;
                origPoint.parentPoint = basePoint;
            }
        }
    }
    return pathsCalculated;
}

From source file:org.broad.igv.track.PackedFeatures.java

/**
 * Allocates each feature to the rows such that there is no overlap.
 *
 * @param iter TabixLineReader wrapping the collection of alignments. Note that this should
 *             really be an Iterator<T>, but it can't be subclassed if that's the case.
 *///from   w ww. j a va2 s. c om
List<FeatureRow> packFeatures(Iterator iter) {

    List<FeatureRow> rows = new ArrayList(10);
    if (iter == null || !iter.hasNext()) {
        return rows;
    }

    maxFeatureLength = 0;
    int totalCount = 0;

    LinkedHashMap<Integer, PriorityQueue<T>> bucketArray = new LinkedHashMap();
    Comparator pqComparator = new Comparator<T>() {
        public int compare(Feature row1, Feature row2) {
            return (row2.getEnd() - row2.getStart()) - (row1.getEnd() - row2.getStart());
        }
    };

    // Allocate features to buckets,  1 bucket per base position
    while (iter.hasNext()) {
        T feature = (T) iter.next();
        maxFeatureLength = Math.max(maxFeatureLength,
                getFeatureEndForPacking(feature) - getFeatureStartForPacking(feature));
        features.add(feature);

        int bucketNumber = getFeatureStartForPacking(feature);

        PriorityQueue<T> bucket = bucketArray.get(bucketNumber);
        if (bucket == null) {
            bucket = new PriorityQueue<T>(5, pqComparator);
            bucketArray.put(bucketNumber, bucket);
        }
        bucket.add(feature);
        totalCount++;

    }

    // Allocate features to rows, pulling at most 1 per bucket for each row
    FeatureRow currentRow = new FeatureRow();
    int allocatedCount = 0;
    int nextStart = Integer.MIN_VALUE;

    int lastKey = 0;
    int lastAllocatedCount = -1;
    while (allocatedCount < totalCount && rows.size() < maxLevels) {

        // Check to prevent infinite loops
        if (lastAllocatedCount == allocatedCount) {

            if (IGV.hasInstance()) {
                String msg = "Infinite loop detected while packing features for track: " + getTrackName()
                        + ".<br>Not all features will be shown."
                        + "<br>Please contact igv-team@broadinstitute.org";

                log.error(msg);
                MessageUtils.showMessage(msg);
            }
            break;
        }
        lastAllocatedCount = allocatedCount;

        // Next row Loop through alignments until we reach the end of the interval

        PriorityQueue<T> bucket = null;
        // Advance to nextLine occupied bucket

        ArrayList<Integer> emptyBucketKeys = new ArrayList();
        for (Integer key : bucketArray.keySet()) {
            //if (key < lastKey) {
            //    String msg = "Features from track: " + trackName + " are not sorted.  Some features might not be shown.<br>" +
            //            "Please notify igv-help@broadinstitute.org";
            //    MessageUtils.showMessage(msg);
            //}
            lastKey = key;
            if (key >= nextStart) {
                bucket = bucketArray.get(key);

                T feature = bucket.poll();

                if (bucket.isEmpty()) {
                    emptyBucketKeys.add(key);
                }
                currentRow.addFeature(feature);
                nextStart = currentRow.end + FeatureTrack.MINIMUM_FEATURE_SPACING;
                allocatedCount++;
            }
        }
        for (Integer key : emptyBucketKeys) {
            bucketArray.remove(key);
        }

        // We've reached the end of the interval,  start a new row
        if (currentRow.features.size() > 0) {
            rows.add(currentRow);
            lastAllocatedCount = 0;
        }
        currentRow = new FeatureRow();
        nextStart = 0;
        lastKey = 0;

    }
    // Add the last row
    if (currentRow.features.size() > 0) {
        rows.add(currentRow);
    }

    return rows;
}

From source file:com.joliciel.csvLearner.maxent.MaxentBestFeatureObserver.java

@Override
public void onTerminate() {
    bestFeaturesPerOutcome = new TreeMap<String, List<NameValuePair>>();
    totalPerOutcome = new TreeMap<String, Double>();
    bestFeatureTotalPerOutcome = new TreeMap<String, Double>();
    filePercentagePerOutcome = new TreeMap<String, Map<String, Double>>();
    fileNames = new TreeSet<String>();
    for (Entry<String, Map<String, Double>> entry : featureMap.entrySet()) {
        String outcome = entry.getKey();
        LOG.debug("outcome: " + outcome);
        Map<String, Double> featureTotals = entry.getValue();
        Map<String, Double> fileTotals = new TreeMap<String, Double>();
        PriorityQueue<NameValuePair> heap = new PriorityQueue<NameValuePair>(featureTotals.size(),
                new NameValueDescendingComparator());
        double grandTotal = 0.0;
        for (Entry<String, Double> featureTotal : featureTotals.entrySet()) {
            NameValuePair pair = new NameValuePair(featureTotal.getKey(), featureTotal.getValue());
            heap.add(pair);
            grandTotal += featureTotal.getValue();
            String featureKey = featureTotal.getKey();
            if (featureKey.contains(CSVLearner.NOMINAL_MARKER))
                featureKey = featureKey.substring(0, featureKey.indexOf(CSVLearner.NOMINAL_MARKER));
            String fileName = this.featureToFileMap.get(featureKey);
            Double fileTotalObj = fileTotals.get(fileName);
            double fileTotal = fileTotalObj == null ? 0 : fileTotalObj.doubleValue();
            fileTotals.put(fileName, fileTotal + featureTotal.getValue());
        }//  w  w w .  ja  v a  2 s  .c  o m
        List<NameValuePair> bestFeatures = new ArrayList<NameValuePair>();
        double bestFeatureTotal = 0.0;
        for (int i = 0; i < n; i++) {
            NameValuePair pair = heap.poll();
            if (pair == null)
                break;

            LOG.debug("Feature: " + pair.getName() + ", Total: " + pair.getValue());
            bestFeatures.add(pair);
            bestFeatureTotal += pair.getValue();
        }
        bestFeaturesPerOutcome.put(outcome, bestFeatures);
        totalPerOutcome.put(outcome, grandTotal);
        bestFeatureTotalPerOutcome.put(outcome, bestFeatureTotal);

        // convert the file totals to percentages
        for (Entry<String, Double> fileTotal : fileTotals.entrySet()) {
            double filePercentage = fileTotal.getValue() / grandTotal;
            fileTotal.setValue(filePercentage);
            fileNames.add(fileTotal.getKey());
        }
        filePercentagePerOutcome.put(outcome, fileTotals);

        featureTotals.clear();
    }
    featureMap.clear();
    featureMap = null;
}

From source file:org.onebusaway.uk.network_rail.gtfs_realtime.graph.PositionBerthToStanoxGraphMain.java

private Map<Location, Integer> getNearbyNodesWithLocation(Map<RawBerthNode, Location> nodesToLocations,
        RawBerthNode source, int minCount) {

    Map<Location, Integer> locationsAndTime = new HashMap<Location, Integer>();

    PriorityQueue<OrderedNode> queue = new PriorityQueue<OrderedNode>();
    queue.add(new OrderedNode(source, 0));

    Set<RawBerthNode> visited = new HashSet<RawBerthNode>();
    visited.add(source);//from w  ww . j a  v a 2s .co  m

    Map<RawBerthNode, Integer> minTimeToSource = new HashMap<RawBerthNode, Integer>();

    while (!queue.isEmpty()) {
        OrderedNode orderedNode = queue.poll();
        RawBerthNode node = orderedNode.node;
        if (minTimeToSource.containsKey(node)) {
            continue;
        }
        int time = orderedNode.value;
        minTimeToSource.put(node, time);
        if (nodesToLocations.containsKey(node)) {
            locationsAndTime.put(nodesToLocations.get(node), time);
            if (locationsAndTime.size() >= minCount) {
                return locationsAndTime;
            }
        }

        for (Edge edge : node.getEdges()) {
            RawBerthNode to = edge.getTo();
            int proposedTime = edge.getAverageDuration() + time;
            if (!minTimeToSource.containsKey(to)) {
                queue.add(new OrderedNode(to, proposedTime));
            }
        }
    }

    return locationsAndTime;
}

From source file:net.sourceforge.jasa.market.FourHeapOrderBook.java

/**
 * Remove, possibly several, shouts from heap such that quantity(heap) is
 * reduced by the supplied quantity and reinsert the shouts using the
 * standard insertion logic. quantity(heap) is defined as the total quantity
 * of every shout in the heap.//from  w  w  w.ja va 2 s. c om
 * 
 * @param heap
 *            The heap to remove shouts from.
 * @param quantity
 *            The total quantity to remove.
 */
protected void reinsert(PriorityQueue<Order> heap, int quantity) {

    while (quantity > 0) {

        Order top = (Order) heap.remove();

        if (top.getQuantity() > quantity) {
            heap.add(top.split(top.getQuantity() - quantity));
        }

        quantity -= top.getQuantity();

        try {
            if (top.isBid()) {
                addBid(top);
            } else {
                addAsk(top);
            }
        } catch (DuplicateShoutException e) {
            throw new AuctionRuntimeException("Invalid market state");
        }
    }

}

From source file:classif.ahc.AHCSymbolicSequence.java

public void cluster() {

    // cache all distances
    distances = new double[data.size()][data.size()];
    for (int i = 0; i < data.size(); i++) {
        for (int j = i + 1; j < data.size(); j++) {
            distances[i][j] = data.get(i).distance(data.get(j));
            distances[j][i] = distances[i][j];
        }//from www .  ja v a  2  s.  c  o m
    }
    System.out.println("distances cached");

    ArrayList<Integer>[] nClusterID = new ArrayList[data.size()];
    for (int i = 0; i < data.size(); i++) {
        nClusterID[i] = new ArrayList<Integer>();
        nClusterID[i].add(i);
    }
    int nClusters = data.size();

    int nInstances = data.size();
    Node[] clusterNodes = new Node[data.size()];

    PriorityQueue<Tuple> queue = new PriorityQueue<Tuple>(nClusters, new TupleComparator());
    double[][] fDistance0 = new double[nClusters][nClusters];
    for (int i = 0; i < nClusters; i++) {
        fDistance0[i][i] = 0;
        for (int j = i + 1; j < nClusters; j++) {
            fDistance0[i][j] = getDistanceClusters(nClusterID[i], nClusterID[j]);
            fDistance0[j][i] = fDistance0[i][j];
            queue.add(new Tuple(fDistance0[i][j], i, j, 1, 1));
        }
    }

    centroidsForNumberOfClusters = new ArrayList[data.size() + 1];
    centroidsForNumberOfClusters[data.size()] = new ArrayList<Sequence>();
    for (int i = 0; i < data.size(); i++) {
        centroidsForNumberOfClusters[data.size()].add(data.get(i));
    }

    while (nClusters > 1) {
        System.out.println("nClusters left = " + nClusters);
        int iMin1 = -1;
        int iMin2 = -1;
        Tuple t;
        do {
            t = queue.poll();
        } while (t != null && (nClusterID[t.m_iCluster1].size() != t.m_nClusterSize1
                || nClusterID[t.m_iCluster2].size() != t.m_nClusterSize2));
        iMin1 = t.m_iCluster1;
        iMin2 = t.m_iCluster2;

        centroidsForNumberOfClusters[nClusters
                - 1] = (ArrayList<Sequence>) centroidsForNumberOfClusters[nClusters].clone();

        merge(iMin1, iMin2, t.m_fDist, t.m_fDist, nClusterID, centroidsForNumberOfClusters[nClusters - 1],
                clusterNodes, distances);
        for (int i = 0; i < nInstances; i++) {
            if (i != iMin1 && nClusterID[i].size() != 0) {
                int i1 = Math.min(iMin1, i);
                int i2 = Math.max(iMin1, i);
                double fDistance = getDistanceClusters(nClusterID[i1], nClusterID[i2]);
                queue.add(new Tuple(fDistance, i1, i2, nClusterID[i1].size(), nClusterID[i2].size()));
            }
        }

        nClusters--;

    }
    System.out.println("Clustering done for all possible cuts");

}

From source file:edu.snu.leader.hidden.MetricSpatialIndividual.java

/**
 * TODO Method description// w  w  w  .  j av  a 2s  . c  o  m
 *
 * @param simState
 * @see edu.snu.leader.hidden.SpatialIndividual#findNearestNeighbors(edu.snu.leader.hidden.SimulationState)
 */
@Override
public void findNearestNeighbors(SimulationState simState) {
    _LOG.trace("Entering findNearestNeighbors( simState )");

    // Get the metric distance to calculate the nearest neighbors
    _nearestNeighborDistance = simState.getNearestNeighborDistance();

    // Build a priority queue to sort things for us
    PriorityQueue<Neighbor> sortedNeighbors = new PriorityQueue<Neighbor>();

    // Iterate through all the individuals
    Iterator<SpatialIndividual> indIter = simState.getAllIndividuals().iterator();
    while (indIter.hasNext()) {
        // Get the individual
        SpatialIndividual ind = indIter.next();

        // If it is us, continue on
        if (_id.equals(ind._id)) {
            continue;
        }

        // Build a neighbor out of it and put it in the queue
        Neighbor neighbor = new Neighbor((float) _location.distance(ind._location), ind);
        sortedNeighbors.add(neighbor);
    }

    // Get all the neighbors within the specified distance
    Iterator<Neighbor> neighborIter = sortedNeighbors.iterator();
    while (neighborIter.hasNext()) {
        Neighbor neighbor = neighborIter.next();

        // Is it within the distance?
        if (neighbor.getDistance() <= _nearestNeighborDistance) {
            // Yup
            _nearestNeighbors.add(neighbor);
            neighbor.getIndividual().signalNearestNeighborStatus(this);
        }
        //            else
        //            {
        //                // We can bail because the neighbors are sorted by distance
        //                // from closest to farthest
        //                break;
        //            }
    }

    _LOG.trace("Leaving findNearestNeighbors( simState )");

}

From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java

/**
 * Performs a spatial join between records in two R-trees
 * @param R// w  w w .  j  a v a2 s. c o m
 * @param S
 * @param output
 * @return
 * @throws IOException
 * SuppresWarnings("resource") is used because we create LineReaders on the
 * internal data stream of both R and S. We do not want to close the
 * LineReader because it will subsequently close the internal data stream
 * of R and S which is something we want to avoid because both R and S are
 * not created by this function and it should not free these resources.
 */
protected static <S1 extends Shape, S2 extends Shape> int spatialJoinDisk(final RTree<S1> R, final RTree<S2> S,
        final ResultCollector2<S1, S2> output, final Reporter reporter) throws IOException {
    PriorityQueue<Long> nodesToJoin = new PriorityQueue<Long>(R.nodeCount + S.nodeCount);

    // Start with the two roots
    nodesToJoin.add(0L);

    // Caches to keep the retrieved data records. Helpful when it reaches the
    // leaves and starts to read objects from the two trees
    LruCache<Integer, Shape[]> r_records_cache = new LruCache<Integer, Shape[]>(R.degree * 2);
    LruCache<Integer, Shape[]> s_records_cache = new LruCache<Integer, Shape[]>(S.degree * R.degree * 4);

    Text line = new Text2();

    int result_count = 0;

    LineReader r_lr = null, s_lr = null;
    // Last offset read from r and s
    int r_last_offset = 0;
    int s_last_offset = 0;

    while (!nodesToJoin.isEmpty()) {
        long nodes_to_join = nodesToJoin.remove();
        int r_node = (int) (nodes_to_join >>> 32);
        int s_node = (int) (nodes_to_join & 0xFFFFFFFF);

        // Compute the overlap between the children of the two nodes
        // If a node is non-leaf, its children are other nodes
        // If a node is leaf, its children are data records
        boolean r_leaf = r_node >= R.nonLeafNodeCount;
        boolean s_leaf = s_node >= S.nonLeafNodeCount;

        if (!r_leaf && !s_leaf) {
            // Both are internal nodes, read child nodes under them
            // Find overlaps using a simple cross join (TODO: Use plane-sweep)
            for (int i = 0; i < R.degree; i++) {
                int new_r_node = r_node * R.degree + i + 1;
                for (int j = 0; j < S.degree; j++) {
                    int new_s_node = s_node * S.degree + j + 1;
                    if (R.nodes[new_r_node].isIntersected(S.nodes[new_s_node])) {
                        long new_pair = (((long) new_r_node) << 32) | new_s_node;
                        nodesToJoin.add(new_pair);
                    }
                }
            }
        } else if (r_leaf && !s_leaf) {
            // R is a leaf node while S is an internal node
            // Compare the leaf node in R against all child nodes of S
            for (int j = 0; j < S.degree; j++) {
                int new_s_node = s_node * S.degree + j + 1;
                if (R.nodes[r_node].isIntersected(S.nodes[new_s_node])) {
                    long new_pair = (((long) r_node) << 32) | new_s_node;
                    nodesToJoin.add(new_pair);
                }
            }
        } else if (!r_leaf && s_leaf) {
            // R is an internal node while S is a leaf node
            // Compare child nodes of R against the leaf node in S
            for (int i = 0; i < R.degree; i++) {
                int new_r_node = r_node * R.degree + i + 1;
                if (R.nodes[new_r_node].isIntersected(S.nodes[s_node])) {
                    long new_pair = (((long) new_r_node) << 32) | s_node;
                    nodesToJoin.add(new_pair);
                }
            }
        } else if (r_leaf && s_leaf) {
            // Both are leaf nodes, join objects under them
            int r_start_offset = R.dataOffset[r_node];
            int r_end_offset = R.dataOffset[r_node + 1];
            int s_start_offset = S.dataOffset[s_node];
            int s_end_offset = S.dataOffset[s_node + 1];

            // Read or retrieve r_records
            Shape[] r_records = r_records_cache.get(r_start_offset);
            if (r_records == null) {
                int cache_key = r_start_offset;
                r_records = r_records_cache.popUnusedEntry();
                if (r_records == null) {
                    r_records = new Shape[R.degree * 2];
                }

                // Need to read it from stream
                if (r_last_offset != r_start_offset) {
                    long seekTo = r_start_offset + R.treeStartOffset;
                    R.data.seek(seekTo);
                    r_lr = new LineReader(R.data);
                }
                int record_i = 0;
                while (r_start_offset < r_end_offset) {
                    r_start_offset += r_lr.readLine(line);
                    if (r_records[record_i] == null)
                        r_records[record_i] = R.stockObject.clone();
                    r_records[record_i].fromText(line);
                    record_i++;
                }
                r_last_offset = r_start_offset;
                // Nullify other records
                while (record_i < r_records.length)
                    r_records[record_i++] = null;
                r_records_cache.put(cache_key, r_records);
            }

            // Read or retrieve s_records
            Shape[] s_records = s_records_cache.get(s_start_offset);
            if (s_records == null) {
                int cache_key = s_start_offset;

                // Need to read it from stream
                if (s_lr == null || s_last_offset != s_start_offset) {
                    // Need to reposition s_lr (LineReader of S)
                    long seekTo = s_start_offset + S.treeStartOffset;
                    S.data.seek(seekTo);
                    s_lr = new LineReader(S.data);
                }
                s_records = s_records_cache.popUnusedEntry();
                if (s_records == null) {
                    s_records = new Shape[S.degree * 2];
                }
                int record_i = 0;
                while (s_start_offset < s_end_offset) {
                    s_start_offset += s_lr.readLine(line);
                    if (s_records[record_i] == null)
                        s_records[record_i] = S.stockObject.clone();
                    s_records[record_i].fromText(line);
                    record_i++;
                }
                // Nullify other records
                while (record_i < s_records.length)
                    s_records[record_i++] = null;
                // Put in cache
                s_records_cache.put(cache_key, s_records);
                s_last_offset = s_start_offset;
            }

            // Do Cartesian product between records to find overlapping pairs
            for (int i_r = 0; i_r < r_records.length && r_records[i_r] != null; i_r++) {
                for (int i_s = 0; i_s < s_records.length && s_records[i_s] != null; i_s++) {
                    if (r_records[i_r].isIntersected(s_records[i_s])
                            && !r_records[i_r].equals(s_records[i_s])) {
                        result_count++;
                        if (output != null) {
                            output.collect((S1) r_records[i_r], (S2) s_records[i_s]);
                        }
                    }
                }
            }
        }
        if (reporter != null)
            reporter.progress();
    }
    return result_count;
}

From source file:com.facebook.FileLruCache.java

private void trim() {
    Logger.log(LoggingBehaviors.CACHE, TAG, "trim started");
    PriorityQueue<ModifiedFile> heap = new PriorityQueue<ModifiedFile>();
    long size = 0;
    long count = 0;
    for (File file : this.directory.listFiles(BufferFile.excludeBufferFiles())) {
        ModifiedFile modified = new ModifiedFile(file);
        heap.add(modified);
        Logger.log(LoggingBehaviors.CACHE, TAG, "  trim considering time="
                + Long.valueOf(modified.getModified()) + " name=" + modified.getFile().getName());

        size += file.length();/*  ww  w .ja va 2 s. c  o m*/
        count++;
    }

    while ((size > limits.getByteCount()) || (count > limits.getFileCount())) {
        File file = heap.remove().getFile();
        Logger.log(LoggingBehaviors.CACHE, TAG, "  trim removing " + file.getName());
        size -= file.length();
        count--;
        file.delete();
    }
}