Example usage for java.util PriorityQueue poll

List of usage examples for java.util PriorityQueue poll

Introduction

In this page you can find the example usage for java.util PriorityQueue poll.

Prototype

public E poll() 

Source Link

Usage

From source file:org.apache.hadoop.hdfs.server.namenode.FSEditLogTestUtil.java

public static EditLogInputStream getJournalInputStream(JournalManager jm, long txId, boolean inProgressOk)
        throws IOException {
    final PriorityQueue<EditLogInputStream> allStreams = new PriorityQueue<EditLogInputStream>(64,
            JournalSet.EDIT_LOG_INPUT_STREAM_COMPARATOR);
    jm.selectInputStreams(allStreams, txId, inProgressOk, true);
    EditLogInputStream elis = null, ret;
    try {/*  ww  w  .  ja  v a 2 s  .  c o m*/
        while ((elis = allStreams.poll()) != null) {
            if (elis.getFirstTxId() > txId) {
                break;
            }
            if (elis.getLastTxId() < txId) {
                elis.close();
                continue;
            }
            elis.skipUntil(txId);
            ret = elis;
            elis = null;
            return ret;
        }
    } finally {
        IOUtils.cleanup(FSEditLogTestUtil.LOG, allStreams.toArray(new EditLogInputStream[0]));
        IOUtils.cleanup(FSEditLogTestUtil.LOG, elis);
    }
    return null;
}

From source file:org.apache.hadoop.hdfs.server.namenode.FSEditLogTestUtil.java

/**
 * Find out how many transactions we can read from a
 * FileJournalManager, starting at a given transaction ID.
 *
 * @param jm              The journal manager
 * @param fromTxId        Transaction ID to start at
 * @param inProgressOk    Should we consider edit logs that are not finalized?
 * @return                The number of transactions
 * @throws IOException//w  w  w. ja  v  a 2  s  .  com
 */
public static long getNumberOfTransactions(JournalManager jm, long fromTxId, boolean inProgressOk,
        boolean abortOnGap) throws IOException {
    long numTransactions = 0, txId = fromTxId;
    final PriorityQueue<EditLogInputStream> allStreams = new PriorityQueue<EditLogInputStream>(64,
            JournalSet.EDIT_LOG_INPUT_STREAM_COMPARATOR);
    jm.selectInputStreams(allStreams, fromTxId, inProgressOk, true);
    EditLogInputStream elis = null;
    try {
        while ((elis = allStreams.poll()) != null) {
            elis.skipUntil(txId);
            while (true) {
                FSEditLogOp op = elis.readOp();
                if (op == null) {
                    break;
                }
                if (abortOnGap && (op.getTransactionId() != txId)) {
                    TestFileJournalManager.LOG
                            .info("getNumberOfTransactions: detected gap at txId " + fromTxId);
                    return numTransactions;
                }
                txId = op.getTransactionId() + 1;
                numTransactions++;
            }
        }
    } finally {
        IOUtils.cleanup(FSEditLogTestUtil.LOG, allStreams.toArray(new EditLogInputStream[0]));
        IOUtils.cleanup(FSEditLogTestUtil.LOG, elis);
    }
    return numTransactions;
}

From source file:com.uber.stream.kafka.mirrormaker.controller.core.IdealStateBuilder.java

public static IdealState buildCustomIdealStateFor(String topicName, int numTopicPartitions,
        PriorityQueue<InstanceTopicPartitionHolder> instanceToNumServingTopicPartitionMap) {

    final CustomModeISBuilder customModeIdealStateBuilder = new CustomModeISBuilder(topicName);

    customModeIdealStateBuilder.setStateModel(OnlineOfflineStateModel.name).setNumPartitions(numTopicPartitions)
            .setNumReplica(1).setMaxPartitionsPerNode(numTopicPartitions);

    for (int i = 0; i < numTopicPartitions; ++i) {
        InstanceTopicPartitionHolder liveInstance = instanceToNumServingTopicPartitionMap.poll();
        if (liveInstance != null) {
            customModeIdealStateBuilder.assignInstanceAndState(Integer.toString(i),
                    liveInstance.getInstanceName(), "ONLINE");
            liveInstance.addTopicPartition(new TopicPartition(topicName, i));
            instanceToNumServingTopicPartitionMap.add(liveInstance);
        }//  w  ww .  jav a 2s .c om
    }
    return customModeIdealStateBuilder.build();
}

From source file:org.sample.whiteboardapp.MyWhiteboard.java

static JSONObject findKNN(double[] Qpoint, Node root, int k) {
    JSONObject coordinates = new JSONObject();
    JSONArray lat_json = new JSONArray();
    JSONArray long_json = new JSONArray();
    PriorityQueue<Double> pq = new PriorityQueue<Double>(10, Collections.reverseOrder());
    HashMap<Double, Node> hm = new HashMap();
    searchKDSubtree(pq, hm, root, Qpoint, k, 0);
    System.out.println(pq.size());
    while (pq.size() != 0) {
        Node ans = hm.get(pq.poll());
        System.out.println(ans.point[0] + " " + ans.point[1]);
        System.out.println(pq.size());
        lat_json.add(ans.point[0]);/*  w  w w  . j  a  va2  s.  c o  m*/
        long_json.add(ans.point[1]);

    }
    coordinates.put("latitude", lat_json);
    coordinates.put("longitude", long_json);
    return coordinates;

}

From source file:Main.java

public static List<String> getSortedStringList(Collection<String> toSort) {
    List<String> sorted = new LinkedList<String>();
    final PriorityQueue<String> ordered = new PriorityQueue<String>(
            toSort.size() + 1/*In case the toSort is empty*/, new Comparator<String>() {
                @Override/* w w w  . j  a v  a 2 s  . c  o  m*/
                public int compare(String lhs, String rhs) {
                    lhs = lhs.replaceAll("[^a-zA-Z0-9]", "");
                    rhs = rhs.replaceAll("[^a-zA-Z0-9]", "");
                    int result = rhs.compareTo(lhs);
                    return result;
                }
            });
    ordered.addAll(toSort);
    int originalSize = ordered.size();
    for (int i = 0; i < originalSize; i++) {
        sorted.add(ordered.poll());
    }
    return sorted;
}

From source file:org.apache.pig.builtin.TOP.java

protected static void updateTop(PriorityQueue<Tuple> store, int limit, DataBag inputBag) {
    Iterator<Tuple> itr = inputBag.iterator();
    while (itr.hasNext()) {
        Tuple t = itr.next();/*from  w  w  w .  java 2s. c om*/
        store.add(t);
        if (store.size() > limit)
            store.poll();
    }
}

From source file:org.sample.whiteboardapp.MyWhiteboard.java

static void searchKDSubtree(PriorityQueue<Double> pq, HashMap<Double, Node> hm, Node root, double[] Qpoint,
        int k, int depth) {
    Node child = null;//from   w  w w . ja v  a2 s. com
    int dim = depth;
    double dist = Distance(Qpoint, root.point);

    if (pq.size() < k) {
        pq.add(dist);
        hm.put(dist, root);
    } else if (dist < pq.peek()) {
        pq.poll();
        pq.add(dist);
        hm.put(dist, root);
    }
    if (Qpoint[dim] < root.point[dim]) {
        if (root.left != null) {
            searchKDSubtree(pq, hm, root.left, Qpoint, k, (depth + 1) % 2);
            child = root.right;
        }
    } else {
        if (root.right != null) {
            searchKDSubtree(pq, hm, root.right, Qpoint, k, (depth + 1) % 2);
            child = root.left;
        }
    }
    if ((pq.size() < k || (Qpoint[dim] - root.point[dim]) < pq.peek()) && child != null) {
        searchKDSubtree(pq, hm, child, Qpoint, k, (depth + 1) % 2);
    }
}

From source file:net.spfbl.core.Huffman.java

private static Huffman buildTree(int[] frequency) {
    PriorityQueue<Huffman> queue = new PriorityQueue<Huffman>();
    for (char i = 0; i < 256; i++) {
        if (frequency[i] > 0) {
            queue.add(new Huffman(i, frequency[i], null, null));
        }//  ww w .  j ava  2 s  .  c om
    }
    if (queue.size() == 1) {
        if (frequency['\0'] == 0) {
            queue.add(new Huffman('\0', 0, null, null));
        } else {
            queue.add(new Huffman('\1', 0, null, null));
        }
    }
    while (queue.size() > 1) {
        Huffman left = queue.poll();
        Huffman right = queue.poll();
        Huffman parent = new Huffman('\0', left.frequency + right.frequency, left, right);
        queue.add(parent);
    }
    return queue.poll();
}

From source file:com.uber.stream.kafka.mirrormaker.common.utils.HelixUtils.java

public static IdealState buildCustomIdealStateFor(String topicName, int numTopicPartitions,
        PriorityQueue<InstanceTopicPartitionHolder> instanceToNumServingTopicPartitionMap) {

    final CustomModeISBuilder customModeIdealStateBuilder = new CustomModeISBuilder(topicName);

    customModeIdealStateBuilder.setStateModel(OnlineOfflineStateModel.name).setNumPartitions(numTopicPartitions)
            .setNumReplica(1).setMaxPartitionsPerNode(numTopicPartitions);

    for (int i = 0; i < numTopicPartitions; ++i) {
        synchronized (instanceToNumServingTopicPartitionMap) {
            InstanceTopicPartitionHolder liveInstance = instanceToNumServingTopicPartitionMap.poll();
            customModeIdealStateBuilder.assignInstanceAndState(Integer.toString(i),
                    liveInstance.getInstanceName(), "ONLINE");
            liveInstance.addTopicPartition(new TopicPartition(topicName, i));
            instanceToNumServingTopicPartitionMap.add(liveInstance);
        }//from  w  w w  . j  ava 2s  .  co  m
    }
    return customModeIdealStateBuilder.build();
}

From source file:org.apache.hadoop.hdfs.server.namenode.TestFileJournalManager.java

private static EditLogInputStream getJournalInputStream(JournalManager jm, long txId, boolean inProgressOk)
        throws IOException {
    final PriorityQueue<EditLogInputStream> allStreams = new PriorityQueue<EditLogInputStream>(64,
            JournalSet.EDIT_LOG_INPUT_STREAM_COMPARATOR);
    jm.selectInputStreams(allStreams, txId, inProgressOk);
    EditLogInputStream elis = null, ret;
    try {/*from ww w. j  a v  a  2 s. c  om*/
        while ((elis = allStreams.poll()) != null) {
            if (elis.getFirstTxId() > txId) {
                break;
            }
            if (elis.getLastTxId() < txId) {
                elis.close();
                continue;
            }
            elis.skipUntil(txId);
            ret = elis;
            elis = null;
            return ret;
        }
    } finally {
        IOUtils.cleanup(LOG, allStreams.toArray(new EditLogInputStream[0]));
        IOUtils.cleanup(LOG, elis);
    }
    return null;
}