Example usage for java.util PriorityQueue PriorityQueue

List of usage examples for java.util PriorityQueue PriorityQueue

Introduction

In this page you can find the example usage for java.util PriorityQueue PriorityQueue.

Prototype

public PriorityQueue(int initialCapacity, Comparator<? super E> comparator) 

Source Link

Document

Creates a PriorityQueue with the specified initial capacity that orders its elements according to the specified comparator.

Usage

From source file:edu.utsa.sifter.som.MainSOM.java

void initTerms() throws IOException {
    final Terms terms = MultiFields.getTerms(Reader, "body");

    System.out.println("number of terms in index: " + terms.size());
    final PriorityQueue<TermPair> topTerms = new PriorityQueue<TermPair>(Conf.MAX_VECTOR_FEATURES,
            new TermPair.TermPairComparator());

    int num = 0;/* w  w w . j  av a  2 s  .c o  m*/
    TermsEnum term = terms.iterator(null);
    while (term.next() != null) {
        final int count = term.docFreq();
        final double r = ((double) count) / Reader.numDocs();

        if (Conf.DOC_FREQ_THRESHOLD_LOW <= r && r <= Conf.DOC_FREQ_THRESHOLD_HIGH) {
            final String s = term.term().utf8ToString();
            if (s.length() >= Conf.MIN_SOM_TERM_LENGTH) {
                if (topTerms.size() < Conf.MAX_VECTOR_FEATURES) {
                    topTerms.add(new TermPair(s, count));
                } else if (topTerms.peek().DocCount < count) {
                    topTerms.remove();
                    topTerms.add(new TermPair(s, count));
                }
                ++num;
            }
        }
    }
    System.out.println(num + " terms with in doc frequency range");

    final int numFeatures = Math.min(topTerms.size(), Conf.MAX_VECTOR_FEATURES);
    TermIndices = new HashMap<String, Integer>((numFeatures * 4 + 1) / 3); // respect load factor
    Terms = new java.util.Vector<String>(numFeatures);
    Terms.setSize(numFeatures);
    System.out.println("the top " + numFeatures + " features will be used");
    for (int i = numFeatures - 1; i > -1; --i) { // reverse order, to put top terms first
        TermPair t = topTerms.poll(); // least remaining
        TermIndices.put(t.Term, i);
        Terms.set(i, t.Term);
        // System.out.println("Including term " + t.Term + " (" + t.DocCount + ")");
    }
}

From source file:com.gs.obevo.impl.graph.GraphSorter.java

private <T> TopologicalOrderIterator<T, DefaultEdge> getTopologicalOrderIterator(
        DirectedGraph<T, DefaultEdge> graph, Comparator<? super T> comparator) {
    if (comparator != null) {
        Queue<T> queue = new PriorityQueue<T>(10, comparator);
        return new TopologicalOrderIterator<T, DefaultEdge>(graph, queue);
    } else if (graph.vertexSet().iterator().next() instanceof Comparable) {
        Queue<T> queue = new PriorityQueue<T>();
        return new TopologicalOrderIterator<T, DefaultEdge>(graph, queue);
    } else {/*from w w w .j av a2 s  .c  o m*/
        throw new IllegalArgumentException(
                "Unsortable graph elements - either need to provide a Comparator or have Comparable vertices to guarantee a consistent topological order");
    }
}

From source file:org.apache.hadoop.corona.PoolGroupManager.java

/**
 * Put all the pool groups into the priority queue sorted by a comparator
 * @param comparator the comparator to sort all the pool groups in the queue
 * @return the queue of the pool groups sorted by a comparator
 *//*w w w  . j ava2 s  . co  m*/
private Queue<PoolGroupSchedulable> createPoolGroupQueue(ScheduleComparator comparator) {
    int initCapacity = snapshotPoolGroups.size() == 0 ? 1 : snapshotPoolGroups.size();
    Queue<PoolGroupSchedulable> poolGroupQueue = new PriorityQueue<PoolGroupSchedulable>(initCapacity,
            comparator);
    poolGroupQueue.addAll(snapshotPoolGroups);
    return poolGroupQueue;
}

From source file:com.uber.stream.kafka.mirrormaker.controller.core.HelixMirrorMakerManager.java

public HelixMirrorMakerManager(ControllerConf controllerConf) {
    _controllerConf = controllerConf;//w  ww . ja  v  a2  s.c  o  m
    _helixZkURL = HelixUtils.getAbsoluteZkPathForHelix(_controllerConf.getZkStr());
    _helixClusterName = _controllerConf.getHelixClusterName();
    _instanceId = controllerConf.getInstanceId();
    _workloadInfoRetriever = new WorkloadInfoRetriever(this, true);
    _currentServingInstance = new PriorityQueue<>(1,
            InstanceTopicPartitionHolder.perPartitionWorkloadComparator(_workloadInfoRetriever, null));
    _offsetMonitor = new OffsetMonitor(this, controllerConf);

    _minLagTimeSec = controllerConf.getAutoRebalanceMinLagTimeInSeconds();
    _minLagOffset = controllerConf.getAutoRebalanceMinLagOffset();
    _offsetMaxValidTimeMillis = TimeUnit.SECONDS
            .toMillis(controllerConf.getAutoRebalanceMaxOffsetInfoValidInSeconds());
    _maxDedicatedInstancesRatio = controllerConf.getMaxDedicatedLaggingInstancesRatio();
}

From source file:org.mule.util.store.MonitoredObjectStoreWrapper.java

public void expire() {
    try {/*from   w ww . jav a2  s .c om*/
        final long now = System.nanoTime();
        List<Serializable> keys = allKeys();
        int excess = (allKeys().size() - maxEntries);
        if (maxEntries > 0 && excess > 0) {
            PriorityQueue<StoredObject<T>> q = new PriorityQueue<StoredObject<T>>(excess,
                    new Comparator<StoredObject<T>>() {

                        @Override
                        public int compare(StoredObject<T> paramT1, StoredObject<T> paramT2) {
                            return paramT2.timestamp.compareTo(paramT1.timestamp);
                        }
                    });
            long youngest = Long.MAX_VALUE;
            for (Serializable key : keys) {
                StoredObject<T> obj = getStore().retrieve(key);
                //TODO extract the entryTTL>0 outside of loop
                if (entryTTL > 0 && TimeUnit.NANOSECONDS.toMillis(now - obj.getTimestamp()) >= entryTTL) {
                    remove(key);
                    excess--;
                    if (excess > 0 && q.size() > excess) {
                        q.poll();
                        youngest = q.peek().timestamp;
                    }
                } else {
                    if (excess > 0 && (q.size() < excess || obj.timestamp < youngest)) {
                        q.offer(obj);
                        youngest = q.peek().timestamp;
                    }
                    if (excess > 0 && q.size() > excess) {
                        q.poll();
                        youngest = q.peek().timestamp;
                    }

                }
            }
            for (int i = 0; i < excess; i++) {
                Serializable key = q.poll().key;
                remove(key);
            }
        } else {
            if (entryTTL > 0) {
                for (Serializable key : keys) {
                    StoredObject<T> obj = getStore().retrieve(key);
                    if (TimeUnit.NANOSECONDS.toMillis(now - obj.getTimestamp()) >= entryTTL) {
                        remove(key);
                    }
                }
            }
        }
    } catch (Exception e) {
        logger.warn("Running expirty on " + baseStore + " threw " + e + ":" + e.getMessage());
    }
}

From source file:MSUmpire.PSMDataStructure.ProtID.java

public float GetAbundanceByMS1_TopN(int topN, float pepweight) {
    if (PeptideID.isEmpty()) {
        return 0;
    }/*from   w w w  . j av  a2s. c  o m*/
    PriorityQueue<Float> TopQueue = new PriorityQueue<>(PeptideID.size(), Collections.reverseOrder());
    for (PepIonID peptide : PeptideID.values()) {
        if (peptide.PeakHeight != null && peptide.FilteringWeight > pepweight) {
            TopQueue.add(peptide.PeakHeight[0]);
        }
    }

    float totalabundance = 0f;
    int num = Math.min(topN, TopQueue.size());

    for (int i = 0; i < num; i++) {
        totalabundance += TopQueue.poll();
    }
    return totalabundance / num;
}

From source file:android.support.v7.graphics.ColorCutQuantizer.java

private List<Swatch> quantizePixels(int maxColors) {
    // Create the priority queue which is sorted by volume descending. This means we always
    // split the largest box in the queue
    final PriorityQueue<Vbox> pq = new PriorityQueue<>(maxColors, VBOX_COMPARATOR_VOLUME);

    // To start, offer a box which contains all of the colors
    pq.offer(new Vbox(0, mColors.length - 1));

    // Now go through the boxes, splitting them until we have reached maxColors or there are no
    // more boxes to split
    splitBoxes(pq, maxColors);/*from  w  w w  .  j ava  2s .c  o  m*/

    // Finally, return the average colors of the color boxes
    return generateAverageColors(pq);
}

From source file:io.seldon.spark.actions.GroupActionsJob.java

public static void run(CmdLineArgs cmdLineArgs) {
    long unixDays = 0;
    try {/*from w w  w.jav a  2 s. c  o  m*/
        unixDays = JobUtils.dateToUnixDays(cmdLineArgs.input_date_string);
    } catch (ParseException e) {
        unixDays = 0;
    }
    System.out.println(String.format("--- started GroupActionsJob date[%s] unixDays[%s] ---",
            cmdLineArgs.input_date_string, unixDays));

    System.out.println("Env: " + System.getenv());
    System.out.println("Properties: " + System.getProperties());

    SparkConf sparkConf = new SparkConf().setAppName("GroupActionsJob");

    if (cmdLineArgs.debug_use_local_master) {
        System.out.println("Using 'local' master");
        sparkConf.setMaster("local");
    }

    Tuple2<String, String>[] sparkConfPairs = sparkConf.getAll();
    System.out.println("--- sparkConf ---");
    for (int i = 0; i < sparkConfPairs.length; i++) {
        Tuple2<String, String> kvPair = sparkConfPairs[i];
        System.out.println(String.format("%s:%s", kvPair._1, kvPair._2));
    }
    System.out.println("-----------------");

    JavaSparkContext jsc = new JavaSparkContext(sparkConf);
    { // setup aws access
        Configuration hadoopConf = jsc.hadoopConfiguration();
        hadoopConf.set("fs.s3.impl", "org.apache.hadoop.fs.s3native.NativeS3FileSystem");
        if (cmdLineArgs.aws_access_key_id != null && !"".equals(cmdLineArgs.aws_access_key_id)) {
            hadoopConf.set("fs.s3n.awsAccessKeyId", cmdLineArgs.aws_access_key_id);
            hadoopConf.set("fs.s3n.awsSecretAccessKey", cmdLineArgs.aws_secret_access_key);
        }
    }

    // String output_path_dir = "./out/" + input_date_string + "-" + UUID.randomUUID();

    JavaRDD<String> dataSet = jsc.textFile(
            JobUtils.getSourceDirFromDate(cmdLineArgs.input_path_pattern, cmdLineArgs.input_date_string))
            .repartition(4);

    final ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);

    final String single_client = cmdLineArgs.single_client;
    if (single_client != null) {
        Function<String, Boolean> clientFilter = new Function<String, Boolean>() {

            @Override
            public Boolean call(String t) throws Exception {
                ActionData actionData = JobUtils.getActionDataFromActionLogLine(objectMapper, t);
                return ((actionData.client != null) && (actionData.client.equals(single_client)));
            }
        };
        dataSet = dataSet.filter(clientFilter);
    }

    JavaPairRDD<String, ActionData> pairs = dataSet.mapToPair(new PairFunction<String, String, ActionData>() {

        @Override
        public Tuple2<String, ActionData> call(String t) throws Exception {
            ActionData actionData = JobUtils.getActionDataFromActionLogLine(objectMapper, t);
            // String key = (actionData.userid == 0) ? "__no_userid__" : actionData.client;
            String key = actionData.client;
            return new Tuple2<String, ActionData>(key, actionData);
        }

    }).persist(StorageLevel.MEMORY_AND_DISK());

    List<String> clientList = pairs.keys().distinct().collect();
    Queue<ClientDetail> clientDetailQueue = new PriorityQueue<ClientDetail>(30, new Comparator<ClientDetail>() {

        @Override
        public int compare(ClientDetail o1, ClientDetail o2) {
            if (o1.itemCount > o2.itemCount) {
                return -1;
            } else if (o1.itemCount < o2.itemCount) {
                return 1;
            }
            return 0;
        }
    });
    Queue<ClientDetail> clientDetailZeroQueue = new PriorityQueue<ClientDetail>(30,
            new Comparator<ClientDetail>() {

                @Override
                public int compare(ClientDetail o1, ClientDetail o2) {
                    if (o1.itemCount > o2.itemCount) {
                        return -1;
                    } else if (o1.itemCount < o2.itemCount) {
                        return 1;
                    }
                    return 0;
                }
            });
    System.out.println("Client list " + clientList.toString());
    for (String client : clientList) {
        if (client != null) {
            System.out.println("looking at client " + client);
            final String currentClient = client;

            JavaPairRDD<String, ActionData> filtered_by_client = pairs
                    .filter(new Function<Tuple2<String, ActionData>, Boolean>() {

                        @Override
                        public Boolean call(Tuple2<String, ActionData> v1) throws Exception {
                            if (currentClient.equalsIgnoreCase(v1._1)) {
                                return Boolean.TRUE;
                            } else {
                                return Boolean.FALSE;
                            }
                        }
                    });

            JavaPairRDD<String, ActionData> nonZeroUserIds = filtered_by_client
                    .filter(new Function<Tuple2<String, ActionData>, Boolean>() {

                        @Override
                        public Boolean call(Tuple2<String, ActionData> v1) throws Exception {
                            if (v1._2.userid == 0) {
                                return Boolean.FALSE;
                            } else {
                                return Boolean.TRUE;
                            }
                        }
                    });

            JavaPairRDD<String, Integer> userIdLookupRDD = nonZeroUserIds
                    .mapToPair(new PairFunction<Tuple2<String, ActionData>, String, Integer>() {

                        @Override
                        public Tuple2<String, Integer> call(Tuple2<String, ActionData> t) throws Exception {
                            String key = currentClient + "_" + t._2.client_userid;
                            return new Tuple2<String, Integer>(key, t._2.userid);
                        }
                    });

            Map<String, Integer> userIdLookupMap = userIdLookupRDD.collectAsMap();
            Map<String, Integer> userIdLookupMap_wrapped = new HashMap<String, Integer>(userIdLookupMap);
            final Broadcast<Map<String, Integer>> broadcastVar = jsc.broadcast(userIdLookupMap_wrapped);
            JavaRDD<String> json_only_with_zeros = filtered_by_client
                    .map(new Function<Tuple2<String, ActionData>, String>() {

                        @Override
                        public String call(Tuple2<String, ActionData> v1) throws Exception {
                            Map<String, Integer> m = broadcastVar.getValue();
                            ActionData actionData = v1._2;
                            if (actionData.userid == 0) {
                                String key = currentClient + "_" + actionData.client_userid;
                                if (m.containsKey(key)) {
                                    actionData.userid = m.get(key);
                                } else {
                                    return "";
                                }
                            }
                            String json = JobUtils.getJsonFromActionData(actionData);
                            return json;
                        }
                    });

            JavaRDD<String> json_only = json_only_with_zeros.filter(new Function<String, Boolean>() {

                @Override
                public Boolean call(String v1) throws Exception {
                    return (v1.length() == 0) ? Boolean.FALSE : Boolean.TRUE;
                }
            });

            String outputPath = getOutputPath(cmdLineArgs.output_path_dir, unixDays, client);
            if (cmdLineArgs.gzip_output) {
                json_only.saveAsTextFile(outputPath, org.apache.hadoop.io.compress.GzipCodec.class);
            } else {
                json_only.saveAsTextFile(outputPath);
            }
            long json_only_count = json_only.count();
            clientDetailZeroQueue
                    .add(new ClientDetail(currentClient, json_only_with_zeros.count() - json_only_count));
            clientDetailQueue.add(new ClientDetail(currentClient, json_only_count));
        } else
            System.out.println("Found null client!");
    }

    System.out.println("- Client Action (Zero Userid) Count -");
    while (clientDetailZeroQueue.size() != 0) {
        GroupActionsJob.ClientDetail clientDetail = clientDetailZeroQueue.remove();
        System.out.println(String.format("%s: %d", clientDetail.client, clientDetail.itemCount));
    }

    System.out.println("- Client Action Count -");
    while (clientDetailQueue.size() != 0) {
        GroupActionsJob.ClientDetail clientDetail = clientDetailQueue.remove();
        System.out.println(String.format("%s: %d", clientDetail.client, clientDetail.itemCount));
    }

    jsc.stop();
    System.out.println(String.format("--- finished GroupActionsJob date[%s] unixDays[%s] ---",
            cmdLineArgs.input_date_string, unixDays));

}

From source file:org.apache.pig.builtin.TOP.java

@Override
public void accumulate(Tuple tuple) throws IOException {
    if (tuple == null || tuple.size() < 3) {
        return;//from   w w  w  .  j  a  v a2  s . co  m
    }
    try {
        int n = (Integer) tuple.get(0);
        int fieldNum = (Integer) tuple.get(1);
        DataBag inputBag = (DataBag) tuple.get(2);
        if (inputBag == null) {
            return;
        }

        if (store == null) {
            store = new PriorityQueue<Tuple>(n + 1, new TupleComparator(fieldNum, sortDesc));
        }

        updateTop(store, n, inputBag);
    } catch (ExecException e) {
        throw new RuntimeException("ExecException executing function: ", e);
    } catch (Exception e) {
        throw new RuntimeException("General Exception executing function: ", e);
    }
}

From source file:com.joliciel.csvLearner.maxent.MaxentBestFeatureObserver.java

@Override
public void onTerminate() {
    bestFeaturesPerOutcome = new TreeMap<String, List<NameValuePair>>();
    totalPerOutcome = new TreeMap<String, Double>();
    bestFeatureTotalPerOutcome = new TreeMap<String, Double>();
    filePercentagePerOutcome = new TreeMap<String, Map<String, Double>>();
    fileNames = new TreeSet<String>();
    for (Entry<String, Map<String, Double>> entry : featureMap.entrySet()) {
        String outcome = entry.getKey();
        LOG.debug("outcome: " + outcome);
        Map<String, Double> featureTotals = entry.getValue();
        Map<String, Double> fileTotals = new TreeMap<String, Double>();
        PriorityQueue<NameValuePair> heap = new PriorityQueue<NameValuePair>(featureTotals.size(),
                new NameValueDescendingComparator());
        double grandTotal = 0.0;
        for (Entry<String, Double> featureTotal : featureTotals.entrySet()) {
            NameValuePair pair = new NameValuePair(featureTotal.getKey(), featureTotal.getValue());
            heap.add(pair);// w w  w  .  j  ava2s . co  m
            grandTotal += featureTotal.getValue();
            String featureKey = featureTotal.getKey();
            if (featureKey.contains(CSVLearner.NOMINAL_MARKER))
                featureKey = featureKey.substring(0, featureKey.indexOf(CSVLearner.NOMINAL_MARKER));
            String fileName = this.featureToFileMap.get(featureKey);
            Double fileTotalObj = fileTotals.get(fileName);
            double fileTotal = fileTotalObj == null ? 0 : fileTotalObj.doubleValue();
            fileTotals.put(fileName, fileTotal + featureTotal.getValue());
        }
        List<NameValuePair> bestFeatures = new ArrayList<NameValuePair>();
        double bestFeatureTotal = 0.0;
        for (int i = 0; i < n; i++) {
            NameValuePair pair = heap.poll();
            if (pair == null)
                break;

            LOG.debug("Feature: " + pair.getName() + ", Total: " + pair.getValue());
            bestFeatures.add(pair);
            bestFeatureTotal += pair.getValue();
        }
        bestFeaturesPerOutcome.put(outcome, bestFeatures);
        totalPerOutcome.put(outcome, grandTotal);
        bestFeatureTotalPerOutcome.put(outcome, bestFeatureTotal);

        // convert the file totals to percentages
        for (Entry<String, Double> fileTotal : fileTotals.entrySet()) {
            double filePercentage = fileTotal.getValue() / grandTotal;
            fileTotal.setValue(filePercentage);
            fileNames.add(fileTotal.getKey());
        }
        filePercentagePerOutcome.put(outcome, fileTotals);

        featureTotals.clear();
    }
    featureMap.clear();
    featureMap = null;
}