List of usage examples for java.util PriorityQueue PriorityQueue
public PriorityQueue(SortedSet<? extends E> c)
From source file:org.jiemamy.utils.collection.CollectionsUtil.java
/** * {@link PriorityQueue}?????/*from w w w. j a v a 2 s . c om*/ * * @param <E> {@link PriorityQueue}?? * @param c ????? * @return {@link PriorityQueue}??? * @throws IllegalArgumentException ?{@code null}??? * @see PriorityQueue#PriorityQueue(PriorityQueue) */ public static <E> PriorityQueue<E> newPriorityQueue(PriorityQueue<? extends E> c) { Validate.notNull(c); return new PriorityQueue<E>(c); }
From source file:org.jiemamy.utils.collection.CollectionsUtil.java
/** * {@link PriorityQueue}?????//from w w w . ja v a 2s. com * * @param <E> {@link PriorityQueue}?? * @param c ????? * @return {@link PriorityQueue}??? * @throws IllegalArgumentException ?{@code null}??? * @see PriorityQueue#PriorityQueue(SortedSet) */ public static <E> PriorityQueue<E> newPriorityQueue(SortedSet<? extends E> c) { Validate.notNull(c); return new PriorityQueue<E>(c); }
From source file:org.apache.flink.streaming.runtime.operators.windowing.WindowOperator.java
private void restoreFromLegacyWindowOperator(DataInputViewStreamWrapper in) throws IOException { Preconditions.checkArgument(legacyWindowOperatorType == LegacyWindowOperatorType.NONE); int numWatermarkTimers = in.readInt(); this.restoredFromLegacyEventTimeTimers = new PriorityQueue<>(Math.max(numWatermarkTimers, 1)); for (int i = 0; i < numWatermarkTimers; i++) { K key = keySerializer.deserialize(in); W window = windowSerializer.deserialize(in); long timestamp = in.readLong(); Timer<K, W> timer = new Timer<>(timestamp, key, window); restoredFromLegacyEventTimeTimers.add(timer); }/*from w ww . j a v a 2 s. c om*/ int numProcessingTimeTimers = in.readInt(); this.restoredFromLegacyProcessingTimeTimers = new PriorityQueue<>(Math.max(numProcessingTimeTimers, 1)); for (int i = 0; i < numProcessingTimeTimers; i++) { K key = keySerializer.deserialize(in); W window = windowSerializer.deserialize(in); long timestamp = in.readLong(); Timer<K, W> timer = new Timer<>(timestamp, key, window); restoredFromLegacyProcessingTimeTimers.add(timer); } // just to read all the rest, although we do not really use this information. int numProcessingTimeTimerTimestamp = in.readInt(); for (int i = 0; i < numProcessingTimeTimerTimestamp; i++) { in.readLong(); in.readInt(); } if (LOG.isDebugEnabled()) { int subtaskIdx = getRuntimeContext().getIndexOfThisSubtask(); if (restoredFromLegacyEventTimeTimers != null && !restoredFromLegacyEventTimeTimers.isEmpty()) { LOG.debug("{} (taskIdx={}) restored {} event time timers from an older Flink version: {}", getClass().getSimpleName(), subtaskIdx, restoredFromLegacyEventTimeTimers.size(), restoredFromLegacyEventTimeTimers); } if (restoredFromLegacyProcessingTimeTimers != null && !restoredFromLegacyProcessingTimeTimers.isEmpty()) { LOG.debug("{} (taskIdx={}) restored {} processing time timers from an older Flink version: {}", getClass().getSimpleName(), subtaskIdx, restoredFromLegacyProcessingTimeTimers.size(), restoredFromLegacyProcessingTimeTimers); } } }
From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java
/** * Performs a spatial join between records in two R-trees * @param R//ww w . j a va 2s .c o m * @param S * @param output * @return * @throws IOException * SuppresWarnings("resource") is used because we create LineReaders on the * internal data stream of both R and S. We do not want to close the * LineReader because it will subsequently close the internal data stream * of R and S which is something we want to avoid because both R and S are * not created by this function and it should not free these resources. */ protected static <S1 extends Shape, S2 extends Shape> int spatialJoinDisk(final RTree<S1> R, final RTree<S2> S, final ResultCollector2<S1, S2> output, final Reporter reporter) throws IOException { PriorityQueue<Long> nodesToJoin = new PriorityQueue<Long>(R.nodeCount + S.nodeCount); // Start with the two roots nodesToJoin.add(0L); // Caches to keep the retrieved data records. Helpful when it reaches the // leaves and starts to read objects from the two trees LruCache<Integer, Shape[]> r_records_cache = new LruCache<Integer, Shape[]>(R.degree * 2); LruCache<Integer, Shape[]> s_records_cache = new LruCache<Integer, Shape[]>(S.degree * R.degree * 4); Text line = new Text2(); int result_count = 0; LineReader r_lr = null, s_lr = null; // Last offset read from r and s int r_last_offset = 0; int s_last_offset = 0; while (!nodesToJoin.isEmpty()) { long nodes_to_join = nodesToJoin.remove(); int r_node = (int) (nodes_to_join >>> 32); int s_node = (int) (nodes_to_join & 0xFFFFFFFF); // Compute the overlap between the children of the two nodes // If a node is non-leaf, its children are other nodes // If a node is leaf, its children are data records boolean r_leaf = r_node >= R.nonLeafNodeCount; boolean s_leaf = s_node >= S.nonLeafNodeCount; if (!r_leaf && !s_leaf) { // Both are internal nodes, read child nodes under them // Find overlaps using a simple cross join (TODO: Use plane-sweep) for (int i = 0; i < R.degree; i++) { int new_r_node = r_node * R.degree + i + 1; for (int j = 0; j < S.degree; j++) { int new_s_node = s_node * S.degree + j + 1; if (R.nodes[new_r_node].isIntersected(S.nodes[new_s_node])) { long new_pair = (((long) new_r_node) << 32) | new_s_node; nodesToJoin.add(new_pair); } } } } else if (r_leaf && !s_leaf) { // R is a leaf node while S is an internal node // Compare the leaf node in R against all child nodes of S for (int j = 0; j < S.degree; j++) { int new_s_node = s_node * S.degree + j + 1; if (R.nodes[r_node].isIntersected(S.nodes[new_s_node])) { long new_pair = (((long) r_node) << 32) | new_s_node; nodesToJoin.add(new_pair); } } } else if (!r_leaf && s_leaf) { // R is an internal node while S is a leaf node // Compare child nodes of R against the leaf node in S for (int i = 0; i < R.degree; i++) { int new_r_node = r_node * R.degree + i + 1; if (R.nodes[new_r_node].isIntersected(S.nodes[s_node])) { long new_pair = (((long) new_r_node) << 32) | s_node; nodesToJoin.add(new_pair); } } } else if (r_leaf && s_leaf) { // Both are leaf nodes, join objects under them int r_start_offset = R.dataOffset[r_node]; int r_end_offset = R.dataOffset[r_node + 1]; int s_start_offset = S.dataOffset[s_node]; int s_end_offset = S.dataOffset[s_node + 1]; // Read or retrieve r_records Shape[] r_records = r_records_cache.get(r_start_offset); if (r_records == null) { int cache_key = r_start_offset; r_records = r_records_cache.popUnusedEntry(); if (r_records == null) { r_records = new Shape[R.degree * 2]; } // Need to read it from stream if (r_last_offset != r_start_offset) { long seekTo = r_start_offset + R.treeStartOffset; R.data.seek(seekTo); r_lr = new LineReader(R.data); } int record_i = 0; while (r_start_offset < r_end_offset) { r_start_offset += r_lr.readLine(line); if (r_records[record_i] == null) r_records[record_i] = R.stockObject.clone(); r_records[record_i].fromText(line); record_i++; } r_last_offset = r_start_offset; // Nullify other records while (record_i < r_records.length) r_records[record_i++] = null; r_records_cache.put(cache_key, r_records); } // Read or retrieve s_records Shape[] s_records = s_records_cache.get(s_start_offset); if (s_records == null) { int cache_key = s_start_offset; // Need to read it from stream if (s_lr == null || s_last_offset != s_start_offset) { // Need to reposition s_lr (LineReader of S) long seekTo = s_start_offset + S.treeStartOffset; S.data.seek(seekTo); s_lr = new LineReader(S.data); } s_records = s_records_cache.popUnusedEntry(); if (s_records == null) { s_records = new Shape[S.degree * 2]; } int record_i = 0; while (s_start_offset < s_end_offset) { s_start_offset += s_lr.readLine(line); if (s_records[record_i] == null) s_records[record_i] = S.stockObject.clone(); s_records[record_i].fromText(line); record_i++; } // Nullify other records while (record_i < s_records.length) s_records[record_i++] = null; // Put in cache s_records_cache.put(cache_key, s_records); s_last_offset = s_start_offset; } // Do Cartesian product between records to find overlapping pairs for (int i_r = 0; i_r < r_records.length && r_records[i_r] != null; i_r++) { for (int i_s = 0; i_s < s_records.length && s_records[i_s] != null; i_s++) { if (r_records[i_r].isIntersected(s_records[i_s]) && !r_records[i_r].equals(s_records[i_s])) { result_count++; if (output != null) { output.collect((S1) r_records[i_r], (S2) s_records[i_s]); } } } } } if (reporter != null) reporter.progress(); } return result_count; }