List of usage examples for java.util PriorityQueue PriorityQueue
public PriorityQueue()
From source file:persistence.JSONReader.java
/** * //from ww w. j a va 2s .c o m * @param c the class of the Models expected * @return a List of Models from a JSON formatted file. * @throws java.io.IOException * @throws org.json.simple.parser.ParseException */ public static AbstractQueue<JSONModel> loadModels(Class c) throws IOException, ParseException, InstantiationException, IllegalAccessException { AbstractQueue<JSONModel> modelQueue = new PriorityQueue(); JSONModel model; JSONParser parser = new JSONParser(); String json = file.readLine(); while (json != null) { model = (JSONModel) c.newInstance(); JSONObject jsonObject = (JSONObject) parser.parse(json); model.fromJSON(jsonObject); modelQueue.add(model); json = file.readLine(); } return modelQueue; }
From source file:Main.java
/** * Given any of the known collection types, this method will return an instance of the collection. * @param collectionType the type of the collection * @return the collection instance//w ww. j a va 2s .c om */ public static Collection<?> getCollection(Class<?> collectionType) { if (HashSet.class.equals(collectionType)) { return new HashSet<Object>(); } else if (TreeSet.class.equals(collectionType)) { return new TreeSet<Object>(); } else if (CopyOnWriteArraySet.class.equals(collectionType)) { return new CopyOnWriteArraySet<Object>(); } else if (LinkedHashSet.class.equals(collectionType)) { return new LinkedHashSet<Object>(); } else if (ArrayList.class.equals(collectionType)) { return new ArrayList<Object>(); } else if (LinkedList.class.equals(collectionType)) { return new LinkedList<Object>(); } else if (Vector.class.equals(collectionType)) { return new Vector<Object>(); } else if (Stack.class.equals(collectionType)) { return new Stack<Object>(); } else if (PriorityQueue.class.equals(collectionType)) { return new PriorityQueue<Object>(); } else if (PriorityBlockingQueue.class.equals(collectionType)) { return new PriorityBlockingQueue<Object>(); } else if (ArrayDeque.class.equals(collectionType)) { return new ArrayDeque<Object>(); } else if (ConcurrentLinkedQueue.class.equals(collectionType)) { return new ConcurrentLinkedQueue<Object>(); } else if (LinkedBlockingQueue.class.equals(collectionType)) { return new LinkedBlockingQueue<Object>(); } else if (LinkedBlockingDeque.class.equals(collectionType)) { return new LinkedBlockingDeque<Object>(); } else if (List.class.equals(collectionType)) { return new LinkedList<Object>(); } else if (Set.class.equals(collectionType)) { return new HashSet<Object>(); } else if (Queue.class.equals(collectionType)) { return new PriorityQueue<Object>(); } else if (Deque.class.equals(collectionType)) { return new ArrayDeque<Object>(); } else if (Collection.class.equals(collectionType)) { return new LinkedList<Object>(); } throw new IllegalArgumentException("Unsupported collection type: " + collectionType); }
From source file:org.shaman.terrain.voronoi.Voronoi.java
public Voronoi() { edges = new ArrayList<>(); points = new ArrayList<>(); deleted = new HashSet<>(); queue = new PriorityQueue<>(); }
From source file:Navigation.Vertex.java
public static void computePaths(Vertex source) { source.minDistance = 0.;/*from ww w.j ava 2s. c o m*/ PriorityQueue<Vertex> vertexQueue = new PriorityQueue<Vertex>(); vertexQueue.add(source); while (!vertexQueue.isEmpty()) { Vertex u = vertexQueue.poll(); // Visit each edge exiting u for (Edge e : u.adjacencies) { Vertex v = e.target; double weight = e.weight; double distanceThroughU = u.minDistance + weight; if (distanceThroughU < v.minDistance) { vertexQueue.remove(v); v.minDistance = distanceThroughU; v.previous = u; vertexQueue.add(v); } } } }
From source file:org.apache.mahout.clustering.lda.LDAPrintTopics.java
private static void ensureQueueSize(Collection<Queue<Pair<String, Double>>> queues, int k) { for (int i = queues.size(); i <= k; ++i) { queues.add(new PriorityQueue<Pair<String, Double>>()); }//from ww w . ja v a 2 s .c o m }
From source file:org.apache.hama.util.Files.java
/** * Merges k sequence files each of size n using knlog(k) merge algorithm. * @param inputPath :the input directory which contains sorted sequence * files, that have to be merged. * @param fs :the filesystem/*from ww w . j av a 2 s . c om*/ * @param outputPath :the path to the merged sorted sequence file. */ public static <KEY extends WritableComparable<? super KEY>, VALUE extends Writable> void merge(FileSystem fs, Path inputPath, Path outputPath, Class<KEY> keyClazz, Class<VALUE> valClazz) { Configuration conf = fs.getConf(); PriorityQueue<KVPair<KEY, VALUE>> pq = new PriorityQueue<KVPair<KEY, VALUE>>(); //Map from KeyValuePair to the split number to which it belongs. HashMap<KVPair<KEY, VALUE>, Integer> keySplitMap = new HashMap<KVPair<KEY, VALUE>, Integer>(); FileStatus[] files; SequenceFile.Writer writer = null; SequenceFile.Reader reader[] = null; try { files = fs.listStatus(inputPath); reader = new SequenceFile.Reader[files.length]; for (int i = 0; i < files.length; i++) { if (files[i].getLen() > 0) { reader[i] = new SequenceFile.Reader(fs, files[i].getPath(), conf); KEY key = ReflectionUtils.newInstance(keyClazz, new Object[0]); VALUE val = ReflectionUtils.newInstance(valClazz, new Object[0]); reader[i].next(key, val); KVPair<KEY, VALUE> kv = new KVPair<KEY, VALUE>(key, val); pq.add(kv); keySplitMap.put(kv, i); } } writer = SequenceFile.createWriter(fs, conf, outputPath, keyClazz, valClazz); while (!pq.isEmpty()) { KVPair<KEY, VALUE> smallestKey = pq.poll(); writer.append(smallestKey.getKey(), smallestKey.getValue()); Integer index = keySplitMap.get(smallestKey); keySplitMap.remove(smallestKey); KEY key = ReflectionUtils.newInstance(keyClazz, new Object[0]); VALUE val = ReflectionUtils.newInstance(valClazz, new Object[0]); if (reader[index].next(key, val)) { KVPair<KEY, VALUE> kv = new KVPair<KEY, VALUE>(key, val); pq.add(kv); keySplitMap.put(kv, index); } } } catch (IOException e) { LOG.error("Couldn't get status, exiting ...", e); System.exit(-1); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { LOG.error("Cannot close writer to sorted seq. file. Exiting ...", e); System.exit(-1); } } } }
From source file:com.joliciel.jochre.search.highlight.FixedSizeSnippetFinder.java
@Override public List<Snippet> findSnippets(int docId, Set<String> fields, Set<HighlightTerm> highlightTerms, int maxSnippets, int snippetSize) { try {//w w w. j a v a 2s. c om Document doc = indexSearcher.doc(docId); JochreIndexDocument jochreDoc = searchService.getJochreIndexDocument(indexSearcher, docId); // find best snippet for each term PriorityQueue<Snippet> heap = new PriorityQueue<Snippet>(); int i = -1; for (HighlightTerm term : highlightTerms) { i++; String content = jochreDoc.getContents(); CoordinateStorage coordinateStorage = jochreDoc.getCoordinateStorage(); if (term.getStartOffset() >= content.length()) { String title = doc.get("title"); String startPage = doc.get("startPage"); String endPage = doc.get("endPage"); LOG.debug("Content: " + content); throw new RuntimeException(term.toString() + " cannot fit into contents for doc " + title + ", pages " + startPage + " to " + endPage + ", length: " + content.length()); } List<HighlightTerm> snippetTerms = new ArrayList<HighlightTerm>(); snippetTerms.add(term); int j = -1; boolean foundImage = false; for (HighlightTerm otherTerm : highlightTerms) { j++; if (j <= i) continue; if (otherTerm.getImageIndex() != term.getImageIndex()) { if (foundImage) break; else continue; } foundImage = true; if (otherTerm.getStartOffset() < term.getStartOffset() + snippetSize) { snippetTerms.add(otherTerm); } else { break; } } HighlightTerm lastTerm = snippetTerms.get(snippetTerms.size() - 1); int middle = (term.getStartOffset() + lastTerm.getEndOffset()) / 2; int start = middle - (snippetSize / 2); int end = middle + (snippetSize / 2); if (start > term.getStartOffset()) start = term.getStartOffset(); if (end < lastTerm.getEndOffset()) end = lastTerm.getEndOffset(); if (start < 0) start = 0; if (end > content.length()) end = content.length(); for (int k = start; k >= 0; k--) { if (Character.isWhitespace(content.charAt(k))) { start = k + 1; break; } } for (int k = end; k < content.length(); k++) { if (Character.isWhitespace(content.charAt(k))) { end = k; break; } } int imageStartOffset = coordinateStorage.getImageStartOffset(term.getImageIndex()); int imageEndOffset = Integer.MAX_VALUE; if (term.getImageIndex() + 1 < coordinateStorage.getImageCount()) { imageEndOffset = coordinateStorage.getImageStartOffset(term.getImageIndex() + 1); } if (start < imageStartOffset) start = imageStartOffset; if (end > imageEndOffset) end = imageEndOffset; Snippet snippet = new Snippet(docId, term.getField(), start, end); snippet.setHighlightTerms(snippetTerms); heap.add(snippet); } // if we have no snippets, add one per field type if (heap.isEmpty()) { String content = jochreDoc.getContents(); int end = snippetSize * maxSnippets; if (end > content.length()) end = content.length(); for (int k = end; k < content.length(); k++) { if (Character.isWhitespace(content.charAt(k))) { end = k; break; } } Snippet snippet = new Snippet(docId, fields.iterator().next(), 0, end); heap.add(snippet); } List<Snippet> snippets = new ArrayList<Snippet>(maxSnippets); while (snippets.size() < maxSnippets && !heap.isEmpty()) { Snippet snippet = heap.poll(); boolean hasOverlap = false; for (Snippet otherSnippet : snippets) { if (otherSnippet.hasOverlap(snippet)) hasOverlap = true; } if (!hasOverlap) snippets.add(snippet); } for (Snippet snippet : snippets) { LOG.debug("Added snippet: " + snippet.toJson()); } return snippets; } catch (IOException e) { LogUtils.logError(LOG, e); throw new RuntimeException(e); } }
From source file:com.hortonworks.registries.schemaregistry.HAServerNotificationManager.java
private void notify(String urlPath, Object postBody) { // If Schema Registry was not started in HA mode then serverURL would be null, in case don't bother making POST calls if (serverUrl != null) { PriorityQueue<Pair<Integer, String>> queue = new PriorityQueue<>(); synchronized (UPDATE_ITERATE_LOCK) { hostIps.stream().forEach(hostIp -> { queue.add(Pair.of(1, hostIp)); });//from w w w .ja va2 s. com } while (!queue.isEmpty()) { Pair<Integer, String> priorityWithHostIp = queue.remove(); WebTarget target = ClientBuilder.newClient() .target(String.format("%s%s", priorityWithHostIp.getRight(), urlPath)); Response response = null; try { response = target.request().post(Entity.json(postBody)); } catch (Exception e) { LOG.warn("Failed to notify the peer server '{}' about the current host debut.", priorityWithHostIp.getRight()); } if ((response == null || response.getStatus() != Response.Status.OK.getStatusCode()) && priorityWithHostIp.getLeft() < MAX_RETRY) { queue.add(Pair.of(priorityWithHostIp.getLeft() + 1, priorityWithHostIp.getRight())); } else if (priorityWithHostIp.getLeft() < MAX_RETRY) { LOG.info("Notified the peer server '{}' about the current host debut.", priorityWithHostIp.getRight()); } else if (priorityWithHostIp.getLeft() >= MAX_RETRY) { LOG.warn( "Failed to notify the peer server '{}' about the current host debut, giving up after {} attempts.", priorityWithHostIp.getRight(), MAX_RETRY); } try { Thread.sleep(priorityWithHostIp.getLeft() * 100); } catch (InterruptedException e) { LOG.warn("Failed to notify the peer server '{}'", priorityWithHostIp.getRight(), e); } } } }
From source file:edu.snu.leader.hidden.MetricSpatialIndividual.java
/** * TODO Method description/* w w w .j a v a2 s. c o m*/ * * @param simState * @see edu.snu.leader.hidden.SpatialIndividual#findNearestNeighbors(edu.snu.leader.hidden.SimulationState) */ @Override public void findNearestNeighbors(SimulationState simState) { _LOG.trace("Entering findNearestNeighbors( simState )"); // Get the metric distance to calculate the nearest neighbors _nearestNeighborDistance = simState.getNearestNeighborDistance(); // Build a priority queue to sort things for us PriorityQueue<Neighbor> sortedNeighbors = new PriorityQueue<Neighbor>(); // Iterate through all the individuals Iterator<SpatialIndividual> indIter = simState.getAllIndividuals().iterator(); while (indIter.hasNext()) { // Get the individual SpatialIndividual ind = indIter.next(); // If it is us, continue on if (_id.equals(ind._id)) { continue; } // Build a neighbor out of it and put it in the queue Neighbor neighbor = new Neighbor((float) _location.distance(ind._location), ind); sortedNeighbors.add(neighbor); } // Get all the neighbors within the specified distance Iterator<Neighbor> neighborIter = sortedNeighbors.iterator(); while (neighborIter.hasNext()) { Neighbor neighbor = neighborIter.next(); // Is it within the distance? if (neighbor.getDistance() <= _nearestNeighborDistance) { // Yup _nearestNeighbors.add(neighbor); neighbor.getIndividual().signalNearestNeighborStatus(this); } // else // { // // We can bail because the neighbors are sorted by distance // // from closest to farthest // break; // } } _LOG.trace("Leaving findNearestNeighbors( simState )"); }
From source file:com.joliciel.jochre.lexicon.MostLikelyWordChooserImpl.java
public LetterSequence chooseMostLikelyWord(List<LetterSequence> heap, List<LetterSequence> holdoverHeap, int n) {/*from w w w .java 2s. com*/ LetterSequence bestSequence = null; List<LetterSequence> holdoverWithDash = new ArrayList<LetterSequence>(n); List<LetterSequence> holdoverWithoutDash = new ArrayList<LetterSequence>(n); int i = 0; for (LetterSequence holdoverSequence : holdoverHeap) { if (i >= n) break; if (holdoverSequence.toString().endsWith("-")) holdoverWithDash.add(holdoverSequence); else holdoverWithoutDash.add(holdoverSequence); i++; } PriorityQueue<LetterSequence> combinedHeap = new PriorityQueue<LetterSequence>(); for (LetterSequence sequenceWithDash : holdoverWithDash) { // find the dash that needs to be skipped at the end of sequence 1 for (int j = sequenceWithDash.size() - 1; j >= 0; j--) { Letter outcome = sequenceWithDash.get(j); if (outcome.getString().equals("-")) { sequenceWithDash.setDashToSkip(j); break; } } for (LetterSequence letterSequence : heap) { LetterSequence combinedSequence = this.getLetterGuesserService().getLetterSequence(sequenceWithDash, letterSequence); combinedHeap.add(combinedSequence); } } List<LetterSequence> combinedSequences = new ArrayList<LetterSequence>(); for (i = 0; i < n; i++) { if (combinedHeap.isEmpty()) break; combinedSequences.add(combinedHeap.poll()); } if (holdoverWithoutDash.size() == 0) { // all holdovers end with a dash // therefore we must combine the two sequences bestSequence = this.chooseMostLikelyWord(combinedSequences, n); } else { // some holdovers end with a dash, others don't // need to compare combined sequences with individual sequences LetterSequence bestCombinedSequence = this.chooseMostLikelyWord(combinedSequences, n); // Originally we only included sequences without dashes here // However, this falsifies the results towards those without a dash // especially in the case where sequence 1 or sequence 2 is also a common word (e.g. der in Yiddish) // PriorityQueue<LetterSequence> holdoverHeapWithoutDash = new PriorityQueue<LetterSequence>(holdoverWithoutDash); // LetterSequence bestHoldoverSequenceWithoutDash = this.chooseMostLikelyWord(holdoverHeapWithoutDash, n); // Changed it to the following: LetterSequence bestHoldoverSequence = this.chooseMostLikelyWord(holdoverHeap, n); LetterSequence bestNextRowSequence = this.chooseMostLikelyWord(heap, n); if (LOG.isDebugEnabled()) { LOG.debug("Best combined: " + bestCombinedSequence.toString() + ". Adjusted score: " + bestCombinedSequence.getAdjustedScore()); LOG.debug("Best seq1 separate: " + bestHoldoverSequence.toString() + ". Adjusted score: " + bestHoldoverSequence.getAdjustedScore()); LOG.debug("Best seq2 separate: " + bestNextRowSequence.toString() + ". Adjusted score: " + bestNextRowSequence.getAdjustedScore()); } // Now, to compare the best combined with the best separate scores, we need to get a geometric mean of the shapes // in the best separate ones, and adjust for the lowest frequency word LetterSequence separateSequence = this.letterGuesserService.getLetterSequence(bestHoldoverSequence, bestNextRowSequence); int minFrequency = bestHoldoverSequence.getFrequency() < bestNextRowSequence.getFrequency() ? bestHoldoverSequence.getFrequency() : bestNextRowSequence.getFrequency(); double freqLog = this.getFrequencyAdjustment(minFrequency); double separateAdjustedScore = separateSequence.getScore() * freqLog + additiveSmoothing; separateSequence.setAdjustedScore(separateAdjustedScore); if (LOG.isDebugEnabled()) LOG.debug("Best separate: " + separateSequence.toString() + ". Score: " + separateSequence.getScore() + ". Freq: " + minFrequency + ". Adjusted: " + freqLog + ". Adjusted score: " + separateSequence.getAdjustedScore()); if (bestCombinedSequence.getAdjustedScore() > separateAdjustedScore) { if (LOG.isDebugEnabled()) LOG.debug("Using combined sequence"); bestSequence = bestCombinedSequence; } else { if (LOG.isDebugEnabled()) LOG.debug("Using separate sequences"); bestSequence = this.getLetterGuesserService().getLetterSequence(bestHoldoverSequence, bestNextRowSequence); } if (LOG.isDebugEnabled()) LOG.debug("Best with holdover: " + bestSequence.toString()); } return bestSequence; }