List of usage examples for java.util PriorityQueue add
public boolean add(E e)
From source file:de.tu_berlin.dima.aim3.querysuggestion.QuerySuggTCase.java
/** * Print results from the hdfs//from w w w .jav a 2 s . c o m * * @param resultPath */ protected void printResults(String resultPath) throws Exception { ArrayList<String> resultFiles = new ArrayList<String>(); // Determine all result files if (getFilesystemProvider().isDir(resultPath)) { for (String file : getFilesystemProvider().listFiles(resultPath)) { if (!getFilesystemProvider().isDir(file)) { resultFiles.add(resultPath + "/" + file); } } } else { resultFiles.add(resultPath); } // collect lines of all result files PriorityQueue<String> computedResult = new PriorityQueue<String>(); for (String resultFile : resultFiles) { // read each result file InputStream is = getFilesystemProvider().getInputStream(resultFile); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); String line = reader.readLine(); // collect lines while (line != null) { computedResult.add(line); line = reader.readLine(); } reader.close(); } // Assert.assertEquals("Computed and expected results have different size", // expectedResult.size(), computedResult.size()); System.out.println("RESULTS:"); while (!computedResult.isEmpty()) { String computedLine = computedResult.poll(); System.out.println(computedLine); // if (LOG.isDebugEnabled()) // LOG.debug("compLine: <" + computedLine + ">"); // System.out.println("compLine: <" + computedLine + ">"); // Assert.assertEquals("Computed and expected lines differ", // expectedLine, computedLine); } }
From source file:com.android.switchaccess.HuffmanTreeBuilder.java
/** * Creates a HuffmanNode for each of the nodes in the {@code windowRoot}. The HuffmanNode * internally keeps track of the probability for each of these nodes. Finally, all the * HuffmanNodes are added to a priority queue to keep them sorted on an ascending order based * on their probabilities.// w ww . j ava 2 s. co m * * @param userContext The actions the user has taken so far. In case of an IME, this would be * what the user has typed so far. * @param windowRoot The root of the tree of SwitchAccessNodeCompats * @return Returns a TreeSet which contains all the HuffmanNodes in ascending order based on * their probabilities. If the {@code windowRoot} contains no clickable nodes, an empty * TreeSet is returned. */ private PriorityQueue<HuffmanNode> getOptionScanNodeProbabilities(String userContext, SwitchAccessNodeCompat windowRoot) { LinkedList<SwitchAccessNodeCompat> talkBackOrderList = TreeBuilderUtils.getNodesInTalkBackOrder(windowRoot); Set<SwitchAccessNodeCompat> talkBackOrderSet = new HashSet<>(talkBackOrderList); Map<SwitchAccessNodeCompat, Double> probabilityDistribution = mProbabilityModelReader .getProbabilityDistribution(userContext, talkBackOrderSet); PriorityQueue<HuffmanNode> optionScanNodeProbabilities = new PriorityQueue<>(); for (SwitchAccessNodeCompat currentNode : talkBackOrderSet) { Double currentNodeProbability = probabilityDistribution.get(currentNode); List<AccessibilityNodeActionNode> currentNodeActions = TreeBuilderUtils .getCompatActionNodes(currentNode); /* TODO(rmorina): need to think about the correct behaviour when there are more * than one actions associated with a node */ if (currentNodeActions.size() == 1) { optionScanNodeProbabilities.add(new HuffmanNode(currentNodeActions.get(0), currentNodeProbability)); } currentNode.recycle(); } return optionScanNodeProbabilities; }
From source file:eu.stratosphere.pact.test.util.TestBase.java
/** * Compares the expectedResultString and the file(s) in the HDFS linewise. * Both results (expected and computed) are held in memory. Hence, this * method should not be used to compare large results. * /* ww w . j a v a 2 s . c o m*/ * The line comparator is used to compare lines from the expected and result set. * * @param expectedResult * @param hdfsPath * @param comp Line comparator */ protected void compareResultsByLinesInMemory(String expectedResultStr, String resultPath, Comparator<String> comp) throws Exception { ArrayList<String> resultFiles = new ArrayList<String>(); // Determine all result files if (getFilesystemProvider().isDir(resultPath)) { for (String file : getFilesystemProvider().listFiles(resultPath)) { if (!getFilesystemProvider().isDir(file)) { resultFiles.add(resultPath + "/" + file); } } } else { resultFiles.add(resultPath); } // collect lines of all result files PriorityQueue<String> computedResult = new PriorityQueue<String>(); for (String resultFile : resultFiles) { // read each result file InputStream is = getFilesystemProvider().getInputStream(resultFile); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); String line = reader.readLine(); // collect lines while (line != null) { computedResult.add(line); line = reader.readLine(); } reader.close(); } PriorityQueue<String> expectedResult = new PriorityQueue<String>(); StringTokenizer st = new StringTokenizer(expectedResultStr, "\n"); while (st.hasMoreElements()) { expectedResult.add(st.nextToken()); } // log expected and computed results if (LOG.isDebugEnabled()) { LOG.debug("Expected: " + expectedResult); LOG.debug("Computed: " + computedResult); } Assert.assertEquals("Computed and expected results have different size", expectedResult.size(), computedResult.size()); while (!expectedResult.isEmpty()) { String expectedLine = expectedResult.poll(); String computedLine = computedResult.poll(); if (LOG.isDebugEnabled()) LOG.debug("expLine: <" + expectedLine + ">\t\t: compLine: <" + computedLine + ">"); Assert.assertEquals("Computed and expected lines differ", expectedLine, computedLine); } }
From source file:org.apache.accumulo.core.file.blockfile.cache.LruBlockCache.java
/** * Eviction method./*from ww w. ja v a 2 s .c o m*/ */ void evict() { // Ensure only one eviction at a time if (!evictionLock.tryLock()) return; try { evictionInProgress = true; long bytesToFree = size.get() - minSize(); LOG.debug("Block cache LRU eviction started. Attempting to free " + bytesToFree + " bytes"); if (bytesToFree <= 0) return; // Instantiate priority buckets BlockBucket bucketSingle = new BlockBucket(bytesToFree, blockSize, singleSize()); BlockBucket bucketMulti = new BlockBucket(bytesToFree, blockSize, multiSize()); BlockBucket bucketMemory = new BlockBucket(bytesToFree, blockSize, memorySize()); // Scan entire map putting into appropriate buckets for (CachedBlock cachedBlock : map.values()) { switch (cachedBlock.getPriority()) { case SINGLE: { bucketSingle.add(cachedBlock); break; } case MULTI: { bucketMulti.add(cachedBlock); break; } case MEMORY: { bucketMemory.add(cachedBlock); break; } } } PriorityQueue<BlockBucket> bucketQueue = new PriorityQueue<BlockBucket>(3); bucketQueue.add(bucketSingle); bucketQueue.add(bucketMulti); bucketQueue.add(bucketMemory); int remainingBuckets = 3; long bytesFreed = 0; BlockBucket bucket; while ((bucket = bucketQueue.poll()) != null) { long overflow = bucket.overflow(); if (overflow > 0) { long bucketBytesToFree = Math.min(overflow, (long) Math.ceil((bytesToFree - bytesFreed) / (double) remainingBuckets)); bytesFreed += bucket.free(bucketBytesToFree); } remainingBuckets--; } float singleMB = ((float) bucketSingle.totalSize()) / ((float) (1024 * 1024)); float multiMB = ((float) bucketMulti.totalSize()) / ((float) (1024 * 1024)); float memoryMB = ((float) bucketMemory.totalSize()) / ((float) (1024 * 1024)); LOG.debug("Block cache LRU eviction completed. " + "Freed " + bytesFreed + " bytes. " + "Priority Sizes: " + "Single=" + singleMB + "MB (" + bucketSingle.totalSize() + "), " + "Multi=" + multiMB + "MB (" + bucketMulti.totalSize() + ")," + "Memory=" + memoryMB + "MB (" + bucketMemory.totalSize() + ")"); } finally { stats.evict(); evictionInProgress = false; evictionLock.unlock(); } }
From source file:at.illecker.hama.hybrid.examples.onlinecf.OnlineCF.java
public List<KeyValuePair<Long, Double>> getMostSimilarItems(long item, int count) { Comparator<KeyValuePair<Long, Double>> similarityComparator = new Comparator<KeyValuePair<Long, Double>>() { @Override// www. j av a 2s. c om public int compare(KeyValuePair<Long, Double> arg0, KeyValuePair<Long, Double> arg1) { double difference = arg0.getValue().doubleValue() - arg1.getValue().doubleValue(); return (int) (100000 * difference); } }; PriorityQueue<KeyValuePair<Long, Double>> queue = new PriorityQueue<KeyValuePair<Long, Double>>(count, similarityComparator); LinkedList<KeyValuePair<Long, Double>> results = new LinkedList<KeyValuePair<Long, Double>>(); for (Long candidateItem : m_modelItemFactorizedValues.keySet()) { double similarity = calculateItemSimilarity(item, candidateItem); KeyValuePair<Long, Double> targetItem = new KeyValuePair<Long, Double>(candidateItem, similarity); queue.add(targetItem); } results.addAll(queue); return results; }
From source file:at.illecker.hama.hybrid.examples.onlinecf.OnlineCF.java
public List<KeyValuePair<Long, Double>> getMostSimilarUsers(long user, int count) { Comparator<KeyValuePair<Long, Double>> similarityComparator = new Comparator<KeyValuePair<Long, Double>>() { @Override//from w w w. j a v a2 s .c om public int compare(KeyValuePair<Long, Double> arg0, KeyValuePair<Long, Double> arg1) { double difference = arg0.getValue().doubleValue() - arg1.getValue().doubleValue(); return (int) (100000 * difference); } }; PriorityQueue<KeyValuePair<Long, Double>> queue = new PriorityQueue<KeyValuePair<Long, Double>>(count, similarityComparator); LinkedList<KeyValuePair<Long, Double>> results = new LinkedList<KeyValuePair<Long, Double>>(); for (Long candidateUser : m_modelUserFactorizedValues.keySet()) { double similarity = calculateUserSimilarity(user, candidateUser); KeyValuePair<Long, Double> targetUser = new KeyValuePair<Long, Double>(candidateUser, similarity); queue.add(targetUser); } results.addAll(queue); return results; }
From source file:com.github.wolfdogs.kemono.util.event.RootEventManager.java
@Override public <T extends Event> void dispatchEvent(ThrowableHandler throwableHandler, T event, Object... objects) { if (throwableHandler == null) throwableHandler = DEFAULT_THROWABLE_HANDLER; if (objects.length == 0) objects = new Object[] { new Object() }; Class<? extends Event> type = event.getClass(); PriorityQueue<HandlerEntry> handlerEntryQueue = new PriorityQueue<HandlerEntry>(16, HANDLER_ENTRY_PRIORITY_COMPARATOR); Map<Object, Queue<HandlerEntry>> objectEntriesMap = handlerEntryContainersMap.get(type); if (objectEntriesMap == null) return;//from w w w . ja va2s. c om for (Object object : objects) { Class<?> cls = object.getClass(); Queue<HandlerEntry> entries = objectEntriesMap.get(object); if (entries != null) { for (HandlerEntry entry : entries) handlerEntryQueue.add(entry); } Class<?>[] interfaces = cls.getInterfaces(); for (Class<?> clz : interfaces) { Queue<HandlerEntry> classEntries = objectEntriesMap.get(clz); if (classEntries != null) { for (HandlerEntry entry : classEntries) handlerEntryQueue.add(entry); } } for (Class<?> clz = cls; clz != null; clz = clz.getSuperclass()) { Queue<HandlerEntry> classEntries = objectEntriesMap.get(clz); if (classEntries != null) { for (HandlerEntry entry : classEntries) handlerEntryQueue.add(entry); } } } while (handlerEntryQueue.isEmpty() == false && event.isInterrupted() == false) { HandlerEntry entry = handlerEntryQueue.poll(); EventHandler handler = entry.getHandler(); if (handler == null) continue; try { handler.handleEvent(event); } catch (Throwable e) { throwableHandler.handleThrowable(e); } } }
From source file:com.joliciel.csvLearner.features.BestFeatureFinder.java
public List<NameValuePair> getBestFeatures(GenericEvents events, String testOutcome, int featureCount) { LOG.debug("testOutcome: " + testOutcome); List<NameValuePair> bestFeatures = new ArrayList<NameValuePair>(); RealValueFeatureEvaluator evaluator = new RealValueFeatureEvaluator(); evaluator.setFeatureSplitter(featureSplitter); try {//from w w w . j a va 2s . c om Set<String> features = events.getFeatures(); PriorityQueue<NameValuePair> heap = new PriorityQueue<NameValuePair>(features.size(), new NameValueDescendingComparator()); double eventSpaceEntropy = -1; for (String feature : features) { List<Double> featureEntropies = evaluator.evaluateFeature(events, feature, testOutcome); double informationGain = featureEntropies.get(0) - featureEntropies.get(featureEntropies.size() - 1); if (eventSpaceEntropy < 0) eventSpaceEntropy = featureEntropies.get(0); NameValuePair pair = new NameValuePair(feature, informationGain); heap.add(pair); } bestFeatures.add(new NameValuePair(TOTAL_ENTROPY, eventSpaceEntropy)); for (int i = 0; i < featureCount; i++) { NameValuePair pair = heap.poll(); if (pair == null) break; LOG.debug("feature: " + pair.getName() + ", " + pair.getValue()); bestFeatures.add(pair); } heap = null; } finally { evaluator.logPerformance(); } return bestFeatures; }
From source file:com.mentor.questa.vrm.jenkins.QuestaVrmHostAction.java
private CategoryDataset buildDataSet(StaplerRequest req) { boolean showAction = Boolean.valueOf(req.getParameter("showActions")) || getActionCookie(req); DataSetBuilder<String, Long> dsb = new DataSetBuilder<String, Long>(); PriorityQueue<Pair> pq = new PriorityQueue<Pair>(); HashMap<String, Integer> hostCount = new HashMap<String, Integer>(); for (TestResult temp : getRegressionResult().getActions()) { QuestaVrmAbstractResult action = (QuestaVrmAbstractResult) temp; if (showAction || action instanceof QuestaVrmTestResult) { if (action.getStartTime() == -1 || action.getDoneTime() == -1) { continue; }/*from www. j a v a 2s.c om*/ pq.add(new Pair(action.getStartTimeDate(), action.getHost(), 1)); pq.add(new Pair(action.getDoneTimeDate(), action.getHost(), -1)); hostCount.put(action.getHost(), 0); } } if (pq.isEmpty()) { return dsb.build(); } long offset = getRegressionResult().getRegressionBegin().getTime(); int noOfTests; HashSet<String> visited = new HashSet<String>(); while (!pq.isEmpty()) { long currentKey = pq.peek().date.getTime(); while (!pq.isEmpty() && pq.peek().date.getTime() == currentKey) { Pair current = pq.peek(); noOfTests = hostCount.get(current.host); while (!pq.isEmpty() && pq.peek().compareTo(current) == 0) { noOfTests += pq.poll().diff; } dsb.add(noOfTests, current.host, (current.date.getTime() - offset) / 1000); hostCount.put(current.host, noOfTests); visited.add(current.host); } for (String host : hostCount.keySet()) { if (!visited.contains(host)) { dsb.add(hostCount.get(host), host, (currentKey - offset) / 1000); } } visited.clear(); } return dsb.build(); }
From source file:edu.snu.leader.hierarchy.simple.test.TestIndividual.java
/** * Initialize the individual//from w ww . java2 s. c om * * @param allIndividuals */ public void initialize(List<TestIndividual> allIndividuals) { // Basically, we just need to find our neighbors // Build a priority queue to sort things for us PriorityQueue<TestNeighbor> sortedNeighbors = new PriorityQueue<TestNeighbor>(); // Iterate through all the individuals Iterator<TestIndividual> indIter = allIndividuals.iterator(); while (indIter.hasNext()) { // Get the individual TestIndividual ind = indIter.next(); // If it is us, continue on if (_id.equals(ind._id)) { continue; } // Build a neighbor out of it and put it in the queue TestNeighbor neighbor = new TestNeighbor((float) _location.distance(ind._location), ind); sortedNeighbors.add(neighbor); } // Get the "nearest" neighbors int count = Math.min(sortedNeighbors.size(), _nearestNeighborCount); for (int i = 0; i < count; i++) { _nearestNeighbors.add(sortedNeighbors.poll()); } }