List of usage examples for java.util PriorityQueue PriorityQueue
public PriorityQueue(SortedSet<? extends E> c)
From source file:io.warp10.script.functions.OPTDTW.java
@Override public Object apply(WarpScriptStack stack) throws WarpScriptException { Object o = stack.pop();// w w w .j a v a 2s .c o m if (!(o instanceof Number)) { throw new WarpScriptException( getName() + " expects a count of best restults to return on top of the stack."); } int count = ((Number) o).intValue(); o = stack.pop(); if (!(o instanceof List)) { throw new WarpScriptException(getName() + " expects a numeric list to use as query below the count."); } double[] query = new double[((List) o).size()]; int i = 0; for (Object oo : (List) o) { query[i++] = ((Number) oo).doubleValue(); } // Z-Normalize query double[] musigma = DoubleUtils.musigma(query, true); for (i = 0; i < query.length; i++) { query[i] = (query[i] - musigma[0]) / musigma[1]; } o = stack.pop(); if (!(o instanceof List)) { throw new WarpScriptException(getName() + " expects a numeric list as the sequence in which to find best matches below the 'query' list."); } double[] sequence = new double[((List) o).size()]; i = 0; for (Object oo : (List) o) { sequence[i++] = ((Number) oo).doubleValue(); } if (sequence.length <= query.length) { throw new WarpScriptException( getName() + " expects the query list to be shorter than the sequence list."); } double mindist = 0.0; PriorityQueue<Pair<Integer, Double>> distances = new PriorityQueue<Pair<Integer, Double>>( new Comparator<Pair<Integer, Double>>() { @Override public int compare(Pair<Integer, Double> o1, Pair<Integer, Double> o2) { return o1.getValue().compareTo(o2.getValue()); } }); double[] subsequence = new double[query.length]; for (i = 0; i <= sequence.length - query.length; i++) { System.arraycopy(sequence, i, subsequence, 0, query.length); // Z-Normalize the subsequence musigma = DoubleUtils.musigma(subsequence, true); for (int j = 0; j < subsequence.length; j++) { subsequence[j] = (subsequence[j] - musigma[0]) / musigma[1]; } double dist = dtw.compute(query, 0, query.length, subsequence, 0, query.length, mindist); if (dist < 0) { continue; } distances.add(new Pair<Integer, Double>(i, dist)); // // If the priority queue is of 'count' size, retrieve the largest distance and // use it as the threshold for the DTW computation // if (count > 0 && distances.size() >= count) { Object adist[] = distances.toArray(); mindist = ((Pair<Integer, Double>) adist[count - 1]).getValue(); } } List<List<Object>> results = new ArrayList<List<Object>>(); while (!distances.isEmpty()) { Pair<Integer, Double> entry = distances.poll(); List<Object> result = new ArrayList<Object>(); result.add(entry.getKey()); result.add(entry.getValue()); results.add(result); if (count > 0 && count == results.size()) { break; } } stack.push(results); return stack; }
From source file:com.datatorrent.lib.util.AbstractBaseSortOperator.java
public void initializeQueue() { pqueue = new PriorityQueue<K>(getSize()); }
From source file:com.joliciel.csvLearner.features.RealValueFeatureEvaluator.java
/** * For a given feature, calculate the entropy after each level of splitting. * Level 0: the entropy taking into account only those events which have a value as opposed to those which don't * Level 1: entropy for events without a value (where value=0) + entropy of other events after first split * Level 2: entropy for events without a value (where value=0) + entropy of other events after second split * etc.//from w w w . j a va 2s. c o m * @param events the list of events * @param feature the feature to consider for splitting * @return */ public List<Double> evaluateFeature(GenericEvents events, String feature, String testOutcome) { long startTime = (new Date()).getTime(); if (LOG.isTraceEnabled()) { LOG.trace("Evaluating feature: " + feature); LOG.trace("Test outcome: " + testOutcome); } long startTimeInitialise = (new Date()).getTime(); PriorityQueue<NameValuePair> heap = new PriorityQueue<NameValuePair>(events.size()); Collection<NameValuePair> featureValues = new ArrayList<NameValuePair>(); Map<String, Integer> eventOutcomeMap = new TreeMap<String, Integer>(); Map<String, Integer> featureOutcomeMap = new TreeMap<String, Integer>(); Map<String, Integer> nonFeatureOutcomeMap = new TreeMap<String, Integer>(); List<String> outcomes = null; if (testOutcome == null) { Set<String> outcomeSet = events.getOutcomes(); outcomes = new ArrayList<String>(outcomeSet); } else { outcomes = new ArrayList<String>(); outcomes.add(testOutcome); outcomes.add(""); } int[] eventOutcomeCounts = new int[outcomes.size()]; int[] featureOutcomeCounts = new int[outcomes.size()]; int[] nonFeatureOutcomeCounts = new int[outcomes.size()]; int eventCount = events.size(); int featureCount = 0; for (GenericEvent event : events) { if (!event.isTest()) { String outcome = event.getOutcome(); int outcomeIndex = 0; if (testOutcome == null) { outcomeIndex = outcomes.indexOf(outcome); } else { if (!outcome.equals(testOutcome)) { outcome = ""; outcomeIndex = 1; } else { outcomeIndex = 0; } } long startTimeFindFeature = (new Date()).getTime(); int featureIndex = event.getFeatureIndex(feature); long endTimeFindFeature = (new Date()).getTime(); totalTimeFindFeature += (endTimeFindFeature - startTimeFindFeature); if (featureIndex >= 0) { long startTimeOrdering = (new Date()).getTime(); heap.add(new NameValuePair(outcome, event.getWeights().get(featureIndex))); long endTimeOrdering = (new Date()).getTime(); totalTimeOrdering += (endTimeOrdering - startTimeOrdering); featureOutcomeCounts[outcomeIndex]++; featureCount++; } else { nonFeatureOutcomeCounts[outcomeIndex]++; } eventOutcomeCounts[outcomeIndex]++; } } int nonFeatureCount = eventCount - featureCount; long startTimeOrdering = (new Date()).getTime(); while (!heap.isEmpty()) featureValues.add(heap.poll()); long endTimeOrdering = (new Date()).getTime(); totalTimeOrdering += (endTimeOrdering - startTimeOrdering); int i = 0; for (String outcome : outcomes) { eventOutcomeMap.put(outcome, eventOutcomeCounts[i]); featureOutcomeMap.put(outcome, featureOutcomeCounts[i]); nonFeatureOutcomeMap.put(outcome, nonFeatureOutcomeCounts[i]); i++; } long endTimeInitialise = (new Date()).getTime(); totalTimeInitialise += (endTimeInitialise - startTimeInitialise); long startTimeInitialEntropy = (new Date()).getTime(); double eventSpaceEntropy = EntropyCalculator.getEntropy(eventOutcomeMap.values(), eventCount); double featureEntropy = EntropyCalculator.getEntropy(featureOutcomeMap.values(), featureCount); double nonFeatureEntropy = EntropyCalculator.getEntropy(nonFeatureOutcomeMap.values(), nonFeatureCount); long endTimeInitialEntropy = (new Date()).getTime(); totalTimeInitialEntropy += (endTimeInitialEntropy - startTimeInitialEntropy); List<Double> entropyByLevel = new ArrayList<Double>(); entropyByLevel.add(eventSpaceEntropy); double proportionalFeatureEntropy = ((double) featureCount / (double) eventCount) * featureEntropy; double proportionalNonFeatureEntropy = ((double) nonFeatureCount / (double) eventCount) * nonFeatureEntropy; double level0Entropy = proportionalFeatureEntropy + proportionalNonFeatureEntropy; entropyByLevel.add(level0Entropy); if (LOG.isTraceEnabled()) { LOG.trace("eventSpaceEntropy: " + eventSpaceEntropy); LOG.trace("proportionalFeatureEntropy: " + proportionalFeatureEntropy); LOG.trace("proportionalNonFeatureEntropy: " + proportionalNonFeatureEntropy); LOG.trace("level 0 Entropy: " + level0Entropy); } List<NameValuePair> featureValueList = new ArrayList<NameValuePair>(featureValues); long startTimeSplit = (new Date()).getTime(); featureSplitter.split(featureValueList); long endTimeSplit = (new Date()).getTime(); totalTimeSplit += (endTimeSplit - startTimeSplit); Map<Integer, Set<Split>> splitsByDepth = featureSplitter.getSplitsByDepth(); for (int level : splitsByDepth.keySet()) { double levelEntropy = proportionalNonFeatureEntropy; if (splitsByDepth.get(level).size() == 0) levelEntropy += proportionalFeatureEntropy; else { for (Split split : splitsByDepth.get(level)) { long startTimeSplitEntropy = (new Date()).getTime(); double proprotionalEntropy = ((double) split.getSize() / (double) eventCount) * split.getEntropy(); long endTimeSplitEntropy = (new Date()).getTime(); totalTimeSplitEntropy += (endTimeSplitEntropy - startTimeSplitEntropy); levelEntropy += proprotionalEntropy; } } entropyByLevel.add(levelEntropy); if (LOG.isTraceEnabled()) LOG.trace("level " + level + " Entropy: " + levelEntropy); } long endTime = (new Date()).getTime(); totalTime += (endTime - startTime); return entropyByLevel; }
From source file:org.apache.hama.computemodel.mapreduce.ShuffleAndDistribute.java
@Override protected void compute( BSPPeer<NullWritable, NullWritable, K2, V2, WritableKeyValues<? extends WritableComparable<?>, ? extends Writable>> peer) throws IOException { int peerId = peer.getPeerId(); Configuration conf = peer.getConfiguration(); this.memoryQueue = (PriorityQueue<WritableKeyValues<K2, V2>>) peer.getSavedObject(Mapper.MESSAGE_QUEUE); this.globalKeyDistribution = (long[][]) peer.getSavedObject(Mapper.KEY_DIST); WritableKeyValues<WritableKeyValues<IntWritable, IntWritable>, LongWritable> message; while ((message = (WritableKeyValues<WritableKeyValues<IntWritable, IntWritable>, LongWritable>) peer .getCurrentMessage()) != null) { int peerNo = message.getKey().getKey().get(); int partition = message.getKey().getValue().get(); globalKeyDistribution[peerNo][partition] += message.getValue().get(); }// w w w . j a va2 s. co m int[] keyDistribution = new int[globalKeyDistribution[0].length]; designateKeysToReducers(keyDistribution, globalKeyDistribution, conf); int myKeyCount = 0; for (int i = 0; i < globalKeyDistribution[0].length; ++i) { myKeyCount += globalKeyDistribution[peerId][i]; } PriorityQueue<WritableKeyValues<K2, V2>> mergeQueue = new PriorityQueue<WritableKeyValues<K2, V2>>( myKeyCount); Partitioner<K2, V2> partitioner = (Partitioner<K2, V2>) ReflectionUtils .newInstance(conf.getClass(Mapper.PARTITIONER_CLASS, HashPartitioner.class), conf); Iterator<WritableKeyValues<K2, V2>> keyValIter = this.memoryQueue.iterator(); String[] peerNames = peer.getAllPeerNames(); while (keyValIter.hasNext()) { WritableKeyValues<K2, V2> record = keyValIter.next(); int partition = partitioner.getPartition(record.getKey(), record.getValue(), peer.getNumPeers()); // should be num reducers // eventually int destPeerId = keyDistribution[partition]; if (peerId != destPeerId) { peer.send(peerNames[destPeerId], record); keyValIter.remove(); } } }
From source file:com.example.android.wearable.quiz.MainActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main);/*from w w w . j av a2 s . c o m*/ mGoogleApiClient = new GoogleApiClient.Builder(this).addApi(Wearable.API).addConnectionCallbacks(this) .addOnConnectionFailedListener(this).build(); mFutureQuestions = new PriorityQueue<Question>(10); // Find UI components to be used later. questionEditText = (EditText) findViewById(R.id.question_text); choiceAEditText = (EditText) findViewById(R.id.choice_a_text); choiceBEditText = (EditText) findViewById(R.id.choice_b_text); choiceCEditText = (EditText) findViewById(R.id.choice_c_text); choiceDEditText = (EditText) findViewById(R.id.choice_d_text); choicesRadioGroup = (RadioGroup) findViewById(R.id.choices_radio_group); quizStatus = (TextView) findViewById(R.id.quiz_status); quizButtons = (LinearLayout) findViewById(R.id.quiz_buttons); questionsContainer = (LinearLayout) findViewById(R.id.questions_container); readQuizFromFileButton = (Button) findViewById(R.id.read_quiz_from_file_button); resetQuizButton = (Button) findViewById(R.id.reset_quiz_button); }
From source file:com.joliciel.jochre.analyser.BeamSearchImageAnalyser.java
public void analyseInternal(JochreImage image) { LOG.debug("Analysing image " + image.getId()); if (currentMonitor != null) { currentMonitor.setCurrentAction("imageMonitor.analysingImage", new Object[] { image.getPage().getIndex() }); }/* ww w . jav a 2s . c o m*/ for (LetterGuessObserver observer : observers) { observer.onImageStart(image); } if (totalShapeCount < 0) totalShapeCount = image.getShapeCount(); for (Paragraph paragraph : image.getParagraphs()) { LOG.debug("Analysing paragraph " + paragraph.getIndex() + " (id=" + paragraph.getId() + ")"); List<LetterSequence> holdoverSequences = null; for (RowOfShapes row : paragraph.getRows()) { LOG.debug("Analysing row " + row.getIndex() + " (id=" + row.getId() + ")"); for (GroupOfShapes group : row.getGroups()) { if (group.isSkip()) { LOG.debug("Skipping group " + group.getIndex() + " (id=" + group.getId() + ")"); continue; } LOG.debug("Analysing group " + group.getIndex() + " (id=" + group.getId() + ")"); int width = group.getRight() - group.getLeft() + 1; List<ShapeSequence> shapeSequences = null; if (boundaryDetector != null) { shapeSequences = boundaryDetector.findBoundaries(group); } else { // simply add this groups shape's shapeSequences = new ArrayList<ShapeSequence>(); ShapeSequence shapeSequence = boundaryService.getEmptyShapeSequence(); for (Shape shape : group.getShapes()) shapeSequence.addShape(shape); shapeSequences.add(shapeSequence); } // Perform a beam search to guess the most likely sequence for this word TreeMap<Integer, PriorityQueue<LetterSequence>> heaps = new TreeMap<Integer, PriorityQueue<LetterSequence>>(); // prime a starter heap with the n best shape boundary analyses for this group PriorityQueue<LetterSequence> starterHeap = new PriorityQueue<LetterSequence>(1); for (ShapeSequence shapeSequence : shapeSequences) { LetterSequence emptySequence = this.getLetterGuesserService() .getEmptyLetterSequence(shapeSequence); starterHeap.add(emptySequence); } heaps.put(0, starterHeap); PriorityQueue<LetterSequence> finalHeap = null; while (heaps.size() > 0) { Entry<Integer, PriorityQueue<LetterSequence>> heapEntry = heaps.pollFirstEntry(); if (LOG.isTraceEnabled()) LOG.trace("heap for index: " + heapEntry.getKey().intValue() + ", width: " + width); if (heapEntry.getKey().intValue() == width) { finalHeap = heapEntry.getValue(); break; } PriorityQueue<LetterSequence> previousHeap = heapEntry.getValue(); // limit the breadth to K int maxSequences = previousHeap.size() > this.beamWidth ? this.beamWidth : previousHeap.size(); for (int j = 0; j < maxSequences; j++) { LetterSequence history = previousHeap.poll(); ShapeInSequence shapeInSequence = history.getNextShape(); Shape shape = shapeInSequence.getShape(); if (LOG.isTraceEnabled()) { LOG.trace("Sequence " + history + ", shape: " + shape); } LogUtils.logMemory(LOG); int position = 0; if (Linguistics.getInstance(image.getPage().getDocument().getLocale()) .isLeftToRight()) { position = shape.getRight() - group.getLeft() + 1; } else { position = group.getRight() - shape.getLeft() + 1; } PriorityQueue<LetterSequence> heap = heaps.get(position); if (heap == null) { heap = new PriorityQueue<LetterSequence>(); heaps.put(position, heap); } MONITOR.startTask("guess letter"); try { letterGuesser.guessLetter(shapeInSequence, history); } finally { MONITOR.endTask(); } MONITOR.startTask("heap sort"); try { for (Decision<Letter> letterGuess : shape.getLetterGuesses()) { // leave out very low probability outcomes if (letterGuess.getProbability() > this.minOutcomeWeight) { LetterSequence sequence = this.getLetterGuesserService() .getLetterSequencePlusOne(history); sequence.add(letterGuess.getOutcome()); sequence.addDecision(letterGuess); heap.add(sequence); } // weight big enough to include } // next letter guess for this shape } finally { MONITOR.endTask(); } } // next history in heap } // any more heaps? LetterSequence bestSequence = null; boolean shouldCombineWithHoldover = false; boolean isHoldover = false; MONITOR.startTask("best sequence"); try { List<LetterSequence> finalSequences = new ArrayList<LetterSequence>(); for (int i = 0; i < this.beamWidth; i++) { if (finalHeap.isEmpty()) break; finalSequences.add(finalHeap.poll()); } if (this.getMostLikelyWordChooser() == null) { // most likely sequence is on top of the last heap bestSequence = finalSequences.get(0); } else { // get most likely sequence using lexicon if (holdoverSequences != null) { // we have a holdover from the previous row ending with a dash bestSequence = this.getMostLikelyWordChooser().chooseMostLikelyWord(finalSequences, holdoverSequences, this.beamWidth); shouldCombineWithHoldover = true; } else { // check if this is the last group on the row and could end with a dash boolean shouldBeHeldOver = false; if (group.getIndex() == row.getGroups().size() - 1 && row.getIndex() < paragraph.getRows().size() - 1) { for (LetterSequence letterSequence : finalSequences) { if (letterSequence.toString().endsWith("-")) { shouldBeHeldOver = true; break; } } } if (shouldBeHeldOver) { holdoverSequences = finalSequences; isHoldover = true; } else { // simplest case: no holdover bestSequence = this.getMostLikelyWordChooser() .chooseMostLikelyWord(finalSequences, this.beamWidth); } } // have we holdover sequences? } // have we a most likely word chooser? if (!isHoldover) { for (LetterGuessObserver observer : observers) { observer.onBeamSearchEnd(bestSequence, finalSequences, holdoverSequences); } } } finally { MONITOR.endTask(); } MONITOR.startTask("assign letter"); try { if (shouldCombineWithHoldover) { holdoverSequences = null; } if (!isHoldover) { for (LetterGuessObserver observer : observers) { observer.onStartSequence(bestSequence); } group.setBestLetterSequence(bestSequence); int i = 0; for (ShapeInSequence shapeInSequence : bestSequence.getUnderlyingShapeSequence()) { String bestOutcome = bestSequence.get(i).getString(); this.assignLetter(shapeInSequence, bestOutcome); i++; } // next shape for (LetterGuessObserver observer : observers) { observer.onGuessSequence(bestSequence); } } this.shapeCount += group.getShapes().size(); if (this.currentMonitor != null) { double progress = (double) shapeCount / (double) totalShapeCount; LOG.debug("progress: " + progress); currentMonitor.setPercentComplete(progress); } } finally { MONITOR.endTask(); } } // next group } // next row } // next paragraph for (LetterGuessObserver observer : observers) { observer.onImageEnd(); } }
From source file:org.apache.mahout.clustering.lda.LDAPrintTopics.java
private static List<Queue<Pair<String, Double>>> topWordsForTopics(String dir, Configuration job, List<String> wordList, int numWordsToPrint) { List<Queue<Pair<String, Double>>> queues = Lists.newArrayList(); Map<Integer, Double> expSums = Maps.newHashMap(); for (Pair<IntPairWritable, DoubleWritable> record : new SequenceFileDirIterable<IntPairWritable, DoubleWritable>( new Path(dir, "part-*"), PathType.GLOB, null, null, true, job)) { IntPairWritable key = record.getFirst(); int topic = key.getFirst(); int word = key.getSecond(); ensureQueueSize(queues, topic);/*w w w . java 2 s . co m*/ if (word >= 0 && topic >= 0) { double score = record.getSecond().get(); if (expSums.get(topic) == null) { expSums.put(topic, 0.0); } expSums.put(topic, expSums.get(topic) + Math.exp(score)); String realWord = wordList.get(word); maybeEnqueue(queues.get(topic), realWord, score, numWordsToPrint); } } for (int i = 0; i < queues.size(); i++) { Queue<Pair<String, Double>> queue = queues.get(i); Queue<Pair<String, Double>> newQueue = new PriorityQueue<Pair<String, Double>>(queue.size()); double norm = expSums.get(i); for (Pair<String, Double> pair : queue) { newQueue.add(new Pair<String, Double>(pair.getFirst(), Math.exp(pair.getSecond()) / norm)); } queues.set(i, newQueue); } return queues; }
From source file:org.eclipse.tracecompass.statesystem.core.tests.perf.historytree.HistoryTreeBackendBenchmark.java
/** * Benchmarks creating, single querying and full querying the state system *//*from w ww . jav a2s.c o m*/ @Test public void testBenchmark() { /* Check arguments */ long totalTime = this.fNbAvgIntervals * INTERVAL_AVG_TIME; Performance perf = Performance.getDefault(); PerformanceMeter pmBuild = perf.createPerformanceMeter(TEST_PREFIX + TEST_BUILDING_ID + fName); perf.tagAsSummary(pmBuild, TEST_BUILDING_ID + fShortName, Dimension.CPU_TIME); PerformanceMeter pmSingleQuery = perf.createPerformanceMeter(TEST_PREFIX + TEST_SINGLE_QUERY_ID + fName); perf.tagAsSummary(pmSingleQuery, TEST_SINGLE_QUERY_ID + fShortName, Dimension.CPU_TIME); PerformanceMeter pmFullQuery = perf.createPerformanceMeter(TEST_PREFIX + TEST_FULL_QUERY_ID + fName); perf.tagAsSummary(pmFullQuery, TEST_FULL_QUERY_ID + fShortName, Dimension.CPU_TIME); PerformanceMeter pmRangeQuery = perf.createPerformanceMeter(TEST_PREFIX + TEST_QUERY_RANGE_ID + fName); perf.tagAsSummary(pmRangeQuery, TEST_QUERY_RANGE_ID + fShortName, Dimension.CPU_TIME); for (int i = 0; i < fNbLoops; i++) { try { /* Create the state system */ createFile(); IStateHistoryBackend backend = StateHistoryBackendFactory.createHistoryTreeBackendNewFile( TEST_BUILDING_ID, NonNullUtils.checkNotNull(fTempFile), 1, 1, QUEUE_SIZE); ITmfStateSystemBuilder ss = StateSystemFactory.newStateSystem(backend); /* Initialize the attributes */ Queue<QuarkEvent> quarkEvents = new PriorityQueue<>(fNbAttrib); Random randomGenerator = new Random(SEED); int rootQuark = ss.getQuarkAbsoluteAndAdd(ROOT_NODE); /* Create all attributes before testing */ for (int j = 0; j < fNbAttrib; j++) { int quark = ss.getQuarkRelativeAndAdd(rootQuark, String.valueOf(j)); quarkEvents.add(new QuarkEvent(quark, (Math.abs(randomGenerator.nextLong()) % INTERVAL_AVG_TIME) + 1, fValues.getValues())); } /* Adds random intervals to the state system */ pmBuild.start(); while (true) { QuarkEvent quarkEvent = quarkEvents.poll(); if (quarkEvent == null) { break; } long eventTime = quarkEvent.getNextEventTime(); ss.modifyAttribute(eventTime, quarkEvent.getNextValue(), quarkEvent.getQuark()); long nextDelta = fDistributionMethod.getNextEndTime(randomGenerator, INTERVAL_AVG_TIME); long nextEndTime = eventTime + nextDelta; if (nextEndTime <= totalTime) { quarkEvent.setNextEventTime(nextEndTime); quarkEvents.add(quarkEvent); } } ss.closeHistory(totalTime); pmBuild.stop(); /* * Benchmark the single queries: for each random timestamp, * query a random attribute */ List<Integer> subAttributes = ss.getSubAttributes(rootQuark, false); pmSingleQuery.start(); for (int j = 0; j < QUERY_COUNT; j++) { long ts = getNextRandomValue(randomGenerator, totalTime); int attrib = (int) getNextRandomValue(randomGenerator, subAttributes.size()); ss.querySingleState(ts, attrib); } pmSingleQuery.stop(); /* Benchmark the history range query of 10 attributes */ pmRangeQuery.start(); for (int j = 0; j < 10; j++) { int attrib = (int) getNextRandomValue(randomGenerator, subAttributes.size()); StateSystemUtils.queryHistoryRange(ss, attrib, ss.getStartTime(), ss.getCurrentEndTime()); } pmRangeQuery.stop(); /* Benchmark the full queries */ pmFullQuery.start(); for (int j = 0; j < QUERY_COUNT; j++) { long ts = getNextRandomValue(randomGenerator, totalTime); ss.queryFullState(ts); } pmFullQuery.stop(); /* Output some data on the file */ if (i == 0) { if (backend instanceof HistoryTreeBackend) { HistoryTreeBackend htBackend = (HistoryTreeBackend) backend; System.out.println("History tree file size: " + FileUtils.byteCountToDisplaySize(htBackend.getFileSize())); System.out.println("Average node usage: " + htBackend.getAverageNodeUsage()); } } deleteFile(); } catch (IOException | StateValueTypeException | AttributeNotFoundException | StateSystemDisposedException e) { fail(e.getMessage()); } finally { deleteFile(); } } pmBuild.commit(); pmSingleQuery.commit(); pmFullQuery.commit(); pmRangeQuery.commit(); }
From source file:org.apache.accumulo.core.file.blockfile.cache.LruBlockCache.java
/** * Eviction method.//from w w w. j av a 2 s.c o m */ void evict() { // Ensure only one eviction at a time if (!evictionLock.tryLock()) return; try { evictionInProgress = true; long bytesToFree = size.get() - minSize(); LOG.debug("Block cache LRU eviction started. Attempting to free " + bytesToFree + " bytes"); if (bytesToFree <= 0) return; // Instantiate priority buckets BlockBucket bucketSingle = new BlockBucket(bytesToFree, blockSize, singleSize()); BlockBucket bucketMulti = new BlockBucket(bytesToFree, blockSize, multiSize()); BlockBucket bucketMemory = new BlockBucket(bytesToFree, blockSize, memorySize()); // Scan entire map putting into appropriate buckets for (CachedBlock cachedBlock : map.values()) { switch (cachedBlock.getPriority()) { case SINGLE: { bucketSingle.add(cachedBlock); break; } case MULTI: { bucketMulti.add(cachedBlock); break; } case MEMORY: { bucketMemory.add(cachedBlock); break; } } } PriorityQueue<BlockBucket> bucketQueue = new PriorityQueue<BlockBucket>(3); bucketQueue.add(bucketSingle); bucketQueue.add(bucketMulti); bucketQueue.add(bucketMemory); int remainingBuckets = 3; long bytesFreed = 0; BlockBucket bucket; while ((bucket = bucketQueue.poll()) != null) { long overflow = bucket.overflow(); if (overflow > 0) { long bucketBytesToFree = Math.min(overflow, (long) Math.ceil((bytesToFree - bytesFreed) / (double) remainingBuckets)); bytesFreed += bucket.free(bucketBytesToFree); } remainingBuckets--; } float singleMB = ((float) bucketSingle.totalSize()) / ((float) (1024 * 1024)); float multiMB = ((float) bucketMulti.totalSize()) / ((float) (1024 * 1024)); float memoryMB = ((float) bucketMemory.totalSize()) / ((float) (1024 * 1024)); LOG.debug("Block cache LRU eviction completed. " + "Freed " + bytesFreed + " bytes. " + "Priority Sizes: " + "Single=" + singleMB + "MB (" + bucketSingle.totalSize() + "), " + "Multi=" + multiMB + "MB (" + bucketMulti.totalSize() + ")," + "Memory=" + memoryMB + "MB (" + bucketMemory.totalSize() + ")"); } finally { stats.evict(); evictionInProgress = false; evictionLock.unlock(); } }