List of usage examples for java.util TreeMap firstEntry
public Map.Entry<K, V> firstEntry()
From source file:net.sf.maltcms.chromaui.charts.overlay.Peak1DOverlay.java
/** * * @param ce//from w w w . ja v a 2 s .co m */ @Override public void selectionStateChanged(SelectionChangeEvent ce) { if (isVisible() && ce.getSource() != this && ce.getSelection() != null) { if (ce.getSelection().getType().equals(ISelection.Type.CLEAR)) { Logger.getLogger(getClass().getName()).fine("Received clear selection type"); clear(); return; } if (dataset != null) { IScan target = dataset.getTarget(ce.getSelection().getSeriesIndex(), ce.getSelection().getItemIndex()); TreeMap<Double, IPeakAnnotationDescriptor> distanceMap = new TreeMap<>(); for (IPeakAnnotationDescriptor ipad : peakAnnotations.getMembers()) { double absDiff = Math.abs(ipad.getApexTime() - target.getScanAcquisitionTime()); if (absDiff < 10.0d) { distanceMap.put(absDiff, ipad); } } if (!distanceMap.isEmpty()) { IPeakAnnotationDescriptor ipad = distanceMap.firstEntry().getValue(); if (!activeSelection.contains(ipad)) { switch (ce.getSelection().getType()) { case CLICK: Logger.getLogger(getClass().getName()).fine("Click selection received"); renderer.generatePeakShape(peakAnnotations.getChromatogram(), ipad, dataset, renderer.getSeriesIndex(dataset, peakAnnotations.getChromatogram()), selectedPeaks); activeSelection.add(ipad); break; case HOVER: // System.out.println("Hover selection received"); // // content.add(ipad); // activeSelection.add(ipad); default: break; } fireOverlayChanged(); } } } } }
From source file:org.corpus_tools.peppermodules.annis.SRelation2ANNISMapper.java
protected SLayer getFirstComponentLayer(SNode node) { SLayer componentLayer = null;//from w w w .j a v a 2 s. co m Set<SLayer> nodeLayer = node.getLayers(); if (nodeLayer != null) { // get layer name which comes lexically first TreeMap<String, SLayer> layers = new TreeMap<>(); for (SLayer l : nodeLayer) { layers.put(l.getName(), l); } if (!layers.isEmpty()) { componentLayer = layers.firstEntry().getValue(); } } return componentLayer; }
From source file:GitBackend.GitAPI.java
public RevCommit getFirstCommit(String path) { // this.showLogForFile(this.repository.getDirectory()+path); // this.showLog(); //RevWalk walk = new RevWalk(git.getRepository()); TreeMap<DateTime, RevCommit> allCommits = new TreeMap<DateTime, RevCommit>(); try (Git git = new Git(repository)) { Iterable<RevCommit> commits = git.log().all().call(); int count = 0; for (RevCommit commit : commits) { count++;//from w w w. j a v a 2 s. c o m DateTime commitTime = new DateTime(commit.getCommitterIdent().getWhen()); allCommits.put(commitTime, commit); } } catch (IOException e) { e.printStackTrace(); } catch (NoHeadException e) { e.printStackTrace(); } catch (GitAPIException e) { e.printStackTrace(); } return allCommits.firstEntry().getValue(); }
From source file:GitBackend.GitAPI.java
public Date getFirstCommitDate(String path) { // this.showLogForFile(this.repository.getDirectory()+path); // this.showLog(); //RevWalk walk = new RevWalk(git.getRepository()); TreeMap<DateTime, RevCommit> allCommits = new TreeMap<DateTime, RevCommit>(); try (Git git = new Git(repository)) { Iterable<RevCommit> commits = git.log().all().call(); int count = 0; for (RevCommit commit : commits) { count++;/* w ww . j a v a 2 s . c o m*/ DateTime commitTime = new DateTime(commit.getCommitterIdent().getWhen()); allCommits.put(commitTime, commit); } } catch (IOException e) { e.printStackTrace(); } catch (NoHeadException e) { e.printStackTrace(); } catch (GitAPIException e) { e.printStackTrace(); } return allCommits.firstEntry().getKey().toDate(); }
From source file:com.nextgis.rehacompdemo.RoutingActivity.java
@Override public void onLocationChanged(Location currentLocation) { GeoEnvelope area = getArea(currentLocation); Location nextLocation = new Location(LocationManager.GPS_PROVIDER); Location previousLocation = new Location(LocationManager.GPS_PROVIDER); TreeMap<Float, Location> perpendiculars = new TreeMap<>(); HashMap<Location, GeoLineString> snapsToSegments = new HashMap<>(); for (GeoLineString segment : mRouteSegments) if (area.intersects(segment.getEnvelope()) || area.contains(segment.getEnvelope())) { previousLocation.setLongitude(Geo.mercatorToWgs84SphereX(segment.getPoint(0).getX())); previousLocation.setLatitude(Geo.mercatorToWgs84SphereY(segment.getPoint(0).getY())); nextLocation.setLongitude(Geo.mercatorToWgs84SphereX(segment.getPoint(1).getX())); nextLocation.setLatitude(Geo.mercatorToWgs84SphereY(segment.getPoint(1).getY())); Location snap = snapToLine(previousLocation, nextLocation, currentLocation, false); Float perpendicular = currentLocation.distanceTo(snap); perpendiculars.put(perpendicular, snap); snapsToSegments.put(snap, segment); }/*from w w w . j a va 2s .c o m*/ if (perpendiculars.size() > 0 && snapsToSegments.size() > 0) { Location snappedLocation = perpendiculars.firstEntry().getValue(); GeoLineString segment = snapsToSegments.get(snappedLocation); previousLocation.setLongitude(Geo.mercatorToWgs84SphereX(segment.getPoint(0).getX())); previousLocation.setLatitude(Geo.mercatorToWgs84SphereY(segment.getPoint(0).getY())); nextLocation.setLongitude(Geo.mercatorToWgs84SphereX(segment.getPoint(1).getX())); nextLocation.setLatitude(Geo.mercatorToWgs84SphereY(segment.getPoint(1).getY())); GeoPoint point = segment.getPoint(1); if (snappedLocation.distanceTo(previousLocation) < snappedLocation.distanceTo(nextLocation)) { point = segment.getPoint(0); nextLocation.setLongitude(previousLocation.getLongitude()); nextLocation.setLatitude(previousLocation.getLatitude()); } if (snappedLocation.distanceTo(nextLocation) <= mActivationDistance) { long id = -1; for (IGeometryCacheItem cachePoint : mAllPoints) { if (point.equals(cachePoint.getGeometry())) id = cachePoint.getFeatureId(); } int position = mAdapter.getItemPosition(id); if (position != -1) { mSteps.requestFocusFromTouch(); mSteps.setSelection(position); } } } }
From source file:com.datatorrent.contrib.hdht.HDHTWriter.java
/** * Flush changes from write cache to disk. New data files will be written and meta data replaced atomically. The flush * frequency determines availability of changes to external readers. * * @throws IOException//from w ww .j ava 2s . co m */ private void writeDataFiles(Bucket bucket) throws IOException { BucketIOStats ioStats = getOrCretaStats(bucket.bucketKey); LOG.debug("Writing data files in bucket {}", bucket.bucketKey); // copy meta data on write BucketMeta bucketMetaCopy = kryo.copy(getMeta(bucket.bucketKey)); /** Process purge requests before flushing data from cache to maintain * the oder or purge and put operations. This makes sure that purged data * removed from file, before new data is added to the files */ HashSet<String> filesToDelete = Sets.newHashSet(); bucketMetaCopy = processPurge(bucket, bucketMetaCopy, filesToDelete); // bucket keys by file TreeMap<Slice, BucketFileMeta> bucketSeqStarts = bucketMetaCopy.files; Map<BucketFileMeta, Map<Slice, Slice>> modifiedFiles = Maps.newHashMap(); for (Map.Entry<Slice, byte[]> entry : bucket.frozenWriteCache.entrySet()) { // find file for key Map.Entry<Slice, BucketFileMeta> floorEntry = bucketSeqStarts.floorEntry(entry.getKey()); BucketFileMeta floorFile; if (floorEntry != null) { floorFile = floorEntry.getValue(); } else { floorEntry = bucketSeqStarts.firstEntry(); if (floorEntry == null || floorEntry.getValue().name != null) { // no existing file or file with higher key floorFile = new BucketFileMeta(); } else { // placeholder for new keys, move start key floorFile = floorEntry.getValue(); bucketSeqStarts.remove(floorEntry.getKey()); } floorFile.startKey = entry.getKey(); if (floorFile.startKey.length != floorFile.startKey.buffer.length) { // normalize key for serialization floorFile.startKey = new Slice(floorFile.startKey.toByteArray()); } bucketSeqStarts.put(floorFile.startKey, floorFile); } Map<Slice, Slice> fileUpdates = modifiedFiles.get(floorFile); if (fileUpdates == null) { modifiedFiles.put(floorFile, fileUpdates = Maps.newHashMap()); } fileUpdates.put(entry.getKey(), new Slice(entry.getValue())); } // write modified files for (Map.Entry<BucketFileMeta, Map<Slice, Slice>> fileEntry : modifiedFiles.entrySet()) { BucketFileMeta fileMeta = fileEntry.getKey(); TreeMap<Slice, Slice> fileData = new TreeMap<Slice, Slice>(getKeyComparator()); if (fileMeta.name != null) { // load existing file long start = System.currentTimeMillis(); FileReader reader = store.getReader(bucket.bucketKey, fileMeta.name); reader.readFully(fileData); ioStats.dataBytesRead += store.getFileSize(bucket.bucketKey, fileMeta.name); ioStats.dataReadTime += System.currentTimeMillis() - start; /* these keys are re-written */ ioStats.dataKeysRewritten += fileData.size(); ioStats.filesReadInCurrentWriteCycle++; ioStats.dataFilesRead++; reader.close(); filesToDelete.add(fileMeta.name); } // apply updates fileData.putAll(fileEntry.getValue()); // new file writeFile(bucket, bucketMetaCopy, fileData); } LOG.debug("Files written {} files read {}", ioStats.filesWroteInCurrentWriteCycle, ioStats.filesReadInCurrentWriteCycle); // flush meta data for new files try { LOG.debug("Writing {} with {} file entries", FNAME_META, bucketMetaCopy.files.size()); OutputStream os = store.getOutputStream(bucket.bucketKey, FNAME_META + ".new"); Output output = new Output(os); bucketMetaCopy.committedWid = bucket.committedLSN; bucketMetaCopy.recoveryStartWalPosition = bucket.recoveryStartWalPosition; kryo.writeClassAndObject(output, bucketMetaCopy); output.close(); os.close(); store.rename(bucket.bucketKey, FNAME_META + ".new", FNAME_META); } catch (IOException e) { throw new RuntimeException("Failed to write bucket meta data " + bucket.bucketKey, e); } // clear pending changes ioStats.dataKeysWritten += bucket.frozenWriteCache.size(); // switch to new version this.metaCache.put(bucket.bucketKey, bucketMetaCopy); // delete old files for (String fileName : filesToDelete) { store.delete(bucket.bucketKey, fileName); } invalidateReader(bucket.bucketKey, filesToDelete); // clearing cache after invalidating readers bucket.frozenWriteCache.clear(); // cleanup WAL files which are not needed anymore. minimumRecoveryWalPosition = bucketMetaCopy.recoveryStartWalPosition; for (Long bucketId : this.bucketKeys) { BucketMeta meta = getMeta(bucketId); if (meta.recoveryStartWalPosition.fileId < minimumRecoveryWalPosition.fileId || (meta.recoveryStartWalPosition.fileId == minimumRecoveryWalPosition.fileId && meta.recoveryStartWalPosition.offset < minimumRecoveryWalPosition.offset)) { minimumRecoveryWalPosition = meta.recoveryStartWalPosition; } } this.wal.cleanup(minimumRecoveryWalPosition.fileId); ioStats.filesReadInCurrentWriteCycle = 0; ioStats.filesWroteInCurrentWriteCycle = 0; }
From source file:com.example.camera2raw.Camera2RawFragment.java
/** * Retrieve the next {@link Image} from a reference counted {@link ImageReader}, retaining * that {@link ImageReader} until that {@link Image} is no longer in use, and set this * {@link Image} as the result for the next request in the queue of pending requests. If * all necessary information is available, begin saving the image to a file in a background * thread.//from www .ja v a 2s. c o m * * @param pendingQueue the currently active requests. * @param reader a reference counted wrapper containing an {@link ImageReader} from which * to acquire an image. */ private void dequeueAndSaveImage(TreeMap<Integer, ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) { synchronized (mCameraStateLock) { Map.Entry<Integer, ImageSaverBuilder> entry = pendingQueue.firstEntry(); ImageSaverBuilder builder = entry.getValue(); // Increment reference count to prevent ImageReader from being closed while we // are saving its Images in a background thread (otherwise their resources may // be freed while we are writing to a file). if (reader == null || reader.getAndRetain() == null) { Log.e(TAG, "Paused the activity before we could save the image," + " ImageReader already closed."); pendingQueue.remove(entry.getKey()); return; } Image image; try { image = reader.get().acquireNextImage(); } catch (IllegalStateException e) { Log.e(TAG, "Too many images queued for saving, dropping image for request: " + entry.getKey()); pendingQueue.remove(entry.getKey()); return; } builder.setRefCountedReader(reader).setImage(image); handleCompletionLocked(entry.getKey(), builder, pendingQueue); //back Activity getActivity().finish(); } }
From source file:us.levk.math.linear.EucledianDistanceClusterer.java
public Cluster eucledian(final RealMatrix original) throws IOException { try (HugeRealMatrix distances = new HugeRealMatrix(original.getRowDimension(), original.getRowDimension())) { final Map<Integer, Cluster> genehash = new HashMap<Integer, Cluster>() { private static final long serialVersionUID = 1L; {//from ww w . j a va 2 s . c o m for (int index = original.getRowDimension(); --index >= 0; put(index, new Cluster(index))) ; } }; TreeMap<Double, int[]> sorted = new TreeMap<>(); log.debug("Populating distance matrix"); for (int i = 0; i < original.getRowDimension(); i++) { for (int j = i + 1; j < original.getRowDimension(); j++) { // Euclidean distance calculation. double total = 0; for (int k = 0; k < original.getColumnDimension(); k++) { double left = original.getEntry(i, k); double right = original.getEntry(j, k); if (!isNaN(left) && !isNaN(right) && !isInfinite(left) && !isInfinite(right)) total += Math.pow(left - right, 2); } double distance = Math.pow(total, 0.5); distances.setEntry(i, j, distance); distances.setEntry(j, i, distance); int[] genePair = { i, j }; // Enter the distance calculated and the genes measured into a // treemap. Will be automatically sorted. sorted.put(distance, genePair); } } log.debug("Initialized distances matrix " + distances); while (true) { // Get the first key of the TreeMap. Will be the shortest distance de // facto. final double minkey = (Double) sorted.firstKey(); int[] minValues = (int[]) sorted.firstEntry().getValue(); final int value1 = minValues[0], value2 = minValues[1]; // find Cluster cluster = new Cluster(genehash.get(value1), genehash.get(value2)) { { log.debug("Generating cluster from " + value1 + " and " + value2 + " in " + genehash); contains().addAll(genehash.get(value1).contains()); contains().addAll(genehash.get(value2).contains()); d(minkey); log.debug("Generated cluster " + this); } }; genehash.put(cluster.id(), cluster); genehash.remove(value1); genehash.remove(value2); if (genehash.size() <= 1) break; // Iterate over all the current clusters to remeasure distance with the // previously clustered group. for (Cluster c : genehash.values()) { // Skip measuring the new cluster with itself. if (c == cluster) continue; double distance = 0; int n = 0; // Get genes from each cluster. Distance is measured from each element // to every element. for (int current : c.contains()) for (int created : cluster.contains()) { distance += distances.getEntry(current, created); n++; } distance = distance / n; int[] valuePair = { c.id(), cluster.id() }; sorted.put(distance, valuePair); } // Get the shortest distance. // Check to make sure shortest distance does not include a gene pair // that // has already had its elements clustered. boolean minimized = false; while (!minimized) { double mk = sorted.firstKey(); minValues = sorted.firstEntry().getValue(); // If the gene pair is not present in the current gene set, remove // this // distance. if (!genehash.containsKey(minValues[0]) || !genehash.containsKey(minValues[1])) sorted.remove(mk); else minimized = true; } } return genehash.entrySet().iterator().next().getValue(); } }
From source file:com.joliciel.talismane.parser.TransitionBasedGlobalLearningParser.java
public List<ParseConfiguration> parseSentence(List<PosTagSequence> posTagSequences, FeatureWeightVector weightVector, RankingSolution correctSolution) { MONITOR.startTask("parseSentence"); try {// ww w. j av a2 s.c o m long startTime = (new Date()).getTime(); int maxAnalysisTimeMilliseconds = maxAnalysisTimePerSentence * 1000; int minFreeMemoryBytes = minFreeMemory * KILOBYTE; TokenSequence tokenSequence = posTagSequences.get(0).getTokenSequence(); TreeMap<Integer, TreeSet<ParseConfiguration>> heaps = new TreeMap<Integer, TreeSet<ParseConfiguration>>(); TreeSet<ParseConfiguration> heap0 = new TreeSet<ParseConfiguration>(); for (PosTagSequence posTagSequence : posTagSequences) { // add an initial ParseConfiguration for each postag sequence ParseConfiguration initialConfiguration = this.getParserServiceInternal() .getInitialConfiguration(posTagSequence); initialConfiguration.setScoringStrategy(new SimpleRankingScoringStrategy()); initialConfiguration.setRankingScore(0.0); heap0.add(initialConfiguration); if (LOG.isDebugEnabled()) { LOG.debug("Adding initial posTagSequence: " + posTagSequence); } } heaps.put(0, heap0); TreeSet<ParseConfiguration> backupHeap = null; TreeSet<ParseConfiguration> finalHeap = null; while (heaps.size() > 0) { Entry<Integer, TreeSet<ParseConfiguration>> heapEntry = heaps.firstEntry(); TreeSet<ParseConfiguration> currentHeap = heapEntry.getValue(); int currentHeapIndex = heapEntry.getKey(); if (LOG.isTraceEnabled()) { LOG.trace("##### Polling next heap: " + heapEntry.getKey() + ", size: " + heapEntry.getValue().size()); } boolean finished = false; // systematically set the final heap here, just in case we exit "naturally" with no more heaps finalHeap = heapEntry.getValue(); backupHeap = new TreeSet<ParseConfiguration>(); // we jump out when either (a) all tokens have been attached or (b) we go over the max alloted time ParseConfiguration topConf = currentHeap.first(); if (topConf.isTerminal()) { LOG.trace("Exiting with terminal heap: " + heapEntry.getKey() + ", size: " + heapEntry.getValue().size()); finished = true; } // check if we've gone over alloted time for this sentence long analysisTime = (new Date()).getTime() - startTime; if (maxAnalysisTimePerSentence > 0 && analysisTime > maxAnalysisTimeMilliseconds) { LOG.info("Parse tree analysis took too long for sentence: " + tokenSequence.getText()); LOG.info("Breaking out after " + maxAnalysisTimePerSentence + " seconds."); finished = true; } // check if we've enough memory to process this sentence if (minFreeMemory > 0) { long freeMemory = Runtime.getRuntime().freeMemory(); if (freeMemory < minFreeMemoryBytes) { LOG.info("Not enough memory left to parse sentence: " + tokenSequence.getText()); LOG.info("Min free memory (bytes):" + minFreeMemoryBytes); LOG.info("Current free memory (bytes): " + freeMemory); finished = true; } } // check if any of the remaining top-N solutions on any heap can lead to the correct solution if (correctSolution != null) { boolean canReachCorrectSolution = false; for (TreeSet<ParseConfiguration> heap : heaps.values()) { int j = 1; for (ParseConfiguration solution : heap) { if (j > beamWidth) break; if (solution.canReach(correctSolution)) { canReachCorrectSolution = true; break; } j++; } if (canReachCorrectSolution) break; } if (!canReachCorrectSolution) { LOG.debug("None of the solutions on the heap can reach the gold solution. Exiting."); finished = true; } } if (finished) { // combine any remaining heaps for (TreeSet<ParseConfiguration> heap : heaps.values()) { if (finalHeap != heap) { finalHeap.addAll(heap); } } break; } // remove heap from set of heaps heapEntry = heaps.pollFirstEntry(); // limit the breadth to K int maxSolutions = currentHeap.size() > this.beamWidth ? this.beamWidth : currentHeap.size(); int j = 0; while (currentHeap.size() > 0) { ParseConfiguration history = currentHeap.pollFirst(); backupHeap.add(history); if (LOG.isTraceEnabled()) { LOG.trace("### Next configuration on heap " + heapEntry.getKey() + ":"); LOG.trace(history.toString()); LOG.trace("Score: " + df.format(history.getScore())); LOG.trace(history.getPosTagSequence()); } Set<Transition> transitions = new HashSet<Transition>(); // test the positive rules on the current configuration boolean ruleApplied = false; if (parserPositiveRules != null) { MONITOR.startTask("check rules"); try { for (ParserRule rule : parserPositiveRules) { if (LOG.isTraceEnabled()) { LOG.trace("Checking rule: " + rule.getCondition().getName()); } RuntimeEnvironment env = this.featureService.getRuntimeEnvironment(); FeatureResult<Boolean> ruleResult = rule.getCondition().check(history, env); if (ruleResult != null && ruleResult.getOutcome()) { transitions.add(rule.getTransition()); ruleApplied = true; if (LOG.isTraceEnabled()) { LOG.trace("Rule applies. Setting transition to: " + rule.getTransition().getCode()); } if (!rule.getTransition().checkPreconditions(history)) { LOG.error("Cannot apply rule, preconditions not met."); ruleApplied = false; } break; } } } finally { MONITOR.endTask("check rules"); } } if (!ruleApplied) { transitions = parsingConstrainer.getPossibleTransitions(history); Set<Transition> eliminatedTransitions = new HashSet<Transition>(); for (Transition transition : transitions) { if (!transition.checkPreconditions(history)) { eliminatedTransitions.add(transition); } } transitions.removeAll(eliminatedTransitions); // apply the negative rules eliminatedTransitions = new HashSet<Transition>(); if (parserNegativeRules != null) { MONITOR.startTask("check negative rules"); try { for (ParserRule rule : parserNegativeRules) { if (LOG.isTraceEnabled()) { LOG.trace("Checking negative rule: " + rule.getCondition().getName()); } RuntimeEnvironment env = this.featureService.getRuntimeEnvironment(); FeatureResult<Boolean> ruleResult = rule.getCondition().check(history, env); if (ruleResult != null && ruleResult.getOutcome()) { eliminatedTransitions.add(rule.getTransition()); if (LOG.isTraceEnabled()) { LOG.debug("Rule applies. Eliminating transition: " + rule.getTransition().getCode()); } } } if (eliminatedTransitions.size() == transitions.size()) { LOG.debug("All transitions eliminated! Restoring original transitions."); } else { transitions.removeAll(eliminatedTransitions); } } finally { MONITOR.endTask("check negative rules"); } } } // has a positive rule been applied? if (transitions.size() == 0) { // just in case the we run out of both heaps and analyses, we build this backup heap backupHeap.add(history); if (LOG.isTraceEnabled()) LOG.trace( "No transitions could be applied: not counting this solution as part of the beam"); } else { // up the counter, since we will count this solution towards the heap j++; // add solutions to the heap, one per valid transition MONITOR.startTask("heap sort"); try { Map<Transition, Double> deltaScorePerTransition = new HashMap<Transition, Double>(); double absoluteMax = 1; for (Transition transition : transitions) { if (LOG.isTraceEnabled()) { LOG.trace("Applying transition: " + transition.getCode()); } ParseConfiguration configuration = this.parserServiceInternal .getConfiguration(history); transition.apply(configuration); configuration.setRankingScore(history.getRankingScore()); configuration.getIncrementalFeatureResults() .addAll(history.getIncrementalFeatureResults()); // test the features on the new configuration double scoreDelta = 0.0; MONITOR.startTask("feature analyse"); List<FeatureResult<?>> featureResults = new ArrayList<FeatureResult<?>>(); try { for (ParseConfigurationFeature<?> feature : this.parseFeatures) { MONITOR.startTask(feature.getName()); try { RuntimeEnvironment env = this.featureService.getRuntimeEnvironment(); FeatureResult<?> featureResult = feature.check(configuration, env); if (featureResult != null) { featureResults.add(featureResult); double weight = weightVector.getWeight(featureResult); scoreDelta += weight; if (LOG.isTraceEnabled()) { LOG.trace(featureResult.toString() + " = " + weight); } } } finally { MONITOR.endTask(feature.getName()); } } configuration.getIncrementalFeatureResults().add(featureResults); if (LOG.isTraceEnabled()) { LOG.trace("Score = " + configuration.getRankingScore() + " + " + scoreDelta + " = " + (configuration.getRankingScore() + scoreDelta)); } configuration.setRankingScore(configuration.getRankingScore() + scoreDelta); deltaScorePerTransition.put(transition, scoreDelta); if (Math.abs(scoreDelta) > absoluteMax) absoluteMax = Math.abs(scoreDelta); } finally { MONITOR.endTask("feature analyse"); } int nextHeapIndex = parseComparisonStrategy.getComparisonIndex(configuration) * 1000; while (nextHeapIndex <= currentHeapIndex) nextHeapIndex++; TreeSet<ParseConfiguration> nextHeap = heaps.get(nextHeapIndex); if (nextHeap == null) { nextHeap = new TreeSet<ParseConfiguration>(); heaps.put(nextHeapIndex, nextHeap); if (LOG.isTraceEnabled()) LOG.trace("Created heap with index: " + nextHeapIndex); } nextHeap.add(configuration); if (LOG.isTraceEnabled()) { LOG.trace("Added configuration with score " + configuration.getScore() + " to heap: " + nextHeapIndex + ", total size: " + nextHeap.size()); } configuration.clearMemory(); } // next transition // Create a probability distribution of transitions // normalise probabilities for each transition via normalised exponential // e^(x/absmax)/sum(e^(x/absmax)) // where x/absmax is in [-1,1] // e^(x/absmax) is in [1/e,e] double total = 0.0; for (Transition transition : deltaScorePerTransition.keySet()) { double deltaScore = deltaScorePerTransition.get(transition); deltaScore = Math.exp(deltaScore / absoluteMax); deltaScorePerTransition.put(transition, deltaScore); total += deltaScore; } for (Transition transition : deltaScorePerTransition.keySet()) { double probability = deltaScorePerTransition.get(transition); probability /= total; Decision<Transition> decision = machineLearningService.createDecision(transition, probability); transition.setDecision(decision); if (LOG.isTraceEnabled()) { LOG.trace("Transition: " + transition.getCode() + ", Prob: " + probability); } } } finally { MONITOR.endTask("heap sort"); } } // have we any transitions? // beam width test if (j == maxSolutions) break; } // next history } // next atomic index // return the best sequences on the heap List<ParseConfiguration> bestConfigurations = new ArrayList<ParseConfiguration>(); int i = 0; if (finalHeap.isEmpty()) finalHeap = backupHeap; while (!finalHeap.isEmpty()) { bestConfigurations.add(finalHeap.pollFirst()); i++; if (i >= this.getBeamWidth()) break; } if (LOG.isDebugEnabled()) { if (correctSolution != null) { LOG.debug("Gold transitions: " + correctSolution.getIncrementalOutcomes()); } for (ParseConfiguration finalConfiguration : bestConfigurations) { LOG.debug(df.format(finalConfiguration.getScore()) + ": " + finalConfiguration.toString()); LOG.debug("Pos tag sequence: " + finalConfiguration.getPosTagSequence()); LOG.debug("Transitions: " + finalConfiguration.getTransitions()); if (LOG.isTraceEnabled()) { StringBuilder sb = new StringBuilder(); sb.append(" * PosTag sequence score "); sb.append(df.format(finalConfiguration.getPosTagSequence().getScore())); sb.append(" = "); for (PosTaggedToken posTaggedToken : finalConfiguration.getPosTagSequence()) { sb.append(" * "); sb.append(df.format(posTaggedToken.getDecision().getProbability())); } sb.append(" root "); sb.append(finalConfiguration.getPosTagSequence().size()); LOG.trace(sb.toString()); sb = new StringBuilder(); sb.append(" * Token sequence score = "); sb.append(df.format(finalConfiguration.getPosTagSequence().getTokenSequence().getScore())); LOG.trace(sb.toString()); } } } return bestConfigurations; } finally { MONITOR.endTask("parseSentence"); } }
From source file:tw.com.geminihsu.app01.camera2raw.Camera2RawFragment.java
/** * Retrieve the next {@link Image} from a reference counted {@link ImageReader}, retaining * that {@link ImageReader} until that {@link Image} is no longer in use, and set this * {@link Image} as the result for the next request in the queue of pending requests. If * all necessary information is available, begin saving the image to a file in a background * thread./*from www . j a va 2s .c om*/ * * @param pendingQueue the currently active requests. * @param reader a reference counted wrapper containing an {@link ImageReader} from which * to acquire an image. */ private void dequeueAndSaveImage(TreeMap<Integer, ImageSaver.ImageSaverBuilder> pendingQueue, RefCountedAutoCloseable<ImageReader> reader) { synchronized (mCameraStateLock) { Map.Entry<Integer, ImageSaver.ImageSaverBuilder> entry = pendingQueue.firstEntry(); ImageSaver.ImageSaverBuilder builder = entry.getValue(); // Increment reference count to prevent ImageReader from being closed while we // are saving its Images in a background thread (otherwise their resources may // be freed while we are writing to a file). if (reader == null || reader.getAndRetain() == null) { Log.e(TAG, "Paused the activity before we could save the image," + " ImageReader already closed."); pendingQueue.remove(entry.getKey()); return; } Image image; try { image = reader.get().acquireNextImage(); } catch (IllegalStateException e) { Log.e(TAG, "Too many images queued for saving, dropping image for request: " + entry.getKey()); pendingQueue.remove(entry.getKey()); return; } builder.setRefCountedReader(reader).setImage(image); handleCompletionLocked(entry.getKey(), builder, pendingQueue); //back Activity getActivity().finish(); } }