List of usage examples for com.google.common.collect Maps newTreeMap
public static <K extends Comparable, V> TreeMap<K, V> newTreeMap()
From source file:org.activityinfo.core.shared.importing.match.ColumnMappingGuesser.java
public TreeMap<Integer, ImportTarget> getDistanceMap(String sourceLabel) { final TreeMap<Integer, ImportTarget> distanceMap = Maps.newTreeMap(); for (ImportTarget target : importTargets) { final String targetLabel = target.getLabel(); final int distance = StringUtil.getLevenshteinDistance(sourceLabel, targetLabel); distanceMap.put(distance, target); }//from w w w. j ava2 s . com return distanceMap; }
From source file:eu.interedition.text.Text.java
public SortedMap<TextRange, String> read(final SortedSet<TextRange> ranges) throws IOException { try {/*from w w w. j av a 2 s . c o m*/ final SortedMap<TextRange, String> results = Maps.newTreeMap(); for (TextRange range : ranges) { results.put(range, content.getSubString(range.getStart() + 1, (int) range.length())); } return results; } catch (SQLException e) { throw Throwables.propagate(e); } }
From source file:net.myrrix.web.servlets.AbstractMyrrixServlet.java
@Override public void init(ServletConfig config) throws ServletException { super.init(config); ServletContext context = config.getServletContext(); recommender = (MyrrixRecommender) context.getAttribute(RECOMMENDER_KEY); rescorerProvider = (RescorerProvider) context.getAttribute(RESCORER_PROVIDER_KEY); @SuppressWarnings("unchecked") ReloadingReference<List<List<HostAndPort>>> theAllPartitions = (ReloadingReference<List<List<HostAndPort>>>) context .getAttribute(ALL_PARTITIONS_REF_KEY); allPartitions = theAllPartitions;/*w ww. ja va 2 s . c om*/ thisPartition = (Integer) context.getAttribute(PARTITION_KEY); responseTypeCache = Maps.newConcurrentMap(); Map<String, ServletStats> timings; synchronized (context) { @SuppressWarnings("unchecked") Map<String, ServletStats> temp = (Map<String, ServletStats>) context.getAttribute(TIMINGS_KEY); timings = temp; if (timings == null) { timings = Maps.newTreeMap(); context.setAttribute(TIMINGS_KEY, timings); } } String key = getClass().getSimpleName(); ServletStats theTiming = timings.get(key); if (theTiming == null) { theTiming = new ServletStats(); timings.put(key, theTiming); } timing = theTiming; }
From source file:org.apache.druid.query.BaseQuery.java
protected static Map<String, Object> computeOverriddenContext(final Map<String, Object> context, final Map<String, Object> overrides) { Map<String, Object> overridden = Maps.newTreeMap(); if (context != null) { overridden.putAll(context);//from w ww.j ava 2 s.c o m } overridden.putAll(overrides); return overridden; }
From source file:com.facebook.buck.features.project.intellij.IjProjectWriter.java
private void writeTargetInfoMap(IjProjectTemplateDataPreparer projectDataPreparer, boolean update) throws IOException { Map<String, Map<String, String>> targetInfoMap = update ? readTargetInfoMap() : Maps.newTreeMap(); projectDataPreparer.getModulesToBeWritten().forEach(module -> { module.getTargets().forEach(target -> { Map<String, String> targetInfo = Maps.newTreeMap(); targetInfo.put(INTELLIJ_TYPE, MODULE_TYPE); targetInfo.put(INTELLIJ_NAME, module.getName()); targetInfo.put(INTELLIJ_FILE_PATH, projectPaths.getModuleImlFilePath(module).toString()); targetInfo.put("buck.type", getRuleNameForBuildTarget(target)); targetInfoMap.put(target.getFullyQualifiedName(), targetInfo); });/*from w w w . ja va 2 s . c o m*/ }); projectDataPreparer.getLibrariesToBeWritten().forEach(library -> { library.getTargets().forEach(target -> { Map<String, String> targetInfo = Maps.newTreeMap(); targetInfo.put(INTELLIJ_TYPE, LIBRARY_TYPE); targetInfo.put(INTELLIJ_NAME, library.getName()); targetInfo.put(INTELLIJ_FILE_PATH, projectPaths.getLibraryXmlFilePath(library).toString()); targetInfo.put("buck.type", getRuleNameForBuildTarget(target)); targetInfoMap.put(target.getFullyQualifiedName(), targetInfo); }); }); Path targetInfoMapPath = getTargetInfoMapPath(); try (JsonGenerator generator = ObjectMappers .createGenerator(outFilesystem.newFileOutputStream(targetInfoMapPath)).useDefaultPrettyPrinter()) { generator.writeObject(targetInfoMap); } }
From source file:co.cask.common.lang.InstantiatorFactory.java
private <T> Instantiator<T> getByKnownType(TypeToken<T> type) { Class<? super T> rawType = type.getRawType(); if (rawType.isArray()) { return new Instantiator<T>() { @Override/*from ww w .ja v a 2s. c o m*/ public T create() { return (T) Lists.newLinkedList(); } }; } if (Collection.class.isAssignableFrom(rawType)) { if (SortedSet.class.isAssignableFrom(rawType)) { return new Instantiator<T>() { @Override public T create() { return (T) Sets.newTreeSet(); } }; } if (Set.class.isAssignableFrom(rawType)) { return new Instantiator<T>() { @Override public T create() { return (T) Sets.newHashSet(); } }; } if (Queue.class.isAssignableFrom(rawType)) { return new Instantiator<T>() { @Override public T create() { return (T) Lists.newLinkedList(); } }; } return new Instantiator<T>() { @Override public T create() { return (T) Lists.newArrayList(); } }; } if (Map.class.isAssignableFrom(rawType)) { if (SortedMap.class.isAssignableFrom(rawType)) { return new Instantiator<T>() { @Override public T create() { return (T) Maps.newTreeMap(); } }; } return new Instantiator<T>() { @Override public T create() { return (T) Maps.newHashMap(); } }; } return null; }
From source file:com.cinchapi.concourse.server.storage.temp.Limbo.java
@Override public Map<Long, String> audit(long record) { Map<Long, String> audit = Maps.newTreeMap(); for (Iterator<Write> it = iterator(); it.hasNext();) { Write write = it.next();// w w w. j av a 2 s.co m if (write.getRecord().longValue() == record) { audit.put(write.getVersion(), write.toString()); } } return audit; }
From source file:org.eclipse.wb.internal.core.model.property.editor.DatePropertyEditor.java
@Override protected boolean setEditorText(Property property, String text) throws Exception { String valueText = text.trim(); Object value;/*w w w . ja v a 2s . co m*/ // check for delete if (valueText.length() == 0) { value = Property.UNKNOWN_VALUE; } else { // prepare value try { Map<String, Object> variables = Maps.newTreeMap(); variables.put("value", valueText); if (property instanceof GenericProperty) { GenericProperty genericProperty = (GenericProperty) property; variables.put("control", genericProperty.getJavaInfo().getObject()); } value = evaluate(m_toDateScript, variables); } catch (Throwable e) { UiUtils.openWarning(DesignerPlugin.getShell(), property.getTitle(), MessageFormat.format(ModelMessages.DatePropertyEditor_notValidDate, valueText)); return false; } } // modify property property.setValue(value); return true; }
From source file:eu.trentorise.smartcampus.mobility.service.SmartPlannerService.java
private Map<String, PlanningPolicy> getStoredPolicies(Boolean draft) { Map<String, PlanningPolicy> result = Maps.newTreeMap(); Criteria criteria = new Criteria(); if (draft != null) { criteria.and("draft").is(draft); }//from w ww. j a va 2 s. c o m List<CompilablePolicyData> compilable = storage.searchDomainObjects(criteria, CompilablePolicyData.class); for (CompilablePolicyData policy : compilable) { result.put(policy.getPolicyId(), new CompilablePolicy(policy)); } return result; }
From source file:io.druid.client.CachingClusteredClient.java
@Override public Sequence<T> run(final Query<T> query, final Map<String, Object> responseContext) { final QueryToolChest<T, Query<T>> toolChest = warehouse.getToolChest(query); final CacheStrategy<T, Object, Query<T>> strategy = toolChest.getCacheStrategy(query); final Map<DruidServer, List<SegmentDescriptor>> serverSegments = Maps.newTreeMap(); final List<Pair<Interval, byte[]>> cachedResults = Lists.newArrayList(); final Map<String, CachePopulator> cachePopulatorMap = Maps.newHashMap(); final boolean useCache = query.getContextUseCache(true) && strategy != null && cacheConfig.isUseCache() && cacheConfig.isQueryCacheable(query); final boolean populateCache = query.getContextPopulateCache(true) && strategy != null && cacheConfig.isPopulateCache() && cacheConfig.isQueryCacheable(query); final boolean isBySegment = query.getContextBySegment(false); final ImmutableMap.Builder<String, Object> contextBuilder = new ImmutableMap.Builder<>(); final int priority = query.getContextPriority(0); contextBuilder.put("priority", priority); if (populateCache) { // prevent down-stream nodes from caching results as well if we are populating the cache contextBuilder.put(CacheConfig.POPULATE_CACHE, false); contextBuilder.put("bySegment", true); }/*ww w .java 2 s . co m*/ contextBuilder.put("intermediate", true); TimelineLookup<String, ServerSelector> timeline = serverView.getTimeline(query.getDataSource()); if (timeline == null) { return Sequences.empty(); } // build set of segments to query Set<Pair<ServerSelector, SegmentDescriptor>> segments = Sets.newLinkedHashSet(); List<TimelineObjectHolder<String, ServerSelector>> serversLookup = Lists.newLinkedList(); for (Interval interval : query.getIntervals()) { Iterables.addAll(serversLookup, timeline.lookup(interval)); } // Let tool chest filter out unneeded segments final List<TimelineObjectHolder<String, ServerSelector>> filteredServersLookup = toolChest .filterSegments(query, serversLookup); for (TimelineObjectHolder<String, ServerSelector> holder : filteredServersLookup) { for (PartitionChunk<ServerSelector> chunk : holder.getObject()) { ServerSelector selector = chunk.getObject(); final SegmentDescriptor descriptor = new SegmentDescriptor(holder.getInterval(), holder.getVersion(), chunk.getChunkNumber()); segments.add(Pair.of(selector, descriptor)); } } final byte[] queryCacheKey; if ((populateCache || useCache) // implies strategy != null && !isBySegment) // explicit bySegment queries are never cached { queryCacheKey = strategy.computeCacheKey(query); } else { queryCacheKey = null; } if (queryCacheKey != null) { // cachKeys map must preserve segment ordering, in order for shards to always be combined in the same order Map<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> cacheKeys = Maps.newLinkedHashMap(); for (Pair<ServerSelector, SegmentDescriptor> segment : segments) { final Cache.NamedKey segmentCacheKey = CacheUtil.computeSegmentCacheKey( segment.lhs.getSegment().getIdentifier(), segment.rhs, queryCacheKey); cacheKeys.put(segment, segmentCacheKey); } // Pull cached segments from cache and remove from set of segments to query final Map<Cache.NamedKey, byte[]> cachedValues; if (useCache) { cachedValues = cache.getBulk(cacheKeys.values()); } else { cachedValues = ImmutableMap.of(); } for (Map.Entry<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> entry : cacheKeys.entrySet()) { Pair<ServerSelector, SegmentDescriptor> segment = entry.getKey(); Cache.NamedKey segmentCacheKey = entry.getValue(); final Interval segmentQueryInterval = segment.rhs.getInterval(); final byte[] cachedValue = cachedValues.get(segmentCacheKey); if (cachedValue != null) { // remove cached segment from set of segments to query segments.remove(segment); cachedResults.add(Pair.of(segmentQueryInterval, cachedValue)); } else if (populateCache) { // otherwise, if populating cache, add segment to list of segments to cache final String segmentIdentifier = segment.lhs.getSegment().getIdentifier(); cachePopulatorMap.put(String.format("%s_%s", segmentIdentifier, segmentQueryInterval), new CachePopulator(cache, objectMapper, segmentCacheKey)); } } } // Compile list of all segments not pulled from cache for (Pair<ServerSelector, SegmentDescriptor> segment : segments) { final QueryableDruidServer queryableDruidServer = segment.lhs.pick(); if (queryableDruidServer == null) { log.makeAlert("No servers found for %s?! How can this be?!", segment.rhs).emit(); } else { final DruidServer server = queryableDruidServer.getServer(); List<SegmentDescriptor> descriptors = serverSegments.get(server); if (descriptors == null) { descriptors = Lists.newArrayList(); serverSegments.put(server, descriptors); } descriptors.add(segment.rhs); } } return new LazySequence<>(new Supplier<Sequence<T>>() { @Override public Sequence<T> get() { ArrayList<Sequence<T>> sequencesByInterval = Lists.newArrayList(); addSequencesFromCache(sequencesByInterval); addSequencesFromServer(sequencesByInterval); return mergeCachedAndUncachedSequences(sequencesByInterval, toolChest); } private void addSequencesFromCache(ArrayList<Sequence<T>> listOfSequences) { if (strategy == null) { return; } final Function<Object, T> pullFromCacheFunction = strategy.pullFromCache(); final TypeReference<Object> cacheObjectClazz = strategy.getCacheObjectClazz(); for (Pair<Interval, byte[]> cachedResultPair : cachedResults) { final byte[] cachedResult = cachedResultPair.rhs; Sequence<Object> cachedSequence = new BaseSequence<>( new BaseSequence.IteratorMaker<Object, Iterator<Object>>() { @Override public Iterator<Object> make() { try { if (cachedResult.length == 0) { return Iterators.emptyIterator(); } return objectMapper.readValues( objectMapper.getFactory().createParser(cachedResult), cacheObjectClazz); } catch (IOException e) { throw Throwables.propagate(e); } } @Override public void cleanup(Iterator<Object> iterFromMake) { } }); listOfSequences.add(Sequences.map(cachedSequence, pullFromCacheFunction)); } } private void addSequencesFromServer(ArrayList<Sequence<T>> listOfSequences) { listOfSequences.ensureCapacity(listOfSequences.size() + serverSegments.size()); final Query<Result<BySegmentResultValueClass<T>>> rewrittenQuery = (Query<Result<BySegmentResultValueClass<T>>>) query .withOverriddenContext(contextBuilder.build()); // Loop through each server, setting up the query and initiating it. // The data gets handled as a Future and parsed in the long Sequence chain in the resultSeqToAdd setter. for (Map.Entry<DruidServer, List<SegmentDescriptor>> entry : serverSegments.entrySet()) { final DruidServer server = entry.getKey(); final List<SegmentDescriptor> descriptors = entry.getValue(); final QueryRunner clientQueryable = serverView.getQueryRunner(server); if (clientQueryable == null) { log.error("WTF!? server[%s] doesn't have a client Queryable?", server); continue; } final MultipleSpecificSegmentSpec segmentSpec = new MultipleSpecificSegmentSpec(descriptors); final Sequence<T> resultSeqToAdd; if (!server.isAssignable() || !populateCache || isBySegment) { // Direct server queryable if (!isBySegment) { resultSeqToAdd = clientQueryable.run(query.withQuerySegmentSpec(segmentSpec), responseContext); } else { // bySegment queries need to be de-serialized, see DirectDruidClient.run() @SuppressWarnings("unchecked") final Query<Result<BySegmentResultValueClass<T>>> bySegmentQuery = (Query<Result<BySegmentResultValueClass<T>>>) query; @SuppressWarnings("unchecked") final Sequence<Result<BySegmentResultValueClass<T>>> resultSequence = clientQueryable .run(bySegmentQuery.withQuerySegmentSpec(segmentSpec), responseContext); resultSeqToAdd = (Sequence) Sequences.map(resultSequence, new Function<Result<BySegmentResultValueClass<T>>, Result<BySegmentResultValueClass<T>>>() { @Override public Result<BySegmentResultValueClass<T>> apply( Result<BySegmentResultValueClass<T>> input) { final BySegmentResultValueClass<T> bySegmentValue = input.getValue(); return new Result<>(input.getTimestamp(), new BySegmentResultValueClass<T>( Lists.transform(bySegmentValue.getResults(), toolChest.makePreComputeManipulatorFn(query, MetricManipulatorFns.deserializing())), bySegmentValue.getSegmentId(), bySegmentValue.getInterval())); } }); } } else { // Requires some manipulation on broker side @SuppressWarnings("unchecked") final Sequence<Result<BySegmentResultValueClass<T>>> runningSequence = clientQueryable .run(rewrittenQuery.withQuerySegmentSpec(segmentSpec), responseContext); resultSeqToAdd = toolChest.mergeSequencesUnordered( Sequences.<Result<BySegmentResultValueClass<T>>, Sequence<T>>map(runningSequence, new Function<Result<BySegmentResultValueClass<T>>, Sequence<T>>() { private final Function<T, Object> cacheFn = strategy.prepareForCache(); // Acctually do something with the results @Override public Sequence<T> apply(Result<BySegmentResultValueClass<T>> input) { final BySegmentResultValueClass<T> value = input.getValue(); final CachePopulator cachePopulator = cachePopulatorMap .get(String.format("%s_%s", value.getSegmentId(), value.getInterval())); final Queue<ListenableFuture<Object>> cacheFutures = new ConcurrentLinkedQueue<>(); return Sequences.<T>withEffect(Sequences.<T, T>map( Sequences.<T, T>map(Sequences.<T>simple(value.getResults()), new Function<T, T>() { @Override public T apply(final T input) { if (cachePopulator != null) { // only compute cache data if populating cache cacheFutures .add(backgroundExecutorService .submit(new Callable<Object>() { @Override public Object call() { return cacheFn .apply(input); } })); } return input; } }), toolChest.makePreComputeManipulatorFn( // Ick... most makePreComputeManipulatorFn directly cast to their ToolChest query type of choice // This casting is sub-optimal, but hasn't caused any major problems yet... (Query) rewrittenQuery, MetricManipulatorFns.deserializing())), new Runnable() { @Override public void run() { if (cachePopulator != null) { Futures.addCallback( Futures.allAsList(cacheFutures), new FutureCallback<List<Object>>() { @Override public void onSuccess( List<Object> cacheData) { cachePopulator .populate(cacheData); // Help out GC by making sure all references are gone cacheFutures.clear(); } @Override public void onFailure( Throwable throwable) { log.error(throwable, "Background caching failed"); } }, backgroundExecutorService); } } }, MoreExecutors.sameThreadExecutor());// End withEffect } })); } listOfSequences.add(resultSeqToAdd); } } }// End of Supplier ); }