List of usage examples for com.google.common.collect Maps asMap
@GwtIncompatible("NavigableMap") public static <K, V> NavigableMap<K, V> asMap(NavigableSet<K> set, Function<? super K, V> function)
From source file:com.davidbracewell.math.similarity.SimilarityMeasure.java
/** * Calculate double.// w w w. j a va 2 s. c om * * @param s1 the s 1 * @param s2 the s 2 * @return the double */ public double calculate(Set<?> s1, Set<?> s2) { return calculate(Maps.asMap(Preconditions.checkNotNull(s1, "Vectors cannot be null"), SET_NUMBER.INSTANCE), Maps.asMap(Preconditions.checkNotNull(s2, "Vectors cannot be null"), SET_NUMBER.INSTANCE)); }
From source file:io.druid.server.lookup.cache.polling.OffHeapPollingCache.java
public OffHeapPollingCache(final Iterable<Map.Entry<K, V>> entries) { synchronized (started) { this.cacheName = String.format("cache-%s", UUID.randomUUID()); this.reverseCacheName = String.format("reverseCache-%s", UUID.randomUUID()); mapCache = DB.createHashMap(cacheName).make(); reverseCache = DB.createHashMap(reverseCacheName).make(); ImmutableSet.Builder<V> setOfValuesBuilder = ImmutableSet.builder(); for (Map.Entry<K, V> entry : entries) { mapCache.put(entry.getKey(), entry.getValue()); setOfValuesBuilder.add(entry.getValue()); }//from w w w.j ava 2 s. c o m final Set<V> setOfValues = setOfValuesBuilder.build(); reverseCache.putAll(Maps.asMap(setOfValues, new Function<V, List<K>>() { @Override public List<K> apply(final V input) { return Lists.newArrayList(Maps.filterKeys(mapCache, new Predicate<K>() { @Override public boolean apply(K key) { V retVal = mapCache.get(key); if (retVal == null) { return false; } return retVal.equals(input); } }).keySet()); } })); started.getAndSet(true); } }
From source file:org.apache.druid.server.lookup.cache.polling.OffHeapPollingCache.java
public OffHeapPollingCache(final Iterable<Map.Entry<K, V>> entries) { synchronized (started) { this.cacheName = StringUtils.format("cache-%s", UUID.randomUUID()); this.reverseCacheName = StringUtils.format("reverseCache-%s", UUID.randomUUID()); mapCache = DB.createHashMap(cacheName).make(); reverseCache = DB.createHashMap(reverseCacheName).make(); ImmutableSet.Builder<V> setOfValuesBuilder = ImmutableSet.builder(); for (Map.Entry<K, V> entry : entries) { mapCache.put(entry.getKey(), entry.getValue()); setOfValuesBuilder.add(entry.getValue()); }/*w ww . j a v a 2 s. c o m*/ final Set<V> setOfValues = setOfValuesBuilder.build(); reverseCache.putAll(Maps.asMap(setOfValues, new Function<V, List<K>>() { @Override public List<K> apply(final V input) { return Lists.newArrayList(Maps.filterKeys(mapCache, new Predicate<K>() { @Override public boolean apply(K key) { V retVal = mapCache.get(key); if (retVal == null) { return false; } return retVal.equals(input); } }).keySet()); } })); started.getAndSet(true); } }
From source file:org.grouplens.lenskit.util.table.RowImpl.java
@Override public Map<String, Object> asMap() { // FIXME Don't create a new set every time this is done. return Maps.asMap(Sets.newHashSet(layout.getColumns()), VALUE_FUNCTION); }
From source file:edu.mit.streamjit.impl.compiler2.DoubleArrayConcreteStorage.java
public DoubleArrayConcreteStorage(Arrayish.Factory arrayFactory, Storage s) { this.capacity = s.steadyStateCapacity(); assert capacity > 0 : s + " has capacity " + capacity; this.throughput = s.throughput(); assert capacity == 2 * throughput : "can't double buffer " + s; this.readArray = arrayFactory.make(s.type(), throughput); this.writeArray = arrayFactory.make(s.type(), throughput); ImmutableSet<ActorGroup> relevantGroups = ImmutableSet.<ActorGroup>builder().addAll(s.upstreamGroups()) .addAll(s.downstreamGroups()).build(); Map<ActorGroup, Integer> oneMap = Maps.asMap(relevantGroups, x -> 1); this.readOffset = s.readIndices(oneMap).first(); int writeOffset = s.writeIndices(oneMap).first(); MethodHandle stateGetter = STATE_GETTER.bindTo(this); this.readHandle = MethodHandles.filterArguments( MethodHandles.guardWithTest(stateGetter, readArray.get(), writeArray.get()), 0, Combinators.adder(-readOffset)); this.writeHandle = MethodHandles.filterArguments( MethodHandles.guardWithTest(stateGetter, writeArray.set(), readArray.set()), 0, Combinators.adder(-writeOffset)); this.adjustHandle = ADJUST.bindTo(this); }
From source file:ninja.leaping.permissionsex.extrabackends.groupmanager.GroupManagerSubjectData.java
@Override public Map<Set<Map.Entry<String, String>>, Map<String, String>> getAllOptions() { return Maps.filterValues(Maps.asMap(getActiveContexts(), this::getOptions), input -> input != null && !input.isEmpty()); }
From source file:com.stackframe.sarariman.tickets.TicketsImpl.java
public Map<? extends Number, Ticket> getMap() { Function<Number, Ticket> f = new Function<Number, Ticket>() { public Ticket apply(Number n) { return get(n.intValue()); }//from w ww . j a v a 2s .c o m }; return Maps.asMap(Numbers.positiveIntegers, f); }
From source file:org.apache.hadoop.hive.ql.parse.DruidSqlOperatorConverter.java
public static final Map<SqlOperator, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter> getDefaultMap() { if (druidOperatorMap == null) { druidOperatorMap = new HashMap<SqlOperator, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter>(); DruidQuery.DEFAULT_OPERATORS_LIST.stream() .forEach(op -> druidOperatorMap.put(op.calciteOperator(), op)); //Override Hive specific operators druidOperatorMap.putAll(Maps.asMap(HiveFloorDate.ALL_FUNCTIONS, (Function<SqlFunction, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter>) input -> new FloorOperatorConversion())); druidOperatorMap.putAll(Maps.asMap(HiveExtractDate.ALL_FUNCTIONS, (Function<SqlFunction, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter>) input -> new ExtractOperatorConversion())); druidOperatorMap.put(HiveConcat.INSTANCE, new DirectOperatorConversion(HiveConcat.INSTANCE, "concat")); druidOperatorMap.put(SqlStdOperatorTable.SUBSTRING, new DruidSqlOperatorConverter.DruidSubstringOperatorConversion()); druidOperatorMap.put(SqlStdOperatorTable.IS_NULL, new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NULL, "isnull")); druidOperatorMap.put(SqlStdOperatorTable.IS_NOT_NULL, new UnaryFunctionOperatorConversion(SqlStdOperatorTable.IS_NOT_NULL, "notnull")); druidOperatorMap.put(HiveTruncSqlOperator.INSTANCE, new DruidDateTruncOperatorConversion()); druidOperatorMap.put(HiveToDateSqlOperator.INSTANCE, new DruidToDateOperatorConversion()); druidOperatorMap.put(HiveFromUnixTimeSqlOperator.INSTANCE, new DruidFormUnixTimeOperatorConversion()); druidOperatorMap.put(HiveUnixTimestampSqlOperator.INSTANCE, new DruidUnixTimestampOperatorConversion()); druidOperatorMap.put(HiveDateAddSqlOperator.INSTANCE, new DruidDateArithmeticOperatorConversion(1, HiveDateAddSqlOperator.INSTANCE)); druidOperatorMap.put(HiveDateSubSqlOperator.INSTANCE, new DruidDateArithmeticOperatorConversion(-1, HiveDateSubSqlOperator.INSTANCE)); }/*from ww w. j av a 2 s . co m*/ return druidOperatorMap; }
From source file:com.google.devtools.build.lib.skyframe.TraversalInfoRootPackageExtractor.java
private void collectPackagesUnder(WalkableGraph graph, ExtendedEventHandler eventHandler, final RepositoryName repository, Set<TraversalInfo> traversals, ImmutableList.Builder<PathFragment> builder) throws InterruptedException { Map<TraversalInfo, SkyKey> traversalToKeyMap = Maps.asMap(traversals, new Function<TraversalInfo, SkyKey>() { @Override/* ww w. j a v a 2s.c o m*/ public SkyKey apply(TraversalInfo traversalInfo) { return CollectPackagesUnderDirectoryValue.key(repository, traversalInfo.rootedDir, traversalInfo.blacklistedSubdirectories); } }); Map<SkyKey, SkyValue> values = graph.getSuccessfulValues(traversalToKeyMap.values()); ImmutableSet.Builder<TraversalInfo> subdirTraversalBuilder = ImmutableSet.builder(); for (Map.Entry<TraversalInfo, SkyKey> entry : traversalToKeyMap.entrySet()) { TraversalInfo info = entry.getKey(); SkyKey key = entry.getValue(); SkyValue val = values.get(key); CollectPackagesUnderDirectoryValue collectPackagesValue = (CollectPackagesUnderDirectoryValue) val; if (collectPackagesValue != null) { if (collectPackagesValue.isDirectoryPackage()) { builder.add(info.rootedDir.getRootRelativePath()); } if (collectPackagesValue.getErrorMessage() != null) { eventHandler.handle(Event.error(collectPackagesValue.getErrorMessage())); } ImmutableMap<RootedPath, Boolean> subdirectoryTransitivelyContainsPackages = collectPackagesValue .getSubdirectoryTransitivelyContainsPackagesOrErrors(); for (RootedPath subdirectory : subdirectoryTransitivelyContainsPackages.keySet()) { if (subdirectoryTransitivelyContainsPackages.get(subdirectory)) { PathFragment subdirectoryRelativePath = subdirectory.getRootRelativePath(); ImmutableSet<PathFragment> blacklistedSubdirectoriesBeneathThisSubdirectory = info.blacklistedSubdirectories .stream().filter(pathFragment -> pathFragment.startsWith(subdirectoryRelativePath)) .collect(toImmutableSet()); ImmutableSet<PathFragment> excludedSubdirectoriesBeneathThisSubdirectory = info.excludedSubdirectories .stream().filter(pathFragment -> pathFragment.startsWith(subdirectoryRelativePath)) .collect(toImmutableSet()); if (!excludedSubdirectoriesBeneathThisSubdirectory.contains(subdirectoryRelativePath)) { subdirTraversalBuilder.add(new TraversalInfo(subdirectory, blacklistedSubdirectoriesBeneathThisSubdirectory, excludedSubdirectoriesBeneathThisSubdirectory)); } } } } } ImmutableSet<TraversalInfo> subdirTraversals = subdirTraversalBuilder.build(); if (!subdirTraversals.isEmpty()) { collectPackagesUnder(graph, eventHandler, repository, subdirTraversals, builder); } }
From source file:edu.mit.streamjit.impl.compiler2.PeekableBufferConcreteStorage.java
public static StorageFactory factory(final Map<Token, PeekableBuffer> buffers) { return (Storage storage) -> { assert buffers.containsKey(storage.id()) : storage.id() + " not in " + buffers; //Hack: we don't have the throughput when making init storage, //but we don't need it either. int throughput1; try {// www . ja v a 2s . c o m throughput1 = storage.throughput(); } catch (IllegalStateException ignored) { throughput1 = Integer.MIN_VALUE; } ImmutableSet<ActorGroup> relevantGroups = ImmutableSet.<ActorGroup>builder() .addAll(storage.upstreamGroups()).addAll(storage.downstreamGroups()).build(); ImmutableSortedSet<Integer> readIndices = storage.readIndices(Maps.asMap(relevantGroups, i -> 1)); int minReadIndex1 = readIndices.isEmpty() ? Integer.MIN_VALUE : readIndices.first(); return new PeekableBufferConcreteStorage(storage.type(), throughput1, minReadIndex1, buffers.get(storage.id())); }; }