List of usage examples for com.google.common.collect Maps newIdentityHashMap
public static <K, V> IdentityHashMap<K, V> newIdentityHashMap()
From source file:com.cinchapi.concourse.plugin.data.TrackingMultimap.java
/** * Construct a new instance.//from ww w . j a v a2 s. c o m * * @param delegate an {@link Map#isEmpty() empty} map */ protected TrackingMultimap(Map<K, Set<V>> delegate) { Preconditions.checkState(delegate.isEmpty()); this.data = delegate; this.keyTypes = Maps.newIdentityHashMap(); for (DataType type : DataType.values()) { this.keyTypes.put(type, new AtomicInteger(0)); } this.totalValueCount = new AtomicLong(0); this.uniqueValueCount = new AtomicLong(0); this.valueCache = new SparseBitSet(); }
From source file:org.onosproject.provider.of.group.impl.OpenFlowGroupProvider.java
@Override public void performGroupOperation(DeviceId deviceId, GroupOperations groupOps) { Map<OFGroupMod, OpenFlowSwitch> mods = Maps.newIdentityHashMap(); final Dpid dpid = Dpid.dpid(deviceId.uri()); OpenFlowSwitch sw = controller.getSwitch(dpid); for (GroupOperation groupOperation : groupOps.operations()) { if (sw == null) { log.error("SW {} is not found", dpid); return; }/*from ww w. j a v a 2 s. com*/ final Long groupModXid = XID_COUNTER.getAndIncrement(); GroupModBuilder builder = GroupModBuilder.builder(groupOperation.buckets(), groupOperation.groupId(), groupOperation.groupType(), sw.factory(), Optional.of(groupModXid)); OFGroupMod groupMod = null; switch (groupOperation.opType()) { case ADD: groupMod = builder.buildGroupAdd(); break; case MODIFY: groupMod = builder.buildGroupMod(); break; case DELETE: groupMod = builder.buildGroupDel(); break; default: log.error("Unsupported Group operation"); } sw.sendMsg(groupMod); GroupId groudId = new DefaultGroupId(groupMod.getGroup().getGroupNumber()); pendingGroupOperations.put(groudId, groupOperation); pendingXidMaps.put(groudId, groupModXid); } }
From source file:org.fao.geonet.kernel.AllThesaurus.java
@Override public synchronized QueryResultsTable performRequest(final String query) throws IOException, MalformedQueryException, QueryEvaluationException, AccessDeniedException { final Map<Thesaurus, QueryResultsTable> allResults = Maps.newIdentityHashMap(); onThesauri(null, new Function<Thesaurus, Void>() { @Nullable/* w ww . ja v a 2 s . c o m*/ @Override public Void apply(@Nonnull Thesaurus input) { final QueryResultsTable queryResultsTable; try { queryResultsTable = input.performRequest(query); if (queryResultsTable.getRowCount() > 0) { allResults.put(input, queryResultsTable); } } catch (IOException | AccessDeniedException | QueryEvaluationException | MalformedQueryException e) { throw new RuntimeException(e); } return null; } }); return new AllQueryResultsTable(allResults); }
From source file:org.eclipse.xtext.resource.impl.DefaultResourceDescription.java
protected Map<EObject, IEObjectDescription> createEObject2ExportedEObjectsMap( Iterable<IEObjectDescription> exportedObjects) { Map<EObject, IEObjectDescription> uri2exportedEObjects = Maps.newIdentityHashMap(); for (IEObjectDescription eObjectDescription : exportedObjects) { uri2exportedEObjects.put(eObjectDescription.getEObjectOrProxy(), eObjectDescription); }//from w w w . j a va 2 s . c om return uri2exportedEObjects; }
From source file:org.apache.drill.exec.planner.physical.explain.PrelSequencer.java
public Map<Prel, OpId> go(Prel root) { // get fragments. Frag rootFrag = new Frag(root); frags.add(rootFrag);/*from w w w. j a va 2 s. c o m*/ root.accept(this, rootFrag); // do depth first traversal of fragments to assign major fragment ids. Queue<Frag> q = Lists.newLinkedList(); q.add(rootFrag); int majorFragmentId = 0; while (!q.isEmpty()) { Frag frag = q.remove(); frag.majorFragmentId = majorFragmentId++; for (Frag child : frag) { q.add(child); } } // for each fragment, do a dfs of operators to assign operator ids. Map<Prel, OpId> ids = Maps.newIdentityHashMap(); ids.put(rootFrag.root, new OpId(0, 0)); for (Frag f : frags) { int id = 1; Queue<Prel> ops = Lists.newLinkedList(); ops.add(f.root); while (!ops.isEmpty()) { Prel p = ops.remove(); boolean isExchange = p instanceof ExchangePrel; if (p != f.root) { // we account for exchanges as receviers to guarantee unique identifiers. ids.put(p, new OpId(f.majorFragmentId, id++)); } if (!isExchange || p == f.root) { List<Prel> children = Lists.reverse(Lists.newArrayList(p.iterator())); for (Prel child : children) { ops.add(child); } } } } return ids; }
From source file:org.onosproject.store.primitives.impl.PartitionedAsyncConsistentMap.java
@Override public CompletableFuture<Boolean> prepare(MapTransaction<K, V> transaction) { Map<AsyncConsistentMap<K, V>, List<MapUpdate<K, V>>> updatesGroupedByMap = Maps.newIdentityHashMap(); transaction.updates().forEach(update -> { AsyncConsistentMap<K, V> map = getMap(update.key()); updatesGroupedByMap.computeIfAbsent(map, k -> Lists.newLinkedList()).add(update); });/* ww w. j a va2s . c o m*/ Map<AsyncConsistentMap<K, V>, MapTransaction<K, V>> transactionsByMap = Maps.transformValues( updatesGroupedByMap, list -> new MapTransaction<>(transaction.transactionId(), list)); return Tools .allOf(transactionsByMap.entrySet().stream().map(e -> e.getKey().prepare(e.getValue())) .collect(Collectors.toList())) .thenApply(list -> list.stream().reduce(Boolean::logicalAnd).orElse(true)); }
From source file:no.ssb.vtl.model.DataStructure.java
private static IdentityHashMap<Component, String> computeInverseCache( ImmutableMap<String, Component> delegate) { IdentityHashMap<Component, String> map = Maps.newIdentityHashMap(); for (Entry<String, Component> entry : delegate.entrySet()) { map.put(entry.getValue(), entry.getKey()); }//from ww w . j av a 2 s . co m return map; }
From source file:org.apache.beam.runners.dataflow.worker.graph.Networks.java
public static <N, E> String toDot(Network<N, E> network) { StringBuilder builder = new StringBuilder(); builder.append("digraph network {\n"); Map<N, String> nodeName = Maps.newIdentityHashMap(); network.nodes().forEach(node -> nodeName.put(node, "n" + nodeName.size())); for (Entry<N, String> nodeEntry : nodeName.entrySet()) { builder.append(String.format(" %s [fontname=\"Courier New\" label=\"%s\"];\n", nodeEntry.getValue(), escapeDot(nodeEntry.getKey().toString()))); }/*w ww .j av a 2s .c om*/ for (E edge : network.edges()) { EndpointPair<N> endpoints = network.incidentNodes(edge); builder.append(String.format(" %s -> %s [fontname=\"Courier New\" label=\"%s\"];\n", nodeName.get(endpoints.source()), nodeName.get(endpoints.target()), escapeDot(edge.toString()))); } builder.append("}"); return builder.toString(); }
From source file:org.onosproject.store.primitives.impl.PartitionedAsyncConsistentMap.java
@Override public CompletableFuture<Boolean> prepareAndCommit(MapTransaction<K, V> transaction) { Map<AsyncConsistentMap<K, V>, List<MapUpdate<K, V>>> updatesGroupedByMap = Maps.newIdentityHashMap(); transaction.updates().forEach(update -> { AsyncConsistentMap<K, V> map = getMap(update.key()); updatesGroupedByMap.computeIfAbsent(map, k -> Lists.newLinkedList()).add(update); });/*w w w . ja va2 s .c o m*/ Map<AsyncConsistentMap<K, V>, MapTransaction<K, V>> transactionsByMap = Maps.transformValues( updatesGroupedByMap, list -> new MapTransaction<>(transaction.transactionId(), list)); return Tools .allOf(transactionsByMap.entrySet().stream().map(e -> e.getKey().prepareAndCommit(e.getValue())) .collect(Collectors.toList())) .thenApply(list -> list.stream().reduce(Boolean::logicalAnd).orElse(true)); }
From source file:edu.buaa.satla.analysis.core.arg.ARGReachedSet.java
/** * Set a new precision for each single state in the reached set. * @param p The new precision, may be for a single CPA (c.f. {@link #adaptPrecision(ARGState, Precision)}). *///w w w . ja v a 2s . com public void updatePrecisionGlobally(Precision pNewPrecision, Class<? extends Precision> pPrecisionType) { Map<Precision, Precision> precisionUpdateCache = Maps.newIdentityHashMap(); for (AbstractState s : mReached) { Precision oldPrecision = mReached.getPrecision(s); Precision newPrecision = precisionUpdateCache.get(oldPrecision); if (newPrecision == null) { newPrecision = adaptPrecision(oldPrecision, pNewPrecision, pPrecisionType); precisionUpdateCache.put(oldPrecision, newPrecision); } mReached.updatePrecision(s, newPrecision); } }