List of usage examples for com.google.common.collect Maps newIdentityHashMap
public static <K, V> IdentityHashMap<K, V> newIdentityHashMap()
From source file:org.sosy_lab.cpachecker.cpa.arg.ARGReachedSet.java
/** * Set a new precision for each single state in the reached set. * @param p The new precision, may be for a single CPA (c.f. {@link #adaptPrecision(ARGState, Precision)}). *///from w w w . j a va2s .co m public void updatePrecisionGlobally(Precision pNewPrecision, Predicate<? super Precision> pPrecisionType) { Map<Precision, Precision> precisionUpdateCache = Maps.newIdentityHashMap(); for (AbstractState s : mReached) { Precision oldPrecision = mReached.getPrecision(s); Precision newPrecision = precisionUpdateCache.get(oldPrecision); if (newPrecision == null) { newPrecision = adaptPrecision(oldPrecision, pNewPrecision, pPrecisionType); precisionUpdateCache.put(oldPrecision, newPrecision); } mReached.updatePrecision(s, newPrecision); } }
From source file:org.carrot2.source.lucene.LuceneDocumentSource.java
@SuppressWarnings("unchecked") @Override/*from w w w.ja v a 2 s. co m*/ public void init(IControllerContext context) { super.init(context); this.context = context; synchronized (context) { final String key = AttributeUtils.getKey(getClass(), "openIndexes"); if (context.getAttribute(key) == null) { context.setAttribute(key, Maps.newIdentityHashMap()); context.addListener(new IControllerContextListener() { public void beforeDisposal(IControllerContext context) { closeAllIndexes(); } }); } this.openIndexes = (IdentityHashMap<Directory, IndexSearcher>) context.getAttribute(key); } }
From source file:org.eclipse.xtext.util.formallang.PdaUtil.java
public <S, P, T, D extends Pda<S, P>> D expand(Pda<S, P> pda, Function<S, Pda<S, P>> expand, Function<S, T> tokens, PdaFactory<D, S, P, T> fact) { D result = fact.create(tokens.apply(pda.getStart()), tokens.apply(pda.getStop())); Identity<S> identity = new Identity<S>(); Map<S, S> idstates = Maps.newIdentityHashMap(); Multimap<S, S> followers = LinkedHashMultimap.create(); for (S s_old : nfaUtil.collect(pda)) { S s_new = idstates.get(s_old); if (s_new == null) { Pda<S, P> sub = expand.apply(s_old); if (sub != null) { S s_start = identity.get(fact.createPush(result, tokens.apply(s_old))); S s_stop = identity.get(fact.createPop(result, tokens.apply(s_old))); idstates.put(s_old, s_start); idstates.put(sub.getStart(), s_start); idstates.put(sub.getStop(), s_stop); followers.putAll(s_start, sub.getFollowers(sub.getStart())); followers.putAll(s_stop, pda.getFollowers(s_old)); for (S f_old : nfaUtil.collect(sub)) if (f_old != sub.getStart() && f_old != sub.getStop()) { S f_new = idstates.get(f_old); if (f_new == null) { idstates.put(f_old, f_new = clone(f_old, sub, result, tokens, fact, identity)); followers.putAll(f_new, pda.getFollowers(f_old)); }/*ww w.j a v a2s . c o m*/ } } else { idstates.put(s_old, s_new = clone(s_old, pda, result, tokens, fact, identity)); followers.putAll(s_new, pda.getFollowers(s_old)); } } } for (Map.Entry<S, Collection<S>> entry : followers.asMap().entrySet()) { Set<S> f = Sets.newLinkedHashSet(); for (S s : entry.getValue()) f.add(idstates.get(s)); fact.setFollowers(result, entry.getKey(), f); } return result; }
From source file:org.carrot2.workbench.vis.FlashViewPage.java
/** * Create a mirror of a processing result with a smaller memory footprint * for visualizations./*from ww w.j a v a2s. c o m*/ */ private ProcessingResultMirror smallerMemFootprintMirror(ProcessingResult pr) { ProcessingResultMirror prm = new ProcessingResultMirror(); prm.query = pr.getAttribute(AttributeNames.QUERY); prm.documents = Lists.newArrayList(); IdentityHashMap<Document, Document> docMapping = Maps.newIdentityHashMap(); for (Document doc : pr.getDocuments()) { String title = passData.contains(DocumentData.TITLE) ? doc.getTitle() : null; String snippet = passData.contains(DocumentData.SNIPPET) ? doc.getSummary() : null; Document docMirror = new Document(title, snippet, null, null, doc.getStringId()); prm.documents.add(docMirror); docMapping.put(doc, docMirror); } prm.clusters = Lists.newArrayList(); for (Cluster c : pr.getClusters()) { prm.clusters.add(mirrorOf(c, docMapping)); } return prm; }
From source file:org.prebake.service.bake.Oven.java
private static void sortByGlobs(List<Path> paths, ImmutableGlobSet globs) { int n = paths.size(); Map<Glob, Integer> globByIndex = Maps.newIdentityHashMap(); int nGlobs = 0; for (Glob g : globs) { globByIndex.put(g, nGlobs++);//w w w . ja v a2 s. c o m } if (nGlobs < 2) { return; } TaggedPath[] taggedPaths = new TaggedPath[n]; for (int i = 0; i < n; ++i) { int index = nGlobs; for (Glob g : globs.matching(paths.get(i))) { int k = globByIndex.get(g); if (k < index) { index = k; } } taggedPaths[i] = new TaggedPath(index, paths.get(i)); } Arrays.sort(taggedPaths, new Comparator<TaggedPath>() { public int compare(TaggedPath p, TaggedPath q) { return p.index - q.index; } }); for (int i = n; --i >= 0;) { paths.set(i, taggedPaths[i].p); } }
From source file:org.apache.beam.runners.core.construction.graph.Networks.java
public static <NodeT, EdgeT> String toDot(Network<NodeT, EdgeT> network) { StringBuilder builder = new StringBuilder(); builder.append(String.format("digraph network {%n")); Map<NodeT, String> nodeName = Maps.newIdentityHashMap(); network.nodes().forEach(node -> nodeName.put(node, "n" + nodeName.size())); for (Entry<NodeT, String> nodeEntry : nodeName.entrySet()) { builder.append(String.format(" %s [fontname=\"Courier New\" label=\"%s\"];%n", nodeEntry.getValue(), escapeDot(nodeEntry.getKey().toString()))); }/*w w w . j a v a 2 s . c o m*/ for (EdgeT edge : network.edges()) { EndpointPair<NodeT> endpoints = network.incidentNodes(edge); builder.append(String.format(" %s -> %s [fontname=\"Courier New\" label=\"%s\"];%n", nodeName.get(endpoints.source()), nodeName.get(endpoints.target()), escapeDot(edge.toString()))); } builder.append("}"); return builder.toString(); }
From source file:org.gradle.api.internal.artifacts.ivyservice.resolveengine.excludes.ModuleExclusions.java
private MergeOperation mergeOperation(AbstractModuleExclusion[] one, AbstractModuleExclusion[] two) { synchronized (mergeOperationLock) { Map<AbstractModuleExclusion[], MergeOperation> oneMap = mergeOperationCache.get(one); if (oneMap == null) { oneMap = Maps.newIdentityHashMap(); mergeOperationCache.put(one, oneMap); }/*from w ww .j a va2s . c om*/ MergeOperation mergeOperation = oneMap.get(two); if (mergeOperation != null) { return mergeOperation; } mergeOperation = new MergeOperation(one, two); oneMap.put(two, mergeOperation); return mergeOperation; } }
From source file:io.atomix.core.map.impl.PartitionedAtomicMapProxy.java
@Override public CompletableFuture<Boolean> prepare(TransactionLog<MapUpdate<K, byte[]>> transactionLog) { Map<PartitionId, List<MapUpdate<K, byte[]>>> updatesGroupedByMap = Maps.newIdentityHashMap(); transactionLog.records().forEach(update -> { updatesGroupedByMap.computeIfAbsent(getProxyClient().getPartitionId(update.key().toString()), k -> Lists.newLinkedList()).add(update); });//ww w .j a va 2s. c o m Map<PartitionId, TransactionLog<MapUpdate<K, byte[]>>> transactionsByMap = Maps.transformValues( updatesGroupedByMap, list -> new TransactionLog<>(transactionLog.transactionId(), transactionLog.version(), list)); return Futures .allOf(transactionsByMap.entrySet().stream() .map(e -> getProxyClient().applyOn(e.getKey(), service -> service.prepare(e.getValue())) .thenApply(v -> v == PrepareResult.OK || v == PrepareResult.PARTIAL_FAILURE)) .collect(Collectors.toList())) .thenApply(list -> list.stream().reduce(Boolean::logicalAnd).orElse(true)); }
From source file:org.splevo.vpm.analyzer.DefaultVPMAnalyzerService.java
/** * Identify the mergeable variation points of a refinement and collect them in buckets for each * mergeable sub group./*from ww w. j a v a 2 s . com*/ * * @param variationPoints * The variation points to check. * @return The buckets per merge able group identified by one of the contained VPs. */ private Multimap<VariationPoint, VariationPoint> identifyMergeableVPs(EList<VariationPoint> variationPoints) { // The indexes are used to // i) build groups of refinements to merge // ii) fast lookup of which group a VP currently belongs to Multimap<VariationPoint, VariationPoint> mergeVPBuckets = LinkedHashMultimap.create(); Map<VariationPoint, VariationPoint> invertedBucketIndex = Maps.newIdentityHashMap(); VariationPoint[] vpArray = variationPoints.toArray(new VariationPoint[] {}); // iterate over all pairs // the inner loop ensures no double check is performed for (int i = 0; i < vpArray.length; i++) { VariationPoint vp1 = vpArray[i]; for (int j = i + 1; j < vpArray.length; j++) { VariationPoint vp2 = vpArray[j]; if (vp1 == vp2) { logger.error("Comparing a VP with itself should not happen"); continue; } if (canBeMerged(vp1, vp2)) { VariationPoint bucketKey = chooseBucket(mergeVPBuckets, invertedBucketIndex, vp1, vp2); invertedBucketIndex.put(vp2, bucketKey); mergeVPBuckets.get(bucketKey).add(vp2); invertedBucketIndex.put(vp1, bucketKey); mergeVPBuckets.get(bucketKey).add(vp1); } } } return mergeVPBuckets; }
From source file:com.google.javascript.jscomp.TypeInference.java
private Map<TemplateType, JSType> inferTemplateTypesFromParameters(FunctionType fnType, Node call) { if (fnType.getTemplateTypeMap().getTemplateKeys().isEmpty()) { return Collections.emptyMap(); }/*from w w w . j a v a 2s.c o m*/ Map<TemplateType, JSType> resolvedTypes = Maps.newIdentityHashMap(); Set<JSType> seenTypes = Sets.newIdentityHashSet(); Node callTarget = call.getFirstChild(); if (NodeUtil.isGet(callTarget)) { Node obj = callTarget.getFirstChild(); maybeResolveTemplatedType(fnType.getTypeOfThis(), getJSType(obj), resolvedTypes, seenTypes); } if (call.hasMoreThanOneChild()) { maybeResolveTemplateTypeFromNodes(fnType.getParameters(), call.getChildAtIndex(1).siblings(), resolvedTypes, seenTypes); } return resolvedTypes; }