List of usage examples for com.google.common.collect Maps newIdentityHashMap
public static <K, V> IdentityHashMap<K, V> newIdentityHashMap()
From source file:io.atomix.core.set.impl.DistributedSetProxy.java
@Override public CompletableFuture<Boolean> prepare(TransactionLog<SetUpdate<String>> transactionLog) { Map<PartitionId, List<SetUpdate<String>>> updatesGroupedBySet = Maps.newIdentityHashMap(); transactionLog.records().forEach(update -> { updatesGroupedBySet//from www. j a v a 2 s. co m .computeIfAbsent(getProxyClient().getPartitionId(update.element()), k -> Lists.newLinkedList()) .add(update); }); Map<PartitionId, TransactionLog<SetUpdate<String>>> transactionsBySet = Maps.transformValues( updatesGroupedBySet, list -> new TransactionLog<>(transactionLog.transactionId(), transactionLog.version(), list)); return Futures .allOf(transactionsBySet.entrySet().stream() .map(e -> getProxyClient().applyOn(e.getKey(), service -> service.prepare(e.getValue())) .thenApply(v -> v == PrepareResult.OK || v == PrepareResult.PARTIAL_FAILURE)) .collect(Collectors.toList())) .thenApply(list -> list.stream().reduce(Boolean::logicalAnd).orElse(true)); }
From source file:org.apache.apex.malhar.lib.counters.BasicCounters.java
/** * @param counterType type of counter */ public BasicCounters(@Nonnull Class<T> counterType) { cache = Maps.newIdentityHashMap(); this.counterType = counterType; }
From source file:co.cask.common.internal.io.ReflectionDatumReader.java
@SuppressWarnings("unchecked") public ReflectionDatumReader(Schema schema, TypeToken<T> type) { this.schema = schema; this.type = type; this.creatorFactory = new InstantiatorFactory(true); this.creators = Maps.newIdentityHashMap(); this.fieldAccessorFactory = new ReflectionFieldAccessorFactory(); }
From source file:com.google.android.testing.nativedriver.server.ActivitiesReporter.java
public ActivitiesReporter() { liveActivities = Maps.newIdentityHashMap(); }
From source file:org.sonatype.nexus.internal.jetty.ConnectorRegistrarImpl.java
@Inject public ConnectorRegistrarImpl(final JettyServerConfiguration serverConfiguration) { this.serverConfiguration = checkNotNull(serverConfiguration); this.managedConfigurations = Maps.newIdentityHashMap(); }
From source file:nl.knaw.huygens.tei.xpath.XPathUtil.java
public static Map<String, String> getNamespaceInfo(String xml) { Map<String, String> namespaces = Maps.newIdentityHashMap(); XMLInputFactory inputFactory = XMLInputFactory.newInstance(); try {//from w w w . j a v a 2 s. c o m XMLStreamReader xreader = inputFactory.createXMLStreamReader(IOUtils.toInputStream(xml, "UTF-8")); while (xreader.hasNext()) { if (xreader.next() == XMLStreamConstants.START_ELEMENT) { QName qName = xreader.getName(); if (qName != null) { addNamespace(namespaces, qName.getPrefix(), qName.getNamespaceURI()); for (int i = 0; i < xreader.getAttributeCount(); i++) { addNamespace(namespaces, xreader.getAttributePrefix(i), xreader.getAttributeNamespace(i)); } } } } } catch (XMLStreamException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return namespaces; }
From source file:com.kodebeagle.javaparser.SingleClassBindingResolver.java
/** * Returns the location and type of all the variables. * * @return/*from ww w . j av a 2 s .c o m*/ */ public Map<ASTNode, String> getVariableTypesAtPosition() { final Map<ASTNode, String> variableTypes = Maps.newIdentityHashMap(); for (final Entry<Integer, List<ASTNode>> variableBinding : resolver.getVariableBinding().entrySet()) { Integer bindingId = variableBinding.getKey(); final String varType = checkNotNull(resolver.getVariableTypes().get(bindingId)); for (final ASTNode node : variableBinding.getValue()) { variableTypes.put(node, varType); } } return variableTypes; }
From source file:com.yahoo.yqlplus.engine.internal.tasks.GraphPlanner.java
/** * Plan a graph of tasks using the terminal step as a starting point. Discover all of the used steps from those roots, and then return the starting task. *//*from www . j a v a2 s .c om*/ public ForkTask plan(Step root) { Map<Step, Node> nodes = Maps.newIdentityHashMap(); discover(root, nodes); for (Map.Entry<Step, Node> e : nodes.entrySet()) { for (Step dep : e.getValue().inputs) { nodes.get(dep).deps.add(e.getKey()); } } for (Map.Entry<Step, Node> e : nodes.entrySet()) { if (e.getValue().inputs.isEmpty()) { populateAvailable(e.getValue(), Sets.<Value>newIdentityHashSet(), nodes); } } boolean modified = true; List<Step> keys = Lists.newArrayList(); while (modified) { modified = false; // we plan to modify the map, so copy the set of keys before iteration keys.clear(); keys.addAll(nodes.keySet()); for (Step key : keys) { Node node = nodes.get(key); if (node == null) { continue; } if (node.deps.size() == 1) { // if we are the only input to that dep... Step dep = Iterables.get(node.deps, 0); if (dep.getInputs().size() == 1) { // then merge into that Node target = nodes.get(dep); node.todo.addAll(target.todo); target.todo = node.todo; target.inputs.remove(key); for (Step p : node.inputs) { nodes.get(p).deps.remove(key); nodes.get(p).deps.add(dep); } target.inputs.addAll(node.inputs); target.available.addAll(node.available); nodes.remove(key); modified = true; } } } } // so now all remaining nodes have 0 or > 1 deps // the 0 deps nodes are "ending" nodes, and the > deps nodes are fork nodes // set up the run & end nodes ForkTask start = new ForkTask(); Map<Set<Step>, JoinTask> joinTasks = Maps.newHashMap(); for (Node n : nodes.values()) { n.run = new RunTask(n.todo); n.run.setAvailable(n.available); if (n.inputs.size() > 1) { Set<Step> key = ImmutableSet.copyOf(n.inputs); JoinTask join = joinTasks.get(key); if (join == null) { join = new JoinTask(); joinTasks.put(key, join); join.setAvailable(Sets.<Value>newIdentityHashSet()); } join.addNext(n.run); join.getAvailable().addAll(n.available); n.next = join; } else { n.next = n.run; } } for (Node n : nodes.values()) { if (n.inputs.isEmpty()) { start.addNext(n.next); } for (Step dep : n.deps) { Node node = nodes.get(dep); if (node.next instanceof JoinTask) { ((JoinTask) node.next).getPriors().add(n.run); } n.run.addNext(node.next); } } return start; }
From source file:co.cask.cdap.internal.app.runtime.procedure.ProcedureHandlerMethodFactory.java
ProcedureHandlerMethodFactory(Program program, DataFabricFacadeFactory dataFabricFacadeFactory, BasicProcedureContextFactory contextFactory) { Map<WeakReference<HandlerMethod>, ProcedureEntry> map = Maps.newIdentityHashMap(); procedures = Collections.synchronizedMap(map); refQueue = new ReferenceQueue<HandlerMethod>(); this.program = program; this.dataFabricFacadeFactory = dataFabricFacadeFactory; this.contextFactory = contextFactory; }
From source file:edu.byu.nlp.al.EmpiricalAnnotationLayersInstanceManager.java
@VisibleForTesting EmpiricalAnnotationLayersInstanceManager(Iterable<FlatInstance<D, L>> instances, EmpiricalAnnotations<D, L> annotations, AnnotationRecorder<D, L> annotationRecorder, int maxNumAnnotations, int maxNumMeasurements, boolean prioritizeLabelProportions, RandomGenerator rnd) {// w w w . ja v a2 s .co m super(annotationRecorder); // make a mutable collection of all annotations for each instance List<FlatInstance<D, L>> sortedAnnotations = Lists.newArrayList(); Map<String, Deque<FlatInstance<D, L>>> perInstanceAnnotationLists = Maps.newIdentityHashMap(); for (FlatInstance<D, L> inst : instances) { // find all annotations associated with this item Collection<FlatInstance<D, L>> anns = annotations.getAnnotationsFor(inst.getSource(), inst.getData()) .values(); perInstanceAnnotationLists.put(inst.getSource(), Deques.randomizedDeque(anns, rnd)); } // grab one annotation for each instance until they are gone // (annotate the whole corpus 1-deep before starting on 2-deep, and so on) while (perInstanceAnnotationLists.size() > 0) { Set<String> toRemove = Sets.newHashSet(); for (String src : Iterables2.shuffled(perInstanceAnnotationLists.keySet(), rnd)) { Deque<FlatInstance<D, L>> anns = perInstanceAnnotationLists.get(src); if (anns.size() > 0) { // add 1 to the queue for this instance sortedAnnotations.add(anns.pop()); } if (anns.size() == 0) { toRemove.add(src); } } for (String src : toRemove) { perInstanceAnnotationLists.remove(src); } } // interleave measurements and annotations in the final queue Deque<FlatInstance<D, L>> measurementDeque = Deques.randomizedDeque(annotations.getMeasurements(), rnd); prioritizeMeasurements(measurementDeque, prioritizeLabelProportions); Deque<FlatInstance<D, L>> annotationDeque = new ArrayDeque<FlatInstance<D, L>>(sortedAnnotations); queue = Lists.newLinkedList(); // better queueing behavior // add measurements int numMeasurements = 0; while (measurementDeque.size() > 0 && numMeasurements < maxNumMeasurements) { numMeasurements += 1; queue.add(measurementDeque.pop()); } // add annotations int numAnnotations = 0; while (annotationDeque.size() > 0 && numAnnotations < maxNumAnnotations) { numAnnotations += 1; queue.add(annotationDeque.pop()); } }