List of usage examples for java.util IdentityHashMap IdentityHashMap
public IdentityHashMap(Map<? extends K, ? extends V> m)
From source file:org.onosproject.store.hz.SMap.java
@Override public void putAll(Map<? extends K, ? extends V> map) { Map<byte[], byte[]> sm = new IdentityHashMap<>(map.size()); for (java.util.Map.Entry<? extends K, ? extends V> e : map.entrySet()) { sm.put(serializeKey(e.getKey()), serializeVal(e.getValue())); }// ww w . j a va 2s. c o m m.putAll(sm); }
From source file:xades4j.production.DataObjectDescsProcessor.java
/** * Returns the reference mappings resulting from the data object descriptions. * The corresponding {@code Reference}s and {@code Object}s are added to the * signature./*from www. ja v a2s .c om*/ * @throws UnsupportedAlgorithmException */ Map<DataObjectDesc, Reference> process(SignedDataObjects signedDataObjects, XMLSignature xmlSignature) throws UnsupportedAlgorithmException { for (ResourceResolver resolver : signedDataObjects.getResourceResolvers()) { xmlSignature.addResourceResolver(resolver); } Collection<DataObjectDesc> dataObjsDescs = signedDataObjects.getDataObjectsDescs(); Map<DataObjectDesc, Reference> referenceMappings = new IdentityHashMap<DataObjectDesc, Reference>( dataObjsDescs.size()); String refUri, refType; Transforms transforms; String digestMethodUri = this.algorithmsProvider.getDigestAlgorithmForDataObjsReferences(); boolean hasNullURIReference = false; /**/ try { for (DataObjectDesc dataObjDesc : dataObjsDescs) { transforms = processTransforms(dataObjDesc, xmlSignature.getDocument()); if (dataObjDesc instanceof DataObjectReference) { // If the data object info is a DataObjectReference, the Reference uri // and type are the ones specified on the object. DataObjectReference dataObjRef = (DataObjectReference) dataObjDesc; refUri = dataObjRef.getUri(); refType = dataObjRef.getType(); } else if (dataObjDesc instanceof EnvelopedXmlObject) { // If the data object info is a EnvelopedXmlObject we need to create a // XMLObject to embed it. The Reference uri will refer the new // XMLObject's id. EnvelopedXmlObject envXmlObj = (EnvelopedXmlObject) dataObjDesc; refUri = String.format("%s-object%d", xmlSignature.getId(), xmlSignature.getObjectLength()); refType = Reference.OBJECT_URI; ObjectContainer xmlObj = new ObjectContainer(xmlSignature.getDocument()); xmlObj.setId(refUri); xmlObj.appendChild(envXmlObj.getContent()); xmlObj.setMimeType(envXmlObj.getMimeType()); xmlObj.setEncoding(envXmlObj.getEncoding()); xmlSignature.appendObject(xmlObj); refUri = '#' + refUri; } else if (dataObjDesc instanceof AnonymousDataObjectReference) { if (hasNullURIReference) { // This shouldn't happen because SignedDataObjects does the validation. throw new IllegalStateException("Multiple AnonymousDataObjectReference detected"); } hasNullURIReference = true; refUri = refType = null; AnonymousDataObjectReference anonymousRef = (AnonymousDataObjectReference) dataObjDesc; xmlSignature.addResourceResolver(new ResolverAnonymous(anonymousRef.getDataStream())); } else { throw new ClassCastException( "Unsupported SignedDataObjectDesc. Must be one of DataObjectReference, EnvelopedXmlObject and AnonymousDataObjectReference"); } // Add the Reference. References need an ID because data object // properties may refer them. xmlSignature.addDocument(refUri, transforms, digestMethodUri, String.format("%s-ref%d", xmlSignature.getId(), referenceMappings.size()), // id refType); // SignedDataObjects doesn't allow repeated instances, so there's no // need to check for duplicate entries on the map. Reference newRef = null; Reference ref; for (int i = 0; i < xmlSignature.getSignedInfo().getLength() && newRef == null; i++) { ref = xmlSignature.getSignedInfo().item(i); if (StringUtils.equals(ref.getURI(), refUri)) { newRef = ref; } } referenceMappings.put(dataObjDesc, newRef); } } catch (XMLSignatureException ex) { // -> xmlSignature.appendObject(xmlObj): not thrown when signing. // -> xmlSignature.addDocument(...): appears to be thrown when the digest // algorithm is not supported. throw new UnsupportedAlgorithmException("Digest algorithm not supported in the XML Signature provider", digestMethodUri, ex); } catch (org.apache.xml.security.exceptions.XMLSecurityException ex) { // -> xmlSignature.getSignedInfo().item(...): shouldn't be thrown // when signing. throw new IllegalStateException(ex); } return Collections.unmodifiableMap(referenceMappings); }
From source file:org.diorite.impl.inventory.recipe.craft.SemiShapedCraftingRecipeImpl.java
@Override public CraftingRecipeCheckResult isMatching(final GridInventory inventory) { final Player player = (inventory.getHolder() instanceof Player) ? (Player) inventory.getHolder() : null; final Short2ObjectMap<ItemStack> onCraft = new Short2ObjectOpenHashMap<>(2, .5F); final CraftingRecipePattern pattern = this.pattern; final int maxPatRow = pattern.getRows(), maxPatCol = pattern.getColumns(); final int maxInvRow = inventory.getRows(), maxInvCol = inventory.getColumns(); final CraftingGrid items = new CraftingGridImpl(maxInvRow, maxInvCol); final Collection<BiConsumer<Player, CraftingGrid>> reps = new ArrayList<>(maxInvCol * maxInvRow); final LinkedList<CraftingRecipeItem> ingredients = new LinkedList<>(this.getIngredients()); final Collection<CraftingRepeatableRecipeItem> repeatableIngredients = new LinkedList<>( this.getRepeatableIngredients()); final Map<CraftingRepeatableRecipeItem, List<ItemStack>> repeatableItems = new IdentityHashMap<>( this.repeatableIngredients.size()); // TODO: ugh, no idea how to do this. return null;/*www .j av a2 s . c o m*/ }
From source file:xbird.xquery.ext.grid.MapQueryJob.java
@Override public Map<GridTask, GridNode> map(GridTaskRouter router, MapExpr mapExpr) throws GridException { checkInjectedResouces();/*from w ww .j a v a 2s . c o m*/ final String colPath = mapExpr.getCollectionPath(); final GridNode[] liveNodes = router.getAllNodes(); final int numLiveNodes = liveNodes.length; if (numLiveNodes == 0) {// no live node return Collections.emptyMap(); } final Set<GridNode> mapableNodes = CollectionUtils.asSet(liveNodes); final Map<String, List<GridNode>> mapping; try { mapping = directory.prefixSearch(colPath, mapableNodes); } catch (DbException e) { throw new GridException("prefixSearch failed: " + colPath, e); } if (mapping.isEmpty()) {// no mapping found return Collections.emptyMap(); } final BindingVariable bindingVar = mapExpr.getBindingVariable(); final XQExpression bodyExpr = mapExpr.getBodyExpression(); final GridNodeSelector selector = config.getNodeSelector(); assert (selector != null); final Map<GridNode, List<String>> nodeKeysMap = new HashMap<GridNode, List<String>>(numLiveNodes); final Map<GridTask, GridNode> map = new IdentityHashMap<GridTask, GridNode>(numLiveNodes); for (Map.Entry<String, List<GridNode>> entry : mapping.entrySet()) { final String key = entry.getKey(); final List<GridNode> candidateNodes = entry.getValue(); assert (!candidateNodes.isEmpty()); final GridNode node = selector.selectNode(candidateNodes, null, config); candidateNodes.clear(); // let GC do its work List<String> mappedKeys = nodeKeysMap.get(node); if (mappedKeys == null) { mappedKeys = new ArrayList<String>(128); nodeKeysMap.put(node, mappedKeys); // TODO REVIEWME DispatchQueryExecTask task = new DispatchQueryExecTask(this, bindingVar, bodyExpr, mappedKeys, false); map.put(task, node); } mappedKeys.add(key); } if (LOG.isInfoEnabled()) { logJobInformation(map, getJobId()); } this._results = new ArrayList<Sequence<? extends Item>>(map.size()); return map; }
From source file:org.opentripplanner.routing.algorithm.strategies.WeightTable.java
/** * Build the weight table, parallelized according to the number of processors *///from w w w. java2 s . c om public void buildTable() { ArrayList<TransitStop> stopVertices; LOG.debug("Number of vertices: " + g.getVertices().size()); stopVertices = new ArrayList<TransitStop>(); for (Vertex gv : g.getVertices()) if (gv instanceof TransitStop) stopVertices.add((TransitStop) gv); int nStops = stopVertices.size(); stopIndices = new IdentityHashMap<Vertex, Integer>(nStops); for (int i = 0; i < nStops; i++) stopIndices.put(stopVertices.get(i), i); LOG.debug("Number of stops: " + nStops); table = new float[nStops][nStops]; for (float[] row : table) Arrays.fill(row, Float.POSITIVE_INFINITY); LOG.debug("Performing search at each transit stop."); int nThreads = Runtime.getRuntime().availableProcessors(); LOG.debug("number of threads: " + nThreads); ArrayBlockingQueue<Runnable> taskQueue = new ArrayBlockingQueue<Runnable>(nStops); ThreadPoolExecutor threadPool = new ThreadPoolExecutor(nThreads, nThreads, 10, TimeUnit.SECONDS, taskQueue); GenericObjectPool heapPool = new GenericObjectPool( new PoolableBinHeapFactory<State>(g.getVertices().size()), nThreads); // make one heap and recycle it RoutingRequest options = new RoutingRequest(); // TODO LG Check this change: options.setWalkSpeed(maxWalkSpeed); final double MAX_WEIGHT = 60 * 60 * options.walkReluctance; final double OPTIMISTIC_BOARD_COST = options.getBoardCostLowerBound(); // create a task for each transit stop in the graph ArrayList<Callable<Void>> tasks = new ArrayList<Callable<Void>>(); for (TransitStop origin : stopVertices) { SPTComputer task = new SPTComputer(heapPool, options, MAX_WEIGHT, OPTIMISTIC_BOARD_COST, origin); tasks.add(task); } try { //invoke all of tasks. threadPool.invokeAll(tasks); threadPool.shutdown(); } catch (InterruptedException e) { throw new RuntimeException(e); } floyd(); }
From source file:gridool.db.partitioning.monetdb.ImportForeignKeysJob.java
public Map<GridTask, GridNode> map(GridTaskRouter router, Pair<String, Boolean> params) throws GridException { final GridNode[] nodes = router.getAllNodes(); String templateDbName = params.getFirst(); boolean useGzip = params.getSecond(); final ForeignKey[] fkeys = getForeignKeys(templateDbName); if (fkeys.length == 0) { throw new GridException("No foreign key found on template DB: " + templateDbName); }//from w ww. j a v a 2 s. c o m // #1 scatter missing foreign keys ScatterMissingReferencingKeysJobConf jobConf1 = new ScatterMissingReferencingKeysJobConf(fkeys, useGzip, nodes); GridJobFuture<DumpFile[]> future1 = kernel.execute(ScatterMissingReferencingKeysJob.class, jobConf1); DumpFile[] sentDumpedFiles = GridUtils.invokeGet(future1); // #2 retrieve missing referenced rows in exported tables RetrieveMissingReferencedRowsJobConf jobConf2 = new RetrieveMissingReferencedRowsJobConf(sentDumpedFiles, jobConf1); GridJobFuture<DumpFile[]> future2 = kernel.execute(RetrieveMissingReferenedRowsJob.class, jobConf2); DumpFile[] receivedDumpedFiles = GridUtils.invokeGet(future2); // #3 import collected missing foreign keys final int numNodes = nodes.length; final Map<GridNode, List<DumpFile>> dumpFileMapping = mapDumpFiles(receivedDumpedFiles, numNodes); final Map<GridTask, GridNode> map = new IdentityHashMap<GridTask, GridNode>(numNodes); for (final GridNode node : nodes) { List<DumpFile> dumpFileList = dumpFileMapping.get(node); DumpFile[] dumpFiles = ArrayUtils.toArray(dumpFileList, DumpFile[].class); for (DumpFile df : dumpFiles) { df.clearAssociatedNode(); } GridTask task = new ImportCollectedExportedKeysTask(this, fkeys, dumpFiles); map.put(task, node); } return map; }
From source file:org.diorite.impl.inventory.recipe.craft.ShapelessCraftingRecipeImpl.java
@Override public CraftingRecipeCheckResult isMatching(final GridInventory inventory) { final Player player = (inventory.getHolder() instanceof Player) ? (Player) inventory.getHolder() : null; final Short2ObjectMap<ItemStack> onCraft = new Short2ObjectOpenHashMap<>(2, .5F); final int maxInvRow = inventory.getRows(), maxInvCol = inventory.getColumns(); final LinkedList<CraftingRecipeItem> ingredients = new LinkedList<>(this.getIngredients()); final Collection<CraftingRepeatableRecipeItem> repeatableIngredients = new LinkedList<>( this.getRepeatableIngredients()); final Collection<BiConsumer<Player, CraftingGrid>> reps = new ArrayList<>(maxInvCol * maxInvRow); final CraftingGrid items = new CraftingGridImpl(maxInvRow, maxInvCol); int col = -1, row = 0; ItemStack result;/*from w ww . ja v a 2s .c om*/ if (this.repeatableIngredients.isEmpty()) { for (short i = 1, size = (short) inventory.size(); i < size; i++) { if (++col >= maxInvCol) { col = 0; if (++row > maxInvRow) { throw new IllegalStateException("Inventory is larger than excepted."); } } final ItemStack item = inventory.getItem(i); if (item == null) { continue; } boolean matching = false; //noinspection Duplicates for (final Iterator<CraftingRecipeItem> iterator = ingredients.iterator(); iterator.hasNext();) { final CraftingRecipeItem ingredient = iterator.next(); final ItemStack valid = ingredient.isValid(player, item); if (valid != null) { final short icpy = i; reps.add((p, c) -> { final ItemStack repl = ingredient.getReplacement(p, c); if (repl != null) { onCraft.put(icpy, repl); } }); items.setItem(row, col, valid); iterator.remove(); matching = true; break; } } if (!matching) { return null; } } result = (this.resultFunc == null) ? this.result : this.resultFunc.apply(player, items.clone()); } else { final Map<CraftingRepeatableRecipeItem, List<ItemStack>> repeatableItems = new IdentityHashMap<>( this.repeatableIngredients.size()); for (short i = 1, size = (short) inventory.size(); i < size; i++) { if (++col >= maxInvCol) { col = 0; if (++row > maxInvRow) { throw new IllegalStateException("Inventory is larger than excepted."); } } final ItemStack item = inventory.getItem(i); if (item == null) { continue; } boolean matching = false; if (!ingredients.isEmpty()) { //noinspection Duplicates for (final Iterator<CraftingRecipeItem> iterator = ingredients.iterator(); iterator .hasNext();) { final CraftingRecipeItem ingredient = iterator.next(); final ItemStack valid = ingredient.isValid(player, item); if (valid != null) { final short icpy = i; reps.add((p, c) -> { final ItemStack repl = ingredient.getReplacement(p, c); if (repl != null) { onCraft.put(icpy, repl); } }); items.setItem(row, col, valid); iterator.remove(); matching = true; break; } } } if (!matching) { for (final CraftingRepeatableRecipeItem ingredient : this.repeatableIngredients) { final ItemStack valid = ingredient.isValid(player, item); if (valid != null) { final short icpy = i; reps.add((p, c) -> { final ItemStack repl = ingredient.getReplacement(p, c); if (repl != null) { onCraft.put(icpy, repl); } }); List<ItemStack> repItems = repeatableItems.get(ingredient); if (repItems == null) { repeatableIngredients.remove(ingredient); repItems = new ArrayList<>(10); repeatableItems.put(ingredient, repItems); } repItems.add(valid); items.setItem(row, col, valid); matching = true; break; } } } if (!matching) { return null; } } if (!repeatableIngredients.isEmpty()) { return null; } result = (this.resultFunc == null) ? this.result : this.resultFunc.apply(player, items.clone()); for (final Entry<CraftingRepeatableRecipeItem, List<ItemStack>> entry : repeatableItems.entrySet()) { result = entry.getKey().transform(result, entry.getValue()); result = entry.getKey().transform(result, items); } } reps.forEach(c -> c.accept(player, items)); return ingredients.isEmpty() ? new CraftingRecipeCheckResultImpl(this, result, items, onCraft) : null; }
From source file:org.apache.hadoop.hbase.client.AsyncBatchRpcRetryingCaller.java
public AsyncBatchRpcRetryingCaller(HashedWheelTimer retryTimer, AsyncConnectionImpl conn, TableName tableName, List<? extends Row> actions, long pauseNs, int maxAttempts, long operationTimeoutNs, long rpcTimeoutNs, int startLogErrorsCnt) { this.retryTimer = retryTimer; this.conn = conn; this.tableName = tableName; this.pauseNs = pauseNs; this.maxAttempts = maxAttempts; this.operationTimeoutNs = operationTimeoutNs; this.rpcTimeoutNs = rpcTimeoutNs; this.startLogErrorsCnt = startLogErrorsCnt; this.actions = new ArrayList<>(actions.size()); this.futures = new ArrayList<>(actions.size()); this.action2Future = new IdentityHashMap<>(actions.size()); for (int i = 0, n = actions.size(); i < n; i++) { Row rawAction = actions.get(i);/*from w w w .ja va2 s . co m*/ Action action = new Action(rawAction, i); if (rawAction instanceof Append || rawAction instanceof Increment) { action.setNonce(conn.getNonceGenerator().newNonce()); } this.actions.add(action); CompletableFuture<T> future = new CompletableFuture<>(); futures.add(future); action2Future.put(action, future); } this.action2Errors = new IdentityHashMap<>(); this.startNs = System.nanoTime(); }
From source file:org.optaplanner.core.impl.score.director.AbstractScoreDirector.java
public Solution cloneSolution(Solution originalSolution) { SolutionDescriptor solutionDescriptor = getSolutionDescriptor(); Solution cloneSolution = solutionDescriptor.getSolutionCloner().cloneSolution(originalSolution); if (scoreDirectorFactory.isAssertClonedSolution()) { if (!ObjectUtils.equals(originalSolution.getScore(), cloneSolution.getScore())) { throw new IllegalStateException( "Cloning corruption: " + "the original's score (" + originalSolution.getScore() + ") is different from the clone's score (" + cloneSolution.getScore() + ").\n" + "Check the " + SolutionCloner.class.getSimpleName() + "."); }/* w w w .ja v a2 s. c o m*/ List<Object> originalEntityList = solutionDescriptor.getEntityList(originalSolution); Map<Object, Object> originalEntityMap = new IdentityHashMap<Object, Object>(originalEntityList.size()); for (Object originalEntity : originalEntityList) { originalEntityMap.put(originalEntity, null); } for (Object cloneEntity : solutionDescriptor.getEntityList(cloneSolution)) { if (originalEntityMap.containsKey(cloneEntity)) { throw new IllegalStateException("Cloning corruption: " + "the same entity (" + cloneEntity + ") is present in both the original and the clone.\n" + "So when a planning variable in the original solution changes, " + "the cloned solution will change too.\n" + "Check the " + SolutionCloner.class.getSimpleName() + "."); } } } return cloneSolution; }
From source file:xbird.xquery.ext.grid.DispatchQueryExecTask.java
/** * @see QueryExecJob#map(gridool.routing.GridTaskRouter, DispatchQueryExecTask) *///from w w w . j a v a 2 s .co m public Map<GridTask, GridNode> mapQueryTask(QueryExecJob execJob) throws GridException { checkInjectedResources(); final GridNodeInfo localNode = GridUtils.getLocalNode(config); if (!excludeNodeList.contains(localNode)) { excludeNodeList.add(localNode); } final GridNodeSelector nodeSelector = config.getNodeSelector(); final LockManager lockManager = directory.getLockManager(); final Map<GridNode, List<String>> assignMap = new HashMap<GridNode, List<String>>(relativePaths.size()); final List<Pair<String, Lock>> localExecResources = new ArrayList<Pair<String, Lock>>(relativePaths.size()); int totalLocked = 0; for (String path : relativePaths) { ReadWriteLock lock = lockManager.obtainLock(path); final Lock rlock = lock.readLock(); if (rlock.tryLock()) { localExecResources.add(new Pair<String, Lock>(path, rlock)); } else { totalLocked++; final List<GridNode> replicatedNodes; try { replicatedNodes = directory.exactSearch(path, excludeNodeList); } catch (DbException e) { LOG.error(e.getMessage()); throw new GridException("Exception caused while lookup: " + path, e); } if (replicatedNodes == null || replicatedNodes.isEmpty()) { throw new GridException("No replicated document found for path: " + path); } // TODO Select a node that least recently used for write requests. GridNode node = nodeSelector.selectNode(replicatedNodes, this, config); assert (node != null); List<String> mappedPaths = assignMap.get(node); if (mappedPaths == null) { mappedPaths = new ArrayList<String>(16); assignMap.put(node, mappedPaths); } mappedPaths.add(path); } } final Map<GridTask, GridNode> map = new IdentityHashMap<GridTask, GridNode>(assignMap.size() + 1); for (Map.Entry<GridNode, List<String>> e : assignMap.entrySet()) { GridNode node = e.getKey(); List<String> mappedPaths = e.getValue(); DispatchQueryExecTask dispatchTask = new DispatchQueryExecTask(execJob, bindingVar, bodyExpr, mappedPaths, true); map.put(dispatchTask, node); } if (!localExecResources.isEmpty()) { boolean doForwarding = redirectable; LocalQueryExecTask localTask = new LocalQueryExecTask(execJob, bindingVar, bodyExpr, localExecResources, doForwarding); map.put(localTask, localNode); } if (LOG.isInfoEnabled()) { LOG.info("DispatchQueryExecTask is mapped to " + assignMap.size() + " DispatchQueryExecTask and " + (localExecResources.isEmpty() ? '0' : '1') + " LocalQueryExecTask (" + localExecResources.size() + " localExecResources), " + totalLocked + " documents are write-locked"); } return map; }