List of usage examples for java.util IdentityHashMap IdentityHashMap
public IdentityHashMap()
From source file:com.evolveum.midpoint.prism.schema.PrismSchemaImpl.java
@Override public String debugDump(int indent) { IdentityHashMap<Definition, Object> seen = new IdentityHashMap<>(); StringBuilder sb = new StringBuilder(); for (int i = 0; i < indent; i++) { sb.append(INDENT_STRING);// w w w . jav a2s. c o m } sb.append(toString()).append("\n"); Iterator<Definition> i = definitions.iterator(); while (i.hasNext()) { Definition def = i.next(); sb.append(def.debugDump(indent + 1, seen)); if (i.hasNext()) { sb.append("\n"); } } return sb.toString(); }
From source file:org.springframework.boot.devtools.restart.Restarter.java
@SuppressWarnings("rawtypes") private void triggerShutdownHooks() throws Exception { Class<?> hooksClass = Class.forName("java.lang.ApplicationShutdownHooks"); Method runHooks = hooksClass.getDeclaredMethod("runHooks"); runHooks.setAccessible(true);//from ww w. j ava 2 s. c o m runHooks.invoke(null); Field field = hooksClass.getDeclaredField("hooks"); field.setAccessible(true); field.set(null, new IdentityHashMap()); }
From source file:org.optaplanner.benchmark.impl.result.SolverBenchmarkResult.java
protected static Map<SolverBenchmarkResult, SolverBenchmarkResult> createMergeMap( PlannerBenchmarkResult newPlannerBenchmarkResult, List<SingleBenchmarkResult> singleBenchmarkResultList) { // IdentityHashMap because different SolverBenchmarkResult instances are never merged Map<SolverBenchmarkResult, SolverBenchmarkResult> mergeMap = new IdentityHashMap<SolverBenchmarkResult, SolverBenchmarkResult>(); Map<String, Integer> nameCountMap = new HashMap<String, Integer>(); for (SingleBenchmarkResult singleBenchmarkResult : singleBenchmarkResultList) { SolverBenchmarkResult oldResult = singleBenchmarkResult.getSolverBenchmarkResult(); if (!mergeMap.containsKey(oldResult)) { SolverBenchmarkResult newResult = new SolverBenchmarkResult(newPlannerBenchmarkResult); Integer nameCount = nameCountMap.get(oldResult.name); if (nameCount == null) { nameCount = 1;/* w w w. ja v a2s .c om*/ } else { nameCount++; } nameCountMap.put(oldResult.name, nameCount); newResult.solverConfig = oldResult.solverConfig; newResult.singleBenchmarkResultList = new ArrayList<SingleBenchmarkResult>( oldResult.singleBenchmarkResultList.size()); mergeMap.put(oldResult, newResult); newPlannerBenchmarkResult.getSolverBenchmarkResultList().add(newResult); } } // Make name unique for (Map.Entry<SolverBenchmarkResult, SolverBenchmarkResult> entry : mergeMap.entrySet()) { SolverBenchmarkResult oldResult = entry.getKey(); SolverBenchmarkResult newResult = entry.getValue(); if (nameCountMap.get(oldResult.name) > 1) { newResult.name = oldResult.name + " (" + oldResult.getPlannerBenchmarkResult().getName() + ")"; } else { newResult.name = oldResult.name; } } return mergeMap; }
From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java
public void testEntrySetRemove() { IdentityHashMap hashMap = new IdentityHashMap(); hashMap.put("A", "B"); IdentityHashMap dummy = new IdentityHashMap(); dummy.put("A", "b"); Entry bogus = (Entry) dummy.entrySet().iterator().next(); Set entrySet = hashMap.entrySet(); boolean removed = entrySet.remove(bogus); assertEquals(removed, false);//www . j a v a2 s .co m assertEquals(hashMap.get("A"), "B"); }
From source file:org.archive.crawler.restlet.JobRelatedResource.java
/** * Get a map giving object beanNames./*from w w w .j a va 2s . co m*/ * * @return map from object to beanName */ private IdentityHashMap<Object, String> getBeanToNameMap() { if (beanToNameMap == null) { beanToNameMap = new IdentityHashMap<Object, String>(); for (Object entryObj : cj.getJobContext().getBeansOfType(Object.class).entrySet()) { Map.Entry<?, ?> entry = (Map.Entry<?, ?>) entryObj; beanToNameMap.put(entry.getValue(), (String) entry.getKey()); } } return beanToNameMap; }
From source file:org.jamocha.rating.fraj.RatingProvider.java
private double rateBetaWithoutExistentials(final StatisticsProvider statisticsProvider, final PathNodeFilterSet toRate, final Map<Set<PathFilterList>, List<Pair<List<Set<PathFilterList>>, List<PathFilter>>>> componentToJoinOrder, final Map<Path, Set<PathFilterList>> pathToPreNetworkComponents) { final IdentityHashMap<Set<PathFilterList>, Data> preNetworkComponentToData = new IdentityHashMap<>(); for (final Set<PathFilterList> comp : componentToJoinOrder.keySet()) { preNetworkComponentToData.put(comp, statisticsProvider.getData(comp)); }//from www.ja va 2s . c om final double tupleSize = preNetworkComponentToData.values().stream().mapToDouble(Data::getTupleSize).sum(); final double tuplesPerPage = statisticsProvider.getPageSize() / tupleSize; final double rowCount = calcBetaUnfilteredSize(statisticsProvider, componentToJoinOrder, pathToPreNetworkComponents, componentToJoinOrder.keySet()); // joinsize is needed twice per component, thus pre-calculate it final Map<Set<PathFilterList>, Double> preNetworkComponentToJoinSize = preNetworkComponentToData.keySet() .stream() .collect(toMap(Function.identity(), component -> joinSize(statisticsProvider, component, componentToJoinOrder.get(component), componentToJoinOrder.keySet(), pathToPreNetworkComponents))); final double finsert = preNetworkComponentToData.entrySet().stream() .mapToDouble( entry -> entry.getValue().getFinsert() * preNetworkComponentToJoinSize.get(entry.getKey())) .sum(); final double fdelete = preNetworkComponentToData.values().stream().mapToDouble(Data::getFdelete).sum(); // publish information to statistics provider { final Set<PathFilterList> filters = new HashSet<>(); componentToJoinOrder.keySet().forEach(filters::addAll); filters.add(toRate); statisticsProvider.setData(filters, new Data(finsert, fdelete, rowCount, tupleSize)); } final double mxBeta = m(rowCount, tuplesPerPage); final double runtimeCost = preNetworkComponentToData.entrySet().stream().mapToDouble(entry -> { final Set<PathFilterList> component = entry.getKey(); final Data data = entry.getValue(); return data.getFinsert() * costPosInsVarI(statisticsProvider, component, componentToJoinOrder.get(component), componentToJoinOrder.keySet(), pathToPreNetworkComponents) + data.getFdelete() * (mxBeta + cardenas(mxBeta, preNetworkComponentToJoinSize.get(component))); }).sum(); final double memoryCost = rowCount * tupleSize; return cpuAndMemCostCombiner.applyAsDouble(runtimeCost, memoryCost); }
From source file:org.orekit.models.earth.tessellation.EllipsoidTessellator.java
/** Sample a zone of interest into a grid sample of {@link GeodeticPoint geodetic points}. * <p>//from w w w . j av a 2 s. co m * The created points will be entirely within the zone of interest. * </p> * @param zone zone of interest to sample * @param width grid sample cells width as a distance on surface (in meters) * @param length grid sample cells length as a distance on surface (in meters) * @return a list of lists of points sampling the zone of interest, * each sub-list corresponding to a part not connected to the other * parts (for example for islands) * @exception OrekitException if the zone cannot be sampled */ public List<List<GeodeticPoint>> sample(final SphericalPolygonsSet zone, final double width, final double length) throws OrekitException { final double splitWidth = width / quantization; final double splitLength = length / quantization; final Map<Mesh, List<GeodeticPoint>> map = new IdentityHashMap<Mesh, List<GeodeticPoint>>(); final RegionFactory<Sphere2D> factory = new RegionFactory<Sphere2D>(); SphericalPolygonsSet remaining = (SphericalPolygonsSet) zone.copySelf(); S2Point inside = getInsidePoint(remaining); while (inside != null) { // find a mesh covering at least one connected part of the zone final List<Mesh.Node> mergingSeeds = new ArrayList<Mesh.Node>(); Mesh mesh = new Mesh(ellipsoid, zone, aiming, splitLength, splitWidth, inside); mergingSeeds.add(mesh.getNode(0, 0)); List<GeodeticPoint> sample = null; while (!mergingSeeds.isEmpty()) { // expand the mesh around the seed neighborExpandMesh(mesh, mergingSeeds, zone); // extract the sample from the mesh // this further expands the mesh so sample cells dimensions are multiples of quantization, // hence it must be performed here before checking meshes independence sample = extractSample(mesh, zone); // check the mesh is independent from existing meshes mergingSeeds.clear(); for (final Map.Entry<Mesh, List<GeodeticPoint>> entry : map.entrySet()) { if (!factory.intersection(mesh.getCoverage(), entry.getKey().getCoverage()).isEmpty()) { // the meshes are not independent, they intersect each other! // merge the two meshes together mesh = mergeMeshes(mesh, entry.getKey(), mergingSeeds); map.remove(entry.getKey()); break; } } } // remove the part of the zone covered by the mesh remaining = (SphericalPolygonsSet) factory.difference(remaining, mesh.getCoverage()); inside = getInsidePoint(remaining); map.put(mesh, sample); } // concatenate the lists from the independent meshes final List<List<GeodeticPoint>> sampleLists = new ArrayList<List<GeodeticPoint>>(map.size()); for (final Map.Entry<Mesh, List<GeodeticPoint>> entry : map.entrySet()) { sampleLists.add(entry.getValue()); } return sampleLists; }
From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java
public void testEquals() { IdentityHashMap hashMap = new IdentityHashMap(); checkEmptyHashMapAssumptions(hashMap); hashMap.put(KEY_KEY, VALUE_VAL);/*from w w w . j av a2s .co m*/ IdentityHashMap copyMap = (IdentityHashMap) hashMap.clone(); assertTrue(hashMap.equals(copyMap)); hashMap.put(VALUE_VAL, KEY_KEY); assertFalse(hashMap.equals(copyMap)); }
From source file:com.google.uzaygezen.core.BoundedRollupTest.java
private static <V, K> Set<MapNode<K, V>> toIdentitySet(List<MapNode<K, V>> list) { Set<MapNode<K, V>> set = Collections.newSetFromMap(new IdentityHashMap<MapNode<K, V>, Boolean>()); set.addAll(list);/*from w w w.ja v a 2 s . com*/ Assert.assertEquals(list.size(), set.size()); return set; }
From source file:ome.services.util.ServiceHandler.java
/** * public for testing purposes.//ww w.j a v a 2s .c o m */ public String getResultsString(Object o, IdentityHashMap<Object, String> cache) { if (o == null) { return "null"; } if (cache == null) { cache = new IdentityHashMap<Object, String>(); } else { if (cache.containsKey(o)) { return (String) cache.get(o); } } if (o instanceof Collection) { int count = 0; StringBuilder sb = new StringBuilder(128); sb.append("("); Collection c = (Collection) o; for (Object obj : (c)) { if (count > 0) { sb.append(", "); } if (count > 2) { sb.append("... "); sb.append(c.size() - 3); sb.append(" more"); break; } sb.append(obj); count++; } sb.append(")"); return sb.toString(); } else if (o instanceof Map) { Map map = (Map) o; int count = 0; StringBuilder sb = new StringBuilder(); sb.append("{"); for (Object k : map.keySet()) { if (count > 0) { sb.append(", "); } if (count > 2) { sb.append("... "); sb.append(map.size() - 3); sb.append(" more"); break; } sb.append(k); sb.append("="); cache.put(o, o.getClass().getName() + ":" + System.identityHashCode(o)); sb.append(getResultsString(map.get(k), cache)); count++; } sb.append("}"); return sb.toString(); } else if (o.getClass().isArray()) { int length = Array.getLength(o); if (length == 0) { return "[]"; } StringBuilder sb = new StringBuilder(128); sb.append("["); for (int i = 0; i < length; i++) { if (i != 0) { sb.append(", "); } if (i > 2) { sb.append("... "); sb.append(i - 2); sb.append(" more"); break; } sb.append(Array.get(o, i)); } sb.append("]"); return sb.toString(); } else { return o.toString(); } }