Example usage for java.util IdentityHashMap keySet

List of usage examples for java.util IdentityHashMap keySet

Introduction

In this page you can find the example usage for java.util IdentityHashMap keySet.

Prototype

public Set<K> keySet() 

Source Link

Document

Returns an identity-based set view of the keys contained in this map.

Usage

From source file:Main.java

public static void main(String[] argv) throws Exception {
    IdentityHashMap<Object, Object> objMap = new IdentityHashMap<Object, Object>();

    Object o1 = new Integer(123);
    Object o2 = new Integer(123);
    objMap.put(o1, "first");
    objMap.put(o2, "from java2s.com");

    Object v1 = objMap.get(o1);//from   w w  w .j a  v  a  2s .co  m
    System.out.println(v1);
    Object v2 = objMap.get(o2);
    System.out.println(v2);

    // create a set view
    Set<Object> nset = objMap.keySet();

    System.out.println("Set view is: " + nset);
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

/**
 * Check the state of a newly constructed, empty IdentityHashMap.
 *
 * @param hashMap/*from w  ww.ja  va 2s .co m*/
 */
private static void checkEmptyHashMapAssumptions(IdentityHashMap hashMap) {
    assertNotNull(hashMap);
    assertTrue(hashMap.isEmpty());

    assertNotNull(hashMap.values());
    assertTrue(hashMap.values().isEmpty());
    assertTrue(hashMap.values().size() == 0);

    assertNotNull(hashMap.keySet());
    assertTrue(hashMap.keySet().isEmpty());
    assertTrue(hashMap.keySet().size() == 0);

    assertNotNull(hashMap.entrySet());
    assertTrue(hashMap.entrySet().isEmpty());
    assertTrue(hashMap.entrySet().size() == 0);

    assertNotNull(hashMap.entrySet().iterator());
    assertFalse(hashMap.entrySet().iterator().hasNext());
}

From source file:com.google.gwt.dev.util.collect.IdentityHashSetTest.java

@SuppressWarnings("unchecked")
@Override//w ww.j ava  2s .  c o  m
public Collection makeConfirmedCollection() {
    final java.util.IdentityHashMap map = new java.util.IdentityHashMap();
    return new AbstractSet() {
        @Override
        public boolean add(Object e) {
            return map.put(e, e) == null;
        }

        @Override
        public Iterator iterator() {
            return map.keySet().iterator();
        }

        @Override
        public int size() {
            return map.size();
        }
    };
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testKeySet() {
    IdentityHashMap hashMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(hashMap);

    Set keySet = hashMap.keySet();
    assertNotNull(keySet);/*  w  w  w.ja  v  a2  s.  co  m*/
    assertTrue(keySet.isEmpty());
    assertTrue(keySet.size() == 0);

    hashMap.put(KEY_TEST_KEY_SET, VALUE_TEST_KEY_SET);

    assertTrue(keySet.size() == SIZE_ONE);
    assertTrue(keySet.contains(KEY_TEST_KEY_SET));
    assertFalse(keySet.contains(VALUE_TEST_KEY_SET));
    assertFalse(keySet.contains(KEY_TEST_KEY_SET.toUpperCase(Locale.ROOT)));
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testClone() {
    IdentityHashMap srcMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(srcMap);

    // Check empty clone behavior
    IdentityHashMap dstMap = (IdentityHashMap) srcMap.clone();
    assertNotNull(dstMap);/*  ww w  .ja  v  a2  s.  c o m*/
    assertEquals(dstMap.size(), srcMap.size());
    // assertTrue(dstMap.values().toArray().equals(srcMap.values().toArray()));
    assertTrue(dstMap.keySet().equals(srcMap.keySet()));
    assertTrue(dstMap.entrySet().equals(srcMap.entrySet()));

    // Check non-empty clone behavior
    srcMap.put(KEY_1, VALUE_1);
    srcMap.put(KEY_2, VALUE_2);
    srcMap.put(KEY_3, VALUE_3);
    dstMap = (IdentityHashMap) srcMap.clone();
    assertNotNull(dstMap);
    assertEquals(dstMap.size(), srcMap.size());

    assertTrue(dstMap.keySet().equals(srcMap.keySet()));

    assertTrue(dstMap.entrySet().equals(srcMap.entrySet()));
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testHashMapMap() {
    IdentityHashMap srcMap = new IdentityHashMap();
    assertNotNull(srcMap);//from  w w w . j  a  va  2s .  c om
    checkEmptyHashMapAssumptions(srcMap);

    srcMap.put(INTEGER_1, INTEGER_11);
    srcMap.put(INTEGER_2, INTEGER_22);
    srcMap.put(INTEGER_3, INTEGER_33);

    IdentityHashMap hashMap = new IdentityHashMap(srcMap);
    assertFalse(hashMap.isEmpty());
    assertTrue(hashMap.size() == SIZE_THREE);

    Collection valColl = hashMap.values();
    assertTrue(valColl.contains(INTEGER_11));
    assertTrue(valColl.contains(INTEGER_22));
    assertTrue(valColl.contains(INTEGER_33));

    Collection keyColl = hashMap.keySet();
    assertTrue(keyColl.contains(INTEGER_1));
    assertTrue(keyColl.contains(INTEGER_2));
    assertTrue(keyColl.contains(INTEGER_3));
}

From source file:cdr.forms.SwordDepositHandler.java

private File makeZipFile(gov.loc.mets.DocumentRoot metsDocumentRoot,
        IdentityHashMap<DepositFile, String> filenames) {

    // Get the METS XML

    String metsXml = serializeMets(metsDocumentRoot);

    // Create the zip file

    File zipFile;/*from ww  w  .j  av a 2  s.c o  m*/

    try {
        zipFile = File.createTempFile("tmp", ".zip");
    } catch (IOException e) {
        throw new Error(e);
    }

    FileOutputStream fileOutput;

    try {
        fileOutput = new FileOutputStream(zipFile);
    } catch (FileNotFoundException e) {
        throw new Error(e);
    }

    ZipOutputStream zipOutput = new ZipOutputStream(fileOutput);

    try {

        ZipEntry entry;

        // Write the METS

        entry = new ZipEntry("mets.xml");
        zipOutput.putNextEntry(entry);

        PrintStream xmlPrintStream = new PrintStream(zipOutput);
        xmlPrintStream.print(metsXml);

        // Write files

        for (DepositFile file : filenames.keySet()) {

            if (!file.isExternal()) {

                entry = new ZipEntry(filenames.get(file));
                zipOutput.putNextEntry(entry);

                FileInputStream fileInput = new FileInputStream(file.getFile());

                byte[] buffer = new byte[1024];
                int length;

                while ((length = fileInput.read(buffer)) != -1)
                    zipOutput.write(buffer, 0, length);

                fileInput.close();

            }

        }

        zipOutput.finish();
        zipOutput.close();

        fileOutput.close();

    } catch (IOException e) {

        throw new Error(e);

    }

    return zipFile;

}

From source file:org.apache.kylin.storage.cache.DynamicCacheTest.java

@Test
public void basicTest() {

    final StorageContext context = new StorageContext();
    final List<TblColRef> groups = StorageMockUtils.buildGroups();
    final TblColRef partitionCol = groups.get(0);
    final List<FunctionDesc> aggregations = StorageMockUtils.buildAggregations();
    final TupleInfo tupleInfo = StorageMockUtils.newTupleInfo(groups, aggregations);

    SQLDigest sqlDigest = new SQLDigest("default.test_kylin_fact", null, null, Lists.<TblColRef>newArrayList(),
            groups, Lists.newArrayList(partitionCol), Lists.<TblColRef>newArrayList(), aggregations,
            new ArrayList<MeasureDesc>(), new ArrayList<SQLDigest.OrderEnum>());

    ITuple aTuple = new TsOnlyTuple(partitionCol, "2011-02-01");
    ITuple bTuple = new TsOnlyTuple(partitionCol, "2012-02-01");
    final List<ITuple> allTuples = Lists.newArrayList(aTuple, bTuple);

    //counts for verifying
    final AtomicInteger underlyingSEHitCount = new AtomicInteger(0);
    final List<Integer> returnedRowPerSearch = Lists.newArrayList();

    CacheFledgedDynamicQuery dynamicCache = new CacheFledgedDynamicQuery(new ICachableStorageQuery() {
        @Override/* www  . j  a  va 2  s.  c om*/
        public ITupleIterator search(StorageContext context, SQLDigest sqlDigest, TupleInfo returnTupleInfo) {
            Range<Long> tsRagneInQuery = TsConditionExtractor.extractTsCondition(partitionCol,
                    sqlDigest.filter);
            List<ITuple> ret = Lists.newArrayList();
            for (ITuple tuple : allTuples) {
                if (tsRagneInQuery.contains(Tuple.getTs(tuple, partitionCol))) {
                    ret.add(tuple);
                }
            }

            underlyingSEHitCount.incrementAndGet();
            returnedRowPerSearch.add(ret.size());

            return new SimpleTupleIterator(ret.iterator());
        }

        @Override
        public boolean isDynamic() {
            return true;
        }

        @Override
        public Range<Long> getVolatilePeriod() {
            return Ranges.greaterThan(DateFormat.stringToMillis("2011-02-01"));
        }

        @Override
        public String getStorageUUID() {
            return "111ca32a-a33e-4b69-12aa-0bb8b1f8c191";
        }
    }, partitionCol);

    sqlDigest.filter = StorageMockUtils.buildTs2010Filter(groups.get(0));
    ITupleIterator firstIterator = dynamicCache.search(context, sqlDigest, tupleInfo);
    IdentityHashMap<ITuple, Void> firstResults = new IdentityHashMap<>();
    while (firstIterator.hasNext()) {
        firstResults.put(firstIterator.next(), null);
    }
    firstIterator.close();

    sqlDigest.filter = StorageMockUtils.buildTs2011Filter(groups.get(0));
    ITupleIterator secondIterator = dynamicCache.search(context, sqlDigest, tupleInfo);
    IdentityHashMap<ITuple, Void> secondResults = new IdentityHashMap<>();
    while (secondIterator.hasNext()) {
        secondResults.put(secondIterator.next(), null);
    }
    secondIterator.close();

    Assert.assertEquals(2, firstResults.size());
    IdentityUtils.collectionReferenceEquals(firstResults.keySet(), secondResults.keySet());
    Assert.assertEquals(2, underlyingSEHitCount.get());
    Assert.assertEquals(new Integer(2), returnedRowPerSearch.get(0));
    Assert.assertEquals(new Integer(1), returnedRowPerSearch.get(1));
}

From source file:org.apache.pig.pen.LineageTrimmingVisitor.java

private Map<LOLoad, DataBag> PruneBaseDataConstrainedCoverage(Map<LOLoad, DataBag> baseData,
        LineageTracer lineage, Collection<IdentityHashSet<Tuple>> equivalenceClasses) {

    IdentityHashMap<Tuple, Collection<Tuple>> membershipMap = lineage.getMembershipMap();
    IdentityHashMap<Tuple, Double> lineageGroupWeights = lineage.getWeightedCounts(2f, 1);

    // compute a mapping from lineage group to the set of equivalence
    // classes covered by it
    // IdentityHashMap<Tuple, Set<Integer>> lineageGroupToEquivClasses = new
    // IdentityHashMap<Tuple, Set<Integer>>();
    IdentityHashMap<Tuple, Set<IdentityHashSet<Tuple>>> lineageGroupToEquivClasses = new IdentityHashMap<Tuple, Set<IdentityHashSet<Tuple>>>();
    for (IdentityHashSet<Tuple> equivClass : equivalenceClasses) {
        for (Object t : equivClass) {
            Tuple lineageGroup = lineage.getRepresentative((Tuple) t);
            // Set<Integer> entry =
            // lineageGroupToEquivClasses.get(lineageGroup);
            Set<IdentityHashSet<Tuple>> entry = lineageGroupToEquivClasses.get(lineageGroup);
            if (entry == null) {
                // entry = new HashSet<Integer>();
                entry = new HashSet<IdentityHashSet<Tuple>>();
                lineageGroupToEquivClasses.put(lineageGroup, entry);
            }/*from w w  w  .  ja  v a2 s. co m*/
            // entry.add(equivClassId);
            entry.add(equivClass);
        }
    }

    // select lineage groups such that we cover all equivalence classes
    IdentityHashSet<Tuple> selectedLineageGroups = new IdentityHashSet<Tuple>();
    while (!lineageGroupToEquivClasses.isEmpty()) {
        // greedily find the lineage group with the best "score", where
        // score = # equiv classes covered / group weight
        double bestWeight = -1;
        Tuple bestLineageGroup = null;
        Set<IdentityHashSet<Tuple>> bestEquivClassesCovered = null;
        int bestNumEquivClassesCovered = 0;
        for (Tuple lineageGroup : lineageGroupToEquivClasses.keySet()) {
            double weight = lineageGroupWeights.get(lineageGroup);

            Set<IdentityHashSet<Tuple>> equivClassesCovered = lineageGroupToEquivClasses.get(lineageGroup);
            int numEquivClassesCovered = equivClassesCovered.size();

            if ((numEquivClassesCovered > bestNumEquivClassesCovered)
                    || (numEquivClassesCovered == bestNumEquivClassesCovered && weight < bestWeight)) {

                if (selectedLineageGroups.contains(lineageGroup)) {
                    bestLineageGroup = lineageGroup;
                    bestEquivClassesCovered = equivClassesCovered;
                    continue;
                }

                bestWeight = weight;
                bestLineageGroup = lineageGroup;
                bestNumEquivClassesCovered = numEquivClassesCovered;
                bestEquivClassesCovered = equivClassesCovered;
            }
        }
        // add the best-scoring lineage group to the set of ones we plan to
        // retain
        selectedLineageGroups.add(bestLineageGroup);

        // make copy of bestEquivClassesCovered (or else the code that
        // follows won't work correctly, because removing from the reference
        // set)
        Set<IdentityHashSet<Tuple>> toCopy = bestEquivClassesCovered;
        bestEquivClassesCovered = new HashSet<IdentityHashSet<Tuple>>();
        bestEquivClassesCovered.addAll(toCopy);

        // remove the classes we've now covered
        Collection<Tuple> toRemove = new LinkedList<Tuple>();
        for (Tuple lineageGroup : lineageGroupToEquivClasses.keySet()) {

            Set<IdentityHashSet<Tuple>> equivClasses = lineageGroupToEquivClasses.get(lineageGroup);

            for (Iterator<IdentityHashSet<Tuple>> it = equivClasses.iterator(); it.hasNext();) {
                IdentityHashSet<Tuple> equivClass = it.next();
                if (bestEquivClassesCovered.contains(equivClass)) {
                    it.remove();
                }
            }
            if (equivClasses.size() == 0)
                toRemove.add(lineageGroup);

        }
        for (Tuple removeMe : toRemove)
            lineageGroupToEquivClasses.remove(removeMe);
    }

    // revise baseData to only contain the tuples that are part of
    // selectedLineageGroups
    IdentityHashSet<Tuple> tuplesToRetain = new IdentityHashSet<Tuple>();
    for (Tuple lineageGroup : selectedLineageGroups) {
        Collection<Tuple> members = membershipMap.get(lineageGroup);
        for (Tuple t : members)
            tuplesToRetain.add(t);
    }

    Map<LOLoad, DataBag> newBaseData = new HashMap<LOLoad, DataBag>();
    for (LOLoad loadOp : baseData.keySet()) {
        DataBag data = baseData.get(loadOp);
        // DataBag newData = new DataBag();
        DataBag newData = BagFactory.getInstance().newDefaultBag();
        for (Iterator<Tuple> it = data.iterator(); it.hasNext();) {
            Tuple t = it.next();
            if (tuplesToRetain.contains(t))
                newData.add(t);
        }
        newBaseData.put(loadOp, newData);
    }

    return newBaseData;
}

From source file:org.jamocha.rating.fraj.RatingProvider.java

private double rateBetaWithoutExistentials(final StatisticsProvider statisticsProvider,
        final PathNodeFilterSet toRate,
        final Map<Set<PathFilterList>, List<Pair<List<Set<PathFilterList>>, List<PathFilter>>>> componentToJoinOrder,
        final Map<Path, Set<PathFilterList>> pathToPreNetworkComponents) {
    final IdentityHashMap<Set<PathFilterList>, Data> preNetworkComponentToData = new IdentityHashMap<>();
    for (final Set<PathFilterList> comp : componentToJoinOrder.keySet()) {
        preNetworkComponentToData.put(comp, statisticsProvider.getData(comp));
    }//from w  w w.  jav  a 2  s .c  om
    final double tupleSize = preNetworkComponentToData.values().stream().mapToDouble(Data::getTupleSize).sum();
    final double tuplesPerPage = statisticsProvider.getPageSize() / tupleSize;
    final double rowCount = calcBetaUnfilteredSize(statisticsProvider, componentToJoinOrder,
            pathToPreNetworkComponents, componentToJoinOrder.keySet());
    // joinsize is needed twice per component, thus pre-calculate it
    final Map<Set<PathFilterList>, Double> preNetworkComponentToJoinSize = preNetworkComponentToData.keySet()
            .stream()
            .collect(toMap(Function.identity(),
                    component -> joinSize(statisticsProvider, component, componentToJoinOrder.get(component),
                            componentToJoinOrder.keySet(), pathToPreNetworkComponents)));
    final double finsert = preNetworkComponentToData.entrySet().stream()
            .mapToDouble(
                    entry -> entry.getValue().getFinsert() * preNetworkComponentToJoinSize.get(entry.getKey()))
            .sum();
    final double fdelete = preNetworkComponentToData.values().stream().mapToDouble(Data::getFdelete).sum();
    // publish information to statistics provider
    {
        final Set<PathFilterList> filters = new HashSet<>();
        componentToJoinOrder.keySet().forEach(filters::addAll);
        filters.add(toRate);
        statisticsProvider.setData(filters, new Data(finsert, fdelete, rowCount, tupleSize));
    }
    final double mxBeta = m(rowCount, tuplesPerPage);
    final double runtimeCost = preNetworkComponentToData.entrySet().stream().mapToDouble(entry -> {
        final Set<PathFilterList> component = entry.getKey();
        final Data data = entry.getValue();
        return data.getFinsert()
                * costPosInsVarI(statisticsProvider, component, componentToJoinOrder.get(component),
                        componentToJoinOrder.keySet(), pathToPreNetworkComponents)
                + data.getFdelete() * (mxBeta + cardenas(mxBeta, preNetworkComponentToJoinSize.get(component)));
    }).sum();
    final double memoryCost = rowCount * tupleSize;
    return cpuAndMemCostCombiner.applyAsDouble(runtimeCost, memoryCost);
}