Example usage for java.util IdentityHashMap remove

List of usage examples for java.util IdentityHashMap remove

Introduction

In this page you can find the example usage for java.util IdentityHashMap remove.

Prototype

public V remove(Object key) 

Source Link

Document

Removes the mapping for this key from this map if present.

Usage

From source file:Main.java

public static void main(String args[]) {

    IdentityHashMap<Integer, String> ihmap = new IdentityHashMap<Integer, String>();

    ihmap.put(1, "from");
    ihmap.put(2, "java2s.com");
    ihmap.put(3, "tutorial");

    System.out.println("Value of ihmap before: " + ihmap);

    // remove element at key 2
    ihmap.remove(2);

    System.out.println("Value of ihmap after remove: " + ihmap);
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

/**
 * Test method for 'java.util.IdentityHashMap.remove(Object)'.
 *//* w  w w . j a  v  a  2  s.  c o  m*/
public void testRemove() {
    IdentityHashMap hashMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(hashMap);

    assertNull(hashMap.remove(null));
    hashMap.put(null, VALUE_TEST_REMOVE);
    assertNotNull(hashMap.remove(null));

    hashMap.put(KEY_TEST_REMOVE, VALUE_TEST_REMOVE);
    assertEquals(hashMap.remove(KEY_TEST_REMOVE), VALUE_TEST_REMOVE);
    assertNull(hashMap.remove(KEY_TEST_REMOVE));
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testIsEmpty() {
    IdentityHashMap srcMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(srcMap);

    IdentityHashMap dstMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(dstMap);

    dstMap.putAll(srcMap);/*from  www .jav  a  2  s.  c  o m*/
    assertTrue(dstMap.isEmpty());

    dstMap.put(KEY_KEY, VALUE_VAL);
    assertFalse(dstMap.isEmpty());

    dstMap.remove(KEY_KEY);
    assertTrue(dstMap.isEmpty());
    assertEquals(dstMap.size(), 0);
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testKeysConflict() {
    IdentityHashMap hashMap = new IdentityHashMap();

    hashMap.put(STRING_ZERO_KEY, STRING_ZERO_VALUE);
    hashMap.put(INTEGER_ZERO_KEY, INTEGER_ZERO_VALUE);
    hashMap.put(ODD_ZERO_KEY, ODD_ZERO_VALUE);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), INTEGER_ZERO_VALUE);
    assertEquals(hashMap.get(ODD_ZERO_KEY), ODD_ZERO_VALUE);
    assertEquals(hashMap.get(STRING_ZERO_KEY), STRING_ZERO_VALUE);
    hashMap.remove(INTEGER_ZERO_KEY);
    assertEquals(hashMap.get(ODD_ZERO_KEY), ODD_ZERO_VALUE);
    assertEquals(hashMap.get(STRING_ZERO_KEY), STRING_ZERO_VALUE);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), null);
    hashMap.remove(ODD_ZERO_KEY);/*from  w  ww  .  j a v a  2s . c  o  m*/
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), null);
    assertEquals(hashMap.get(ODD_ZERO_KEY), null);
    assertEquals(hashMap.get(STRING_ZERO_KEY), STRING_ZERO_VALUE);
    hashMap.remove(STRING_ZERO_KEY);
    assertEquals(hashMap.get(INTEGER_ZERO_KEY), null);
    assertEquals(hashMap.get(ODD_ZERO_KEY), null);
    assertEquals(hashMap.get(STRING_ZERO_KEY), null);
    assertEquals(hashMap.size(), 0);
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

/**
 * Test method for 'java.util.IdentityHashMap.size()'.
 *///from w ww.ja  v  a 2  s  .c  o  m
public void testSize() {
    IdentityHashMap hashMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(hashMap);

    // Test size behavior on put
    assertEquals(hashMap.size(), SIZE_ZERO);
    hashMap.put(KEY_1, VALUE_1);
    assertEquals(hashMap.size(), SIZE_ONE);
    hashMap.put(KEY_2, VALUE_2);
    assertEquals(hashMap.size(), SIZE_TWO);
    hashMap.put(KEY_3, VALUE_3);
    assertEquals(hashMap.size(), SIZE_THREE);

    // Test size behavior on remove
    hashMap.remove(KEY_1);
    assertEquals(hashMap.size(), SIZE_TWO);
    hashMap.remove(KEY_2);
    assertEquals(hashMap.size(), SIZE_ONE);
    hashMap.remove(KEY_3);
    assertEquals(hashMap.size(), SIZE_ZERO);

    // Test size behavior on putAll
    hashMap.put(KEY_1, VALUE_1);
    hashMap.put(KEY_2, VALUE_2);
    hashMap.put(KEY_3, VALUE_3);
    IdentityHashMap srcMap = new IdentityHashMap(hashMap);
    hashMap.putAll(srcMap);
    assertEquals(hashMap.size(), SIZE_THREE);

    // Test size behavior on clear
    hashMap.clear();
    assertEquals(hashMap.size(), SIZE_ZERO);
}

From source file:de.codesourcery.eve.skills.ui.model.impl.MarketGroupTreeModelBuilder.java

private FilteringTreeModel createTreeModel(boolean populateAllNodes) {
    long time = -System.currentTimeMillis();

    final IdentityHashMap<MarketGroup, ITreeNode> nodes = new IdentityHashMap<MarketGroup, ITreeNode>();

    // construct tree
    final List<MarketGroup> marketGroups = dataModel.getLeafMarketGroups();
    System.out.println("createTreeModel( populateAll = " + populateAllNodes + "): Filtering "
            + marketGroups.size() + " leaf market groups");

    //      int debugCount=0;
    for (MarketGroup marketGroup : marketGroups) {
        //         System.out.print(".");
        //         if ( (debugCount++ % 60 ) == 0 ) {
        //            System.out.println();
        //         }

        final ITreeNode node = getOrCreateTreeNode(marketGroup, nodes);
        if (populateAllNodes) {
            final List<InventoryType> members = getMembers(marketGroup);

            if (!members.isEmpty()) {
                for (InventoryType type : members) {
                    node.addChild(new DefaultTreeNode(type));
                }/*from  w  w w  . j a va  2 s .  c  om*/
            } else {
                nodes.remove(marketGroup);
                continue;
            }
        }

        if (marketGroup.getParent() != null) {
            MarketGroup current = marketGroup;
            while (current != null) {
                final ITreeNode toAdd = getOrCreateTreeNode(current, nodes);
                if (current.getParent() != null) {
                    ITreeNode parent = getOrCreateTreeNode(current.getParent(), nodes);
                    boolean add = true;
                    for (ITreeNode child : parent.getChildren()) {
                        if (ObjectUtils.equals(child.getValue(), current)) {
                            add = false;
                            break;
                        }
                    }
                    if (add) {
                        parent.addChild(toAdd);
                    }
                }
                current = current.getParent();
            }
        }
    }

    System.out.println("createTreeModel( populateAll = " + populateAllNodes + "): Initial tree creation took "
            + (time + System.currentTimeMillis()) + " ms");

    final ITreeNode root = new DefaultTreeNode();
    // convert all nodes without children to LazyTreeNode instances 
    for (ITreeNode node : nodes.values()) {
        final MarketGroup g = (MarketGroup) node.getValue();
        if (g.getParent() == null) { // top-level market group, add to root node
            root.addChild(wrapIfLeafNode(node));
        } else {
            wrapIfLeafNode(node);
        }
    }

    final FilteringTreeModel model = new FilteringTreeModel(new DefaultTreeModel(root));

    // sort tree nodes alphabetically
    final Comparator<ITreeNode> COMPARATOR = new Comparator<ITreeNode>() {
        @Override
        public int compare(ITreeNode o1, ITreeNode o2) {
            if (o1.getValue() instanceof MarketGroup && o2.getValue() instanceof MarketGroup) {
                final MarketGroup g1 = (MarketGroup) o1.getValue();
                final MarketGroup g2 = (MarketGroup) o2.getValue();
                return g1.getName().compareTo(g2.getName());
            } else if (o1.getValue() instanceof InventoryType && o2.getValue() instanceof InventoryType) {
                final InventoryType g1 = (InventoryType) o1.getValue();
                final InventoryType g2 = (InventoryType) o2.getValue();
                return g1.getName().compareTo(g2.getName());
            }
            throw new RuntimeException(
                    "Internal error,unhandled node values: " + o1.getValue() + " / " + o2.getValue());
        }
    };
    model.sortChildren(root, COMPARATOR, true);

    time += System.currentTimeMillis();
    System.out.println("createTreeModel( populateAll = " + populateAllNodes + ") took " + time + " ms");
    return model;
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testEntrySet() {
    IdentityHashMap hashMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(hashMap);

    Set entrySet = hashMap.entrySet();
    assertNotNull(entrySet);//from   w w  w .  j  a  v a2 s  . c  o  m

    // Check that the entry set looks right
    hashMap.put(KEY_TEST_ENTRY_SET, VALUE_TEST_ENTRY_SET_1);
    entrySet = hashMap.entrySet();
    assertEquals(entrySet.size(), SIZE_ONE);
    Iterator itSet = entrySet.iterator();
    Map.Entry entry = (Map.Entry) itSet.next();
    assertEquals(entry.getKey(), KEY_TEST_ENTRY_SET);
    assertEquals(entry.getValue(), VALUE_TEST_ENTRY_SET_1);

    // Check that entries in the entrySet are update correctly on overwrites
    hashMap.put(KEY_TEST_ENTRY_SET, VALUE_TEST_ENTRY_SET_2);
    entrySet = hashMap.entrySet();
    assertEquals(entrySet.size(), SIZE_ONE);
    itSet = entrySet.iterator();
    entry = (Map.Entry) itSet.next();
    assertEquals(entry.getKey(), KEY_TEST_ENTRY_SET);
    assertEquals(entry.getValue(), VALUE_TEST_ENTRY_SET_2);

    // Check that entries are updated on removes
    hashMap.remove(KEY_TEST_ENTRY_SET);
    checkEmptyHashMapAssumptions(hashMap);
}

From source file:ca.uhn.fhir.util.FhirTerser.java

private void visit(IdentityHashMap<Object, Object> theStack, IBaseResource theResource, IBase theElement,
        List<String> thePathToElement, BaseRuntimeChildDefinition theChildDefinition,
        BaseRuntimeElementDefinition<?> theDefinition, IModelVisitor theCallback) {
    List<String> pathToElement = addNameToList(thePathToElement, theChildDefinition);

    if (theStack.put(theElement, theElement) != null) {
        return;/*from www  .j  av a 2 s. c o  m*/
    }

    theCallback.acceptElement(theResource, theElement, pathToElement, theChildDefinition, theDefinition);

    BaseRuntimeElementDefinition<?> def = theDefinition;
    if (def.getChildType() == ChildTypeEnum.CONTAINED_RESOURCE_LIST) {
        def = myContext.getElementDefinition(theElement.getClass());
    }

    if (theElement instanceof IBaseReference) {
        IBaseResource target = ((IBaseReference) theElement).getResource();
        if (target != null) {
            if (target.getIdElement().hasIdPart() == false || target.getIdElement().isLocal()) {
                RuntimeResourceDefinition targetDef = myContext.getResourceDefinition(target);
                visit(theStack, target, target, pathToElement, null, targetDef, theCallback);
            }
        }
    }

    switch (def.getChildType()) {
    case ID_DATATYPE:
    case PRIMITIVE_XHTML_HL7ORG:
    case PRIMITIVE_XHTML:
    case PRIMITIVE_DATATYPE:
        // These are primitive types
        break;
    case RESOURCE:
    case RESOURCE_BLOCK:
    case COMPOSITE_DATATYPE: {
        BaseRuntimeElementCompositeDefinition<?> childDef = (BaseRuntimeElementCompositeDefinition<?>) def;
        for (BaseRuntimeChildDefinition nextChild : childDef.getChildrenAndExtension()) {

            List<?> values = nextChild.getAccessor().getValues(theElement);
            if (values != null) {
                for (Object nextValueObject : values) {
                    IBase nextValue;
                    try {
                        nextValue = (IBase) nextValueObject;
                    } catch (ClassCastException e) {
                        String s = "Found instance of " + nextValueObject.getClass()
                                + " - Did you set a field value to the incorrect type? Expected "
                                + IBase.class.getName();
                        throw new ClassCastException(s);
                    }
                    if (nextValue == null) {
                        continue;
                    }
                    if (nextValue.isEmpty()) {
                        continue;
                    }
                    BaseRuntimeElementDefinition<?> childElementDef;
                    childElementDef = nextChild.getChildElementDefinitionByDatatype(nextValue.getClass());

                    if (childElementDef == null) {
                        childElementDef = myContext.getElementDefinition(nextValue.getClass());
                    }

                    if (nextChild instanceof RuntimeChildDirectResource) {
                        // Don't descend into embedded resources
                        theCallback.acceptElement(theResource, nextValue, null, nextChild, childElementDef);
                    } else {
                        visit(theStack, theResource, nextValue, pathToElement, nextChild, childElementDef,
                                theCallback);
                    }
                }
            }
        }
        break;
    }
    case CONTAINED_RESOURCES: {
        BaseContainedDt value = (BaseContainedDt) theElement;
        for (IResource next : value.getContainedResources()) {
            def = myContext.getResourceDefinition(next);
            visit(theStack, next, next, pathToElement, null, def, theCallback);
        }
        break;
    }
    case CONTAINED_RESOURCE_LIST:
    case EXTENSION_DECLARED:
    case UNDECL_EXT: {
        throw new IllegalStateException("state should not happen: " + def.getChildType());
    }
    }

    theStack.remove(theElement);

}

From source file:org.apache.hadoop.hive.ql.exec.tez.WorkloadManager.java

private void handleUpdateErrorOnMasterThread(WmTezSession session, int failedEndpointVersion,
        IdentityHashMap<WmTezSession, GetRequest> toReuse, WmThreadSyncWork syncWork,
        HashSet<String> poolsToRedistribute) {
    // First, check if the registry has been updated since the error, and skip the error if
    // we have received new, valid registry info (TODO: externally, add a grace period for this?).
    Ref<Integer> endpointVersion = new Ref<>(-1);
    AmPluginInfo info = session.getAmPluginInfo(endpointVersion);
    if (info != null && endpointVersion.value > failedEndpointVersion) {
        LOG.info("Ignoring an update error; endpoint information has been updated to {}", info);
        return;/*from  ww w  . jav a2s  . com*/
    }
    GetRequest reuseRequest = toReuse.remove(session);
    if (reuseRequest != null) {
        // This session is bad, so don't allow reuse; just convert it to normal get.
        reuseRequest.sessionToReuse = null;
    }

    // We are assuming the update-error AM is bad and just try to kill it.
    RemoveSessionResult rr = checkAndRemoveSessionFromItsPool(session, poolsToRedistribute, null, true);
    switch (rr) {
    case OK:
    case NOT_FOUND:
        // Regardless whether it was removed successfully or after failing to remove, restart it.
        // Since we just restart this from under the user, mark it so we handle it properly when
        // the user tries to actually use this session and fails, proceeding to return/destroy it.
        session.setIsIrrelevantForWm("Failed to update resource allocation");
        // We assume AM might be bad so we will not try to kill the query here; just scrap the AM.
        // TODO: propagate this error to TezJobMonitor somehow? Without using killQuery
        syncWork.toRestartInUse.add(session);
        break;
    case IGNORE:
        return; // An update error for some session that was actually already killed by us.
    default:
        throw new AssertionError("Unknown state " + rr);
    }
}

From source file:org.apache.pig.pen.LineageTrimmingVisitor.java

private Map<LOLoad, DataBag> PruneBaseDataConstrainedCoverage(Map<LOLoad, DataBag> baseData,
        LineageTracer lineage, Collection<IdentityHashSet<Tuple>> equivalenceClasses) {

    IdentityHashMap<Tuple, Collection<Tuple>> membershipMap = lineage.getMembershipMap();
    IdentityHashMap<Tuple, Double> lineageGroupWeights = lineage.getWeightedCounts(2f, 1);

    // compute a mapping from lineage group to the set of equivalence
    // classes covered by it
    // IdentityHashMap<Tuple, Set<Integer>> lineageGroupToEquivClasses = new
    // IdentityHashMap<Tuple, Set<Integer>>();
    IdentityHashMap<Tuple, Set<IdentityHashSet<Tuple>>> lineageGroupToEquivClasses = new IdentityHashMap<Tuple, Set<IdentityHashSet<Tuple>>>();
    for (IdentityHashSet<Tuple> equivClass : equivalenceClasses) {
        for (Object t : equivClass) {
            Tuple lineageGroup = lineage.getRepresentative((Tuple) t);
            // Set<Integer> entry =
            // lineageGroupToEquivClasses.get(lineageGroup);
            Set<IdentityHashSet<Tuple>> entry = lineageGroupToEquivClasses.get(lineageGroup);
            if (entry == null) {
                // entry = new HashSet<Integer>();
                entry = new HashSet<IdentityHashSet<Tuple>>();
                lineageGroupToEquivClasses.put(lineageGroup, entry);
            }/*  w w w.  java  2 s  .c o m*/
            // entry.add(equivClassId);
            entry.add(equivClass);
        }
    }

    // select lineage groups such that we cover all equivalence classes
    IdentityHashSet<Tuple> selectedLineageGroups = new IdentityHashSet<Tuple>();
    while (!lineageGroupToEquivClasses.isEmpty()) {
        // greedily find the lineage group with the best "score", where
        // score = # equiv classes covered / group weight
        double bestWeight = -1;
        Tuple bestLineageGroup = null;
        Set<IdentityHashSet<Tuple>> bestEquivClassesCovered = null;
        int bestNumEquivClassesCovered = 0;
        for (Tuple lineageGroup : lineageGroupToEquivClasses.keySet()) {
            double weight = lineageGroupWeights.get(lineageGroup);

            Set<IdentityHashSet<Tuple>> equivClassesCovered = lineageGroupToEquivClasses.get(lineageGroup);
            int numEquivClassesCovered = equivClassesCovered.size();

            if ((numEquivClassesCovered > bestNumEquivClassesCovered)
                    || (numEquivClassesCovered == bestNumEquivClassesCovered && weight < bestWeight)) {

                if (selectedLineageGroups.contains(lineageGroup)) {
                    bestLineageGroup = lineageGroup;
                    bestEquivClassesCovered = equivClassesCovered;
                    continue;
                }

                bestWeight = weight;
                bestLineageGroup = lineageGroup;
                bestNumEquivClassesCovered = numEquivClassesCovered;
                bestEquivClassesCovered = equivClassesCovered;
            }
        }
        // add the best-scoring lineage group to the set of ones we plan to
        // retain
        selectedLineageGroups.add(bestLineageGroup);

        // make copy of bestEquivClassesCovered (or else the code that
        // follows won't work correctly, because removing from the reference
        // set)
        Set<IdentityHashSet<Tuple>> toCopy = bestEquivClassesCovered;
        bestEquivClassesCovered = new HashSet<IdentityHashSet<Tuple>>();
        bestEquivClassesCovered.addAll(toCopy);

        // remove the classes we've now covered
        Collection<Tuple> toRemove = new LinkedList<Tuple>();
        for (Tuple lineageGroup : lineageGroupToEquivClasses.keySet()) {

            Set<IdentityHashSet<Tuple>> equivClasses = lineageGroupToEquivClasses.get(lineageGroup);

            for (Iterator<IdentityHashSet<Tuple>> it = equivClasses.iterator(); it.hasNext();) {
                IdentityHashSet<Tuple> equivClass = it.next();
                if (bestEquivClassesCovered.contains(equivClass)) {
                    it.remove();
                }
            }
            if (equivClasses.size() == 0)
                toRemove.add(lineageGroup);

        }
        for (Tuple removeMe : toRemove)
            lineageGroupToEquivClasses.remove(removeMe);
    }

    // revise baseData to only contain the tuples that are part of
    // selectedLineageGroups
    IdentityHashSet<Tuple> tuplesToRetain = new IdentityHashSet<Tuple>();
    for (Tuple lineageGroup : selectedLineageGroups) {
        Collection<Tuple> members = membershipMap.get(lineageGroup);
        for (Tuple t : members)
            tuplesToRetain.add(t);
    }

    Map<LOLoad, DataBag> newBaseData = new HashMap<LOLoad, DataBag>();
    for (LOLoad loadOp : baseData.keySet()) {
        DataBag data = baseData.get(loadOp);
        // DataBag newData = new DataBag();
        DataBag newData = BagFactory.getInstance().newDefaultBag();
        for (Iterator<Tuple> it = data.iterator(); it.hasNext();) {
            Tuple t = it.next();
            if (tuplesToRetain.contains(t))
                newData.add(t);
        }
        newBaseData.put(loadOp, newData);
    }

    return newBaseData;
}