Example usage for java.util IdentityHashMap IdentityHashMap

List of usage examples for java.util IdentityHashMap IdentityHashMap

Introduction

In this page you can find the example usage for java.util IdentityHashMap IdentityHashMap.

Prototype

public IdentityHashMap() 

Source Link

Document

Constructs a new, empty identity hash map with a default expected maximum size (21).

Usage

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testClone() {
    IdentityHashMap srcMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(srcMap);

    // Check empty clone behavior
    IdentityHashMap dstMap = (IdentityHashMap) srcMap.clone();
    assertNotNull(dstMap);//from  w  w w .j  a  va 2s .co  m
    assertEquals(dstMap.size(), srcMap.size());
    // assertTrue(dstMap.values().toArray().equals(srcMap.values().toArray()));
    assertTrue(dstMap.keySet().equals(srcMap.keySet()));
    assertTrue(dstMap.entrySet().equals(srcMap.entrySet()));

    // Check non-empty clone behavior
    srcMap.put(KEY_1, VALUE_1);
    srcMap.put(KEY_2, VALUE_2);
    srcMap.put(KEY_3, VALUE_3);
    dstMap = (IdentityHashMap) srcMap.clone();
    assertNotNull(dstMap);
    assertEquals(dstMap.size(), srcMap.size());

    assertTrue(dstMap.keySet().equals(srcMap.keySet()));

    assertTrue(dstMap.entrySet().equals(srcMap.entrySet()));
}

From source file:sf.net.experimaestro.manager.plans.Operator.java

/**
 * Copy the operator// w  w  w. j ava2s.  co  m
 *
 * @param deep Deep copy
 */
final public Operator copy(boolean deep) {
    return copy(deep, new IdentityHashMap<Object, Object>());
}

From source file:de.codesourcery.eve.skills.ui.model.impl.MarketGroupTreeModelBuilder.java

private FilteringTreeModel createTreeModel(boolean populateAllNodes) {
    long time = -System.currentTimeMillis();

    final IdentityHashMap<MarketGroup, ITreeNode> nodes = new IdentityHashMap<MarketGroup, ITreeNode>();

    // construct tree
    final List<MarketGroup> marketGroups = dataModel.getLeafMarketGroups();
    System.out.println("createTreeModel( populateAll = " + populateAllNodes + "): Filtering "
            + marketGroups.size() + " leaf market groups");

    //      int debugCount=0;
    for (MarketGroup marketGroup : marketGroups) {
        //         System.out.print(".");
        //         if ( (debugCount++ % 60 ) == 0 ) {
        //            System.out.println();
        //         }

        final ITreeNode node = getOrCreateTreeNode(marketGroup, nodes);
        if (populateAllNodes) {
            final List<InventoryType> members = getMembers(marketGroup);

            if (!members.isEmpty()) {
                for (InventoryType type : members) {
                    node.addChild(new DefaultTreeNode(type));
                }/*from   w  w w . j a va2  s  .c  o  m*/
            } else {
                nodes.remove(marketGroup);
                continue;
            }
        }

        if (marketGroup.getParent() != null) {
            MarketGroup current = marketGroup;
            while (current != null) {
                final ITreeNode toAdd = getOrCreateTreeNode(current, nodes);
                if (current.getParent() != null) {
                    ITreeNode parent = getOrCreateTreeNode(current.getParent(), nodes);
                    boolean add = true;
                    for (ITreeNode child : parent.getChildren()) {
                        if (ObjectUtils.equals(child.getValue(), current)) {
                            add = false;
                            break;
                        }
                    }
                    if (add) {
                        parent.addChild(toAdd);
                    }
                }
                current = current.getParent();
            }
        }
    }

    System.out.println("createTreeModel( populateAll = " + populateAllNodes + "): Initial tree creation took "
            + (time + System.currentTimeMillis()) + " ms");

    final ITreeNode root = new DefaultTreeNode();
    // convert all nodes without children to LazyTreeNode instances 
    for (ITreeNode node : nodes.values()) {
        final MarketGroup g = (MarketGroup) node.getValue();
        if (g.getParent() == null) { // top-level market group, add to root node
            root.addChild(wrapIfLeafNode(node));
        } else {
            wrapIfLeafNode(node);
        }
    }

    final FilteringTreeModel model = new FilteringTreeModel(new DefaultTreeModel(root));

    // sort tree nodes alphabetically
    final Comparator<ITreeNode> COMPARATOR = new Comparator<ITreeNode>() {
        @Override
        public int compare(ITreeNode o1, ITreeNode o2) {
            if (o1.getValue() instanceof MarketGroup && o2.getValue() instanceof MarketGroup) {
                final MarketGroup g1 = (MarketGroup) o1.getValue();
                final MarketGroup g2 = (MarketGroup) o2.getValue();
                return g1.getName().compareTo(g2.getName());
            } else if (o1.getValue() instanceof InventoryType && o2.getValue() instanceof InventoryType) {
                final InventoryType g1 = (InventoryType) o1.getValue();
                final InventoryType g2 = (InventoryType) o2.getValue();
                return g1.getName().compareTo(g2.getName());
            }
            throw new RuntimeException(
                    "Internal error,unhandled node values: " + o1.getValue() + " / " + o2.getValue());
        }
    };
    model.sortChildren(root, COMPARATOR, true);

    time += System.currentTimeMillis();
    System.out.println("createTreeModel( populateAll = " + populateAllNodes + ") took " + time + " ms");
    return model;
}

From source file:net.datenwerke.sandbox.SandboxLoader.java

/**
 * Initializes this classloader with the config provided by the SandboxContext
 * //from ww  w.  j  a v a2 s.  c o  m
 * @param context
 */
public void init(SandboxContext context) {
    securityManager.checkPermission(new SandboxRuntimePermission("initSandboxLoader"));

    /* name */
    this.name = context.getName();

    /* jars */
    if (null != context.getWhitelistedJars() && !context.getWhitelistedJars().isEmpty())
        whitelistedUcp = new URLClassPath(context.getWhitelistedJars().toArray(new URL[] {}));
    else
        whitelistedUcp = null;

    if (null != context.getJarsForApplicationLoader() && !context.getJarsForApplicationLoader().isEmpty())
        bypassUcp = new URLClassPath(context.getJarsForApplicationLoader().toArray(new URL[] {}));
    else
        bypassUcp = null;

    /* load configuration */
    classesToLoadWithParent = new HashSet<String>(context.getClassesForApplicationLoader());
    classesToLoadWithParent.addAll(BYPASSCLASSES);
    classesByPrefixToLoadWithParent = new HashSet<String>(context.getClassPrefixesForApplicationLoader());

    classesToLoadDirectly = new HashSet<String>(context.getClassesForSandboxLoader());
    classesByPrefixToLoadDirectly = new HashSet<String>(context.getClassPrefixesForSandboxLoader());

    /* subloaders */
    this.hasSubloaders = !context.getSubLoaderContextByClassMap().isEmpty()
            || !context.getSubLoaderContextByClassPrefixMap().isEmpty()
            || !context.getSubLoaderContextByJar().isEmpty();
    if (hasSubloaders) {
        IdentityHashMap<SandboxContext, SandboxLoader> loaderMap = new IdentityHashMap<SandboxContext, SandboxLoader>();

        for (Entry<String, SandboxContext> e : context.getSubLoaderContextByClassMap().entrySet())
            subLoaderCache.put(e.getKey(), initSubLoader(loaderMap, e.getValue()));
        for (Entry<String, SandboxContext> e : context.getSubLoaderContextByClassPrefixMap().entrySet())
            subLoaderPrefixCache.put(e.getKey(), initSubLoader(loaderMap, e.getValue()));
        for (Entry<URL, SandboxContext> e : context.getSubLoaderContextByJar().entrySet())
            subLoaderByJar.put(new URLClassPath(new URL[] { e.getKey() }),
                    initSubLoader(loaderMap, e.getValue()));
    }

    /* debug */
    this.debug = context.isDebug();

    this.codesource = context.getCodesource();
    if (null == this.codesource)
        this.codesource = DEFAULT_CODESOURCE_PREFIX.concat("/")
                .concat(null == name || "".equals(name) ? "default" : name).concat("/");

    this.removeFinalizers = context.isRemoveFinalizers();

    this.enhancer = context.getLoaderEnhancer();

    /* store context */
    this.context = context;
}

From source file:ca.uhn.fhir.util.FhirTerser.java

public List<ResourceReferenceInfo> getAllResourceReferences(final IBaseResource theResource) {
    final ArrayList<ResourceReferenceInfo> retVal = new ArrayList<ResourceReferenceInfo>();
    BaseRuntimeElementCompositeDefinition<?> def = myContext.getResourceDefinition(theResource);
    visit(new IdentityHashMap<Object, Object>(), theResource, theResource, null, null, def,
            new IModelVisitor() {
                @Override/*w ww  .  j a va2 s. c om*/
                public void acceptElement(IBaseResource theOuterResource, IBase theElement,
                        List<String> thePathToElement, BaseRuntimeChildDefinition theChildDefinition,
                        BaseRuntimeElementDefinition<?> theDefinition) {
                    if (theElement == null || theElement.isEmpty()) {
                        return;
                    }
                    if (IBaseReference.class.isAssignableFrom(theElement.getClass())) {
                        retVal.add(new ResourceReferenceInfo(myContext, theOuterResource, thePathToElement,
                                (IBaseReference) theElement));
                    }
                }
            });
    return retVal;
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testContainsKey() {
    IdentityHashMap hashMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(hashMap);

    assertFalse(hashMap.containsKey(KEY_TEST_CONTAINS_KEY));
    hashMap.put(KEY_TEST_CONTAINS_KEY, VALUE_TEST_CONTAINS_KEY);
    assertTrue(hashMap.containsKey(KEY_TEST_CONTAINS_KEY));
    assertFalse(hashMap.containsKey(VALUE_TEST_CONTAINS_DOES_NOT_EXIST));

    assertFalse(hashMap.containsKey(null));
    hashMap.put(null, VALUE_TEST_CONTAINS_KEY);
    assertTrue(hashMap.containsKey(null));
}

From source file:com.thesmartweb.swebrank.Moz.java

/**
 * Method that captures the various Moz metrics for the provided urls (with help of the sample here https://github.com/seomoz/SEOmozAPISamples
 * and ranks them accordingly//from  w  ww.  j  a va  2 s  . c o m
 * @param links the urls to analyze
 * @param top_count the amount of results to keep when we rerank the results according to their value of a specific Moz metric
 * @param moz_threshold the threshold to the Moz value to use
 * @param moz_threshold_option flag if we are going to use threshold in the Moz value or not
 * @param mozMetrics list that contains which metric to use for Moz //1st place is Page Authority,2nd external mozRank, 3rd, mozTrust, 4th DomainAuthority and 5th MozRank (it is the default)
 * @param config_path path that has the config files with the api keys and secret for Moz
 * @return an array with the links sorted according to their moz values
 */
public String[] perform(String[] links, int top_count, Double moz_threshold, Boolean moz_threshold_option,
        List<Boolean> mozMetrics, String config_path) {
    //=====short codes for the metrics 
    long upa = 34359738368L;//page authority
    long pda = 68719476736L;//domain authority
    long uemrp = 1048576;//mozrank external equity
    long utrp = 131072;//moztrust 
    long fmrp = 32768;//mozrank subdomain
    long umrp = 16384;//mozrank
    System.gc();
    System.out.println("into Moz");
    Double[] mozRanks = new Double[links.length];
    DataManipulation textualmanipulation = new DataManipulation();
    for (int i = 0; i < links.length; i++) {
        if (links[i] != null) {
            if (!textualmanipulation.StructuredFileCheck(links[i])) {
                try {
                    Thread.sleep(10000);
                    URLMetricsService urlMetricsservice;
                    urlMetricsservice = authenticate(config_path);
                    String objectURL = links[i].substring(0, links[i].length());
                    Gson gson = new Gson();
                    if (mozMetrics.get(1)) {//Domain Authority
                        String response = urlMetricsservice.getUrlMetrics(objectURL, pda);
                        UrlResponse res = gson.fromJson(response, UrlResponse.class);
                        System.gc();
                        if (res != null && !(response.equalsIgnoreCase("{}"))) {
                            String mozvalue_string = res.getPda();
                            mozRanks[i] = Double.parseDouble(mozvalue_string);
                        } else {
                            mozRanks[i] = Double.parseDouble("0");
                        }
                    } else if (mozMetrics.get(2)) {//External MozRank
                        String response = urlMetricsservice.getUrlMetrics(objectURL, uemrp);
                        UrlResponse res = gson.fromJson(response, UrlResponse.class);
                        System.gc();
                        if (res != null && !(response.equalsIgnoreCase("{}"))) {
                            String mozvalue_string = res.getUemrp();
                            mozRanks[i] = Double.parseDouble(mozvalue_string);
                        } else {
                            mozRanks[i] = Double.parseDouble("0");
                        }
                    } else if (mozMetrics.get(3)) {//MozRank
                        String response = urlMetricsservice.getUrlMetrics(objectURL, umrp);
                        UrlResponse res = gson.fromJson(response, UrlResponse.class);
                        System.gc();
                        if (res != null && !(response.equalsIgnoreCase("{}"))) {
                            String mozvalue_string = res.getUmrp();
                            mozRanks[i] = Double.parseDouble(mozvalue_string);
                        } else {
                            mozRanks[i] = Double.parseDouble("0");
                        }
                    } else if (mozMetrics.get(4)) {//MozTrust
                        String response = urlMetricsservice.getUrlMetrics(objectURL, utrp);
                        UrlResponse res = gson.fromJson(response, UrlResponse.class);
                        System.gc();
                        if (res != null && !(response.equalsIgnoreCase("{}"))) {
                            String mozvalue_string = res.getUtrp();
                            mozRanks[i] = Double.parseDouble(mozvalue_string);
                        } else {
                            mozRanks[i] = Double.parseDouble("0");
                        }
                    } else if (mozMetrics.get(5)) {//Page Authority
                        String response = urlMetricsservice.getUrlMetrics(objectURL, upa);
                        UrlResponse res = gson.fromJson(response, UrlResponse.class);
                        System.gc();
                        if (res != null && !(response.equalsIgnoreCase("{}"))) {
                            String mozvalue_string = res.getUpa();
                            mozRanks[i] = Double.parseDouble(mozvalue_string);
                        } else {
                            mozRanks[i] = Double.parseDouble("0");
                        }
                    } else if (mozMetrics.get(6)) {//subdomain MozRank
                        String response = urlMetricsservice.getUrlMetrics(objectURL, fmrp);
                        UrlResponse res = gson.fromJson(response, UrlResponse.class);
                        System.gc();
                        if (res != null && !(response.equalsIgnoreCase("{}"))) {
                            String mozvalue_string = res.getFmrp();
                            mozRanks[i] = Double.parseDouble(mozvalue_string);
                        } else {
                            mozRanks[i] = Double.parseDouble("0");
                        }
                    }
                } catch (InterruptedException | JsonSyntaxException | NumberFormatException ex) {
                    System.out.println("exception moz:" + ex.toString());
                    mozRanks[i] = Double.parseDouble("0");
                    String[] links_out = null;
                    return links_out;
                }
            } else {
                mozRanks[i] = Double.parseDouble("0");
            }
        }
    }
    try {//ranking of the urls according to their moz score
         //get the scores to a list
        System.out.println("I am goint to rank the scores of Moz");
        System.gc();
        List<Double> seomozRanks_scores_list = Arrays.asList(mozRanks);
        //create a hashmap in order to map the scores with the indexes
        System.gc();
        IdentityHashMap<Double, Integer> originalIndices = new IdentityHashMap<Double, Integer>();
        //copy the original scores list
        System.gc();
        for (int i = 0; i < seomozRanks_scores_list.size(); i++) {
            originalIndices.put(seomozRanks_scores_list.get(i), i);
            System.gc();
        }
        //sort the scores
        List<Double> sorted_seomozRanks_scores = new ArrayList<Double>();
        System.gc();
        sorted_seomozRanks_scores.addAll(seomozRanks_scores_list);
        System.gc();
        sorted_seomozRanks_scores.removeAll(Collections.singleton(null));
        System.gc();
        if (!sorted_seomozRanks_scores.isEmpty()) {
            Collections.sort(sorted_seomozRanks_scores, Collections.reverseOrder());
        }
        //get the original indexes
        //the max amount of results
        int[] origIndex = new int[150];
        if (!sorted_seomozRanks_scores.isEmpty()) {
            //if we want to take the top scores(for example top 10)
            if (!moz_threshold_option) {
                origIndex = new int[top_count];
                for (int i = 0; i < top_count; i++) {
                    Double score = sorted_seomozRanks_scores.get(i);
                    System.gc();
                    // Lookup original index efficiently
                    origIndex[i] = originalIndices.get(score);
                }
            }
            //if we have a threshold
            else if (moz_threshold_option) {
                int j = 0;
                int counter = 0;
                while (j < sorted_seomozRanks_scores.size()) {
                    if (sorted_seomozRanks_scores.get(j).compareTo(moz_threshold) >= 0) {
                        counter++;
                    }
                    j++;
                }
                origIndex = new int[counter];
                for (int k = 0; k < origIndex.length - 1; k++) {
                    System.gc();
                    Double score = sorted_seomozRanks_scores.get(k);
                    origIndex[k] = originalIndices.get(score);
                }
            }
        }
        String[] links_out = new String[origIndex.length];
        for (int jj = 0; jj < origIndex.length; jj++) {
            System.gc();
            links_out[jj] = links[origIndex[jj]];
        }
        System.gc();
        System.out.println("I have ranked the scores of moz");
        return links_out;
    } catch (Exception ex) {
        System.out.println("exception moz list" + ex.toString());
        //Logger.getLogger(Moz.class.getName()).log(Level.SEVERE, null, ex);
        String[] links_out = null;
        return links_out;
    }
}

From source file:org.eclipse.php.internal.core.ast.rewrite.ASTRewriteAnalyzer.java

/**
 * Constructor for ASTRewriteAnalyzer.//from   w w w .j a  v  a  2 s  . c om
 * 
 * @param scanner
 *            An {@link AstLexer} scanner.
 * @param document
 *            The IDocument that contains the content of the compilation
 *            unit to rewrite.
 * @param lineInfo
 *            line information for the content of the compilation unit to
 *            rewrite.
 * @param rootEdit
 *            the edit to add all generated edits to
 * @param eventStore
 *            the event store containing the description of changes
 * @param nodeInfos
 *            annotations to nodes, such as if a node is a string
 *            placeholder or a copy target
 * @param comments
 *            list of comments of the compilation unit to rewrite (elements
 *            of type <code>Comment</code>) or <code>null</code>.
 * @param options
 *            the current options (formatting/compliance) or
 *            <code>null</code>.
 * @param extendedSourceRangeComputer
 *            the source range computer to use
 */
public ASTRewriteAnalyzer(AstLexer scanner, IDocument document, LineInformation lineInfo, String lineDelim,
        TextEdit rootEdit, RewriteEventStore eventStore, NodeInfoStore nodeInfos, List comments, Map options,
        TargetSourceRangeComputer extendedSourceRangeComputer) {
    this.scanner = scanner;
    this.eventStore = eventStore;
    this.document = document;
    this.content = document.get().toCharArray();
    this.lineInfo = lineInfo;
    this.nodeInfos = nodeInfos;
    this.tokenScanner = null;
    this.currentEdit = rootEdit;
    this.sourceCopyInfoToEdit = new IdentityHashMap();
    this.sourceCopyEndNodes = new Stack();

    this.formatter = new ASTRewriteFormatter(document, nodeInfos, eventStore, options, lineDelim,
            scanner.getPHPVersion(), true);

    this.extendedSourceRangeComputer = extendedSourceRangeComputer;
    this.lineCommentEndOffsets = new LineCommentEndOffsets(comments);
}

From source file:com.google.gwt.emultest.java.util.IdentityHashMapTest.java

public void testContainsValue() {
    IdentityHashMap hashMap = new IdentityHashMap();
    checkEmptyHashMapAssumptions(hashMap);

    assertFalse("check contains of empty map", hashMap.containsValue(VALUE_TEST_CONTAINS_KEY));
    hashMap.put(KEY_TEST_CONTAINS_VALUE, VALUE_TEST_CONTAINS_KEY);
    assertTrue("check contains of map with element", hashMap.containsValue(VALUE_TEST_CONTAINS_KEY));
    assertFalse("check contains of map other element",
            hashMap.containsValue(VALUE_TEST_CONTAINS_DOES_NOT_EXIST));

    if (useNullValue()) {
        assertFalse(hashMap.containsValue(null));
    }/*from  w w w .  j a  v a  2  s .  c  o  m*/
    hashMap.put(KEY_TEST_CONTAINS_VALUE, null);
    assertTrue(hashMap.containsValue(null));
}

From source file:org.apache.pig.pen.LineageTrimmingVisitor.java

private Map<LOLoad, DataBag> PruneBaseDataConstrainedCoverage(Map<LOLoad, DataBag> baseData,
        LineageTracer lineage, Collection<IdentityHashSet<Tuple>> equivalenceClasses) {

    IdentityHashMap<Tuple, Collection<Tuple>> membershipMap = lineage.getMembershipMap();
    IdentityHashMap<Tuple, Double> lineageGroupWeights = lineage.getWeightedCounts(2f, 1);

    // compute a mapping from lineage group to the set of equivalence
    // classes covered by it
    // IdentityHashMap<Tuple, Set<Integer>> lineageGroupToEquivClasses = new
    // IdentityHashMap<Tuple, Set<Integer>>();
    IdentityHashMap<Tuple, Set<IdentityHashSet<Tuple>>> lineageGroupToEquivClasses = new IdentityHashMap<Tuple, Set<IdentityHashSet<Tuple>>>();
    for (IdentityHashSet<Tuple> equivClass : equivalenceClasses) {
        for (Object t : equivClass) {
            Tuple lineageGroup = lineage.getRepresentative((Tuple) t);
            // Set<Integer> entry =
            // lineageGroupToEquivClasses.get(lineageGroup);
            Set<IdentityHashSet<Tuple>> entry = lineageGroupToEquivClasses.get(lineageGroup);
            if (entry == null) {
                // entry = new HashSet<Integer>();
                entry = new HashSet<IdentityHashSet<Tuple>>();
                lineageGroupToEquivClasses.put(lineageGroup, entry);
            }/*from  w  w  w  .  j a  va 2  s. c o m*/
            // entry.add(equivClassId);
            entry.add(equivClass);
        }
    }

    // select lineage groups such that we cover all equivalence classes
    IdentityHashSet<Tuple> selectedLineageGroups = new IdentityHashSet<Tuple>();
    while (!lineageGroupToEquivClasses.isEmpty()) {
        // greedily find the lineage group with the best "score", where
        // score = # equiv classes covered / group weight
        double bestWeight = -1;
        Tuple bestLineageGroup = null;
        Set<IdentityHashSet<Tuple>> bestEquivClassesCovered = null;
        int bestNumEquivClassesCovered = 0;
        for (Tuple lineageGroup : lineageGroupToEquivClasses.keySet()) {
            double weight = lineageGroupWeights.get(lineageGroup);

            Set<IdentityHashSet<Tuple>> equivClassesCovered = lineageGroupToEquivClasses.get(lineageGroup);
            int numEquivClassesCovered = equivClassesCovered.size();

            if ((numEquivClassesCovered > bestNumEquivClassesCovered)
                    || (numEquivClassesCovered == bestNumEquivClassesCovered && weight < bestWeight)) {

                if (selectedLineageGroups.contains(lineageGroup)) {
                    bestLineageGroup = lineageGroup;
                    bestEquivClassesCovered = equivClassesCovered;
                    continue;
                }

                bestWeight = weight;
                bestLineageGroup = lineageGroup;
                bestNumEquivClassesCovered = numEquivClassesCovered;
                bestEquivClassesCovered = equivClassesCovered;
            }
        }
        // add the best-scoring lineage group to the set of ones we plan to
        // retain
        selectedLineageGroups.add(bestLineageGroup);

        // make copy of bestEquivClassesCovered (or else the code that
        // follows won't work correctly, because removing from the reference
        // set)
        Set<IdentityHashSet<Tuple>> toCopy = bestEquivClassesCovered;
        bestEquivClassesCovered = new HashSet<IdentityHashSet<Tuple>>();
        bestEquivClassesCovered.addAll(toCopy);

        // remove the classes we've now covered
        Collection<Tuple> toRemove = new LinkedList<Tuple>();
        for (Tuple lineageGroup : lineageGroupToEquivClasses.keySet()) {

            Set<IdentityHashSet<Tuple>> equivClasses = lineageGroupToEquivClasses.get(lineageGroup);

            for (Iterator<IdentityHashSet<Tuple>> it = equivClasses.iterator(); it.hasNext();) {
                IdentityHashSet<Tuple> equivClass = it.next();
                if (bestEquivClassesCovered.contains(equivClass)) {
                    it.remove();
                }
            }
            if (equivClasses.size() == 0)
                toRemove.add(lineageGroup);

        }
        for (Tuple removeMe : toRemove)
            lineageGroupToEquivClasses.remove(removeMe);
    }

    // revise baseData to only contain the tuples that are part of
    // selectedLineageGroups
    IdentityHashSet<Tuple> tuplesToRetain = new IdentityHashSet<Tuple>();
    for (Tuple lineageGroup : selectedLineageGroups) {
        Collection<Tuple> members = membershipMap.get(lineageGroup);
        for (Tuple t : members)
            tuplesToRetain.add(t);
    }

    Map<LOLoad, DataBag> newBaseData = new HashMap<LOLoad, DataBag>();
    for (LOLoad loadOp : baseData.keySet()) {
        DataBag data = baseData.get(loadOp);
        // DataBag newData = new DataBag();
        DataBag newData = BagFactory.getInstance().newDefaultBag();
        for (Iterator<Tuple> it = data.iterator(); it.hasNext();) {
            Tuple t = it.next();
            if (tuplesToRetain.contains(t))
                newData.add(t);
        }
        newBaseData.put(loadOp, newData);
    }

    return newBaseData;
}