Example usage for com.google.common.collect Multimap removeAll

List of usage examples for com.google.common.collect Multimap removeAll

Introduction

In this page you can find the example usage for com.google.common.collect Multimap removeAll.

Prototype

Collection<V> removeAll(@Nullable Object key);

Source Link

Document

Removes all values associated with the key key .

Usage

From source file:com.b2international.snowowl.snomed.datastore.index.change.RelationshipChangeProcessor.java

@Override
public void process(ICDOCommitChangeSet commitChangeSet, RevisionSearcher searcher) throws IOException {
    final Multimap<String, RefSetMemberChange> referringRefSets = memberChangeProcessor.process(commitChangeSet,
            searcher);//w  w  w .  j a v  a 2  s. co m

    deleteRevisions(SnomedRelationshipIndexEntry.class,
            commitChangeSet.getDetachedComponents(SnomedPackage.Literals.RELATIONSHIP));

    final Map<String, Relationship> newRelationshipsById = StreamSupport
            .stream(commitChangeSet.getNewComponents(Relationship.class).spliterator(), false)
            .collect(Collectors.toMap(relationship -> relationship.getId(), relationship -> relationship));

    for (Relationship relationship : commitChangeSet.getNewComponents(Relationship.class)) {
        final Builder doc = SnomedRelationshipIndexEntry.builder(relationship);
        indexNewRevision(relationship.cdoID(), doc.build());
    }

    final Map<String, Relationship> changedRelationshipsById = StreamSupport
            .stream(commitChangeSet.getDirtyComponents(Relationship.class).spliterator(), false)
            .collect(Collectors.toMap(relationship -> relationship.getId(), relationship -> relationship));

    final Set<String> changedRelationshipIds = newHashSet(changedRelationshipsById.keySet());
    final Set<String> referencedRelationshipIds = newHashSet(referringRefSets.keySet());
    referencedRelationshipIds.removeAll(newRelationshipsById.keySet());
    changedRelationshipIds.addAll(referencedRelationshipIds);

    final Query<SnomedRelationshipIndexEntry> query = Query.select(SnomedRelationshipIndexEntry.class)
            .where(SnomedRelationshipIndexEntry.Expressions.ids(changedRelationshipIds))
            .limit(changedRelationshipIds.size()).build();

    final Hits<SnomedRelationshipIndexEntry> changedRelationshipHits = searcher.search(query);
    final ImmutableMap<String, SnomedRelationshipIndexEntry> changedRelationshipRevisionsById = Maps
            .uniqueIndex(changedRelationshipHits, ComponentUtils.<String>getIdFunction());

    for (final String id : changedRelationshipIds) {
        final SnomedRelationshipIndexEntry currentDoc = changedRelationshipRevisionsById.get(id);
        if (currentDoc == null) {
            throw new IllegalStateException(
                    String.format("Current relationship revision should not be null for %s", id));
        }

        final Relationship relationship = changedRelationshipsById.get(id);
        final Builder doc;
        if (relationship != null) {
            doc = SnomedRelationshipIndexEntry.builder(relationship);
        } else {
            doc = SnomedRelationshipIndexEntry.builder(currentDoc);
        }

        final Collection<String> currentMemberOf = currentDoc.getMemberOf();
        final Collection<String> currentActiveMemberOf = currentDoc.getActiveMemberOf();
        new ReferenceSetMembershipUpdater(referringRefSets.removeAll(id), currentMemberOf,
                currentActiveMemberOf).update(doc);

        indexChangedRevision(currentDoc.getStorageKey(), doc.build());
    }
}

From source file:com.google.devtools.build.lib.query2.SkyQueryEnvironment.java

/**
 * Returns FileValue keys for which there may be relevant (from the perspective of {@link
 * #getRBuildFiles}) FileValues in the graph corresponding to the given {@code pathFragments},
 * which are assumed to be file paths.//from  w  w w.j a va2s  . c  o m
 *
 * <p>To do this, we emulate the {@link ContainingPackageLookupFunction} logic: for each given
 * file path, we look for the nearest ancestor directory (starting with its parent directory), if
 * any, that has a package. The {@link PackageLookupValue} for this package tells us the package
 * root that we should use for the {@link RootedPath} for the {@link FileValue} key.
 *
 * <p>Note that there may not be nodes in the graph corresponding to the returned SkyKeys.
 */
Collection<SkyKey> getSkyKeysForFileFragments(Iterable<PathFragment> pathFragments)
        throws InterruptedException {
    Set<SkyKey> result = new HashSet<>();
    Multimap<PathFragment, PathFragment> currentToOriginal = ArrayListMultimap.create();
    for (PathFragment pathFragment : pathFragments) {
        currentToOriginal.put(pathFragment, pathFragment);
    }
    while (!currentToOriginal.isEmpty()) {
        Multimap<SkyKey, PathFragment> packageLookupKeysToOriginal = ArrayListMultimap.create();
        Multimap<SkyKey, PathFragment> packageLookupKeysToCurrent = ArrayListMultimap.create();
        for (Entry<PathFragment, PathFragment> entry : currentToOriginal.entries()) {
            PathFragment current = entry.getKey();
            PathFragment original = entry.getValue();
            for (SkyKey packageLookupKey : getPkgLookupKeysForFile(original, current)) {
                packageLookupKeysToOriginal.put(packageLookupKey, original);
                packageLookupKeysToCurrent.put(packageLookupKey, current);
            }
        }
        Map<SkyKey, SkyValue> lookupValues = graph.getSuccessfulValues(packageLookupKeysToOriginal.keySet());
        for (Map.Entry<SkyKey, SkyValue> entry : lookupValues.entrySet()) {
            SkyKey packageLookupKey = entry.getKey();
            PackageLookupValue packageLookupValue = (PackageLookupValue) entry.getValue();
            if (packageLookupValue.packageExists()) {
                Collection<PathFragment> originalFiles = packageLookupKeysToOriginal.get(packageLookupKey);
                Preconditions.checkState(!originalFiles.isEmpty(), entry);
                for (PathFragment fileName : originalFiles) {
                    result.add(FileValue.key(RootedPath.toRootedPath(packageLookupValue.getRoot(), fileName)));
                }
                for (PathFragment current : packageLookupKeysToCurrent.get(packageLookupKey)) {
                    currentToOriginal.removeAll(current);
                }
            }
        }
        Multimap<PathFragment, PathFragment> newCurrentToOriginal = ArrayListMultimap.create();
        for (PathFragment pathFragment : currentToOriginal.keySet()) {
            PathFragment parent = pathFragment.getParentDirectory();
            if (parent != null) {
                newCurrentToOriginal.putAll(parent, currentToOriginal.get(pathFragment));
            }
        }
        currentToOriginal = newCurrentToOriginal;
    }
    return result;
}

From source file:org.sosy_lab.cpachecker.cfa.postprocessing.global.singleloop.CFASingleLoopTransformation.java

/**
 * Simplify the new graph by removing empty subgraphs and dummy edges.
 *
 * @param pStartNode the start node of the new control flow automaton.
 * @param pNewPredecessorsToPC the mapping of program counter value assignment predecessors to program counter values. Must be mutable.
 * @param pNewSuccessorsToPC the mapping of program counter value assumption successors to program counter values. Must be mutable.
 * @param pGlobalNewToOld/*w  ww.  j  ava  2 s  .  co m*/
 * @throws InterruptedException if a shutdown has been requested by the registered shutdown notifier.
 */
private void simplify(CFANode pStartNode, Multimap<Integer, CFANode> pNewPredecessorsToPC,
        Map<Integer, CFANode> pNewSuccessorsToPC, BiMap<Integer, CFANode> pImmutableNewSuccessorsToPC,
        Map<CFANode, CFANode> pGlobalNewToOld) throws InterruptedException {
    Map<CFANode, Integer> pcToNewSuccessors = pImmutableNewSuccessorsToPC.inverse();

    for (int replaceablePCValue : new ArrayList<>(pNewPredecessorsToPC.keySet())) {
        this.shutdownNotifier.shutdownIfNecessary();
        CFANode newSuccessor = pNewSuccessorsToPC.get(replaceablePCValue);
        List<CFANode> tailsOfRedundantSubgraph = new ArrayList<>(pNewPredecessorsToPC.get(replaceablePCValue));
        for (CFANode tailOfRedundantSubgraph : tailsOfRedundantSubgraph) {
            Integer precedingPCValue;
            CFAEdge dummyEdge;
            // If a subgraph consists only of a dummy edge, eliminate it completely
            if (tailOfRedundantSubgraph.getNumEnteringEdges() == 1
                    && isDummyEdge(dummyEdge = tailOfRedundantSubgraph.getEnteringEdge(0))
                    && dummyEdge.getPredecessor().getNumEnteringEdges() == 0
                    && (precedingPCValue = pcToNewSuccessors.get(dummyEdge.getPredecessor())) != null) {
                Integer predToRemove = pcToNewSuccessors.get(newSuccessor);
                CFANode removed = pNewSuccessorsToPC.remove(predToRemove);
                assert removed == newSuccessor;
                for (CFANode removedPredecessor : pNewPredecessorsToPC.removeAll(predToRemove)) {
                    pNewPredecessorsToPC.put(precedingPCValue, removedPredecessor);
                }
                pNewPredecessorsToPC.remove(precedingPCValue, tailOfRedundantSubgraph);
                pNewSuccessorsToPC.remove(precedingPCValue);
                pNewSuccessorsToPC.put(precedingPCValue, newSuccessor);
            }
        }
    }
    for (CFAEdge oldDummyEdge : findEdges(DUMMY_EDGE_PREDICATE, pStartNode)) {
        this.shutdownNotifier.shutdownIfNecessary();
        CFANode successor = pGlobalNewToOld.get(oldDummyEdge.getSuccessor());
        for (CFAEdge edge : CFAUtils.enteringEdges(successor).toList()) {
            if (isDummyEdge(edge)) {
                removeFromNodes(edge);
                CFANode predecessor = edge.getPredecessor();
                /*
                 * If the subgraph is entered by a dummy edge adjust the program
                 * counter successor.
                 */
                Integer precedingPCValue;
                if (predecessor.getNumEnteringEdges() == 0
                        && (precedingPCValue = pcToNewSuccessors.get(predecessor)) != null) {
                    pcToNewSuccessors.remove(predecessor);
                    pNewSuccessorsToPC.remove(precedingPCValue);
                    pNewSuccessorsToPC.put(precedingPCValue, edge.getSuccessor());
                } else {
                    /*
                     * If the dummy edge is somewhere in between, replace its
                     * predecessor by its successor in the graph.
                     */
                    for (CFAEdge edgeEnteringPredecessor : CFAUtils.enteringEdges(predecessor).toList()) {
                        removeFromNodes(edgeEnteringPredecessor);
                        edgeEnteringPredecessor = copyCFAEdgeWithNewNodes(edgeEnteringPredecessor,
                                edgeEnteringPredecessor.getPredecessor(), successor, pGlobalNewToOld);
                        addToNodes(edgeEnteringPredecessor);
                    }
                }
            }
        }
    }
}

From source file:org.openmicroscopy.shoola.agents.measurement.view.MeasurementViewerComponent.java

/**
 * Implemented as specified by the {@link MeasurementViewer} interface.
 * @see MeasurementViewer#tagSelectedFigures()
 *//* w w w .  jav a2  s.c om*/
public void tagSelectedFigures(List<AnnotationData> tags) {
    Collection<Figure> figures = view.getSelectedFiguresFromTables();
    if (CollectionUtils.isEmpty(figures)) {
        return;
    }
    List<ROIShape> shapes = new ArrayList<ROIShape>();
    Iterator<Figure> kk = figures.iterator();
    ROIFigure fig;
    while (kk.hasNext()) {
        fig = (ROIFigure) kk.next();
        shapes.add(fig.getROIShape());
    }
    if (CollectionUtils.isEmpty(shapes))
        return;

    Multimap<Long, AnnotationData> m = ArrayListMultimap.create();
    Iterator<AnnotationData> j = tags.iterator();
    AnnotationData an;
    while (j.hasNext()) {
        an = j.next();
        m.put(an.getId(), an);
    }
    Iterator<ROIShape> i = shapes.iterator();
    ROIShape shape;
    StructuredDataResults data;
    List<DataObject> objects = new ArrayList<DataObject>();
    ShapeData d;
    Map<Long, AnnotationData> mo = new HashMap<Long, AnnotationData>();
    while (i.hasNext()) {
        shape = i.next();
        d = shape.getData();
        if (d != null && d.getId() > 0) {
            objects.add(d);
            data = (StructuredDataResults) shape.getFigure().getAttribute(AnnotationKeys.TAG);
            if (data != null && CollectionUtils.isNotEmpty(data.getTags())) {
                Collection<TagAnnotationData> t = data.getTags();
                Iterator<TagAnnotationData> tt = t.iterator();
                while (tt.hasNext()) {
                    TagAnnotationData tag = tt.next();
                    if (!mo.containsKey(tag.getId())) {
                        mo.put(tag.getId(), tag);
                    }
                }
            }
        }
    }
    if (objects.isEmpty()) {
        UserNotifier un = MeasurementAgent.getRegistry().getUserNotifier();
        un.notifyInfo("ROI Annotations", "You must save the ROI before annotating it.");
        return;
    }

    //Now we prepare the list of annotations to add or remove
    List<AnnotationData> toAdd = new ArrayList<AnnotationData>();
    List<Object> toRemove = new ArrayList<Object>();
    if (CollectionUtils.isNotEmpty(m.get(-1L))) {
        toAdd.addAll(m.removeAll(-1L));
    }
    Iterator<Entry<Long, AnnotationData>> k = m.entries().iterator();
    Entry<Long, AnnotationData> e;
    while (k.hasNext()) {
        e = k.next();
        Long id = e.getKey();
        if (!mo.containsKey(id)) {
            toAdd.add(e.getValue());
        }
    }
    k = mo.entrySet().iterator();
    while (k.hasNext()) {
        e = k.next();
        Long id = e.getKey();
        if (!m.containsKey(id)) {
            toRemove.add(e.getValue());
        }
    }
    model.fireAnnotationSaving(objects, toAdd, toRemove);
}

From source file:ai.grakn.graql.internal.reasoner.atom.binary.Relation.java

private Set<Pair<RelationPlayer, RelationPlayer>> getRelationPlayerMappings(Relation parentAtom) {
    Set<Pair<RelationPlayer, RelationPlayer>> rolePlayerMappings = new HashSet<>();

    //establish compatible castings for each parent casting
    Multimap<RelationPlayer, RelationPlayer> compatibleMappings = HashMultimap.create();
    parentAtom.getRoleRelationPlayerMap();
    Multimap<RoleType, RelationPlayer> childRoleRPMap = getRoleRelationPlayerMap();
    Map<Var, Type> parentVarTypeMap = parentAtom.getParentQuery().getVarTypeMap();
    Map<Var, Type> childVarTypeMap = this.getParentQuery().getVarTypeMap();

    Set<RoleType> relationRoles = new HashSet<>(getType().asRelationType().relates());
    Set<RoleType> childRoles = new HashSet<>(childRoleRPMap.keySet());

    parentAtom.getRelationPlayers().stream().filter(prp -> prp.getRoleType().isPresent()).forEach(prp -> {
        VarPatternAdmin parentRoleTypeVar = prp.getRoleType().orElse(null);
        TypeLabel parentRoleTypeLabel = parentRoleTypeVar.getTypeLabel().orElse(null);

        //TODO take into account indirect roles
        RoleType parentRole = parentRoleTypeLabel != null ? graph().getType(parentRoleTypeLabel) : null;

        if (parentRole != null) {
            boolean isMetaRole = Schema.MetaSchema.isMetaLabel(parentRole.getLabel());
            Var parentRolePlayer = prp.getRolePlayer().getVarName();
            Type parentType = parentVarTypeMap.get(parentRolePlayer);

            Set<RoleType> compatibleChildRoles = isMetaRole ? childRoles
                    : Sets.intersection(new HashSet<>(parentRole.subTypes()), childRoles);

            if (parentType != null) {
                boolean isMetaType = Schema.MetaSchema.isMetaLabel(parentType.getLabel());
                Set<RoleType> typeRoles = isMetaType ? childRoles : new HashSet<>(parentType.plays());

                //incompatible type
                if (Sets.intersection(relationRoles, typeRoles).isEmpty())
                    compatibleChildRoles = new HashSet<>();
                else {
                    compatibleChildRoles = compatibleChildRoles.stream().filter(
                            rc -> Schema.MetaSchema.isMetaLabel(rc.getLabel()) || typeRoles.contains(rc))
                            .collect(toSet());
                }//from  ww  w.jav  a 2s. c o m
            }

            compatibleChildRoles.stream().filter(childRoleRPMap::containsKey).forEach(r -> {
                Collection<RelationPlayer> childRPs = parentType != null
                        ? childRoleRPMap.get(r).stream().filter(rp -> {
                            Var childRolePlayer = rp.getRolePlayer().getVarName();
                            Type childType = childVarTypeMap.get(childRolePlayer);
                            return childType == null || !checkTypesDisjoint(parentType, childType);
                        }).collect(toSet())
                        : childRoleRPMap.get(r);

                childRPs.forEach(rp -> compatibleMappings.put(prp, rp));
            });
        }
    });

    //self-consistent procedure until no non-empty mappings present
    while (compatibleMappings.asMap().values().stream().filter(s -> !s.isEmpty()).count() > 0) {
        Map.Entry<RelationPlayer, RelationPlayer> entry = compatibleMappings.entries().stream()
                //prioritise mappings with equivalent types and unambiguous mappings
                .sorted(Comparator.comparing(e -> {
                    Type parentType = parentVarTypeMap.get(e.getKey().getRolePlayer().getVarName());
                    Type childType = childVarTypeMap.get(e.getValue().getRolePlayer().getVarName());
                    return !(parentType != null && childType != null && parentType.equals(childType));
                }))
                //prioritise mappings with sam var substitution (idpredicates)
                .sorted(Comparator.comparing(e -> {
                    IdPredicate parentId = parentAtom.getIdPredicates().stream()
                            .filter(p -> p.getVarName().equals(e.getKey().getRolePlayer().getVarName()))
                            .findFirst().orElse(null);
                    IdPredicate childId = getIdPredicates().stream()
                            .filter(p -> p.getVarName().equals(e.getValue().getRolePlayer().getVarName()))
                            .findFirst().orElse(null);
                    return !(parentId != null && childId != null
                            && parentId.getPredicate().equals(childId.getPredicate()));
                })).sorted(Comparator.comparing(e -> compatibleMappings.get(e.getKey()).size())).findFirst()
                .orElse(null);

        RelationPlayer parentCasting = entry.getKey();
        RelationPlayer childCasting = entry.getValue();

        rolePlayerMappings.add(new Pair<>(childCasting, parentCasting));
        compatibleMappings.removeAll(parentCasting);
        compatibleMappings.values().remove(childCasting);

    }
    return rolePlayerMappings;
}