Example usage for com.google.common.collect Multimap putAll

List of usage examples for com.google.common.collect Multimap putAll

Introduction

In this page you can find the example usage for com.google.common.collect Multimap putAll.

Prototype

boolean putAll(Multimap<? extends K, ? extends V> multimap);

Source Link

Document

Stores all key-value pairs of multimap in this multimap, in the order returned by multimap.entries() .

Usage

From source file:org.apache.crunch.impl.mr.plan.MSCRPlanner.java

public MRExecutor plan(Class<?> jarClass, Configuration conf) throws IOException {

    DotfileUtil dotfileUtil = new DotfileUtil(jarClass, conf);

    // Generate the debug lineage dotfiles (if configuration is enabled)
    dotfileUtil.buildLineageDotfile(outputs);

    Map<PCollectionImpl<?>, Set<Target>> targetDeps = Maps.newTreeMap(DEPTH_COMPARATOR);
    for (PCollectionImpl<?> pcollect : outputs.keySet()) {
        targetDeps.put(pcollect, pcollect.getTargetDependencies());
    }//from  w  w w  .  j a  va  2s  .  c o m

    Multimap<Target, JobPrototype> assignments = HashMultimap.create();

    while (!targetDeps.isEmpty()) {
        Set<Target> allTargets = Sets.newHashSet();
        for (PCollectionImpl<?> pcollect : targetDeps.keySet()) {
            allTargets.addAll(outputs.get(pcollect));
        }
        GraphBuilder graphBuilder = new GraphBuilder();

        // Walk the current plan tree and build a graph in which the vertices are
        // sources, targets, and GBK operations.
        Set<PCollectionImpl<?>> currentStage = Sets.newHashSet();
        for (PCollectionImpl<?> output : targetDeps.keySet()) {
            Set<Target> deps = Sets.intersection(allTargets, targetDeps.get(output));
            if (deps.isEmpty()) {
                graphBuilder.visitOutput(output);
                currentStage.add(output);
            }
        }

        Graph baseGraph = graphBuilder.getGraph();
        boolean hasInputs = false;
        for (Vertex v : baseGraph) {
            if (v.isInput()) {
                hasInputs = true;
                break;
            }
        }
        if (!hasInputs) {
            LOG.warn("No input sources for pipeline, nothing to do...");
            return new MRExecutor(conf, jarClass, outputs, toMaterialize, appendedTargets, pipelineCallables);
        }

        // Create a new graph that splits up up dependent GBK nodes.
        Graph graph = prepareFinalGraph(baseGraph);

        // Break the graph up into connected components.
        List<List<Vertex>> components = graph.connectedComponents();

        // Generate the debug graph dotfiles (if configuration is enabled)
        dotfileUtil.buildBaseGraphDotfile(outputs, graph);
        dotfileUtil.buildSplitGraphDotfile(outputs, graph, components);

        // For each component, we will create one or more job prototypes,
        // depending on its profile.
        // For dependency handling, we only need to care about which
        // job prototype a particular GBK is assigned to.
        Multimap<Vertex, JobPrototype> newAssignments = HashMultimap.create();
        for (List<Vertex> component : components) {
            newAssignments.putAll(constructJobPrototypes(component));
        }

        // Add in the job dependency information here.
        for (Map.Entry<Vertex, JobPrototype> e : newAssignments.entries()) {
            JobPrototype current = e.getValue();
            for (Vertex parent : graph.getParents(e.getKey())) {
                for (JobPrototype parentJobProto : newAssignments.get(parent)) {
                    current.addDependency(parentJobProto);
                }
            }
        }

        ImmutableMultimap<Target, JobPrototype> previousStages = ImmutableMultimap.copyOf(assignments);
        for (Map.Entry<Vertex, JobPrototype> e : newAssignments.entries()) {
            if (e.getKey().isOutput()) {
                PCollectionImpl<?> pcollect = e.getKey().getPCollection();
                JobPrototype current = e.getValue();

                // Add in implicit dependencies via SourceTargets that are read into memory
                for (Target pt : pcollect.getTargetDependencies()) {
                    for (JobPrototype parentJobProto : assignments.get(pt)) {
                        current.addDependency(parentJobProto);
                    }
                }

                // Add this to the set of output assignments
                for (Target t : outputs.get(pcollect)) {
                    assignments.put(t, e.getValue());
                }
            } else {
                Source source = e.getKey().getSource();
                if (source != null && source instanceof Target) {
                    JobPrototype current = e.getValue();
                    Collection<JobPrototype> parentJobPrototypes = previousStages.get((Target) source);
                    if (parentJobPrototypes != null) {
                        for (JobPrototype parentJobProto : parentJobPrototypes) {
                            current.addDependency(parentJobProto);
                        }
                    }
                }
            }
        }

        // Remove completed outputs and mark materialized output locations
        // for subsequent job processing.
        for (PCollectionImpl<?> output : currentStage) {
            if (toMaterialize.containsKey(output)) {
                MaterializableIterable mi = toMaterialize.get(output);
                if (mi.isSourceTarget()) {
                    output.materializeAt((SourceTarget) mi.getSource());
                }
            }
            targetDeps.remove(output);
        }
    }

    // Finally, construct the jobs from the prototypes and return.
    MRExecutor exec = new MRExecutor(conf, jarClass, outputs, toMaterialize, appendedTargets,
            pipelineCallables);

    // Generate the debug Plan dotfiles
    dotfileUtil.buildPlanDotfile(exec, assignments, pipeline, lastJobID);

    for (JobPrototype proto : Sets.newHashSet(assignments.values())) {
        exec.addJob(proto.getCrunchJob(jarClass, conf, pipeline, lastJobID));
    }

    // Generate the debug RTNode dotfiles (if configuration is enabled)
    dotfileUtil.buildRTNodesDotfile(exec);

    // Attach the dotfiles to the MRExcutor context
    dotfileUtil.addDotfilesToContext(exec);

    return exec;
}

From source file:uk.ac.ebi.metabolomes.webservices.EUtilsWebServiceConnection.java

/**
 * Queries the NCBI EUtils web service to retrieve through elink.cgi all the associations existing in a database
 * (dbto) for the identifiers provided for an initial different database (dbfrom). The one to one associations are
 * stored in the Multimap returned. Not more than 5,000 ids should be submitted at once.
 *
 * @param dbFromIds the list of string identifiers to search for
 * @param dbFrom    the database in Entrez corresponding to those identifiers
 * @param dbTo      the database where we want to find hits.
 * @return          multimap with all the one-to-many associations fromDB identifiers (keys) -to- toDB identifiers (values).
 * @throws javax.jws.WebService exception if more than the allowed number of entries were submitted.
 *//*w ww. ja v a 2  s. co  m*/
public Multimap<String, String> getDBToIDsFromDBFromIDs(List<String> dbFromIds, EntrezDB dbFrom, EntrezDB dbTo,
        String addTerm, String addTermValue) throws WebServiceException {
    checkNumberOfSubmittedEntries(dbFromIds);
    WebResource webRes = client.resource(baseURL + "elink.fcgi");
    MultivaluedMap queryParams = new MultivaluedMapImpl();
    queryParams.add("dbfrom", dbFrom.toString());
    queryParams.add("db", dbTo.toString());
    if (addTerm != null && addTermValue != null)
        queryParams.add(addTerm, addTermValue);
    for (String id : dbFromIds) {
        queryParams.add("id", id);
    }

    ClientResponse resp = submitPost(webRes, queryParams);

    if (resp.getStatus() != 200) {
        throw new RuntimeException("Failed : HTTP error code : " + resp.getStatus());
    }
    Multimap<String, String> res = HashMultimap.create();
    // mapping should be one to many but there shouldn't be any replicated
    ELinkXMLResponseParser elinkXMLResponseParser = new ELinkXMLResponseParser();
    try {
        res.putAll(elinkXMLResponseParser.parseLinkSetBlock(resp.getEntityInputStream()));
    } catch (XMLStreamException ex) {
        LOGGER.warn("Could not parse output XML adequately...", ex);
    }

    return res;
}

From source file:org.sosy_lab.cpachecker.cpa.bam.BAMCPAStatistics.java

private void exportAllReachedSets(final Path superArgFile, final PathTemplate indexedFile,
        final UnmodifiableReachedSet mainReachedSet) {

    if (superArgFile != null) {

        final Set<UnmodifiableReachedSet> allReachedSets = new HashSet<>();
        allReachedSets.addAll(cache.getAllCachedReachedStates());
        allReachedSets.add(mainReachedSet);

        final Set<ARGState> rootStates = new HashSet<>();
        final Multimap<ARGState, ARGState> connections = HashMultimap.create();

        for (final UnmodifiableReachedSet reachedSet : allReachedSets) {
            ARGState rootState = (ARGState) reachedSet.getFirstState();
            rootStates.add(rootState);//from   w  w w .j a v a 2s.com
            Multimap<ARGState, ARGState> localConnections = HashMultimap.create();
            getConnections(rootState, localConnections);
            connections.putAll(localConnections);

            // dump small graph
            writeArg(indexedFile.getPath(((ARGState) reachedSet.getFirstState()).getStateId()),
                    localConnections, Collections.singleton((ARGState) reachedSet.getFirstState()));
        }

        // dump super-graph
        writeArg(superArgFile, connections, rootStates);
    }
}

From source file:org.onehippo.cms7.essentials.dashboard.packaging.DefaultInstructionPackage.java

@Override
public Multimap<MessageGroup, ? extends Restful> getInstructionsMessages(final PluginContext context) {
    final Instructions myInstructions = getInstructions();
    if (myInstructions == null) {
        return ArrayListMultimap.create();

    }/* w w  w . j  a v a 2  s  .c o  m*/
    final Set<InstructionSet> instructionSets = instructions.getInstructionSets();
    final InstructionExecutor executor = new PluginInstructionExecutor();
    final Set<String> myGroupNames = groupNames();
    final Multimap<MessageGroup, Restful> instructionsMessages = ArrayListMultimap.create();
    for (InstructionSet instructionSet : instructionSets) {
        final Set<String> groups = instructionSet.getGroups();
        for (String group : groups) {
            // execute only or group(s)
            if (myGroupNames.contains(group)) {
                final Multimap<MessageGroup, Restful> instr = executor.getInstructionsMessages(instructionSet,
                        context);
                instructionsMessages.putAll(instr);

            } else {
                log.debug("Skipping instruction group for name: [{}]", group);
            }
        }
    }
    return instructionsMessages;
}

From source file:org.apache.tez.history.parser.datamodel.DagInfo.java

/**
 * Get containers used for this DAG//w  w w.ja  v a2 s.  co m
 *
 * @return Multimap<Container, TaskAttemptInfo> task attempt details at every container
 */
public final Multimap<Container, TaskAttemptInfo> getContainersToTaskAttemptMapping() {
    List<VertexInfo> VertexInfoList = getVertices();
    Multimap<Container, TaskAttemptInfo> containerMapping = LinkedHashMultimap.create();

    for (VertexInfo vertexInfo : VertexInfoList) {
        containerMapping.putAll(vertexInfo.getContainersMapping());
    }
    return Multimaps.unmodifiableMultimap(containerMapping);
}

From source file:com.palantir.atlasdb.keyvalue.impl.TieredKeyValueService.java

@Override
public Multimap<Cell, Long> getAllTimestamps(final String tableName, final Set<Cell> cells,
        final long timestamp) {
    if (isNotTiered(tableName)) {
        return primary.getAllTimestamps(tableName, cells, timestamp);
    }//from  ww w .  j  a  v a 2s .co  m
    Multimap<Cell, Long> primaryResults = primary.getAllTimestamps(tableName, cells, timestamp);
    Multimap<Cell, Long> results = HashMultimap.create(secondary.getAllTimestamps(tableName, cells, timestamp));
    results.putAll(primaryResults);
    return results;
}

From source file:brooklyn.entity.nosql.cassandra.CassandraFabricImpl.java

protected Multimap<String, Entity> calculateDatacenterUsage() {
    Multimap<String, Entity> result = LinkedHashMultimap.<String, Entity>create();
    for (CassandraDatacenter member : Iterables.filter(getMembers(), CassandraDatacenter.class)) {
        Multimap<String, Entity> memberUsage = member.getAttribute(CassandraDatacenter.DATACENTER_USAGE);
        if (memberUsage != null)
            result.putAll(memberUsage);
    }/*from ww  w.ja  v  a  2 s .  com*/
    return result;
}

From source file:org.eclipse.xtext.xtext.OverriddenValueInspector.java

@Override
public Boolean caseAlternatives(Alternatives object) {
    Multimap<String, AbstractElement> prevAssignedFeatures = assignedFeatures;
    Multimap<String, AbstractElement> mergedAssignedFeatures = LinkedHashMultimap.create();
    Set<AbstractRule> prevPermanentlyVisited = permanentlyVisited;
    Set<AbstractRule> mergedPermanentlyVisited = Sets.newHashSet();
    boolean allAborted = true;
    for (AbstractElement element : object.getElements()) {
        assignedFeatures = newMultimap(prevAssignedFeatures);
        permanentlyVisited = Sets.newHashSet(prevPermanentlyVisited);
        if (!doSwitch(element)) {
            allAborted = false;//  www.  j  a va 2  s  . c  om
        }
        mergedAssignedFeatures.putAll(assignedFeatures);
        mergedPermanentlyVisited.addAll(prevPermanentlyVisited);
    }
    if (GrammarUtil.isOptionalCardinality(object)) {
        mergedAssignedFeatures.putAll(prevAssignedFeatures);
    }
    assignedFeatures = mergedAssignedFeatures;
    if (!allAborted && GrammarUtil.isMultipleCardinality(object)) {
        prevAssignedFeatures = assignedFeatures;
        for (AbstractElement element : object.getElements()) {
            assignedFeatures = newMultimap(prevAssignedFeatures);
            permanentlyVisited = Sets.newHashSet(prevPermanentlyVisited);
            doSwitch(element);
            mergedAssignedFeatures.putAll(assignedFeatures);
        }
        assignedFeatures = mergedAssignedFeatures;
    }
    permanentlyVisited = mergedPermanentlyVisited;
    return Boolean.FALSE;
}

From source file:org.apache.shindig.gadgets.http.HttpResponse.java

/**
 * Construct an HttpResponse from a builder (called by HttpResponseBuilder.create).
 *///  w w  w  . j  a v a 2  s  .co m
HttpResponse(HttpResponseBuilder builder) {
    httpStatusCode = builder.getHttpStatusCode();
    Multimap<String, String> headerCopy = HttpResponse.newHeaderMultimap();

    // Always safe, HttpResponseBuilder won't modify the body.
    responseBytes = builder.getResponse();

    // Copy headers after builder.getResponse(), since that can modify Content-Type.
    headerCopy.putAll(builder.getHeaders());

    Map<String, String> metadataCopy = Maps.newHashMap(builder.getMetadata());
    metadata = Collections.unmodifiableMap(metadataCopy);

    // We want to modify the headers to ensure that the proper Content-Type and Date headers
    // have been set. This allows us to avoid these expensive calculations from the cache.
    date = getAndUpdateDate(headerCopy);
    encoding = getAndUpdateEncoding(headerCopy, responseBytes);
    headers = Multimaps.unmodifiableMultimap(headerCopy);
}

From source file:edu.buaa.satla.analysis.core.algorithm.PredicatedAnalysisAlgorithm.java

private Precision buildInitialPrecision(Collection<Precision> precisions, Precision initialPrecision)
        throws InterruptedException, RefinementFailedException {
    if (precisions.size() == 0) {
        return initialPrecision;
    }//from w w w. j  a  v a  2s .co  m

    Multimap<Pair<CFANode, Integer>, AbstractionPredicate> locationInstancPreds = HashMultimap.create();
    Multimap<CFANode, AbstractionPredicate> localPreds = HashMultimap.create();
    Multimap<String, AbstractionPredicate> functionPreds = HashMultimap.create();
    Collection<AbstractionPredicate> globalPreds = new HashSet<>();

    Collection<PredicatePrecision> seenPrecisions = new HashSet<>();

    // add initial precision
    PredicatePrecision predPrec = Precisions.extractPrecisionByType(initialPrecision, PredicatePrecision.class);
    locationInstancPreds.putAll(predPrec.getLocationInstancePredicates());
    localPreds.putAll(predPrec.getLocalPredicates());
    functionPreds.putAll(predPrec.getFunctionPredicates());
    globalPreds.addAll(predPrec.getGlobalPredicates());

    seenPrecisions.add(predPrec);

    // add further precision information obtained during refinement
    for (Precision nextPrec : precisions) {
        predPrec = Precisions.extractPrecisionByType(nextPrec, PredicatePrecision.class);

        shutdownNotifier.shutdownIfNecessary();

        if (!seenPrecisions.contains(predPrec)) {
            seenPrecisions.add(predPrec);
            locationInstancPreds.putAll(predPrec.getLocationInstancePredicates());
            localPreds.putAll(predPrec.getLocalPredicates());
            functionPreds.putAll(predPrec.getFunctionPredicates());
            globalPreds.addAll(predPrec.getGlobalPredicates());
        }
    }

    // construct new predicate precision
    PredicatePrecision newPredPrec = new PredicatePrecision(locationInstancPreds, localPreds, functionPreds,
            globalPreds);

    try {
        // assure that refinement fails if same path is encountered twice and precision not refined on that path
        if (repeatedFailure && noNewPredicates(oldPrecision, newPredPrec)) {
            throw new RefinementFailedException(Reason.RepeatedCounterexample, pathToFailure);
        }
    } catch (SolverException e) {
        throw new RefinementFailedException(Reason.InterpolationFailed, pathToFailure, e);
    }

    return Precisions.replaceByType(initialPrecision, newPredPrec, PredicatePrecision.class);
}