Example usage for com.google.common.collect Multimap asMap

List of usage examples for com.google.common.collect Multimap asMap

Introduction

In this page you can find the example usage for com.google.common.collect Multimap asMap.

Prototype

Map<K, Collection<V>> asMap();

Source Link

Document

Returns a view of this multimap as a Map from each distinct key to the nonempty collection of that key's associated values.

Usage

From source file:com.facebook.presto.accumulo.index.IndexLookup.java

private List<Range> getIndexRanges(String indexTable,
        Multimap<AccumuloColumnConstraint, Range> constraintRanges, Collection<Range> rowIDRanges,
        Authorizations auths) throws TableNotFoundException {
    Set<Range> finalRanges = null;
    // For each column/constraint pair
    for (Entry<AccumuloColumnConstraint, Collection<Range>> constraintEntry : constraintRanges.asMap()
            .entrySet()) {//from  ww w. ja  v  a 2s.c o  m
        // Create a batch scanner against the index table, setting the ranges
        BatchScanner scanner = connector.createBatchScanner(indexTable, auths, 10);
        scanner.setRanges(constraintEntry.getValue());

        // Fetch the column family for this specific column
        Text family = new Text(getIndexColumnFamily(constraintEntry.getKey().getFamily().getBytes(UTF_8),
                constraintEntry.getKey().getQualifier().getBytes(UTF_8)).array());
        scanner.fetchColumnFamily(family);

        // For each entry in the scanner
        Text tmpQualifier = new Text();
        Set<Range> columnRanges = new HashSet<>();
        for (Entry<Key, Value> entry : scanner) {
            entry.getKey().getColumnQualifier(tmpQualifier);

            // Add to our column ranges if it is in one of the row ID ranges
            if (inRange(tmpQualifier, rowIDRanges)) {
                columnRanges.add(new Range(tmpQualifier));
            }
        }

        LOG.debug("Retrieved %d ranges for column %s", columnRanges.size(), constraintEntry.getKey().getName());

        // If finalRanges is null, we have not yet added any column ranges
        if (finalRanges == null) {
            finalRanges = new HashSet<>();
            finalRanges.addAll(columnRanges);
        } else {
            // Retain only the row IDs for this column that have already been added
            // This is your set intersection operation!
            finalRanges.retainAll(columnRanges);
        }

        // Close the scanner
        scanner.close();
    }

    // Return the final ranges for all constraint pairs
    if (finalRanges != null) {
        return ImmutableList.copyOf(finalRanges);
    } else {
        return ImmutableList.of();
    }
}

From source file:net.myrrix.online.eval.AbstractEvaluator.java

private Multimap<Long, RecommendedItem> split(File dataDir, File trainingFile, double trainPercentage,
        double evaluationPercentage, RescorerProvider provider) throws IOException {

    DataFileContents dataFileContents = readDataFile(dataDir, evaluationPercentage, provider);

    Multimap<Long, RecommendedItem> data = dataFileContents.getData();
    log.info("Read data for {} users from input; splitting...", data.size());

    Multimap<Long, RecommendedItem> testData = ArrayListMultimap.create();
    Writer trainingOut = IOUtils.buildGZIPWriter(trainingFile);
    try {//from  ww w. ja v a2  s.  c  om

        Iterator<Map.Entry<Long, Collection<RecommendedItem>>> it = data.asMap().entrySet().iterator();
        while (it.hasNext()) {

            Map.Entry<Long, Collection<RecommendedItem>> entry = it.next();
            long userID = entry.getKey();
            List<RecommendedItem> userPrefs = Lists.newArrayList(entry.getValue());
            it.remove();

            if (isSplitTestByPrefValue()) {
                // Sort low to high, leaving high values at end for testing as "relevant" items
                Collections.sort(userPrefs, ByValueAscComparator.INSTANCE);
            }
            // else leave sorted in time order

            int numTraining = FastMath.max(1, (int) (trainPercentage * userPrefs.size()));
            for (RecommendedItem rec : userPrefs.subList(0, numTraining)) {
                trainingOut.write(Long.toString(userID));
                trainingOut.write(DELIMITER);
                trainingOut.write(Long.toString(rec.getItemID()));
                trainingOut.write(DELIMITER);
                trainingOut.write(Float.toString(rec.getValue()));
                trainingOut.write('\n');
            }

            for (RecommendedItem rec : userPrefs.subList(numTraining, userPrefs.size())) {
                testData.put(userID, rec);
            }

        }

        // All tags go in training data
        for (Map.Entry<String, RecommendedItem> entry : dataFileContents.getItemTags().entries()) {
            trainingOut.write(entry.getKey());
            trainingOut.write(DELIMITER);
            trainingOut.write(Long.toString(entry.getValue().getItemID()));
            trainingOut.write(DELIMITER);
            trainingOut.write(Float.toString(entry.getValue().getValue()));
            trainingOut.write('\n');
        }
        for (Map.Entry<String, RecommendedItem> entry : dataFileContents.getUserTags().entries()) {
            trainingOut.write(Long.toString(entry.getValue().getItemID()));
            trainingOut.write(DELIMITER);
            trainingOut.write(entry.getKey());
            trainingOut.write(DELIMITER);
            trainingOut.write(Float.toString(entry.getValue().getValue()));
            trainingOut.write('\n');
        }

    } finally {
        trainingOut.close(); // Want to know of output stream close failed -- maybe failed to write
    }

    log.info("{} users in test data", testData.size());

    return testData;
}

From source file:eu.itesla_project.online.tools.RunImpactAnalysisOnStateTool.java

@Override
public void run(CommandLine line) throws Exception {
    String workflowId = line.getOptionValue("workflow");
    Integer stateId = Integer.valueOf(line.getOptionValue("state"));
    Set<String> contingencyIds = null;
    if (line.hasOption("contingencies")) {
        contingencyIds = Sets.newHashSet(line.getOptionValue("contingencies").split(","));
    }// ww  w  .  ja  v a  2  s.  c  om
    System.out.println("loading state " + stateId + " of workflow " + workflowId + " from the online db ...");
    OnlineConfig config = OnlineConfig.load();
    OnlineDb onlinedb = config.getOnlineDbFactoryClass().newInstance().create();
    // load the network
    Network network = onlinedb.getState(workflowId, stateId);
    if (network != null) {
        ComputationManager computationManager = new LocalComputationManager();
        ContingenciesAndActionsDatabaseClient contingencyDb = config.getContingencyDbClientFactoryClass()
                .newInstance().create();
        SimulatorFactory simulatorFactory = config.getSimulatorFactoryClass().newInstance();
        Stabilization stabilization = simulatorFactory.createStabilization(network, computationManager, 0);
        ImpactAnalysis impactAnalysis = simulatorFactory.createImpactAnalysis(network, computationManager, 0,
                contingencyDb);
        Map<String, Object> initContext = new HashMap<>();
        SimulationParameters simulationParameters = SimulationParameters.load();
        stabilization.init(simulationParameters, initContext);
        impactAnalysis.init(simulationParameters, initContext);
        System.out.println("running stabilization simulation...");
        StabilizationResult sr = stabilization.run();
        System.out.println("stabilization status: " + sr.getStatus());
        if (sr.getStatus() == StabilizationStatus.COMPLETED) {
            System.out.println("running impact analysis...");
            ImpactAnalysisResult iar = impactAnalysis.run(sr.getState(), contingencyIds);

            Table table = new Table(1 + SecurityIndexType.values().length, BorderStyle.CLASSIC_WIDE);
            table.addCell("Contingency");
            for (SecurityIndexType securityIndexType : SecurityIndexType.values()) {
                table.addCell(securityIndexType.toString());
            }

            Multimap<String, SecurityIndex> securityIndexesPerContingency = Multimaps
                    .index(iar.getSecurityIndexes(), new Function<SecurityIndex, String>() {
                        @Override
                        public String apply(SecurityIndex securityIndex) {
                            return securityIndex.getId().getContingencyId();
                        }
                    });
            for (Map.Entry<String, Collection<SecurityIndex>> entry : securityIndexesPerContingency.asMap()
                    .entrySet()) {
                String contingencyId = entry.getKey();

                table.addCell(contingencyId);

                Map<SecurityIndexType, Boolean> ok = new EnumMap<>(SecurityIndexType.class);
                for (SecurityIndex securityIndex : entry.getValue()) {
                    ok.put(securityIndex.getId().getSecurityIndexType(), securityIndex.isOk());
                }

                for (SecurityIndexType securityIndexType : SecurityIndexType.values()) {
                    Boolean b = ok.get(securityIndexType);
                    String str;
                    if (b == null) {
                        str = "NA";
                    } else {
                        str = b ? "OK" : "NOK";
                    }
                    table.addCell(str);
                }
            }
            System.out.println(table.render());
        } else {
            System.out.println("Error running stabilization -  metrics = " + sr.getMetrics());
        }
    } else {
        System.out.println("no state " + stateId + " of workflow " + workflowId + " stored in the online db");
    }
    onlinedb.close();
}

From source file:org.napile.compiler.lang.resolve.processors.TypeHierarchyResolver.java

private void checkSupertypesForConsistency() {
    for (MutableClassDescriptor mutableClassDescriptor : topologicalOrder) {
        Multimap<TypeConstructor, NapileType> multimap = SubstitutionUtils
                .buildDeepSubstitutionMultimap(mutableClassDescriptor.getDefaultType());
        for (Map.Entry<TypeConstructor, Collection<NapileType>> entry : multimap.asMap().entrySet()) {
            Collection<NapileType> projections = entry.getValue();
            if (projections.size() > 1) {
                TypeConstructor typeConstructor = entry.getKey();
                DeclarationDescriptor declarationDescriptor = typeConstructor.getDeclarationDescriptor();
                assert declarationDescriptor instanceof TypeParameterDescriptor : declarationDescriptor;
                TypeParameterDescriptor typeParameterDescriptor = (TypeParameterDescriptor) declarationDescriptor;

                // Immediate arguments of supertypes cannot be projected
                Set<NapileType> conflictingTypes = Sets.newLinkedHashSet();
                for (NapileType projection : projections) {
                    conflictingTypes.add(projection);
                }/*from  www.j av  a2  s.c o  m*/

                if (conflictingTypes.size() > 1) {
                    DeclarationDescriptor containingDeclaration = typeParameterDescriptor
                            .getContainingDeclaration();
                    assert containingDeclaration instanceof ClassDescriptor : containingDeclaration;
                    NapileClassLike psiElement = (NapileClassLike) BindingTraceUtil
                            .classDescriptorToDeclaration(trace, mutableClassDescriptor);
                    NapileElement extendTypeListElement = psiElement.getSuperTypesElement();
                    assert extendTypeListElement != null;
                    trace.report(Errors.INCONSISTENT_TYPE_PARAMETER_VALUES.on(extendTypeListElement,
                            typeParameterDescriptor, (ClassDescriptor) containingDeclaration,
                            conflictingTypes));
                }
            }
        }
    }
}

From source file:org.openflexo.dg.action.ReinjectDocx.java

private void reinjectEPI(IParsedDocx parsedDocx) {
    Multimap<EditionPatternInstance, IParsedFlexoEPI> epis = lookUpEditionPatternInstances(parsedDocx);
    Multimap<EditionPatternInstance, IParsedFlexoEPI> episToReinject = removeConflictingParsedDocX(epis);
    for (Entry<EditionPatternInstance, Collection<IParsedFlexoEPI>> e : episToReinject.asMap().entrySet()) {
        EditionPatternInstance epi = e.getKey();
        for (IParsedFlexoEPI parsedFlexoEPI : e.getValue()) {
            setSecondaryProgress(FlexoLocalization.localizedForKey("reinjecting_edition_pattern_value") + " "
                    + parsedFlexoEPI.getValue());
            boolean result = epi.setBindingValue(parsedFlexoEPI.getBindingPath(), parsedFlexoEPI.getValue());
            if (result) {
                numberOfEPIUpdated++;//from   w  w  w. j  a va2s  .c  om
            }
        }
    }
}

From source file:com.android.build.gradle.internal.incremental.StringSwitch.java

/**
 * Emit code for a string switch for the given string classifier.
 *
 * switch(s.hashCode()) {//from  w  w w . ja va2 s  .c  o m
 *   case 192: visitCase(s);
 *   case 312: visitCase(s);
 *   case 1024:
 *     if (s.equals("collided_method1")) {
 *         visit(s);
 *     } else if (s.equals("collided_method2")) {
 *         visit(s);
 *     }
 *     visitDefault();
 *   default:
 *     visitDefault();
 * }
 *
 **/
private void visitClassifier(GeneratorAdapter mv, Set<String> strings) {
    visitString();
    visitHashMethod(mv);

    // Group strings by hash code.
    Multimap<Integer, String> buckets = Multimaps.index(strings, HASH_METHOD);
    List<Map.Entry<Integer, Collection<String>>> sorted = Ordering.natural()
            .onResultOf(new Function<Map.Entry<Integer, Collection<String>>, Integer>() {
                @Override
                public Integer apply(Map.Entry<Integer, Collection<String>> entry) {
                    return entry.getKey();
                }
            }).immutableSortedCopy(buckets.asMap().entrySet());

    int sortedHashes[] = new int[sorted.size()];
    List<String> sortedCases[] = new List[sorted.size()];
    int index = 0;
    for (Map.Entry<Integer, Collection<String>> entry : sorted) {
        sortedHashes[index] = entry.getKey();
        sortedCases[index] = Lists.newCopyOnWriteArrayList(entry.getValue());
        index++;
    }

    // Label for each hash and for default.
    Label labels[] = new Label[sorted.size()];
    Label defaultLabel = new Label();
    for (int i = 0; i < sorted.size(); ++i) {
        labels[i] = new Label();
    }

    // Create a switch that dispatches to each label from the hash code of
    mv.visitLookupSwitchInsn(defaultLabel, sortedHashes, labels);

    // Create the cases.
    for (int i = 0; i < sorted.size(); ++i) {
        mv.visitLabel(labels[i]);
        visitx(mv, sortedCases[i]);
    }
    mv.visitLabel(defaultLabel);
    visitDefault();
}

From source file:dodola.anole.lib.StringSwitch.java

/**
 * Emit code for a string switch for the given string classifier.
 *
 * switch(s.hashCode()) {//from w w  w .j a v  a2s .c o m
 *   case 192: visitCase(s);
 *   case 312: visitCase(s);
 *   case 1024:
 *     if (s.equals("collided_method1")) {
 *         visit(s);
 *     } else if (s.equals("collided_method2")) {
 *         visit(s);
 *     }
 *     visitDefault();
 *   default:
 *     visitDefault();
 * }
 *
 **/
private void visitClassifier(GeneratorAdapter mv, Set<String> strings) {
    visitString();
    visitHashMethod(mv);

    // Group strings by hash code.
    Multimap<Integer, String> buckets = Multimaps.index(strings, hashMethod);
    List<Map.Entry<Integer, Collection<String>>> sorted = Ordering.natural()
            .onResultOf(new Function<Map.Entry<Integer, Collection<String>>, Integer>() {
                @Override
                public Integer apply(Map.Entry<Integer, Collection<String>> entry) {
                    return entry.getKey();
                }
            }).immutableSortedCopy(buckets.asMap().entrySet());

    int sortedHashes[] = new int[sorted.size()];
    List<String> sortedCases[] = new List[sorted.size()];
    int index = 0;
    for (Map.Entry<Integer, Collection<String>> entry : sorted) {
        sortedHashes[index] = entry.getKey();
        sortedCases[index] = Lists.newCopyOnWriteArrayList(entry.getValue());
        index++;
    }

    // Label for each hash and for default.
    Label labels[] = new Label[sorted.size()];
    Label defaultLabel = new Label();
    for (int i = 0; i < sorted.size(); ++i) {
        labels[i] = new Label();
    }

    // Create a switch that dispatches to each label from the hash code of
    mv.visitLookupSwitchInsn(defaultLabel, sortedHashes, labels);

    // Create the cases.
    for (int i = 0; i < sorted.size(); ++i) {
        mv.visitLabel(labels[i]);
        visitx(mv, sortedCases[i]);
    }
    mv.visitLabel(defaultLabel);
    visitDefault();
}

From source file:org.dllearner.utilities.sparql.RedundantTypeTriplePatternRemover.java

@Override
public void visit(ElementPathBlock el) {
    // get all rdf:type triple patterns
    Multimap<Node, Triple> subject2TypeTriples = HashMultimap.create();
    for (Iterator<TriplePath> iterator = el.patternElts(); iterator.hasNext();) {
        TriplePath t = iterator.next();/* w  w w.j  a v  a2s  .c  om*/
        if (t.isTriple() && t.getPredicate().matches(RDF.type.asNode())) {
            subject2TypeTriples.put(t.getSubject(), t.asTriple());
        }
    }

    // check for semantically redundant triple patterns
    Set<Triple> redundantTriples = new HashSet<>();
    for (Entry<Node, Collection<Triple>> entry : subject2TypeTriples.asMap().entrySet()) {
        Collection<Triple> triples = entry.getValue();

        // get all super classes
        Set<Node> superClasses = new HashSet<>();
        for (Triple triple : triples) {
            Node cls = triple.getObject();
            superClasses.addAll(getSuperClasses(cls));
        }

        for (Triple triple : triples) {
            Node cls = triple.getObject();
            if (superClasses.contains(cls)) {
                redundantTriples.add(triple);
            }
        }
    }

    // remove redundant triple patterns
    for (Iterator<TriplePath> iterator = el.patternElts(); iterator.hasNext();) {
        TriplePath t = iterator.next();
        if (t.isTriple() && redundantTriples.contains(t.asTriple())) {
            iterator.remove();
        }
    }
}

From source file:com.palantir.atlasdb.cleaner.Scrubber.java

private void scrubCells(TransactionManager txManager, Multimap<String, Cell> tableNameToCells,
        long scrubTimestamp, Transaction.TransactionType transactionType) {
    for (Entry<String, Collection<Cell>> entry : tableNameToCells.asMap().entrySet()) {
        String tableName = entry.getKey();
        if (log.isInfoEnabled()) {
            log.info("Attempting to immediately scrub " + entry.getValue().size() + " cells from table "
                    + tableName);//from   w w  w.  j  a v  a2 s.c om
        }
        for (List<Cell> cells : Iterables.partition(entry.getValue(), batchSizeSupplier.get())) {
            Multimap<Cell, Long> timestampsToDelete = HashMultimap.create(
                    keyValueService.getAllTimestamps(tableName, ImmutableSet.copyOf(cells), scrubTimestamp));
            for (Cell cell : ImmutableList.copyOf(timestampsToDelete.keySet())) {
                // Don't scrub garbage collection sentinels
                timestampsToDelete.remove(cell, Value.INVALID_VALUE_TIMESTAMP);
            }
            // If transactionType == TransactionType.AGGRESSIVE_HARD_DELETE this might
            // force other transactions to abort or retry
            deleteCellsAtTimestamps(txManager, tableName, timestampsToDelete, transactionType);
        }
        if (log.isInfoEnabled()) {
            log.info("Immediately scrubbed " + entry.getValue().size() + " cells from table " + tableName);
        }
    }
}

From source file:feign.examples.AWSSignatureVersion4.java

private String canonicalString(RequestTemplate input, Multimap<String, String> sortedLowercaseHeaders) {
    StringBuilder canonicalRequest = new StringBuilder();
    // HTTPRequestMethod + '\n' +
    canonicalRequest.append(input.method()).append('\n');

    // CanonicalURI + '\n' +
    canonicalRequest.append(URI.create(input.url()).getPath()).append('\n');

    // CanonicalQueryString + '\n' +
    canonicalRequest.append(input.queryLine().substring(1));
    canonicalRequest.append('\n');

    // CanonicalHeaders + '\n' +
    for (Entry<String, Collection<String>> entry : sortedLowercaseHeaders.asMap().entrySet()) {
        canonicalRequest.append(entry.getKey()).append(':').append(Joiner.on(',').join(entry.getValue()))
                .append('\n');
    }//from w  ww  .j av a 2s. c  o  m
    canonicalRequest.append('\n');

    // SignedHeaders + '\n' +
    canonicalRequest.append(Joiner.on(',').join(sortedLowercaseHeaders.keySet())).append('\n');

    // HexEncode(Hash(Payload))
    if (input.body() != null) {
        canonicalRequest.append(base16().lowerCase()
                .encode(sha256().hashString(input.body() != null ? input.body() : "", UTF_8).asBytes()));
    } else {
        canonicalRequest.append(EMPTY_STRING_HASH);
    }
    return canonicalRequest.toString();
}