Example usage for com.google.common.collect Multimap keySet

List of usage examples for com.google.common.collect Multimap keySet

Introduction

In this page you can find the example usage for com.google.common.collect Multimap keySet.

Prototype

Set<K> keySet();

Source Link

Document

Returns a view collection of all distinct keys contained in this multimap.

Usage

From source file:com.google.template.soy.jssrc.internal.JsSrcMain.java

/**
 * Generates JS source files given a Soy parse tree, an options object, an optional bundle of
 * translated messages, and information on where to put the output files.
 *
 * @param soyTree The Soy parse tree to generate JS source code for.
 * @param jsSrcOptions The compilation options relevant to this backend.
 * @param locale The current locale that we're generating JS for, or null if not applicable.
 * @param msgBundle The bundle of translated messages, or null to use the messages from the Soy
 *     source./*from w w  w.  j av  a  2  s  .co  m*/
 * @param outputPathFormat The format string defining how to build the output file path
 *     corresponding to an input file path.
 * @param inputPathsPrefix The input path prefix, or empty string if none.
 * @throws SoySyntaxException If a syntax error is found.
 * @throws IOException If there is an error in opening/writing an output JS file.
 */
public void genJsFiles(SoyFileSetNode soyTree, SoyJsSrcOptions jsSrcOptions, @Nullable String locale,
        @Nullable SoyMsgBundle msgBundle, String outputPathFormat, String inputPathsPrefix)
        throws SoySyntaxException, IOException {

    List<String> jsFileContents = genJsSrc(soyTree, jsSrcOptions, msgBundle);

    ImmutableList<SoyFileNode> srcsToCompile = ImmutableList
            .copyOf(Iterables.filter(soyTree.getChildren(), SoyFileNode.MATCH_SRC_FILENODE));

    if (srcsToCompile.size() != jsFileContents.size()) {
        throw new AssertionError(String.format("Expected to generate %d code chunk(s), got %d",
                srcsToCompile.size(), jsFileContents.size()));
    }

    Multimap<String, Integer> outputs = MainEntryPointUtils.mapOutputsToSrcs(locale, outputPathFormat,
            inputPathsPrefix, srcsToCompile);

    for (String outputFilePath : outputs.keySet()) {
        Writer out = Files.newWriter(new File(outputFilePath), UTF_8);
        try {
            boolean isFirst = true;
            for (int inputFileIndex : outputs.get(outputFilePath)) {
                if (isFirst) {
                    isFirst = false;
                } else {
                    // Concatenating JS files is not safe unless we know that the last statement from one
                    // couldn't combine with the isFirst statement of the next.  Inserting a semicolon will
                    // prevent this from happening.
                    out.write("\n;\n");
                }
                out.write(jsFileContents.get(inputFileIndex));
            }
        } finally {
            out.close();
        }
    }
}

From source file:org.hudsonci.plugins.vault.install.PackageSelector.java

public boolean matches(final Package pkg, final NodeContext context) {
    assert pkg != null;
    assert context != null;

    Multimap<String, String> props = pkg.getProperties();

    // No properties, matches anything
    if (props.isEmpty()) {
        return true;
    }/*from w  w w.  j a  v  a 2s .c o m*/

    Map<String, String> attrs = context.getAttributes();
    for (String key : props.keySet()) {
        String found = attrs.get(key);

        boolean matched = false;

        // See if the found data, matches at least one of our properties keys
        for (String expect : props.get(key)) {
            if (expect.equalsIgnoreCase(found)) {
                matched = true;
                break;
            }
        }

        // If none of the values matched, we don't match
        if (!matched) {
            return false;
        }
    }

    // If we get this far, all property keys have matched
    return true;
}

From source file:org.sonar.server.rule.RuleUpdater.java

private void updateOrInsertActiveRuleParams(DbSession dbSession, RuleParamDto ruleParamDto,
        Multimap<ActiveRuleDto, ActiveRuleParamDto> activeRuleParamsByActiveRule) {
    activeRuleParamsByActiveRule.keySet().forEach(new UpdateOrInsertActiveRuleParams(dbSession, dbClient,
            ruleParamDto, activeRuleParamsByActiveRule));
}

From source file:org.sourcepit.maven.dependency.model.aether.DependencyModelBuildingGraphTransformer.java

private void handleArtifacts(final Multimap<ArtifactKey, DependencyNode> keyToNodes) {
    for (final ArtifactKey artifactKey : keyToNodes.keySet()) {
        final Collection<DependencyNode> nodes = keyToNodes.get(artifactKey);
        final DependencyNode node = nodes.iterator().next();
        final Artifact artifact = node.getDependency().getArtifact();
        final boolean referenced = referencedArtifacts.contains(artifactKey);
        handleArtifacts(keyToNodes, nodes, artifact, referenced);
    }//  www . j av  a 2  s.c  o  m
}

From source file:com.google.template.soy.incrementaldomsrc.IncrementalDomSrcMain.java

/**
 * Generates Incremental DOM JS source files given a Soy parse tree, an options object, an
 * optional bundle of translated messages, and information on where to put the output files.
 *
 * @param soyTree The Soy parse tree to generate JS source code for.
 * @param jsSrcOptions The compilation options relevant to this backend.
 * @param outputPathFormat The format string defining how to build the output file path
 *     corresponding to an input file path.
 * @throws SoySyntaxException If a syntax error is found.
 * @throws IOException If there is an error in opening/writing an output JS file.
 *//*from www .  j  a v a  2s . c o  m*/
public void genJsFiles(SoyFileSetNode soyTree, SoyJsSrcOptions jsSrcOptions, String outputPathFormat)
        throws SoySyntaxException, IOException {

    List<String> jsFileContents = genJsSrc(soyTree, jsSrcOptions);

    ImmutableList<SoyFileNode> srcsToCompile = ImmutableList
            .copyOf(Iterables.filter(soyTree.getChildren(), SoyFileNode.MATCH_SRC_FILENODE));

    if (srcsToCompile.size() != jsFileContents.size()) {
        throw new AssertionError(String.format("Expected to generate %d code chunk(s), got %d",
                srcsToCompile.size(), jsFileContents.size()));
    }

    Multimap<String, Integer> outputs = MainEntryPointUtils.mapOutputsToSrcs(null /* locale */,
            outputPathFormat, "" /* inputPathsPrefix */, srcsToCompile);

    for (String outputFilePath : outputs.keySet()) {
        Writer out = Files.newWriter(new File(outputFilePath), UTF_8);
        try {
            boolean isFirst = true;
            for (int inputFileIndex : outputs.get(outputFilePath)) {
                if (isFirst) {
                    isFirst = false;
                } else {
                    // Concatenating JS files is not safe unless we know that the last statement from one
                    // couldn't combine with the isFirst statement of the next.  Inserting a semicolon will
                    // prevent this from happening.
                    out.write("\n;\n");
                }
                out.write(jsFileContents.get(inputFileIndex));
            }
        } finally {
            out.close();
        }
    }
}

From source file:com.facebook.buck.android.SmartDexingStep.java

@Override
public String getDescription(ExecutionContext context) {
    StringBuilder b = new StringBuilder();
    b.append(getShortName());// w  w  w.j  a  v  a 2 s  .co  m
    b.append(' ');

    Multimap<Path, Path> outputToInputs = outputToInputsSupplier.get();
    for (Path output : outputToInputs.keySet()) {
        b.append("-out ");
        b.append(output.toString());
        b.append("-in ");
        Joiner.on(':').appendTo(b,
                Iterables.transform(outputToInputs.get(output), Functions.toStringFunction()));
    }

    return b.toString();
}

From source file:org.apache.cassandra.dht.BootStrapper.java

/** get potential sources for each range, ordered by proximity (as determined by EndpointSnitch) */
Multimap<Range, InetAddress> getRangesWithSources(String table) {
    assert tokenMetadata.sortedTokens().size() > 0;
    final AbstractReplicationStrategy strat = Table.open(table).getReplicationStrategy();
    Collection<Range> myRanges = strat.getPendingAddressRanges(tokenMetadata, token, address);

    Multimap<Range, InetAddress> myRangeAddresses = ArrayListMultimap.create();
    Multimap<Range, InetAddress> rangeAddresses = strat.getRangeAddresses(tokenMetadata);
    for (Range myRange : myRanges) {
        for (Range range : rangeAddresses.keySet()) {
            if (range.contains(myRange)) {
                List<InetAddress> preferred = DatabaseDescriptor.getEndpointSnitch()
                        .getSortedListByProximity(address, rangeAddresses.get(range));
                myRangeAddresses.putAll(myRange, preferred);
                break;
            }/*w w  w.j  av  a  2 s  . c  o m*/
        }
        assert myRangeAddresses.keySet().contains(myRange);
    }
    return myRangeAddresses;
}

From source file:org.eclipse.b3.build.engine.B3BuildEngineResource.java

private void processFunctions(Multimap<IFunction, String> functions) {
    EList<EObject> content = getContents();
    for (IFunction f : functions.keySet()) {
        content.add(f);/* w w  w . j a v a  2  s .c o  m*/
        for (String name : functions.get(f))
            functionMap.put(name, f);
    }

}

From source file:org.sonar.core.issue.tracking.BlockRecognizer.java

/**
 * If base source code is available, then detect code moves through block hashes.
 * Only the issues associated to a line can be matched here.
 *///from   w  ww.  j  av  a2  s .c  o m
void match(Input<RAW> rawInput, Input<BASE> baseInput, Tracking<RAW, BASE> tracking) {
    BlockHashSequence rawHashSequence = rawInput.getBlockHashSequence();
    BlockHashSequence baseHashSequence = baseInput.getBlockHashSequence();

    Multimap<Integer, RAW> rawsByLine = groupByLine(tracking.getUnmatchedRaws(), rawHashSequence);
    Multimap<Integer, BASE> basesByLine = groupByLine(tracking.getUnmatchedBases(), baseHashSequence);
    Map<Integer, HashOccurrence> occurrencesByHash = new HashMap<>();

    for (Integer line : basesByLine.keySet()) {
        int hash = baseHashSequence.getBlockHashForLine(line);
        HashOccurrence hashOccurrence = occurrencesByHash.get(hash);
        if (hashOccurrence == null) {
            // first occurrence in base
            hashOccurrence = new HashOccurrence();
            hashOccurrence.baseLine = line;
            hashOccurrence.baseCount = 1;
            occurrencesByHash.put(hash, hashOccurrence);
        } else {
            hashOccurrence.baseCount++;
        }
    }

    for (Integer line : rawsByLine.keySet()) {
        int hash = rawHashSequence.getBlockHashForLine(line);
        HashOccurrence hashOccurrence = occurrencesByHash.get(hash);
        if (hashOccurrence != null) {
            hashOccurrence.rawLine = line;
            hashOccurrence.rawCount++;
        }
    }

    for (HashOccurrence hashOccurrence : occurrencesByHash.values()) {
        if (hashOccurrence.baseCount == 1 && hashOccurrence.rawCount == 1) {
            // Guaranteed that baseLine has been moved to rawLine, so we can map all issues on baseLine to all issues on rawLine
            map(rawsByLine.get(hashOccurrence.rawLine), basesByLine.get(hashOccurrence.baseLine), tracking);
            basesByLine.removeAll(hashOccurrence.baseLine);
            rawsByLine.removeAll(hashOccurrence.rawLine);
        }
    }

    // Check if remaining number of lines exceeds threshold. It avoids processing too many combinations.
    if (basesByLine.keySet().size() * rawsByLine.keySet().size() >= 250_000) {
        return;
    }

    List<LinePair> possibleLinePairs = Lists.newArrayList();
    for (Integer baseLine : basesByLine.keySet()) {
        for (Integer rawLine : rawsByLine.keySet()) {
            int weight = lengthOfMaximalBlock(baseInput.getLineHashSequence(), baseLine,
                    rawInput.getLineHashSequence(), rawLine);
            possibleLinePairs.add(new LinePair(baseLine, rawLine, weight));
        }
    }
    Collections.sort(possibleLinePairs, LinePairComparator.INSTANCE);
    for (LinePair linePair : possibleLinePairs) {
        // High probability that baseLine has been moved to rawLine, so we can map all issues on baseLine to all issues on rawLine
        map(rawsByLine.get(linePair.rawLine), basesByLine.get(linePair.baseLine), tracking);
    }
}

From source file:com.eucalyptus.cloudwatch.common.internal.domain.metricdata.MetricManager.java

public static List<Collection<MetricStatistics>> getManyMetricStatistics(
        List<GetMetricStatisticsParams> getMetricStatisticsParamses) {
    if (getMetricStatisticsParamses == null)
        throw new IllegalArgumentException("getMetricStatisticsParamses can not be null");
    Date now = new Date();
    Map<GetMetricStatisticsParams, Collection<MetricStatistics>> resultMap = Maps.newHashMap();
    Multimap<Class, GetMetricStatisticsParams> hashGroupMap = LinkedListMultimap.create();
    for (GetMetricStatisticsParams getMetricStatisticsParams : getMetricStatisticsParamses) {
        if (getMetricStatisticsParams == null)
            throw new IllegalArgumentException("getMetricStatisticsParams can not be null");
        getMetricStatisticsParams.validate(now);
        Class metricEntityClass = MetricEntityFactory.getClassForEntitiesGet(
                getMetricStatisticsParams.getMetricType(), getMetricStatisticsParams.getDimensionHash());
        hashGroupMap.put(metricEntityClass, getMetricStatisticsParams);
    }//from w ww. j  a  v  a  2 s.c om
    for (Class metricEntityClass : hashGroupMap.keySet()) {
        try (final TransactionResource db = Entities.transactionFor(metricEntityClass)) {
            // set some global criteria to start (for narrowing?)
            Date minDate = null;
            Date maxDate = null;
            Junction disjunction = Restrictions.disjunction();
            Map<GetMetricStatisticsParams, TreeMap<GetMetricStatisticsAggregationKey, MetricStatistics>> multiAggregationMap = Maps
                    .newHashMap();
            for (GetMetricStatisticsParams getMetricStatisticsParams : hashGroupMap.get(metricEntityClass)) {
                multiAggregationMap.put(getMetricStatisticsParams,
                        new TreeMap<GetMetricStatisticsAggregationKey, MetricStatistics>(
                                GetMetricStatisticsAggregationKey.COMPARATOR_WITH_NULLS.INSTANCE));
                Junction conjunction = Restrictions.conjunction();
                conjunction = conjunction
                        .add(Restrictions.lt("timestamp", getMetricStatisticsParams.getEndTime()));
                conjunction = conjunction
                        .add(Restrictions.ge("timestamp", getMetricStatisticsParams.getStartTime()));
                conjunction = conjunction
                        .add(Restrictions.eq("accountId", getMetricStatisticsParams.getAccountId()));
                conjunction = conjunction
                        .add(Restrictions.eq("metricName", getMetricStatisticsParams.getMetricName()));
                conjunction = conjunction
                        .add(Restrictions.eq("namespace", getMetricStatisticsParams.getNamespace()));
                conjunction = conjunction.add(
                        Restrictions.eq("dimensionHash", hash(getMetricStatisticsParams.getDimensionMap())));
                if (getMetricStatisticsParams.getUnits() != null) {
                    conjunction = conjunction
                            .add(Restrictions.eq("units", getMetricStatisticsParams.getUnits()));
                }
                disjunction = disjunction.add(conjunction);
                if (minDate == null || getMetricStatisticsParams.getStartTime().before(minDate)) {
                    minDate = getMetricStatisticsParams.getStartTime();
                }
                if (maxDate == null || getMetricStatisticsParams.getEndTime().after(maxDate)) {
                    maxDate = getMetricStatisticsParams.getEndTime();
                }
            }
            Criteria criteria = Entities.createCriteria(metricEntityClass);
            criteria = criteria.add(Restrictions.lt("timestamp", maxDate));
            criteria = criteria.add(Restrictions.ge("timestamp", minDate));
            criteria = criteria.add(disjunction);

            ProjectionList projectionList = Projections.projectionList();
            projectionList.add(Projections.max("sampleMax"));
            projectionList.add(Projections.min("sampleMin"));
            projectionList.add(Projections.sum("sampleSize"));
            projectionList.add(Projections.sum("sampleSum"));
            projectionList.add(Projections.groupProperty("units"));
            projectionList.add(Projections.groupProperty("timestamp"));
            projectionList.add(Projections.groupProperty("accountId"));
            projectionList.add(Projections.groupProperty("metricName"));
            projectionList.add(Projections.groupProperty("metricType"));
            projectionList.add(Projections.groupProperty("namespace"));
            projectionList.add(Projections.groupProperty("dimensionHash"));
            criteria.setProjection(projectionList);
            criteria.addOrder(Order.asc("timestamp"));

            ScrollableResults results = criteria.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY);
            while (results.next()) {
                MetricEntity me = getMetricEntity(results);
                for (GetMetricStatisticsParams getMetricStatisticsParams : hashGroupMap
                        .get(metricEntityClass)) {
                    if (metricDataMatches(getMetricStatisticsParams, me)) {
                        Map<GetMetricStatisticsAggregationKey, MetricStatistics> aggregationMap = multiAggregationMap
                                .get(getMetricStatisticsParams);
                        GetMetricStatisticsAggregationKey key = new GetMetricStatisticsAggregationKey(me,
                                getMetricStatisticsParams.getStartTime(), getMetricStatisticsParams.getPeriod(),
                                getMetricStatisticsParams.getDimensionHash());
                        MetricStatistics item = new MetricStatistics(me,
                                getMetricStatisticsParams.getStartTime(), getMetricStatisticsParams.getPeriod(),
                                getMetricStatisticsParams.getDimensions());
                        if (!aggregationMap.containsKey(key)) {
                            aggregationMap.put(key, item);
                        } else {
                            MetricStatistics totalSoFar = aggregationMap.get(key);
                            totalSoFar.setSampleMax(Math.max(item.getSampleMax(), totalSoFar.getSampleMax()));
                            totalSoFar.setSampleMin(Math.min(item.getSampleMin(), totalSoFar.getSampleMin()));
                            totalSoFar.setSampleSize(totalSoFar.getSampleSize() + item.getSampleSize());
                            totalSoFar.setSampleSum(totalSoFar.getSampleSum() + item.getSampleSum());
                        }
                    }
                }
            }
            for (GetMetricStatisticsParams getMetricStatisticsParams : multiAggregationMap.keySet()) {
                resultMap.put(getMetricStatisticsParams,
                        multiAggregationMap.get(getMetricStatisticsParams).values());
            }
        }
    }
    List<Collection<MetricStatistics>> resultList = Lists.newArrayList();
    for (GetMetricStatisticsParams getMetricStatisticsParams : getMetricStatisticsParamses) {
        if (resultMap.get(getMetricStatisticsParams) == null) {
            resultList.add(new ArrayList<MetricStatistics>());
        } else {
            resultList.add(resultMap.get(getMetricStatisticsParams));
        }
    }
    return resultList;
}