Example usage for com.google.common.collect Multimap values

List of usage examples for com.google.common.collect Multimap values

Introduction

In this page you can find the example usage for com.google.common.collect Multimap values.

Prototype

Collection<V> values();

Source Link

Document

Returns a view collection containing the value from each key-value pair contained in this multimap, without collapsing duplicates (so values().size() == size() ).

Usage

From source file:org.sonar.plugins.csharp.CSharpSensor.java

private static void importRoslynReport(String reportPath, final SensorContext context) {
    Multimap<String, RuleKey> activeRoslynRulesByPartialRepoKey = RoslynProfileExporter
            .activeRoslynRulesByPartialRepoKey(
                    context.activeRules().findAll().stream().map(ActiveRule::ruleKey).collect(toList()));
    final Map<String, String> repositoryKeyByRoslynRuleKey = new HashMap<>();
    for (RuleKey activeRoslynRuleKey : activeRoslynRulesByPartialRepoKey.values()) {
        String previousRepositoryKey = repositoryKeyByRoslynRuleKey.put(activeRoslynRuleKey.rule(),
                activeRoslynRuleKey.repository());
        if (previousRepositoryKey != null) {
            throw new IllegalArgumentException("Rule keys must be unique, but \"" + activeRoslynRuleKey.rule()
                    + "\" is defined in both the \"" + previousRepositoryKey + "\" and \""
                    + activeRoslynRuleKey.repository() + "\" rule repositories.");
        }//from  w w w . j av  a  2  s.co  m
    }

    SarifParserCallback callback = new SarifParserCallbackImplementation(context, repositoryKeyByRoslynRuleKey);
    SarifParserFactory.create(new File(reportPath)).accept(callback);
}

From source file:com.google.api.gbase.client.ConversionUtil.java

/**
 * Creates a {@link GoogleBaseAttribute} of type
 * {@link GoogleBaseAttributeType#GROUP} and initializes it using the
 * current state of the object. Sub-attributes of group with type
 * GROUP or GROUP's derived types are ignored.
 * /*from   w w w. j a v  a  2  s .com*/
 * @param name attribute name
 * @param group attribute value
 * @return a new {@link com.google.api.gbase.client.GoogleBaseAttribute}
 */
public static GoogleBaseAttribute createAttribute(String name, Group group) {
    GoogleBaseAttribute attribute = new GoogleBaseAttribute(name, GoogleBaseAttributeType.GROUP);
    Multimap<String, GoogleBaseAttribute> subAttributes = group.getAllSubAttributes();
    for (GoogleBaseAttribute attr : subAttributes.values()) {
        GoogleBaseAttributeType type = attr.getType();
        if (!GoogleBaseAttributeType.GROUP.equals(type)
                && !GoogleBaseAttributeType.GROUP.equals(type.getSupertype())
                && (attr.hasValue() || attr.hasSubElements())) {
            attribute.addSubAttribute(attr);
        }
    }
    return attribute;
}

From source file:org.opendaylight.controller.config.yang.store.impl.ExtenderYangTracker.java

private static YangStoreSnapshotImpl createSnapshot(MbeParser mbeParser, Multimap<Bundle, URL> multimap)
        throws YangStoreException {
    try {//from  ww  w . j a v  a 2s .com
        YangStoreSnapshotImpl yangStoreSnapshot = mbeParser.parseYangFiles(fromUrlsToInputStreams(multimap));
        logger.trace("{} module entries parsed successfully from {} yang files",
                yangStoreSnapshot.countModuleMXBeanEntries(), multimap.values().size());
        return yangStoreSnapshot;
    } catch (RuntimeException e) {
        throw new YangStoreException("Unable to parse yang files from following URLs: " + multimap, e);
    }
}

From source file:org.apache.hadoop.hive.ql.exec.OperatorUtils.java

/**
 * Given an operator and a set of classes, it returns the number of operators it finds
 * upstream that instantiate any of the given classes.
 *
 * @param start the start operator// www . j a v a2 s.c  om
 * @param classes the set of classes
 * @return the number of operators
 */
public static int countOperatorsUpstream(Operator<?> start, Set<Class<? extends Operator<?>>> classes) {
    Multimap<Class<? extends Operator<?>>, Operator<?>> ops = classifyOperatorsUpstream(start, classes);
    int numberOperators = 0;
    Set<Operator<?>> uniqueOperators = new HashSet<Operator<?>>();
    for (Operator<?> op : ops.values()) {
        if (uniqueOperators.add(op)) {
            numberOperators++;
        }
    }
    return numberOperators;
}

From source file:com.android.sdklib.repository.local.Update.java

public static UpdateResult computeUpdates(@NonNull LocalPkgInfo[] localPkgs,
        @NonNull Multimap<PkgType, RemotePkgInfo> remotePkgs) {

    UpdateResult result = new UpdateResult();
    Set<RemotePkgInfo> updates = Sets.newTreeSet();

    // Find updates to locally installed packages
    for (LocalPkgInfo local : localPkgs) {
        RemotePkgInfo update = findUpdate(local, remotePkgs, result);
        if (update != null) {
            updates.add(update);//from   ww  w  . j a va2s . c  o  m
        }
    }

    // Find new packages not yet installed
    nextRemote: for (RemotePkgInfo remote : remotePkgs.values()) {
        if (updates.contains(remote)) {
            // if package is already a known update, it's not new.
            continue nextRemote;
        }
        IPkgDesc remoteDesc = remote.getDesc();
        for (LocalPkgInfo local : localPkgs) {
            IPkgDesc localDesc = local.getDesc();
            if (remoteDesc.compareTo(localDesc) == 0 || remoteDesc.isUpdateFor(localDesc)) {
                // if package is same as an installed or is an update for an installed
                // one, then it's not new.
                continue nextRemote;
            }
        }

        result.addNewPkgs(remote);
    }

    return result;
}

From source file:org.deephacks.confit.internal.jpa.JpaBean.java

private static void collectRefs(Set<BeanId> predecessors, JpaBeanQueryAssembler query, int level) {
    if (--level < 0) {
        return;/*  w w  w. j ava  2 s.  co m*/
    }
    Multimap<BeanId, JpaRef> successors = JpaRef.findReferences(predecessors);
    if (successors.size() > 0) {
        query.addRefs(predecessors);
    }
    // only recurse successors we havent already visited to break circular references
    Set<BeanId> unvisitedSuccessors = new HashSet<>();
    for (JpaRef successor : successors.values()) {
        if (!query.contains(successor.getTarget())) {
            unvisitedSuccessors.add(successor.getTarget());
        }
    }
    if (unvisitedSuccessors.size() != 0) {
        // we have reached the end and found list successors
        collectRefs(unvisitedSuccessors, query, level);
    }
    query.addRefs(successors);
}

From source file:com.liveramp.hank.storage.cueball.Cueball.java

public static DiskPartitionAssignment getDataDirectoryAssignments(DataDirectoriesConfigurator configurator,
        Collection<Integer> partitionNumbers) {

    ArrayList<String> sortedDataDirectories = new ArrayList<String>(configurator.getDataDirectories());
    Collections.sort(sortedDataDirectories);

    LinkedList<Integer> sortedPartitions = new LinkedList<>(partitionNumbers);
    Collections.sort(sortedPartitions);

    //  TODO we can make this dynamic based on disk size, but not urgent
    double numPartitionsPerDisk = (double) partitionNumbers.size() / sortedDataDirectories.size();

    Multimap<String, Integer> partitionsPerDisk = HashMultimap.create();
    for (String dataDirectory : sortedDataDirectories) {

        int numToAssign = (int) Math.ceil(numPartitionsPerDisk * (partitionsPerDisk.keySet().size() + 1))
                - partitionsPerDisk.values().size();

        for (int i = 0; i < numToAssign && !sortedPartitions.isEmpty(); i++) {
            partitionsPerDisk.put(dataDirectory, sortedPartitions.pop());
        }/*from w w w  .ja  va2 s  .  c  o m*/

    }

    Map<Integer, String> inverse = Maps.newHashMap();
    for (Map.Entry<String, Integer> entry : partitionsPerDisk.entries()) {
        inverse.put(entry.getValue(), entry.getKey());
    }

    return new DiskPartitionAssignment(inverse);
}

From source file:org.deephacks.tools4j.config.internal.core.jpa.JpaBean.java

private static void collectRefs(Set<BeanId> predecessors, JpaBeanQueryAssembler query) {
    Multimap<BeanId, JpaRef> successors = JpaRef.findReferences(predecessors);
    if (successors.size() > 0) {
        query.addRefs(predecessors);//from w  w w. j a  v  a2 s . c  o  m
    }
    // only recurse successors we havent already visited to break circular references
    Set<BeanId> unvisitedSuccessors = new HashSet<BeanId>();
    for (JpaRef successor : successors.values()) {
        if (!query.contains(successor.getTarget())) {
            unvisitedSuccessors.add(successor.getTarget());
        }
    }
    if (unvisitedSuccessors.size() != 0) {
        // we have reached the end and found all successors
        collectRefs(unvisitedSuccessors, query);
    }
    query.addRefs(successors);
}

From source file:org.jboss.errai.idea.plugin.ui.TemplateUtil.java

@NotNull
public static Multimap<String, TemplateDataField> findAllDataFieldTags(final PsiFile templateFile,
        final XmlTag rootTag, final boolean includeRoot) {
    final Multimap<String, TemplateDataField> value = Util
            .getOrCreateCache(dataFieldsCacheKey, templateFile, new CacheProvider<DataFieldCacheHolder>() {
                @Override//from  w w  w  .  j a  va  2  s  . c om
                public DataFieldCacheHolder provide() {
                    final Multimap<String, TemplateDataField> allDataFieldTags = findAllDataFieldTags(rootTag,
                            includeRoot);
                    return new DataFieldCacheHolder(templateFile.getModificationStamp(), allDataFieldTags);
                }

                @Override
                public boolean isCacheValid(DataFieldCacheHolder dataFieldCacheHolder) {
                    return dataFieldCacheHolder.getTime() == templateFile.getModificationStamp();
                }
            }).getValue();

    final Multimap<String, TemplateDataField> templateDataFields = HashMultimap.create(value);
    Iterator<TemplateDataField> iterator = templateDataFields.values().iterator();
    final PsiElement rootElement = rootTag.getOriginalElement();

    while (iterator.hasNext()) {
        TemplateDataField field = iterator.next();
        final PsiElement originalElement = field.getTag().getOriginalElement();

        if (!includeRoot && !Util.isChild(originalElement, rootElement)) {
            iterator.remove();
        }

    }
    return templateDataFields;
}

From source file:com.torodb.torod.db.postgresql.meta.routines.DeleteDocuments.java

public static int execute(Configuration configuration, CollectionSchema colSchema,
        Multimap<DocStructure, Integer> didsByStructure) throws SQLException {
    TableProvider tableProvider = new TableProvider(colSchema);

    DSLContext dsl = DSL.using(configuration);

    Set<SubDocTable> tables = Sets.newHashSet();
    for (DocStructure structure : didsByStructure.keySet()) {
        tables.clear();//w w  w  . j av a  2  s .  c  o  m
        structure.accept(tableProvider, tables);

        executeDeleteSubDocuments(dsl, tables, didsByStructure.get(structure));
    }

    Set<Integer> dids = Sets.newHashSet(didsByStructure.values());
    return executeDeleteRoots(dsl, colSchema, dids);
}