Example usage for com.google.common.collect Multimap get

List of usage examples for com.google.common.collect Multimap get

Introduction

In this page you can find the example usage for com.google.common.collect Multimap get.

Prototype

Collection<V> get(@Nullable K key);

Source Link

Document

Returns a view collection of the values associated with key in this multimap, if any.

Usage

From source file:com.dangdang.ddframe.rdb.sharding.merger.groupby.GroupByResultSet.java

private Collection<GroupByValue> reduce(final Multimap<GroupByKey, GroupByValue> mappedResult)
        throws SQLException {
    List<GroupByValue> result = new ArrayList<>(mappedResult.values().size() * columnLabels.size());
    for (GroupByKey key : mappedResult.keySet()) {
        Collection<GroupByValue> each = mappedResult.get(key);
        GroupByValue reduceResult = new GroupByValue();
        for (int i = 0; i < columnLabels.size(); i++) {
            int index = i + 1;
            Optional<AggregationColumn> aggregationColumn = findAggregationColumn(index);
            Comparable<?> value = null;
            if (aggregationColumn.isPresent()) {
                value = aggregate(aggregationColumn.get(), index, each);
            }//from  w  w w.j  av  a2 s .c  o m
            value = null == value ? each.iterator().next().getValue(new ResultSetQueryIndex(index)) : value;
            reduceResult.put(index, columnLabels.get(i), value);
        }
        if (orderByColumns.isEmpty()) {
            reduceResult.addGroupByColumns(groupByColumns);
        } else {
            reduceResult.addOrderColumns(orderByColumns);
        }
        result.add(reduceResult);
    }
    Collections.sort(result);
    log.trace("Reduced result: {}", result);
    return result;
}

From source file:org.coode.existentialtree.model.OWLRelationHierarchyProvider.java

public Set<OWLIndividual> getChildren(OWLIndividual individual) {
    Multimap<OWLObjectPropertyExpression, OWLIndividual> values = EntitySearcher
            .getObjectPropertyValues(individual, ontologies);

    Set<OWLIndividual> children = new HashSet<OWLIndividual>();

    for (OWLObjectPropertyExpression p : values.keySet()) {
        if (propAndDescendants.isEmpty() || propAndDescendants.contains(p)) {
            children.addAll(values.get(p));
        }//www  .j a  va 2  s  .  co m
    }
    return children;
}

From source file:org.gradle.cache.internal.WrapperDistributionCleanupAction.java

public boolean execute(@Nonnull CleanupProgressMonitor progressMonitor) {
    long maximumTimestamp = Math.max(0, System.currentTimeMillis() - TimeUnit.DAYS.toMillis(1));
    Set<GradleVersion> usedVersions = this.usedGradleVersions.getUsedGradleVersions();
    Multimap<GradleVersion, File> checksumDirsByVersion = determineChecksumDirsByVersion();
    for (GradleVersion version : checksumDirsByVersion.keySet()) {
        if (!usedVersions.contains(version) && version.compareTo(GradleVersion.current()) < 0) {
            deleteDistributions(checksumDirsByVersion.get(version), maximumTimestamp, progressMonitor);
        } else {/*w w  w  .j av a 2s.com*/
            progressMonitor.incrementSkipped(checksumDirsByVersion.get(version).size());
        }
    }
    return true;
}

From source file:edu.umn.msi.tropix.client.search.impl.TropixSearchClientImpl.java

private List<GridData> transform(final Data[] inputData, final String serviceUrl) {
    final Data[] data = inputData == null ? new Data[0] : inputData;
    final List<GridData> results = new ArrayList<GridData>(data.length);
    Collections.transform(Arrays.asList(data), getConversionFunction(serviceUrl), results);
    final Multimap<String, GridData> gridDataByOwnerMap = HashMultimap.create();
    for (final GridData gridData : results) {
        gridDataByOwnerMap.put(gridData.getOwnerId(), gridData);
    }//w  ww. j av  a2 s. c om
    for (final GridUser user : gridUserIterable) {
        if (gridDataByOwnerMap.containsKey(user.getGridId())) {
            for (final GridData gridData : gridDataByOwnerMap.get(user.getGridId())) {
                gridData.setUserName(user.toString());
            }
        }
    }
    return results;
}

From source file:org.apache.hadoop.hbase.snapshot.TakeSnapshotUtils.java

/**
 * Verify that all the expected logs got referenced
 * @param fs filesystem where the logs live
 * @param logsDir original logs directory
 * @param serverNames names of the servers that involved in the snapshot
 * @param snapshot description of the snapshot being taken
 * @param snapshotLogDir directory for logs in the snapshot
 * @throws IOException//from ww w .  ja  v  a  2s  .  co m
 */
public static void verifyAllLogsGotReferenced(FileSystem fs, Path logsDir, Set<String> serverNames,
        SnapshotDescription snapshot, Path snapshotLogDir) throws IOException {
    assertTrue(snapshot, "Logs directory doesn't exist in snapshot", fs.exists(logsDir));
    // for each of the server log dirs, make sure it matches the main directory
    Multimap<String, String> snapshotLogs = getMapOfServersAndLogs(fs, snapshotLogDir, serverNames);
    Multimap<String, String> realLogs = getMapOfServersAndLogs(fs, logsDir, serverNames);
    if (realLogs != null) {
        assertNotNull(snapshot, "No server logs added to snapshot", snapshotLogs);
    } else {
        assertNull(snapshot, "Snapshotted server logs that don't exist", snapshotLogs);
    }

    // check the number of servers
    Set<Entry<String, Collection<String>>> serverEntries = realLogs.asMap().entrySet();
    Set<Entry<String, Collection<String>>> snapshotEntries = snapshotLogs.asMap().entrySet();
    assertEquals(snapshot, "Not the same number of snapshot and original server logs directories",
            serverEntries.size(), snapshotEntries.size());

    // verify we snapshotted each of the log files
    for (Entry<String, Collection<String>> serverLogs : serverEntries) {
        // if the server is not the snapshot, skip checking its logs
        if (!serverNames.contains(serverLogs.getKey()))
            continue;
        Collection<String> snapshotServerLogs = snapshotLogs.get(serverLogs.getKey());
        assertNotNull(snapshot, "Snapshots missing logs for server:" + serverLogs.getKey(), snapshotServerLogs);

        // check each of the log files
        assertEquals(snapshot, "Didn't reference all the log files for server:" + serverLogs.getKey(),
                serverLogs.getValue().size(), snapshotServerLogs.size());
        for (String log : serverLogs.getValue()) {
            assertTrue(snapshot, "Snapshot logs didn't include " + log, snapshotServerLogs.contains(log));
        }
    }
}

From source file:org.gradle.model.internal.manage.schema.extract.ImplTypeSchemaExtractionStrategySupport.java

private <R> List<ModelProperty<?>> extractPropertySchemas(ModelSchemaExtractionContext<R> extractionContext,
        Multimap<String, Method> methodsByName) {
    List<ModelProperty<?>> properties = Lists.newArrayList();
    Set<Method> handledMethods = Sets.newHashSet();

    for (String methodName : methodsByName.keySet()) {
        Collection<Method> methods = methodsByName.get(methodName);

        List<Method> overloadedMethods = getOverloadedMethods(methods);
        if (overloadedMethods != null) {
            handleOverloadedMethods(extractionContext, overloadedMethods);
            continue;
        }/*from  w  w w .  j a  v a  2  s  .com*/

        if (methodName.startsWith("get") && !methodName.equals("get")) {
            PropertyAccessorExtractionContext getterContext = new PropertyAccessorExtractionContext(methods,
                    isGetterDefinedInManagedType(extractionContext, methodName, methods));

            Character getterPropertyNameFirstChar = methodName.charAt(3);
            if (!Character.isUpperCase(getterPropertyNameFirstChar)) {
                handleInvalidGetter(extractionContext, getterContext,
                        "the 4th character of the getter method name must be an uppercase character");
                continue;
            }

            String propertyNameCapitalized = methodName.substring(3);
            String propertyName = StringUtils.uncapitalize(propertyNameCapitalized);
            String setterName = "set" + propertyNameCapitalized;
            Collection<Method> setterMethods = methodsByName.get(setterName);
            PropertyAccessorExtractionContext setterContext = !setterMethods.isEmpty()
                    ? new PropertyAccessorExtractionContext(setterMethods)
                    : null;

            ModelProperty<?> property = extractPropertySchema(extractionContext, propertyName, getterContext,
                    setterContext, handledMethods);
            if (property != null) {
                properties.add(property);

                handledMethods.addAll(getterContext.getDeclaringMethods());
                if (setterContext != null) {
                    handledMethods.addAll(setterContext.getDeclaringMethods());
                }
            }
        }
    }

    validateAllNecessaryMethodsHandled(extractionContext, methodsByName.values(), handledMethods);
    return properties;
}

From source file:net.diogobohm.timed.api.ui.domain.builder.OverviewBuilder.java

public Overview build(Date startDate, Date endDate, List<Task> tasks) {
    Date safeEndDate = getSafeEndDate(endDate);
    List<DayTaskList> dayTasks = Lists.newArrayList();
    Multimap<LocalDate, Task> dayTaskMap = createDayTaskMap(tasks);
    LocalDate startDay = LocalDate.fromDateFields(startDate);
    LocalDate endDay = LocalDate.fromDateFields(safeEndDate);

    for (LocalDate curDay = startDay; !curDay.isAfter(endDay); curDay = curDay.plusDays(1)) {
        List<Task> curDayTasks = Lists.newArrayList();

        if (dayTaskMap.containsKey(curDay)) {
            curDayTasks.addAll(dayTaskMap.get(curDay));
        }//from   w ww.  j ava  2  s.  com

        dayTasks.add(new DayTaskList(curDay.toDate(), new TaskList(curDayTasks)));
    }

    return new Overview(dayTasks);
}

From source file:com.google.template.soy.pysrc.internal.PySrcMain.java

/**
 * Generates Python source files given a Soy parse tree, an options object, and information on
 * where to put the output files.//  ww w .j  a v  a2  s.com
 *
 * @param soyTree The Soy parse tree to generate Python source code for.
 * @param pySrcOptions The compilation options relevant to this backend.
 * @param outputPathFormat The format string defining how to build the output file path
 *     corresponding to an input file path.
 * @param inputPathsPrefix The input path prefix, or empty string if none.
 * @throws SoySyntaxException If a syntax error is found.
 * @throws IOException If there is an error in opening/writing an output Python file.
 */
public void genPyFiles(SoyFileSetNode soyTree, SoyPySrcOptions pySrcOptions, String outputPathFormat,
        String inputPathsPrefix) throws SoySyntaxException, IOException {

    List<String> pyFileContents = genPySrc(soyTree, pySrcOptions);

    ImmutableList<SoyFileNode> srcsToCompile = ImmutableList
            .copyOf(Iterables.filter(soyTree.getChildren(), SoyFileNode.MATCH_SRC_FILENODE));

    if (srcsToCompile.size() != pyFileContents.size()) {
        throw new AssertionError(String.format("Expected to generate %d code chunk(s), got %d",
                srcsToCompile.size(), pyFileContents.size()));
    }

    Multimap<String, Integer> outputs = MainEntryPointUtils.mapOutputsToSrcs(null, outputPathFormat,
            inputPathsPrefix, srcsToCompile);

    for (String outputFilePath : outputs.keySet()) {
        Writer out = Files.newWriter(new File(outputFilePath), Charsets.UTF_8);
        try {
            for (int inputFileIndex : outputs.get(outputFilePath)) {
                out.write(pyFileContents.get(inputFileIndex));
            }
        } finally {
            out.close();
        }
    }
}

From source file:com.rackspacecloud.blueflood.io.datastax.DPreaggregatedMetricsRW.java

private void insertMetricsInBatch(Multimap<Locator, IMetric> map, Granularity granularity) {
    BatchStatement batch = new BatchStatement(BatchStatement.Type.UNLOGGED);

    for (Locator locator : map.keySet()) {
        for (IMetric metric : map.get(locator)) {
            RollupType rollupType = metric.getRollupType();

            DAbstractMetricIO io = rollupTypeToIO.get(rollupType);
            BoundStatement boundStatement = io.getBoundStatementForMetric(metric, granularity);
            batch.add(boundStatement);//from ww w.  j  a  va2  s.com

            if (granularity == Granularity.FULL) {
                Instrumentation.markFullResPreaggregatedMetricWritten();
            }

            if (!LocatorCache.getInstance().isLocatorCurrentInBatchLayer(locator)) {
                LocatorCache.getInstance().setLocatorCurrentInBatchLayer(locator);
                batch.add(locatorIO.getBoundStatementForLocator(locator));
            }

            // if we are recording delayed metrics, we may need to do an
            // extra insert
            if (isRecordingDelayedMetrics) {
                BoundStatement bs = getBoundStatementForMetricIfDelayed(metric);
                if (bs != null) {
                    batch.add(bs);
                }
            }
        }
    }
    LOG.trace(String.format("insert preaggregated batch statement size=%d", batch.size()));

    try {
        DatastaxIO.getSession().execute(batch);
    } catch (Exception ex) {
        Instrumentation.markWriteError();
        LOG.error(String.format("error writing batch of %d preaggregated metrics", batch.size()), ex);
    }
}

From source file:org.eclipse.b3.build.engine.B3BuildEngineResource.java

private void processFunctions(Multimap<IFunction, String> functions) {
    EList<EObject> content = getContents();
    for (IFunction f : functions.keySet()) {
        content.add(f);//w w  w.  j  a  v  a  2s . com
        for (String name : functions.get(f))
            functionMap.put(name, f);
    }

}