Example usage for com.google.common.collect Multimap values

List of usage examples for com.google.common.collect Multimap values

Introduction

In this page you can find the example usage for com.google.common.collect Multimap values.

Prototype

Collection<V> values();

Source Link

Document

Returns a view collection containing the value from each key-value pair contained in this multimap, without collapsing duplicates (so values().size() == size() ).

Usage

From source file:com.palantir.atlasdb.keyvalue.impl.ValidatingQueryRewritingKeyValueService.java

@Override
public void putWithTimestamps(String tableName, Multimap<Cell, Value> cellValues)
        throws KeyAlreadyExistsException {
    if (cellValues.isEmpty()) {
        return;/*from   w w  w .j a  va 2 s. c om*/
    }
    Validate.isTrue(!tableName.equals(TransactionConstants.TRANSACTION_TABLE), TRANSACTION_ERROR);

    long lastTimestamp = -1;
    boolean allAtSameTimestamp = true;
    for (Value value : cellValues.values()) {
        long timestamp = value.getTimestamp();
        Validate.isTrue(timestamp != Long.MAX_VALUE);
        Validate.isTrue(timestamp >= 0);
        if (lastTimestamp != -1 && timestamp != lastTimestamp) {
            allAtSameTimestamp = false;
        }
        lastTimestamp = timestamp;
    }

    if (allAtSameTimestamp) {
        Multimap<Cell, byte[]> cellValuesWithStrippedTimestamp = Multimaps.transformValues(cellValues,
                Value.GET_VALUE);

        Map<Cell, byte[]> putMap = Maps.transformValues(cellValuesWithStrippedTimestamp.asMap(),
                new Function<Collection<byte[]>, byte[]>() {

                    @Override
                    public byte[] apply(Collection<byte[]> input) {
                        try {
                            return Iterables.getOnlyElement(input);
                        } catch (IllegalArgumentException e) {
                            log.error(
                                    "Application tried to put multiple same-cell values in at same timestamp; attempting to perform last-write-wins, but ordering is not guaranteed.");
                            return Iterables.getLast(input);
                        }
                    }

                });

        put(tableName, putMap, lastTimestamp);
        return;
    }
    delegate.putWithTimestamps(tableName, cellValues);
}

From source file:com.google.gapid.views.Formatter.java

/**
 * @return empty list if not a constant, single value for constants, more values, for bitfileds.
 *///  ww  w  .j a v a2  s . c  om
public static Collection<Constant> findConstant(SnippetObject obj, Primitive type) {
    final ConstantSet constants = ConstantSet.lookup(type);
    if (constants == null || constants.getEntries().length == 0) {
        return Collections.emptyList();
    }

    // first, try and find exact match
    List<Constant> byValue = constants.getByValue(obj.getObject());
    if (byValue != null && byValue.size() != 0) {
        if (byValue.size() == 1) {
            // perfect, we have just 1 match
            return byValue;
        }
        // try and find the best match
        Labels labels = Labels.fromSnippets(obj.getSnippets());
        Constant result = disambiguate(byValue, labels);
        return result == null ? Collections.emptyList() : ImmutableList.of(result);
    }

    // we can not find any exact match,
    // but for a number, maybe we can find a combination of constants that match (bit flags)
    Object value = obj.getObject();
    if (!(value instanceof Number) || value instanceof Double || value instanceof Float) {
        return Collections.emptyList();
    }

    long valueNumber = ((Number) value).longValue();
    long leftToFind = valueNumber;
    Multimap<Number, Constant> resultMap = ArrayListMultimap.create();

    for (Constant constant : constants.getEntries()) {
        long constantValue = ((Number) constant.getValue()).longValue();
        if (Long.bitCount(constantValue) == 1 && (valueNumber & constantValue) != 0) {
            resultMap.put(constantValue, constant);
            leftToFind &= ~constantValue; // remove bit
        }
    }

    // we did not find enough flags to cover this constant
    if (leftToFind != 0) {
        return Collections.emptyList();
    }

    // we found exactly 1 of each constant to cover the whole value
    if (resultMap.keySet().size() == resultMap.size()) {
        return resultMap.values();
    }

    // we have more than 1 matching constant per flag to we need to disambiguate
    Labels labels = Labels.fromSnippets(obj.getSnippets());
    for (Number key : resultMap.keySet()) {
        Collection<Constant> flagConstants = resultMap.get(key);
        if (flagConstants.size() == 1) {
            // perfect, we only have 1 value for this
            continue;
        }

        Constant con = disambiguate(flagConstants, labels);
        if (con != null) {
            // we have several values, but we found 1 to use
            resultMap.replaceValues(key, ImmutableList.of(con));
        } else {
            // we have several values and we don't know what one to use
            return Collections.emptyList();
        }
    }
    // assert all constants are disambiguated now
    assert resultMap.keySet().size() == resultMap.size();
    return resultMap.values();
}

From source file:org.eclipse.xtext.serializer.sequencer.ContextFinder.java

@Override
public Set<ISerializationContext> findByContents(EObject semanticObject,
        Iterable<ISerializationContext> contextCandidates) {
    if (semanticObject == null)
        throw new NullPointerException();

    initConstraints();/*from  w ww  .j  av  a 2 s  . co m*/

    Multimap<IConstraint, ISerializationContext> constraints;
    if (contextCandidates != null)
        constraints = getConstraints(semanticObject, contextCandidates);
    else
        constraints = getConstraints(semanticObject);

    if (constraints.size() < 2)
        return Sets.newLinkedHashSet(constraints.values());

    for (IConstraint cand : Lists.newArrayList(constraints.keySet()))
        if (!isValidValueQuantity(cand, semanticObject))
            constraints.removeAll(cand);

    if (constraints.size() < 2)
        return Sets.newLinkedHashSet(constraints.values());

    LinkedHashSet<ISerializationContext> result = Sets.newLinkedHashSet(constraints.values());
    for (EStructuralFeature feat : semanticObject.eClass().getEAllStructuralFeatures()) {
        if (transientValueUtil.isTransient(semanticObject, feat) != ValueTransient.NO)
            continue;
        if (feat.isMany() && ((List<?>) semanticObject.eGet(feat)).isEmpty())
            continue;
        Multimap<AbstractElement, ISerializationContext> assignments = collectAssignments(constraints, feat);
        Set<AbstractElement> assignedElements = findAssignedElements(semanticObject, feat, assignments);
        Set<ISerializationContext> keep = Sets.newHashSet();
        for (AbstractElement ele : assignedElements)
            keep.addAll(assignments.get(ele));
        result.retainAll(keep);
    }
    return result;
}

From source file:org.gerryai.htn.simple.constraint.validation.impl.SimpleConstraintValidator.java

@Override
public final void apply(Map<Term, Term> substitution) {

    // Build a map of tasks to their replacements
    Multimap<Task, Task> taskReplacementMap = HashMultimap.create();
    for (Task task : tasks) {
        Task newTask = task.applyToCopy(substitution);
        taskReplacementMap.put(task, newTask);
    }/*from   w w  w  .  j av  a 2  s  .co m*/
    // Replace the existing tasks in this builder
    tasks = new HashSet<Task>(taskReplacementMap.values());

    // Build the sets of replacement constraints
    beforeConstraints = applyReplace(beforeConstraints, substitution, taskReplacementMap);
    afterConstraints = applyReplace(afterConstraints, substitution, taskReplacementMap);
    betweenConstraints = applyReplace(betweenConstraints, substitution, taskReplacementMap);
    precedenceConstraints = applyReplace(precedenceConstraints, substitution, taskReplacementMap);

}

From source file:org.apache.hadoop.hive.ql.exec.tez.HiveSplitGenerator.java

@SuppressWarnings("unchecked")
@Override// w  ww .  j a  v a2 s.  co  m
public List<Event> initialize() throws Exception {
    // Setup the map work for this thread. Pruning modified the work instance to potentially remove
    // partitions. The same work instance must be used when generating splits.
    Utilities.setMapWork(jobConf, work);
    try {
        boolean sendSerializedEvents = conf
                .getBoolean("mapreduce.tez.input.initializer.serialize.event.payload", true);

        // perform dynamic partition pruning
        if (pruner != null) {
            pruner.prune();
        }

        InputSplitInfoMem inputSplitInfo = null;
        boolean generateConsistentSplits = HiveConf.getBoolVar(conf,
                HiveConf.ConfVars.HIVE_TEZ_GENERATE_CONSISTENT_SPLITS);
        LOG.info("GenerateConsistentSplitsInHive=" + generateConsistentSplits);
        String realInputFormatName = conf.get("mapred.input.format.class");
        boolean groupingEnabled = userPayloadProto.getGroupingEnabled();
        if (groupingEnabled) {
            // Need to instantiate the realInputFormat
            InputFormat<?, ?> inputFormat = (InputFormat<?, ?>) ReflectionUtils
                    .newInstance(JavaUtils.loadClass(realInputFormatName), jobConf);

            int totalResource = 0;
            int taskResource = 0;
            int availableSlots = 0;
            // FIXME. Do the right thing Luke.
            if (getContext() == null) {
                // for now, totalResource = taskResource for llap
                availableSlots = 1;
            }

            if (getContext() != null) {
                totalResource = getContext().getTotalAvailableResource().getMemory();
                taskResource = getContext().getVertexTaskResource().getMemory();
                availableSlots = totalResource / taskResource;
            }

            if (HiveConf.getLongVar(conf, HiveConf.ConfVars.MAPREDMINSPLITSIZE, 1) <= 1) {
                // broken configuration from mapred-default.xml
                final long blockSize = conf.getLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY,
                        DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT);
                final long minGrouping = conf.getLong(TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_MIN_SIZE,
                        TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_MIN_SIZE_DEFAULT);
                final long preferredSplitSize = Math.min(blockSize / 2, minGrouping);
                HiveConf.setLongVar(jobConf, HiveConf.ConfVars.MAPREDMINSPLITSIZE, preferredSplitSize);
                LOG.info("The preferred split size is " + preferredSplitSize);
            }

            // Create the un-grouped splits
            float waves = conf.getFloat(TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_WAVES,
                    TezMapReduceSplitsGrouper.TEZ_GROUPING_SPLIT_WAVES_DEFAULT);

            // Raw splits
            InputSplit[] splits = inputFormat.getSplits(jobConf, (int) (availableSlots * waves));
            // Sort the splits, so that subsequent grouping is consistent.
            Arrays.sort(splits, new InputSplitComparator());
            LOG.info("Number of input splits: " + splits.length + ". " + availableSlots + " available slots, "
                    + waves + " waves. Input format is: " + realInputFormatName);

            if (work.getIncludedBuckets() != null) {
                splits = pruneBuckets(work, splits);
            }

            Multimap<Integer, InputSplit> groupedSplits = splitGrouper.generateGroupedSplits(jobConf, conf,
                    splits, waves, availableSlots, splitLocationProvider);
            // And finally return them in a flat array
            InputSplit[] flatSplits = groupedSplits.values().toArray(new InputSplit[0]);
            LOG.info("Number of split groups: " + flatSplits.length);

            List<TaskLocationHint> locationHints = splitGrouper.createTaskLocationHints(flatSplits,
                    generateConsistentSplits);

            inputSplitInfo = new InputSplitInfoMem(flatSplits, locationHints, flatSplits.length, null, jobConf);
        } else {
            // no need for grouping and the target #of tasks.
            // This code path should never be triggered at the moment. If grouping is disabled,
            // DAGUtils uses MRInputAMSplitGenerator.
            // If this is used in the future - make sure to disable grouping in the payload, if it isn't already disabled
            throw new RuntimeException(
                    "HiveInputFormat does not support non-grouped splits, InputFormatName is: "
                            + realInputFormatName);
            // inputSplitInfo = MRInputHelpers.generateInputSplitsToMem(jobConf, false, 0);
        }

        return createEventList(sendSerializedEvents, inputSplitInfo);
    } finally {
        Utilities.clearWork(jobConf);
    }
}

From source file:com.candy.middle.FinReportDownload.java

/**
 * write to database//from  w  w w .  j  a va 2 s  .c  o m
 * @param reports 
 */
private void writeToDB(String symbol, Multimap<REPORT_TYPE, Report> reports, String dateStr) {
    for (Report rp : reports.values()) {
        if (!fdDbProc.writeData(symbol, rp.getYear(), rp.getQuarter(), rp.getReportType().ordinal(), dateStr,
                rp.getNameValues())) {
            System.out.println("ERROR - write financial report to DB (" + rp.getReportTypeStr() + " )");
        }
    }
}

From source file:org.caltoopia.frontend.ui.launch.FindTopNetworks.java

public List<String> find(List<String> paths) {
    ResourceSet resourceSet = new ResourceSetImpl();

    // Scan the paths for actors & networks.
    List<String> files = new ArrayList<String>();

    for (String path : paths) {
        getActors(files, new File(path));
    }//from   w  w  w . j a  v  a 2  s. c o m

    Multimap<String, URI> uris = HashMultimap.create();

    for (int i = 0; i < files.size(); i++) {
        uris.put(files.get(i), URI.createFileURI(files.get(i)));
    }

    ContainersStateFactory containersStateFactory = new ContainersStateFactory();
    IAllContainersState containersState = containersStateFactory.getContainersState(files, uris);
    resourceSet.eAdapters().add(new DelegatingIAllContainerAdapter(containersState));

    Collection<URI> values = Sets.newHashSet(uris.values());
    for (URI uri : values) {
        resourceSet.createResource(uri);
    }

    List<Resource> resources = Lists.newArrayList(resourceSet.getResources());

    for (Resource res : resources) {
        try {
            res.load(null);
            doSwitch(res.getContents().get(0));
        } catch (Exception x) {
            System.err.println("[FindTopNetworks] error: " + x.getMessage());
        }
    }

    return topNetworks;
}

From source file:eu.numberfour.n4js.ui.organize.imports.N4JSOrganizeImportsHandler.java

@Override
public Object execute(ExecutionEvent event) throws ExecutionException {

    Collection<?> callingMenus = HandlerUtil.getActiveMenus(event);
    // "#TextEditorContext" is the defined plugin.xml
    boolean fromTextContext = (callingMenus != null && callingMenus.contains("#TextEditorContext"));
    boolean fromShortCut = (callingMenus == null || callingMenus.isEmpty());

    XtextEditor editor = EditorUtils.getActiveXtextEditor(event);
    boolean haveActiveEditor = editor != null;

    ISelection selection = HandlerUtil.getCurrentSelection(event);
    boolean nonEmptyStructuredSelection = (selection != null && selection instanceof IStructuredSelection
            && !selection.isEmpty());

    if (haveActiveEditor && (fromTextContext || fromShortCut)) {
        organizeEditor(editor);/*from w ww. j  a va2s .c o  m*/
    } else if (nonEmptyStructuredSelection) {
        // probably called on a tree-selection in the package-manager or whatever view shows the project-structure:
        // organize files and folders:
        // for each selection entry collect files:
        Multimap<IProject, IFile> projectFiles = collectFiles((IStructuredSelection) selection);

        HashSet<IFile> filesInSet = new HashSet<>(projectFiles.values());
        List<IFile> filesAsList = new ArrayList<>(filesInSet);

        if (filesAsList.isEmpty()) {
            return null;
        }

        // Query unsaved
        IWorkbench wbench = PlatformUI.getWorkbench();
        IWorkbenchWindow activeWorkbenchWindow = wbench.getActiveWorkbenchWindow();
        boolean allSaved = wbench.saveAll(activeWorkbenchWindow, activeWorkbenchWindow, null, true);
        if (!allSaved) {
            return null;
        }

        Shell shell = HandlerUtil.getActiveShell(event);

        IRunnableWithProgress op = new IRunnableWithProgress() {
            @Override
            public void run(IProgressMonitor mon) throws InvocationTargetException, InterruptedException {
                int totalWork = filesAsList.size();
                mon.beginTask("Organize imports.", totalWork);
                for (int i = 0; !mon.isCanceled() && i < filesAsList.size(); i++) {
                    IFile currentFile = filesAsList.get(i);
                    mon.setTaskName("Organize imports." + " - File (" + (i + 1) + " of " + totalWork + ")");
                    try {
                        mon.subTask(currentFile.getName());
                        doOrganizeImports(currentFile, new SubProgressMonitor(mon, 1));

                    } catch (CoreException | RuntimeException e) {
                        String msg = "Exception in file " + currentFile.getFullPath().toString() + ".";
                        LOGGER.error(msg, e);
                        if (errorDialogWithStackTrace(msg + " Hit OK to continue.", e)) {
                            // - logged anyway
                        } else {
                            throw new InvocationTargetException(e);
                        }
                    }
                }
                if (mon.isCanceled()) {
                    throw new InterruptedException();
                }
            }
        };

        try {
            new ProgressMonitorDialog(shell).run(true, true, op);
        } catch (InvocationTargetException e) {
            throw new ExecutionException("Error during organizing imports", e);
        } catch (InterruptedException e) {
            // user cancelled, ok
        }

    }

    return null;
}

From source file:gg.pistol.sweeper.core.Analyzer.java

/**
 * Compute the analysis.//from   w ww.  ja  v  a  2 s . co m
 *
 * @return the set of all {@link DuplicateGroup}s sorted decreasingly by size.
 */
NavigableSet<DuplicateGroup> analyze(Collection<? extends Resource> targetResources,
        SweeperOperationListener listener) throws SweeperAbortException {
    Preconditions.checkNotNull(targetResources);
    Preconditions.checkNotNull(listener);
    Preconditions.checkArgument(!targetResources.isEmpty());

    log.trace("Computing the analysis for the resources {}.", targetResources);
    analyzing = true;
    deleting = false;
    abortAnalysis.set(false);
    OperationTrackingListener trackingListener = new OperationTrackingListener(listener);

    // The number of total targets (including the ROOT target) calculated at the beginning (before sizing the targets)
    // by traverseResources().
    MutableInteger totalTargets = new MutableInteger(0);

    rootTarget = traverseResources(targetResources, totalTargets, trackingListener);
    Collection<TargetImpl> sized = computeSize(rootTarget, totalTargets.intValue(), trackingListener);
    Multimap<Long, TargetImpl> sizeDups = filterDuplicateSize(sized);

    computeHash(sizeDups.values(), trackingListener);
    Multimap<String, TargetImpl> hashDups = filterDuplicateHash(sizeDups.values());

    count = computeCount(rootTarget, hashDups);
    NavigableSet<DuplicateGroup> duplicates = createDuplicateGroups(hashDups);
    analyzing = false;
    return duplicates;
}

From source file:com.ikanow.infinit.e.harvest.extraction.document.HarvestStatus_Standalone.java

/**
 * getLogMessages//w w w.jav  a 2s  .co  m
 * Returns a list of up to 20 errors (eg encountered when parsing JavaScrip)t for 
 * a source, sorted by frequency in ascending order
 * @return
 */
private StringBuffer getLogMessages(boolean bReset) {
    if ((null != _messages) && (_messages.size() > 0)) {
        StringBuffer messagesString = new StringBuffer();

        // Create multimap to store errors in, reverse the order of key (error message) and
        // value (count) to sort on error count
        Multimap<Integer, String> mm = TreeMultimap.create();
        for (java.util.Map.Entry<String, Integer> entry : _messages.entrySet()) {
            StringBuffer msg = new StringBuffer(entry.getKey()).append(" (Occurences: ")
                    .append(entry.getValue()).append(')');
            mm.put(-entry.getValue(), msg.toString());
        }

        // Write the error messages to a Collection<String>
        Collection<String> messages = mm.values();

        // Append up to the top 20 messages to our StringBuffer and return
        int messageCount = 1;
        for (String s : messages) {
            if (messageCount > 1) {
                messagesString.append('\n');
            }
            messagesString.append(s);
            messageCount++;
            if (messageCount > 20)
                break;
        }
        if (bReset) {
            _messages.clear();
        }
        return messagesString;
    } else {
        return null;
    }
}