Example usage for com.google.common.collect Multimap keySet

List of usage examples for com.google.common.collect Multimap keySet

Introduction

In this page you can find the example usage for com.google.common.collect Multimap keySet.

Prototype

Set<K> keySet();

Source Link

Document

Returns a view collection of all distinct keys contained in this multimap.

Usage

From source file:io.prestosql.execution.scheduler.SourcePartitionedScheduler.java

private Set<RemoteTask> assignSplits(Multimap<Node, Split> splitAssignment,
        Multimap<Node, Lifespan> noMoreSplitsNotification) {
    ImmutableSet.Builder<RemoteTask> newTasks = ImmutableSet.builder();

    ImmutableSet<Node> nodes = ImmutableSet.<Node>builder().addAll(splitAssignment.keySet())
            .addAll(noMoreSplitsNotification.keySet()).build();
    for (Node node : nodes) {
        // source partitioned tasks can only receive broadcast data; otherwise it would have a different distribution
        ImmutableMultimap<PlanNodeId, Split> splits = ImmutableMultimap.<PlanNodeId, Split>builder()
                .putAll(partitionedNode, splitAssignment.get(node)).build();
        ImmutableMultimap.Builder<PlanNodeId, Lifespan> noMoreSplits = ImmutableMultimap.builder();
        if (noMoreSplitsNotification.containsKey(node)) {
            noMoreSplits.putAll(partitionedNode, noMoreSplitsNotification.get(node));
        }/*w  ww. j a  v  a2 s.  c o  m*/
        newTasks.addAll(stage.scheduleSplits(node, splits, noMoreSplits.build()));
    }
    return newTasks.build();
}

From source file:com.palantir.atlasdb.cleaner.Scrubber.java

/**
 * @return number of cells read from _scrub table
 *///from  w w w  .ja  v  a  2s.  com
private int scrubSomeCells(SortedMap<Long, Multimap<String, Cell>> scrubTimestampToTableNameToCell,
        final TransactionManager txManager, long maxScrubTimestamp) {

    // Don't call expensive toString() if trace logging is off
    if (log.isTraceEnabled()) {
        log.trace("Attempting to scrub cells: " + scrubTimestampToTableNameToCell);
    }

    if (log.isInfoEnabled()) {
        int numCells = 0;
        Set<String> tables = Sets.newHashSet();
        for (Multimap<String, Cell> v : scrubTimestampToTableNameToCell.values()) {
            tables.addAll(v.keySet());
            numCells += v.size();
        }
        log.info("Attempting to scrub " + numCells + " cells from tables " + tables);
    }

    if (scrubTimestampToTableNameToCell.size() == 0) {
        return 0; // No cells left to scrub
    }

    Multimap<Long, Cell> toRemoveFromScrubQueue = HashMultimap.create();

    int numCellsReadFromScrubTable = 0;
    List<Future<Void>> scrubFutures = Lists.newArrayList();
    for (Map.Entry<Long, Multimap<String, Cell>> entry : scrubTimestampToTableNameToCell.entrySet()) {
        final long scrubTimestamp = entry.getKey();
        final Multimap<String, Cell> tableNameToCell = entry.getValue();

        numCellsReadFromScrubTable += tableNameToCell.size();

        long commitTimestamp = getCommitTimestampRollBackIfNecessary(scrubTimestamp, tableNameToCell);
        if (commitTimestamp >= maxScrubTimestamp) {
            // We cannot scrub this yet because not all transactions can read this value.
            continue;
        } else if (commitTimestamp != TransactionConstants.FAILED_COMMIT_TS) {
            // This is CRITICAL; don't scrub if the hard delete transaction didn't actually finish
            // (we still remove it from the _scrub table with the call to markCellsAsScrubbed though),
            // or else we could cause permanent data loss if the hard delete transaction failed after
            // queuing cells to scrub but before successfully committing
            for (final List<Entry<String, Cell>> batch : Iterables.partition(tableNameToCell.entries(),
                    batchSizeSupplier.get())) {
                final Multimap<String, Cell> batchMultimap = HashMultimap.create();
                for (Entry<String, Cell> e : batch) {
                    batchMultimap.put(e.getKey(), e.getValue());
                }
                scrubFutures.add(exec.submit(new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {
                        scrubCells(txManager, batchMultimap, scrubTimestamp,
                                aggressiveScrub ? TransactionType.AGGRESSIVE_HARD_DELETE
                                        : TransactionType.HARD_DELETE);
                        return null;
                    }
                }));
            }
        }
        toRemoveFromScrubQueue.putAll(scrubTimestamp, tableNameToCell.values());
    }

    for (Future<Void> future : scrubFutures) {
        Futures.getUnchecked(future);
    }

    Multimap<Cell, Long> cellToScrubTimestamp = HashMultimap.create();
    scrubberStore.markCellsAsScrubbed(Multimaps.invertFrom(toRemoveFromScrubQueue, cellToScrubTimestamp),
            batchSizeSupplier.get());

    if (log.isTraceEnabled()) {
        log.trace("Finished scrubbing cells: " + scrubTimestampToTableNameToCell);
    }

    if (log.isInfoEnabled()) {
        Set<String> tables = Sets.newHashSet();
        for (Multimap<String, Cell> v : scrubTimestampToTableNameToCell.values()) {
            tables.addAll(v.keySet());
        }
        long minTimestamp = Collections.min(scrubTimestampToTableNameToCell.keySet());
        long maxTimestamp = Collections.max(scrubTimestampToTableNameToCell.keySet());
        log.info("Finished scrubbing " + numCellsReadFromScrubTable + " cells at "
                + scrubTimestampToTableNameToCell.size() + " timestamps (" + minTimestamp + "..." + maxTimestamp
                + ") from tables " + tables);
    }

    return numCellsReadFromScrubTable;
}

From source file:org.apache.drill.exec.memory.Accountor.java

public void close() {
    // remove the fragment context and reset fragment limits whenever an allocator closes
    if (parent != null && parent.parent == null) {
        logger.debug("Fragment " + fragmentStr + "  accountor being closed");
        removeFragmentContext(fragmentContext);
    }/*from w  w w  . j  a  v  a2 s  .c  o  m*/
    resetFragmentLimits();

    if (ENABLE_ACCOUNTING && !buffers.isEmpty()) {
        StringBuffer sb = new StringBuffer();
        sb.append("Attempted to close accountor with ");
        sb.append(buffers.size());
        sb.append(" buffer(s) still allocated");
        if (handle != null) {
            sb.append("for QueryId: ");
            sb.append(QueryIdHelper.getQueryId(handle.getQueryId()));
            sb.append(", MajorFragmentId: ");
            sb.append(handle.getMajorFragmentId());
            sb.append(", MinorFragmentId: ");
            sb.append(handle.getMinorFragmentId());
        }
        sb.append(".\n");

        Multimap<DebugStackTrace, DebugStackTrace> multi = LinkedListMultimap.create();
        for (DebugStackTrace t : buffers.values()) {
            multi.put(t, t);
        }

        for (DebugStackTrace entry : multi.keySet()) {
            Collection<DebugStackTrace> allocs = multi.get(entry);

            sb.append("\n\n\tTotal ");
            sb.append(allocs.size());
            sb.append(" allocation(s) of byte size(s): ");
            for (DebugStackTrace alloc : allocs) {
                sb.append(alloc.size);
                sb.append(", ");
            }

            sb.append("at stack location:\n");
            entry.addToString(sb);
        }
        if (!buffers.isEmpty()) {
            IllegalStateException e = new IllegalStateException(sb.toString());
            if (errorOnLeak) {
                throw e;
            } else {
                logger.warn("Memory leaked.", e);
            }
        }
    }

    remainder.close();

}

From source file:io.nuun.kernel.core.internal.scanner.disk.ClasspathScannerDisk.java

@Override
public void scanClasspathForTypeRegex(final String typeName, final Callback callback) {
    //        Reflections reflections = new Reflections(configurationBuilder().addUrls(computeUrls()).setScanners(new TypesScanner()));
    queue(new ScannerCommand() {
        @Override//from ww w.ja  v  a 2s  . c  om
        public void execute(Reflections reflections) {
            Store store = reflections.getStore();
            Multimap<String, String> multimap = store.get(TypesScanner.class);
            Collection<String> collectionOfString = new HashSet<String>();
            for (String loopKey : multimap.keySet()) {
                if (loopKey.matches(typeName)) {
                    collectionOfString.add(loopKey);
                }
            }

            Collection<Class<?>> types = null;

            if (collectionOfString.size() > 0) {
                types = toClasses(collectionOfString);
            } else {
                types = Collections.emptySet();
            }
            callback.callback(postTreatment(types));

        }

        @Override
        public Scanner scanner() {
            return new TypesScanner();
        }

    });

}

From source file:org.apache.cassandra.db.compaction.LeveledCompactionStrategy.java

public ScannerList getScanners(Collection<SSTableReader> sstables, Range<Token> range) {
    Multimap<Integer, SSTableReader> byLevel = ArrayListMultimap.create();
    for (SSTableReader sstable : sstables) {
        byLevel.get(sstable.getSSTableLevel()).add(sstable);
    }/*  www .  ja v  a2  s.  c  om*/

    List<ISSTableScanner> scanners = new ArrayList<ISSTableScanner>(sstables.size());
    try {
        for (Integer level : byLevel.keySet()) {
            // level can be -1 when sstables are added to Tracker but not to LeveledManifest
            // since we don't know which level those sstable belong yet, we simply do the same as L0 sstables.
            if (level <= 0) {
                // L0 makes no guarantees about overlapping-ness.  Just create a direct scanner for each
                for (SSTableReader sstable : byLevel.get(level))
                    scanners.add(sstable.getScanner(range, CompactionManager.instance.getRateLimiter()));
            } else {
                // Create a LeveledScanner that only opens one sstable at a time, in sorted order
                List<SSTableReader> intersecting = LeveledScanner.intersecting(byLevel.get(level), range);
                if (!intersecting.isEmpty())
                    scanners.add(new LeveledScanner(intersecting, range));
            }
        }
    } catch (Throwable t) {
        try {
            new ScannerList(scanners).close();
        } catch (Throwable t2) {
            t.addSuppressed(t2);
        }
        throw t;
    }

    return new ScannerList(scanners);
}

From source file:com.android.tools.lint.checks.ResourceCycleDetector.java

private void findCycles(@NonNull Context context, @NonNull ResourceType type,
        @NonNull Multimap<String, String> map) {
    Set<String> visiting = Sets.newHashSetWithExpectedSize(map.size());
    Set<String> seen = Sets.newHashSetWithExpectedSize(map.size());
    for (String from : map.keySet()) {
        if (seen.contains(from)) {
            continue;
        }//from ww w .j a  v a 2 s  .co m
        List<String> chain = dfs(map, from, visiting);
        if (chain != null && chain.size() > 2) { // size 1 chains are handled directly
            seen.addAll(chain);
            Collections.reverse(chain);
            if (mChains == null) {
                mChains = Maps.newEnumMap(ResourceType.class);
                mLocations = Maps.newEnumMap(ResourceType.class);
                context.getDriver().requestRepeat(this, Scope.RESOURCE_FILE_SCOPE);
            }
            List<List<String>> list = mChains.get(type);
            if (list == null) {
                list = Lists.newArrayList();
                mChains.put(type, list);
            }
            list.add(chain);
        }
    }
}

From source file:eu.nubomedia.network.benchmark.UserSession.java

public String getCsv(Multimap<String, Object> multimap, boolean orderKeys) throws IOException {
    StringWriter writer = new StringWriter();
    NumberFormat numberFormat = new DecimalFormat("##.###");

    // Header/*w w  w .j a  va 2s.  c o m*/
    boolean first = true;
    Set<String> keySet = orderKeys ? new TreeSet<String>(multimap.keySet()) : multimap.keySet();
    for (String key : keySet) {
        if (!first) {
            writer.append(',');
        }
        writer.append(key);
        first = false;
    }
    writer.append('\n');

    // Values
    int i = 0;
    boolean moreValues;
    do {
        moreValues = false;
        first = true;
        for (String key : keySet) {
            Object[] array = multimap.get(key).toArray();
            moreValues = i < array.length;
            if (moreValues) {
                if (!first) {
                    writer.append(',');
                }

                writer.append(numberFormat.format(array[i]));
            }
            first = false;
        }
        i++;
        if (moreValues) {
            writer.append('\n');
        }
    } while (moreValues);

    writer.flush();
    writer.close();

    return writer.toString();
}

From source file:org.eclipse.xtext.serializer.sequencer.ContextFinder.java

@Override
public Set<ISerializationContext> findByContents(EObject semanticObject,
        Iterable<ISerializationContext> contextCandidates) {
    if (semanticObject == null)
        throw new NullPointerException();

    initConstraints();/*from   w  ww . ja  va 2 s.  co  m*/

    Multimap<IConstraint, ISerializationContext> constraints;
    if (contextCandidates != null)
        constraints = getConstraints(semanticObject, contextCandidates);
    else
        constraints = getConstraints(semanticObject);

    if (constraints.size() < 2)
        return Sets.newLinkedHashSet(constraints.values());

    for (IConstraint cand : Lists.newArrayList(constraints.keySet()))
        if (!isValidValueQuantity(cand, semanticObject))
            constraints.removeAll(cand);

    if (constraints.size() < 2)
        return Sets.newLinkedHashSet(constraints.values());

    LinkedHashSet<ISerializationContext> result = Sets.newLinkedHashSet(constraints.values());
    for (EStructuralFeature feat : semanticObject.eClass().getEAllStructuralFeatures()) {
        if (transientValueUtil.isTransient(semanticObject, feat) != ValueTransient.NO)
            continue;
        if (feat.isMany() && ((List<?>) semanticObject.eGet(feat)).isEmpty())
            continue;
        Multimap<AbstractElement, ISerializationContext> assignments = collectAssignments(constraints, feat);
        Set<AbstractElement> assignedElements = findAssignedElements(semanticObject, feat, assignments);
        Set<ISerializationContext> keep = Sets.newHashSet();
        for (AbstractElement ele : assignedElements)
            keep.addAll(assignments.get(ele));
        result.retainAll(keep);
    }
    return result;
}

From source file:org.obiba.onyx.quartz.core.engine.questionnaire.util.builder.QuestionBuilder.java

/**
 * Look for the {@link Category} with the given name in the current {@link Questionnaire}, add it (create it if
 * necessary) to the current {@link Question}, make it the current category.
 *
 * @param name//w  w  w  .  ja  v a  2  s. c  om
 * @return
 */
public CategoryBuilder withSharedCategory(String name) {
    Multimap<Category, Question> map = QuestionnaireFinder.getInstance(questionnaire).findCategories(name);
    if (map.keySet().size() > 1) {
        throw invalidSharedCategoryNameUnicityException(name);
    } else if (map.keySet().isEmpty()) {
        return withCategory(name);
    } else {
        return CategoryBuilder.createQuestionCategory(this, map.keySet().iterator().next());
    }
}

From source file:carskit.alg.cars.transformation.prefiltering.ExactFiltering.java

@Override
protected Map<Measure, Double> evalRankings() throws Exception {

    HashMap<Integer, HashMultimap<Integer, Integer>> cuiList = rateDao.getCtxUserList(testMatrix, binThold);
    HashMap<Integer, HashMultimap<Integer, Integer>> cuiList_train = rateDao.getCtxUserList(trainMatrix);
    int capacity = cuiList.keySet().size();

    // initialization capacity to speed up
    List<Double> ds5 = new ArrayList<>(isDiverseUsed ? capacity : 0);
    List<Double> ds10 = new ArrayList<>(isDiverseUsed ? capacity : 0);

    List<Double> precs5 = new ArrayList<>(capacity);
    List<Double> precs10 = new ArrayList<>(capacity);
    List<Double> recalls5 = new ArrayList<>(capacity);
    List<Double> recalls10 = new ArrayList<>(capacity);
    List<Double> aps = new ArrayList<>(capacity);
    List<Double> rrs = new ArrayList<>(capacity);
    List<Double> aucs = new ArrayList<>(capacity);
    List<Double> ndcgs = new ArrayList<>(capacity);

    // candidate items for all users: here only training items
    // use HashSet instead of ArrayList to speedup removeAll() and contains() operations: HashSet: O(1); ArrayList: O(log n).
    Set<Integer> candItems = rateDao.getItemList(trainMatrix);

    List<String> preds = null;
    String toFile = null;/*www  .  j a va 2  s .c  o m*/
    int numTopNRanks = numRecs < 0 ? 10 : numRecs;
    if (isResultsOut) {
        preds = new ArrayList<String>(1500);
        preds.add(
                "# userId: recommendations in (itemId, ranking score) pairs, where a correct recommendation is denoted by symbol *."); // optional: file header
        toFile = workingPath + String.format("%s-top-%d-items%s.txt", algoName, numTopNRanks, foldInfo); // the output-file name
        FileIO.deleteFile(toFile); // delete possibly old files
    }

    if (verbose)
        Logs.debug("{}{} has candidate items: {}", algoName, foldInfo, candItems.size());

    // ignore items for all users: most popular items
    if (numIgnore > 0) {
        List<Map.Entry<Integer, Integer>> itemDegs = new ArrayList<>();
        for (Integer j : candItems) {
            itemDegs.add(new AbstractMap.SimpleImmutableEntry<Integer, Integer>(j,
                    rateDao.getRatingCountByItem(trainMatrix, j)));
        }
        Lists.sortList(itemDegs, true);
        int k = 0;
        for (Map.Entry<Integer, Integer> deg : itemDegs) {

            // ignore these items from candidate items
            candItems.remove(deg.getKey());
            if (++k >= numIgnore)
                break;
        }
    }

    // for each context
    for (int ctx : cuiList.keySet()) {

        Multimap<Integer, Integer> uis = cuiList.get(ctx);

        int u_capacity = uis.keySet().size();

        List<Double> c_ds5 = new ArrayList<>(isDiverseUsed ? u_capacity : 0);
        List<Double> c_ds10 = new ArrayList<>(isDiverseUsed ? u_capacity : 0);

        List<Double> c_precs5 = new ArrayList<>(u_capacity);
        List<Double> c_precs10 = new ArrayList<>(u_capacity);
        List<Double> c_recalls5 = new ArrayList<>(u_capacity);
        List<Double> c_recalls10 = new ArrayList<>(u_capacity);
        List<Double> c_aps = new ArrayList<>(u_capacity);
        List<Double> c_rrs = new ArrayList<>(u_capacity);
        List<Double> c_aucs = new ArrayList<>(u_capacity);
        List<Double> c_ndcgs = new ArrayList<>(u_capacity);

        HashMultimap<Integer, Integer> uList_train = (cuiList_train.containsKey(ctx)) ? cuiList_train.get(ctx)
                : HashMultimap.<Integer, Integer>create();

        // for each ctx, we build a 2D rating matrix -- only users and items
        this.sm = null;
        userCorrs = null;
        userMeans = null;

        carskit.data.structure.SparseMatrix UIM = getUIMatrix(ctx);
        this.sm = UIM;
        userCorrs = buildCorrs(true, UIM);
        userMeans = new DenseVector(numUsers);
        for (int u = 0; u < numUsers; u++) {
            SparseVector uv = UIM.row(u);
            userMeans.set(u, uv.getCount() > 0 ? uv.mean() : globalMean);
        }

        // for each user
        for (int u : uis.keySet()) {

            if (verbose && ((u + 1) % 100 == 0))
                Logs.debug("{}{} evaluates progress: {} / {}", algoName, foldInfo, u + 1, capacity);

            // number of candidate items for all users
            int numCands = candItems.size();

            // get positive items from test matrix
            Collection<Integer> posItems = uis.get(u);
            List<Integer> correctItems = new ArrayList<>();

            // intersect with the candidate items
            for (Integer j : posItems) {
                if (candItems.contains(j))
                    correctItems.add(j);
            }

            if (correctItems.size() == 0)
                continue; // no testing data for user u

            // remove rated items from candidate items
            Set<Integer> ratedItems = (uList_train.containsKey(u)) ? uList_train.get(u)
                    : new HashSet<Integer>();

            // predict the ranking scores (unordered) of all candidate items
            List<Map.Entry<Integer, Double>> itemScores = new ArrayList<>(Lists.initSize(candItems));
            for (final Integer j : candItems) {
                if (!ratedItems.contains(j)) {
                    final double rank = ranking(u, j, ctx);
                    if (!Double.isNaN(rank)) {
                        if (rank > binThold)
                            itemScores.add(new AbstractMap.SimpleImmutableEntry<Integer, Double>(j, rank));
                    }
                } else {
                    numCands--;
                }
            }

            if (itemScores.size() == 0)
                continue; // no recommendations available for user u

            // order the ranking scores from highest to lowest: List to preserve orders
            Lists.sortList(itemScores, true);
            List<Map.Entry<Integer, Double>> recomd = (numRecs <= 0 || itemScores.size() <= numRecs)
                    ? itemScores
                    : itemScores.subList(0, numRecs);

            List<Integer> rankedItems = new ArrayList<>();
            StringBuilder sb = new StringBuilder();
            int count = 0;
            for (Map.Entry<Integer, Double> kv : recomd) {
                Integer item = kv.getKey();
                rankedItems.add(item);

                if (isResultsOut && count < numTopNRanks) {
                    // restore back to the original item id
                    sb.append("(").append(rateDao.getItemId(item));

                    if (posItems.contains(item))
                        sb.append("*"); // indicating correct recommendation

                    sb.append(", ").append(kv.getValue().floatValue()).append(")");

                    if (++count >= numTopNRanks)
                        break;
                    if (count < numTopNRanks)
                        sb.append(", ");
                }
            }

            int numDropped = numCands - rankedItems.size();
            double AUC = Measures.AUC(rankedItems, correctItems, numDropped);
            double AP = Measures.AP(rankedItems, correctItems);
            double nDCG = Measures.nDCG(rankedItems, correctItems);
            double RR = Measures.RR(rankedItems, correctItems);

            List<Integer> cutoffs = Arrays.asList(5, 10);
            Map<Integer, Double> precs = Measures.PrecAt(rankedItems, correctItems, cutoffs);
            Map<Integer, Double> recalls = Measures.RecallAt(rankedItems, correctItems, cutoffs);

            c_precs5.add(precs.get(5));
            c_precs10.add(precs.get(10));
            c_recalls5.add(recalls.get(5));
            c_recalls10.add(recalls.get(10));

            c_aucs.add(AUC);
            c_aps.add(AP);
            c_rrs.add(RR);
            c_ndcgs.add(nDCG);

            // diversity
            if (isDiverseUsed) {
                double d5 = diverseAt(rankedItems, 5);
                double d10 = diverseAt(rankedItems, 10);

                c_ds5.add(d5);
                c_ds10.add(d10);
            }

            // output predictions
            if (isResultsOut) {
                // restore back to the original user id
                preds.add(rateDao.getUserId(u) + ", " + rateDao.getContextSituationFromInnerId(ctx) + ": "
                        + sb.toString());
                if (preds.size() >= 1000) {
                    FileIO.writeList(toFile, preds, true);
                    preds.clear();
                }
            }
        } // end a context

        // calculate metrics for a specific user averaged by contexts
        ds5.add(isDiverseUsed ? Stats.mean(c_ds5) : 0.0);
        ds10.add(isDiverseUsed ? Stats.mean(c_ds10) : 0.0);
        precs5.add(Stats.mean(c_precs5));
        precs10.add(Stats.mean(c_precs10));
        recalls5.add(Stats.mean(c_recalls5));
        recalls10.add(Stats.mean(c_recalls10));
        aucs.add(Stats.mean(c_aucs));
        ndcgs.add(Stats.mean(c_ndcgs));
        aps.add(Stats.mean(c_aps));
        rrs.add(Stats.mean(c_rrs));
    }
    //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

    // write results out first
    if (isResultsOut && preds.size() > 0) {
        FileIO.writeList(toFile, preds, true);
        Logs.debug("{}{} has writeen item recommendations to {}", algoName, foldInfo, toFile);
    }

    // measure the performance
    Map<Measure, Double> measures = new HashMap<>();
    measures.put(Measure.D5, isDiverseUsed ? Stats.mean(ds5) : 0.0);
    measures.put(Measure.D10, isDiverseUsed ? Stats.mean(ds10) : 0.0);
    measures.put(Measure.Pre5, Stats.mean(precs5));
    measures.put(Measure.Pre10, Stats.mean(precs10));
    measures.put(Measure.Rec5, Stats.mean(recalls5));
    measures.put(Measure.Rec10, Stats.mean(recalls10));
    measures.put(Measure.AUC, Stats.mean(aucs));
    measures.put(Measure.NDCG, Stats.mean(ndcgs));
    measures.put(Measure.MAP, Stats.mean(aps));
    measures.put(Measure.MRR, Stats.mean(rrs));

    return measures;
}