Example usage for com.google.common.collect Multimap keySet

List of usage examples for com.google.common.collect Multimap keySet

Introduction

In this page you can find the example usage for com.google.common.collect Multimap keySet.

Prototype

Set<K> keySet();

Source Link

Document

Returns a view collection of all distinct keys contained in this multimap.

Usage

From source file:com.torodb.backend.AbstractReadInterface.java

@Override
@SuppressFBWarnings(value = { "OBL_UNSATISFIED_OBLIGATION",
        "ODR_OPEN_DATABASE_RESOURCE" }, justification = "ResultSet is wrapped in a Cursor<Tuple2<Integer, KVValue<?>>>. It's "
                + "iterated and closed in caller code")
public Cursor<Tuple2<Integer, KvValue<?>>> getCollectionDidsAndProjectionWithFieldsIn(DSLContext dsl,
        MetaDatabase metaDatabase, MetaCollection metaCol, MetaDocPart metaDocPart,
        Multimap<MetaField, KvValue<?>> valuesMultimap) throws SQLException {
    assert metaDatabase.getMetaCollectionByIdentifier(metaCol.getIdentifier()) != null;
    assert metaCol.getMetaDocPartByIdentifier(metaDocPart.getIdentifier()) != null;
    assert valuesMultimap.keySet().stream()
            .allMatch(metafield -> metaDocPart.getMetaFieldByIdentifier(metafield.getIdentifier()) != null);

    Stream<Tuple2<MetaField, Collection<KvValue<?>>>> valuesBatchStream = valuesMultimap.asMap().entrySet()
            .stream().map(e -> new Tuple2<MetaField, Collection<KvValue<?>>>(e.getKey(), e.getValue()));
    if (valuesMultimap.asMap().entrySet().stream().anyMatch(e -> e.getValue().size() > 500)) {
        valuesBatchStream = valuesBatchStream.flatMap(e -> Seq.seq(e.v2.stream()).zipWithIndex()
                .groupBy(t -> t.v2 / 500).entrySet().stream().map(se -> toValuesMap(e.v1, se)));
    }/* w  w w. j  a  va2 s .c om*/
    Stream<Cursor<Tuple2<Integer, KvValue<?>>>> didProjectionCursorStream = valuesBatchStream
            .map(Unchecked.function(mapBatch -> getCollectionDidsAndProjectionWithFieldsInBatch(dsl,
                    metaDatabase, metaCol, metaDocPart, mapBatch.v1, mapBatch.v2)));
    Stream<Tuple2<Integer, KvValue<?>>> didProjectionStream = didProjectionCursorStream
            .flatMap(cursor -> cursor.getRemaining().stream());

    return new IteratorCursor<>(didProjectionStream.iterator());
}

From source file:org.eclipse.acceleo.internal.ide.ui.actions.DoNotGenerateFilesAction.java

/**
 * {@inheritDoc}/*from   w w  w  . ja  va  2  s .  c  o m*/
 * 
 * @see org.eclipse.ui.IActionDelegate#run(org.eclipse.jface.action.IAction)
 */
public void run(IAction action) {
    List<IResource> resources = new ArrayList<IResource>();

    // Compute all the resources that are selected
    Iterator<?> iterator = this.structuredSelection.iterator();
    while (iterator.hasNext()) {
        Object next = iterator.next();
        if (next instanceof IResource) {
            resources.add((IResource) next);
        }
    }

    final List<IFile> files = new ArrayList<IFile>();

    // Find all the files that they contain
    for (IResource iResource : resources) {
        if (iResource instanceof IFile) {
            files.add((IFile) iResource);
        } else if (iResource instanceof IContainer) {
            IContainer iContainer = (IContainer) iResource;
            try {
                iContainer.accept(new IResourceVisitor() {

                    public boolean visit(IResource resource) throws CoreException {
                        if (resource instanceof IFile) {
                            files.add((IFile) resource);
                        }
                        return true;
                    }
                });
            } catch (CoreException e) {
                AcceleoUIActivator.log(e, true);
            }
        }
    }

    final Multimap<IProject, IFile> projects2files = ArrayListMultimap.create();
    for (IFile iFile : files) {
        projects2files.put(iFile.getProject(), iFile);
    }

    IRunnableWithProgress runnableWithProgress = new IRunnableWithProgress() {
        public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
            Set<IProject> projects = projects2files.keySet();
            for (IProject iProject : projects) {
                StringBuffer buffer = new StringBuffer();
                Collection<IFile> filesNotToGenerate = projects2files.get(iProject);
                for (IFile fileNotToGenerate : filesNotToGenerate) {
                    buffer.append(fileNotToGenerate.getFullPath() + System.getProperty("line.separator")); //$NON-NLS-1$
                }
                try {
                    IFile doNotGenerateFile = iProject.getFile(IAcceleoConstants.DO_NOT_GENERATE_FILENAME);
                    if (doNotGenerateFile.exists()) {
                        doNotGenerateFile.setContents(new ByteArrayInputStream(buffer.toString().getBytes()),
                                true, true, monitor);
                    } else {
                        doNotGenerateFile.create(new ByteArrayInputStream(buffer.toString().getBytes()), true,
                                monitor);
                    }
                } catch (CoreException e) {
                    AcceleoUIActivator.log(e, true);
                }
            }
        }
    };

    try {
        PlatformUI.getWorkbench().getActiveWorkbenchWindow().run(true, true, runnableWithProgress);
    } catch (InvocationTargetException e) {
        AcceleoUIActivator.log(e, true);
    } catch (InterruptedException e) {
        AcceleoUIActivator.log(e, true);
    }
}

From source file:org.crypto.sse.TSet.java

public static void constructEMMPar(final byte[] key1, final byte[] key2, final byte[] keyENC,
        final Multimap<String, String> lookup, final Multimap<String, String> encryptedIdToRealId)
        throws InterruptedException, ExecutionException, IOException {

    // Instantiation of B buckets in the secure inverted index
    // Initialize of the free set

    // Determination of the bucketSize B
    bucketSize = lookup.size() * spaceOverhead;
    int count = 2;
    for (int j = 1; j < 1000; j++) {
        if (bucketSize > Math.pow(2, count)) {
            count = 2 * j;/* w ww.  jav a 2 s  .  co  m*/
        } else {
            break;
        }
    }

    bucketSize = (int) Math.pow(2, count);

    for (int i = 0; i < bucketSize; i++) {
        secureIndex.add(new ArrayList<Record>());
        free.add(new ArrayList<Integer>());
        // For each bucket initialize to S sub-buckets
        for (int j = 0; j < subBucketSize; j++) {
            // initialize all buckets with random values
            secureIndex.get(i).add(new Record(new byte[16], new byte[16]));
            free.get(i).add(j);
        }
    }

    List<String> listOfKeyword = new ArrayList<String>(lookup.keySet());
    int threads = 0;
    if (Runtime.getRuntime().availableProcessors() > listOfKeyword.size()) {
        threads = listOfKeyword.size();
    } else {
        threads = Runtime.getRuntime().availableProcessors();
    }

    ExecutorService service = Executors.newFixedThreadPool(threads);
    ArrayList<String[]> inputs = new ArrayList<String[]>(threads);

    for (int i = 0; i < threads; i++) {
        String[] tmp;
        if (i == threads - 1) {
            tmp = new String[listOfKeyword.size() / threads + listOfKeyword.size() % threads];
            for (int j = 0; j < listOfKeyword.size() / threads + listOfKeyword.size() % threads; j++) {
                tmp[j] = listOfKeyword.get((listOfKeyword.size() / threads) * i + j);
            }
        } else {
            tmp = new String[listOfKeyword.size() / threads];
            for (int j = 0; j < listOfKeyword.size() / threads; j++) {

                tmp[j] = listOfKeyword.get((listOfKeyword.size() / threads) * i + j);
            }
        }
        inputs.add(i, tmp);
    }

    List<Future<Integer>> futures = new ArrayList<Future<Integer>>();
    for (final String[] input : inputs) {
        Callable<Integer> callable = new Callable<Integer>() {
            public Integer call() throws Exception {

                int output = setup(key1, key2, keyENC, input, lookup, encryptedIdToRealId);
                return 1;
            }
        };
        futures.add(service.submit(callable));
    }

    service.shutdown();

}

From source file:com.streamsets.pipeline.stage.destination.kudu.KuduTarget.java

private void writeBatch(Batch batch) throws StageException {
    Multimap<String, Record> partitions = ELUtils.partitionBatchByExpression(tableNameEval, tableNameVars,
            tableNameTemplate, batch);/*from   w w w  .j  a v a  2 s .c  o m*/

    KuduSession session = Preconditions.checkNotNull(kuduSession, KUDU_SESSION);

    for (String tableName : partitions.keySet()) {
        Map<String, Record> keyToRecordMap = new HashMap<>();
        Iterator<Record> it = partitions.get(tableName).iterator();

        // if table doesn't exist, send records to the error handler and continue
        KuduTable table;
        try {
            table = kuduTables.getUnchecked(tableName);
        } catch (UncheckedExecutionException ex) {
            while (it.hasNext()) {
                errorRecordHandler.onError(new OnRecordErrorException(it.next(), Errors.KUDU_01, tableName));
            }
            continue;
        }

        Optional<KuduRecordConverter> kuduRecordConverter = createKuduRecordConverter(table);
        if (!kuduRecordConverter.isPresent()) {
            throw new StageException(Errors.KUDU_11);
        }
        KuduRecordConverter recordConverter = kuduRecordConverter.get();

        try {
            while (it.hasNext()) {
                try {
                    Record record = it.next();
                    Insert insert = table.newInsert();
                    PartialRow row = insert.getRow();
                    recordConverter.convert(record, row);
                    keyToRecordMap.put(insert.getRow().stringifyRowKey(), record);
                    session.apply(insert);
                } catch (OnRecordErrorException onRecordError) {
                    errorRecordHandler.onError(onRecordError);
                }
            }
            List<RowError> rowErrors = Collections.emptyList();
            List<OperationResponse> responses = session.flush(); // can return null
            if (responses != null) {
                rowErrors = OperationResponse.collectErrors(responses);
            }
            // log ALL errors then process them
            for (RowError error : rowErrors) {
                LOG.warn(Errors.KUDU_03.getMessage(), error.toString());
            }
            for (RowError error : rowErrors) {
                Insert insert = (Insert) error.getOperation();
                // TODO SDC-2701 - support update on duplicate key
                if ("ALREADY_PRESENT".equals(error.getStatus())) {
                    // duplicate row key
                    String rowKey = insert.getRow().stringifyRowKey();
                    Record record = keyToRecordMap.get(rowKey);
                    errorRecordHandler.onError(new OnRecordErrorException(record, Errors.KUDU_08, rowKey));
                } else {
                    throw new StageException(Errors.KUDU_03, error.toString());
                }
            }
        } catch (Exception ex) {
            LOG.error(Errors.KUDU_03.getMessage(), ex.toString(), ex);
            throw throwStageException(ex);
        }
    }
}

From source file:org.sonar.java.filters.FilterVerifier.java

public static void verify(String filename, JavaIssueFilter filter, CodeVisitor... extraCodeVisitors) {
    // set the component to the filter
    filter.setComponentKey(filename);//from  w w  w. ja  v a  2  s  .c  o m

    IssueCollector issueCollector = new IssueCollector();
    ArrayList<CodeVisitor> codeVisitors = Lists.<CodeVisitor>newArrayList(filter, issueCollector);

    // instantiate the rules filtered by the filter
    codeVisitors.addAll(instantiateRules(filter.filteredRules()));

    for (CodeVisitor codeVisitor : extraCodeVisitors) {
        codeVisitors.add(codeVisitor);
    }

    Collection<File> classpath = FileUtils.listFiles(new File("target/test-jars"),
            new String[] { "jar", "zip" }, true);
    VisitorsBridgeForTests visitorsBridge = new VisitorsBridgeForTests(codeVisitors,
            Lists.newArrayList(classpath), null);
    JavaAstScanner.scanSingleFileForTests(new File(filename), visitorsBridge);
    VisitorsBridgeForTests.TestJavaFileScannerContext testJavaFileScannerContext = visitorsBridge
            .lastCreatedTestContext();

    Multimap<Integer, String> issuesByLines = HashMultimap.create();
    for (AnalyzerMessage analyzerMessage : testJavaFileScannerContext.getIssues()) {
        Integer issueLine = analyzerMessage.getLine();
        String ruleKey = AnnotationUtils.getAnnotation(analyzerMessage.getCheck().getClass(), Rule.class).key();
        FilterableIssue issue = mock(FilterableIssue.class);
        when(issue.ruleKey()).thenReturn(RuleKey.of("repo", ruleKey));
        when(issue.componentKey()).thenReturn(filename);
        when(issue.line()).thenReturn(issueLine);

        if (issueCollector.rejectedIssuesLines.contains(issueLine)) {

            assertThat(filter.accept(issue)).overridingErrorMessage("Line #" + issueLine
                    + " has been marked with 'NoIssue' but issue of rule '" + ruleKey + "' has been accepted!")
                    .isFalse();
        } else if (issueCollector.acceptedIssuesLines.contains(issueLine)) {
            // force check on accepted issues
            assertThat(filter.accept(issue)).overridingErrorMessage(
                    "Line #" + issueLine + " has been marked with 'WithIssue' but no issue have been raised!")
                    .isTrue();
        } else {
            issuesByLines.put(issueLine, ruleKey);
        }
    }

    if (!issuesByLines.isEmpty()) {
        List<Integer> lines = Lists.newArrayList(issuesByLines.keySet());
        Collections.sort(lines);
        StringBuilder builder = new StringBuilder();
        for (Integer line : lines) {
            builder.append("\n#" + line + ": " + issuesByLines.get(line).toString());
        }

        Fail.fail("The following lines have not been marked with 'WithIssue' or 'NoIssue' and raised issues:"
                + builder.toString());
    }
}

From source file:moavns.EstruturasVizinhanca.java

public Solucao eliminarRedundancia(Solucao solucao) {
    Multimap<Float, Integer> colunas = TreeMultimap.create();
    for (Coluna coluna : solucao.getColunas()) {
        Float custo = coluna.getCusto();
        colunas.put((custo * (-1)), coluna.getNome());
    }//from ww w . j a  va  2 s.  com
    Solucao testarsolucao = new Solucao(solucao);
    for (Float chave : colunas.keySet()) {
        Iterator iterador = colunas.get(chave).iterator();
        while (iterador.hasNext()) {
            Solucao testar = new Solucao(testarsolucao);
            Coluna maiorcoluna = testarsolucao.getLinhasX().get(iterador.next());
            //System.out.println(maiorcoluna.getNome());
            testar.getLinhasCobertas().clear();
            cobrirLinhas(maiorcoluna, testar);
            if (testar.getLinhasCobertas().size() == testar.getQtdeLinhas()) {
                String newstring = MOAVNS.transformaSolucao(testar);
                if (!MOAVNS.solucoes.contains(newstring)) {
                    testar.getColunas().remove(maiorcoluna);
                    testar.setCustototal(testar.getCustototal() - maiorcoluna.getCusto());
                    testarsolucao = testar;
                }
            }
        }
    }

    return testarsolucao;
}

From source file:cuchaz.enigma.gui.components.ClassSelector.java

public void setClasses(Collection<ClassEntry> classEntries) {
    if (classEntries == null) {
        setModel(null);//from   w  w  w .  j  a  va  2 s . co  m
        return;
    }

    // build the package names
    Map<String, ClassSelectorPackageNode> packages = Maps.newHashMap();
    for (ClassEntry classEntry : classEntries) {
        packages.put(classEntry.getPackageName(), null);
    }

    // sort the packages
    List<String> sortedPackageNames = Lists.newArrayList(packages.keySet());
    Collections.sort(sortedPackageNames, (a, b) -> {
        // I can never keep this rule straight when writing these damn things...
        // a < b => -1, a == b => 0, a > b => +1

        String[] aparts = a.split("/");
        String[] bparts = b.split("/");
        for (int i = 0; true; i++) {
            if (i >= aparts.length) {
                return -1;
            } else if (i >= bparts.length) {
                return 1;
            }

            int result = aparts[i].compareTo(bparts[i]);
            if (result != 0) {
                return result;
            }
        }
    });

    // create the root node and the package nodes
    DefaultMutableTreeNode root = new DefaultMutableTreeNode();
    for (String packageName : sortedPackageNames) {
        ClassSelectorPackageNode node = new ClassSelectorPackageNode(packageName);
        packages.put(packageName, node);
        root.add(node);
    }

    // put the classes into packages
    Multimap<String, ClassEntry> packagedClassEntries = ArrayListMultimap.create();
    for (ClassEntry classEntry : classEntries) {
        packagedClassEntries.put(classEntry.getPackageName(), classEntry);
    }

    // build the class nodes
    for (String packageName : packagedClassEntries.keySet()) {
        // sort the class entries
        List<ClassEntry> classEntriesInPackage = Lists.newArrayList(packagedClassEntries.get(packageName));
        Collections.sort(classEntriesInPackage, m_comparator);

        // create the nodes in order
        for (ClassEntry classEntry : classEntriesInPackage) {
            ClassSelectorPackageNode node = packages.get(packageName);
            node.add(new ClassSelectorClassNode(classEntry));
        }
    }

    // finally, update the tree control
    setModel(new DefaultTreeModel(root));
}

From source file:eu.numberfour.filechecker.FileChecker.java

private boolean run(Path... repoPaths) {
    System.out.println("=====================================================================================");

    final AtomicInteger count = new AtomicInteger(0);
    final AtomicInteger ignored = new AtomicInteger(0);
    final AtomicInteger checked = new AtomicInteger(0);
    final Map<Path, Report> validFiles = new LinkedHashMap<>();
    final Map<Path, Report> invalidFiles = new LinkedHashMap<>();
    final Map<Path, Throwable> erroneousFiles = new LinkedHashMap<>();

    try {//from w w w .  j  a  v a2 s.  c  om
        for (Path repoPath : repoPaths) {
            System.out.println("Asserting file integrity in " + repoPath);
            final Set<Path> thirdPartyFiles = readListOfThirdPartyFiles(repoPath);
            System.out.print("Checking files ...");
            Files.walk(repoPath).forEachOrdered((path) -> {
                File file = path.toFile();
                String pathStr = getCanonicalPath(file);

                count.incrementAndGet();

                if (isIgnored(path, pathStr)) {

                    ignored.incrementAndGet();

                } else {

                    checked.incrementAndGet();

                    try {

                        final String content = new String(Files.readAllBytes(path), StandardCharsets.UTF_8);
                        final Report report = check(path, content, thirdPartyFiles.contains(path));
                        (report.problems.isEmpty() ? validFiles : invalidFiles).put(path, report);

                    } catch (Throwable th) {
                        erroneousFiles.put(path, th);
                        // do not abort entirely, continue with next file
                    }
                }
            });
            System.out.println(" done.");
        }
    } catch (IOException e) {
        System.out.println("ERROR while walking folder tree:");
        e.printStackTrace();
        System.out.println("ABORTING");
        return false;
    }

    System.out.println("-------------------------------------------------------------------------------------");
    if (!invalidFiles.isEmpty()) {
        final Multimap<String, Path> pathsPerError = LinkedHashMultimap.create();
        invalidFiles.values().stream().forEachOrdered(
                (r) -> r.problems.stream().forEachOrdered((err) -> pathsPerError.put(err, r.path)));
        for (String err : pathsPerError.keySet()) {
            final Collection<Path> paths = pathsPerError.get(err);
            System.out.println("PROBLEM \"" + err + "\" in " + paths.size() + " files:");
            for (Path path : paths) {
                System.out.println("    " + path);
            }
        }
    } else {
        System.out.println("No problems.");
    }
    System.out.println("-------------------------------------------------------------------------------------");
    System.out.println("Checked " + checked + " files (" + ignored + " ignored; " + count + " total).");
    System.out.println("Valid files: " + validFiles.size());
    System.out.println("Invalid files: " + invalidFiles.size());
    System.out.println("Erroneous files: " + erroneousFiles.size());
    System.out.println("=====================================================================================");
    return invalidFiles.isEmpty() && erroneousFiles.isEmpty();
}

From source file:brooklyn.entity.nosql.cassandra.CassandraFabricImpl.java

@Override
public void init() {
    super.init();

    if (!getConfigRaw(CassandraDatacenter.SEED_SUPPLIER, true).isPresentAndNonNull())
        setConfig(CassandraDatacenter.SEED_SUPPLIER, getSeedSupplier());

    // track members
    addPolicy(PolicySpec.create(MemberTrackingPolicy.class).displayName("Cassandra Fabric Tracker")
            .configure("group", this));

    // Track first node's startup
    subscribeToMembers(this, CassandraDatacenter.FIRST_NODE_STARTED_TIME_UTC, new SensorEventListener<Long>() {
        @Override//from w  w w  . j  ava 2  s  . c  om
        public void onEvent(SensorEvent<Long> event) {
            Long oldval = getAttribute(CassandraDatacenter.FIRST_NODE_STARTED_TIME_UTC);
            Long newval = event.getValue();
            if (oldval == null && newval != null) {
                setAttribute(CassandraDatacenter.FIRST_NODE_STARTED_TIME_UTC, newval);
                for (CassandraDatacenter member : Iterables.filter(getMembers(), CassandraDatacenter.class)) {
                    ((EntityInternal) member).setAttribute(CassandraDatacenter.FIRST_NODE_STARTED_TIME_UTC,
                            newval);
                }
            }
        }
    });

    // Track the datacenters for this cluster
    subscribeToMembers(this, CassandraDatacenter.DATACENTER_USAGE,
            new SensorEventListener<Multimap<String, Entity>>() {
                @Override
                public void onEvent(SensorEvent<Multimap<String, Entity>> event) {
                    Multimap<String, Entity> usage = calculateDatacenterUsage();
                    setAttribute(DATACENTER_USAGE, usage);
                    setAttribute(DATACENTERS, usage.keySet());
                }
            });
    subscribe(this, DynamicGroup.MEMBER_REMOVED, new SensorEventListener<Entity>() {
        @Override
        public void onEvent(SensorEvent<Entity> event) {
            Multimap<String, Entity> usage = calculateDatacenterUsage();
            setAttribute(DATACENTER_USAGE, usage);
            setAttribute(DATACENTERS, usage.keySet());
        }
    });
}

From source file:org.jenkinsci.plugins.all_changes.AllChangesAction.java

/**
 * Returns all changes which contribute to a build.
 *
 * @param build//from  w ww.ja v  a2s. c o  m
 * @return
 */
public Multimap<ChangeLogSet.Entry, AbstractBuild> getAllChanges(AbstractBuild build) {
    Set<AbstractBuild> builds = getContributingBuilds(build);
    Multimap<String, ChangeLogSet.Entry> changes = ArrayListMultimap.create();
    for (AbstractBuild changedBuild : builds) {
        ChangeLogSet<ChangeLogSet.Entry> changeSet = changedBuild.getChangeSet();
        for (ChangeLogSet.Entry entry : changeSet) {
            changes.put(entry.getCommitId() + entry.getMsgAnnotated() + entry.getTimestamp(), entry);
        }
    }
    Multimap<ChangeLogSet.Entry, AbstractBuild> change2Build = HashMultimap.create();
    for (String changeKey : changes.keySet()) {
        ChangeLogSet.Entry change = changes.get(changeKey).iterator().next();
        for (ChangeLogSet.Entry entry : changes.get(changeKey)) {
            change2Build.put(change, entry.getParent().build);
        }
    }
    return change2Build;
}