Example usage for com.google.common.collect Multimap values

List of usage examples for com.google.common.collect Multimap values

Introduction

In this page you can find the example usage for com.google.common.collect Multimap values.

Prototype

Collection<V> values();

Source Link

Document

Returns a view collection containing the value from each key-value pair contained in this multimap, without collapsing duplicates (so values().size() == size() ).

Usage

From source file:org.apache.pulsar.broker.loadbalance.impl.SimpleLoadManagerImpl.java

private boolean isBrokerAvailableForRebalancing(String bundleName, long maxLoadLevel) {
    NamespaceName namespaceName = new NamespaceName(
            LoadManagerShared.getNamespaceNameFromBundleName(bundleName));
    Map<Long, Set<ResourceUnit>> availableBrokers = sortedRankings.get();
    // this does not have "http://" in front, hacky but no time to pretty up
    Multimap<Long, ResourceUnit> brokers = getFinalCandidates(namespaceName, availableBrokers);

    for (Object broker : brokers.values()) {
        ResourceUnit underloadedRU = (ResourceUnit) broker;
        LoadReport currentLoadReport = currentLoadReports.get(underloadedRU);
        if (isBelowLoadLevel(currentLoadReport.getSystemResourceUsage(), maxLoadLevel)) {
            return true;
        }/* w  w w . j  a va2  s  .c o  m*/
    }
    return false;
}

From source file:com.giaybac.traprange.PDFTableExtractor.java

public List<Table> extract() {
    List<Table> retVal = new ArrayList<>();
    Multimap<Integer, Range<Integer>> pageIdNLineRangesMap = LinkedListMultimap.create();
    Multimap<Integer, TextPosition> pageIdNTextsMap = LinkedListMultimap.create();
    try {//from  w ww.j av  a 2s. c  o  m
        this.document = this.password != null ? PDDocument.load(inputStream, this.password)
                : PDDocument.load(inputStream);
        for (int pageId = 0; pageId < document.getNumberOfPages(); pageId++) {
            boolean b = !exceptedPages.contains(pageId)
                    && (extractedPages.isEmpty() || extractedPages.contains(pageId));
            if (b) {
                List<TextPosition> texts = extractTextPositions(pageId);//sorted by .getY() ASC
                //extract line ranges
                List<Range<Integer>> lineRanges = getLineRanges(pageId, texts);
                //extract column ranges
                List<TextPosition> textsByLineRanges = getTextsByLineRanges(lineRanges, texts);

                pageIdNLineRangesMap.putAll(pageId, lineRanges);
                pageIdNTextsMap.putAll(pageId, textsByLineRanges);
            }
        }
        //Calculate columnRanges
        List<Range<Integer>> columnRanges = getColumnRanges(pageIdNTextsMap.values());
        for (int pageId : pageIdNTextsMap.keySet()) {
            Table table = buildTable(pageId, (List) pageIdNTextsMap.get(pageId),
                    (List) pageIdNLineRangesMap.get(pageId), columnRanges);
            retVal.add(table);
            //debug
            logger.debug("Found " + table.getRows().size() + " row(s) and " + columnRanges.size()
                    + " column(s) of a table in page " + pageId);
        }
    } catch (IOException ex) {
        throw new RuntimeException("Parse pdf file fail", ex);
    } finally {
        if (this.document != null) {
            try {
                this.document.close();
            } catch (IOException ex) {
                logger.error(null, ex);
            }
        }
    }
    //return
    return retVal;
}

From source file:com.palantir.atlasdb.cleaner.Scrubber.java

void scrubImmediately(final TransactionManager txManager,
        final Multimap<String, Cell> tableNameToCell, final long scrubTimestamp, final long commitTimestamp) {
    if (log.isInfoEnabled()) {
        log.info("Scrubbing a total of " + tableNameToCell.size() + " cells immediately.");
    }//ww w. j  a v  a  2s  .  c o  m

    // Note that if the background scrub thread is also running at the same time, it will try to scrub
    // the same cells as the current thread (since these cells were queued for scrubbing right before
    // the hard delete transaction committed; while this is unfortunate (because it means we will be
    // doing more work than necessary), the behavior is still correct
    long nextImmutableTimestamp;
    while ((nextImmutableTimestamp = immutableTimestampSupplier.get()) < commitTimestamp) {
        try {
            if (log.isInfoEnabled()) {
                log.info(String.format(
                        "Sleeping because immutable timestamp %d has not advanced to at least commit timestamp %d",
                        nextImmutableTimestamp, commitTimestamp));
            }
            Thread.sleep(AtlasDbConstants.SCRUBBER_RETRY_DELAY_MILLIS);
        } catch (InterruptedException e) {
            log.error("Interrupted while waiting for immutableTimestamp to advance past commitTimestamp", e);
        }
    }

    List<Future<Void>> scrubFutures = Lists.newArrayList();
    for (List<Entry<String, Cell>> batch : Iterables.partition(tableNameToCell.entries(),
            batchSizeSupplier.get())) {
        final Multimap<String, Cell> batchMultimap = HashMultimap.create();
        for (Entry<String, Cell> e : batch) {
            batchMultimap.put(e.getKey(), e.getValue());
        }

        final Callable<Void> c = new Callable<Void>() {
            @Override
            public Void call() throws Exception {
                if (log.isInfoEnabled()) {
                    log.info("Scrubbing " + batchMultimap.size() + " cells immediately.");
                }

                // Here we don't need to check scrub timestamps because we guarantee that scrubImmediately is called
                // AFTER the transaction commits
                scrubCells(txManager, batchMultimap, scrubTimestamp, TransactionType.AGGRESSIVE_HARD_DELETE);

                Multimap<Cell, Long> cellToScrubTimestamp = HashMultimap.create();

                cellToScrubTimestamp = Multimaps.invertFrom(
                        Multimaps.index(batchMultimap.values(), Functions.constant(scrubTimestamp)),
                        cellToScrubTimestamp);

                scrubberStore.markCellsAsScrubbed(cellToScrubTimestamp, batchSizeSupplier.get());

                if (log.isInfoEnabled()) {
                    log.info("Completed scrub immediately.");
                }
                return null;
            }
        };
        if (!inScrubThread.get()) {
            scrubFutures.add(exec.submit(new Callable<Void>() {
                @Override
                public Void call() throws Exception {
                    inScrubThread.set(true);
                    c.call();
                    return null;
                }
            }));
        } else {
            try {
                c.call();
            } catch (Exception e) {
                throw Throwables.throwUncheckedException(e);
            }
        }
    }

    for (Future<Void> future : scrubFutures) {
        try {
            future.get();
        } catch (InterruptedException e) {
            throw Throwables.throwUncheckedException(e);
        } catch (ExecutionException e) {
            throw Throwables.rewrapAndThrowUncheckedException(e);
        }
    }
}

From source file:com.b2international.snowowl.snomed.datastore.converter.SnomedConceptConverter.java

private void expandDescendants(List<SnomedConcept> results, final Set<String> conceptIds, String descendantKey,
        boolean stated) {
    if (!expand().containsKey(descendantKey)) {
        return;/*  ww w. jav a2  s  .c o m*/
    }

    final Options expandOptions = expand().get(descendantKey, Options.class);
    final boolean direct = checkDirect(expandOptions);

    try {

        final ExpressionBuilder expression = Expressions.builder();
        expression.filter(active());
        final ExpressionBuilder descendantFilter = Expressions.builder();
        if (stated) {
            descendantFilter.should(statedParents(conceptIds));
            if (!direct) {
                descendantFilter.should(statedAncestors(conceptIds));
            }
        } else {
            descendantFilter.should(parents(conceptIds));
            if (!direct) {
                descendantFilter.should(ancestors(conceptIds));
            }
        }
        expression.filter(descendantFilter.build());

        final Query<SnomedConceptDocument> query = Query.select(SnomedConceptDocument.class)
                .where(expression.build()).limit(Integer.MAX_VALUE).build();

        final RevisionSearcher searcher = context().service(RevisionSearcher.class);
        final Hits<SnomedConceptDocument> hits = searcher.search(query);

        if (hits.getTotal() < 1) {
            final SnomedConcepts descendants = new SnomedConcepts(0, 0);
            for (SnomedConcept concept : results) {
                if (stated) {
                    concept.setStatedDescendants(descendants);
                } else {
                    concept.setDescendants(descendants);
                }
            }
            return;
        }

        // in case of only one match and limit zero, use shortcut instead of loading all IDs and components
        // XXX won't work if number of results is greater than one, either use custom ConceptSearch or figure out how to expand descendants effectively
        final int limit = getLimit(expandOptions);
        if (conceptIds.size() == 1 && limit == 0) {
            for (SnomedConcept concept : results) {
                final SnomedConcepts descendants = new SnomedConcepts(0, hits.getTotal());
                if (stated) {
                    concept.setStatedDescendants(descendants);
                } else {
                    concept.setDescendants(descendants);
                }
            }
            return;
        }

        final Multimap<String, String> descendantsByAncestor = TreeMultimap.create();
        for (SnomedConceptDocument hit : hits) {
            final Set<String> parentsAndAncestors = newHashSet();
            if (stated) {
                parentsAndAncestors.addAll(LongSets.toStringSet(hit.getStatedParents()));
                if (!direct) {
                    parentsAndAncestors.addAll(LongSets.toStringSet(hit.getStatedAncestors()));
                }
            } else {
                parentsAndAncestors.addAll(LongSets.toStringSet(hit.getParents()));
                if (!direct) {
                    parentsAndAncestors.addAll(LongSets.toStringSet(hit.getAncestors()));
                }
            }

            parentsAndAncestors.retainAll(conceptIds);
            for (String ancestor : parentsAndAncestors) {
                descendantsByAncestor.put(ancestor, hit.getId());
            }
        }

        final Collection<String> componentIds = newHashSet(descendantsByAncestor.values());

        if (limit > 0 && !componentIds.isEmpty()) {
            // query descendants again
            final SnomedConcepts descendants = SnomedRequests.prepareSearchConcept().all().filterByActive(true)
                    .filterByIds(componentIds).setLocales(locales())
                    .setExpand(expandOptions.get("expand", Options.class)).build().execute(context());

            final Map<String, SnomedConcept> descendantsById = newHashMap();
            descendantsById.putAll(Maps.uniqueIndex(descendants, ID_FUNCTION));
            for (SnomedConcept concept : results) {
                final Collection<String> descendantIds = descendantsByAncestor.get(concept.getId());
                final List<SnomedConcept> currentDescendants = FluentIterable.from(descendantIds).limit(limit)
                        .transform(Functions.forMap(descendantsById)).toList();
                final SnomedConcepts descendantConcepts = new SnomedConcepts(currentDescendants, null, null,
                        limit, descendantIds.size());
                if (stated) {
                    concept.setStatedDescendants(descendantConcepts);
                } else {
                    concept.setDescendants(descendantConcepts);
                }
            }
        } else {
            for (SnomedConcept concept : results) {
                final Collection<String> descendantIds = descendantsByAncestor.get(concept.getId());
                final SnomedConcepts descendants = new SnomedConcepts(limit, descendantIds.size());
                if (stated) {
                    concept.setStatedDescendants(descendants);
                } else {
                    concept.setDescendants(descendants);
                }
            }
        }

    } catch (IOException e) {
        throw SnowowlRuntimeException.wrap(e);
    }
}

From source file:com.b2international.snowowl.snomed.validation.detail.SnomedValidationIssueDetailExtension.java

private void extendConceptIssueLabels(BranchContext context, Collection<ValidationIssue> issues) {
    final RevisionSearcher searcher = context.service(RevisionSearcher.class);

    final List<ValidationIssue> conceptIssues = issues.stream()
            .filter(issue -> SnomedTerminologyComponentConstants.CONCEPT_NUMBER == issue.getAffectedComponent()
                    .getTerminologyComponentId())
            .collect(Collectors.toList());

    if (conceptIssues.isEmpty()) {
        return;//from  w w  w .j a va2 s  .  co m
    }

    final Multimap<String, ValidationIssue> issuesByConceptId = Multimaps.index(conceptIssues,
            issue -> issue.getAffectedComponent().getComponentId());

    final Set<String> synonymIds = SnomedRequests.prepareGetSynonyms().build().execute(context).stream()
            .map(SnomedConcept::getId).collect(Collectors.toSet());

    final Multimap<String, String> affectedComponentLabelsByConcept = HashMultimap.create();

    searcher.scroll(Query.select(String[].class).from(SnomedDescriptionIndexEntry.class)
            .fields(SnomedDescriptionIndexEntry.Fields.CONCEPT_ID, SnomedDescriptionIndexEntry.Fields.TERM)
            .where(Expressions.builder().filter(SnomedDescriptionIndexEntry.Expressions.active())
                    .filter(SnomedDescriptionIndexEntry.Expressions.concepts(issuesByConceptId.keySet()))
                    .filter(SnomedDescriptionIndexEntry.Expressions.types(ImmutableSet.<String>builder()
                            .add(Concepts.FULLY_SPECIFIED_NAME).addAll(synonymIds).build()))
                    .build())
            .limit(SCROLL_SIZE).build()).forEach(hits -> {
                for (String[] hit : hits) {
                    affectedComponentLabelsByConcept.put(hit[0], hit[1]);
                }
            });

    if (!affectedComponentLabelsByConcept.isEmpty()) {
        issuesByConceptId.values().forEach(issue -> {
            final Collection<String> labels = affectedComponentLabelsByConcept
                    .get(issue.getAffectedComponent().getComponentId());
            issue.setAffectedComponentLabels(ImmutableList.copyOf(labels));
        });
    }
}

From source file:com.b2international.snowowl.snomed.datastore.converter.SnomedConceptConverter.java

private void expandAncestors(List<SnomedConcept> results, Set<String> conceptIds, String key, boolean stated) {
    if (!expand().containsKey(key)) {
        return;//from   w ww.  ja  v a 2  s . c  o m
    }

    final Options expandOptions = expand().get(key, Options.class);
    final boolean direct = checkDirect(expandOptions);

    final Multimap<String, String> ancestorsByDescendant = TreeMultimap.create();

    final LongToStringFunction toString = new LongToStringFunction();
    for (SnomedConcept concept : results) {
        final long[] parentIds = stated ? concept.getStatedParentIds() : concept.getParentIds();
        if (parentIds != null) {
            for (long parent : parentIds) {
                if (IComponent.ROOT_IDL != parent) {
                    ancestorsByDescendant.put(concept.getId(), toString.apply(parent));
                }
            }
        }
        if (!direct) {
            final long[] ancestorIds = stated ? concept.getStatedAncestorIds() : concept.getAncestorIds();
            if (ancestorIds != null) {
                for (long ancestor : ancestorIds) {
                    if (IComponent.ROOT_IDL != ancestor) {
                        ancestorsByDescendant.put(concept.getId(), toString.apply(ancestor));
                    }
                }
            }
        }
    }

    final int limit = getLimit(expandOptions);

    final Collection<String> componentIds = newHashSet(ancestorsByDescendant.values());

    if (limit > 0 && !componentIds.isEmpty()) {
        final SnomedConcepts ancestors = SnomedRequests.prepareSearchConcept().all().filterByActive(true)
                .filterByIds(componentIds).setLocales(locales())
                .setExpand(expandOptions.get("expand", Options.class)).build().execute(context());

        final Map<String, SnomedConcept> ancestorsById = newHashMap();
        ancestorsById.putAll(Maps.uniqueIndex(ancestors, ID_FUNCTION));
        for (SnomedConcept concept : results) {
            final Collection<String> ancestorIds = ancestorsByDescendant.get(concept.getId());
            final List<SnomedConcept> conceptAncestors = FluentIterable.from(ancestorIds).limit(limit)
                    .transform(Functions.forMap(ancestorsById)).toList();
            final SnomedConcepts ancestorConcepts = new SnomedConcepts(conceptAncestors, null, null, limit,
                    ancestorIds.size());
            if (stated) {
                concept.setStatedAncestors(ancestorConcepts);
            } else {
                concept.setAncestors(ancestorConcepts);
            }
        }
    } else {
        for (SnomedConcept concept : results) {
            final Collection<String> ancestorIds = ancestorsByDescendant.get(concept.getId());
            final SnomedConcepts ancestors = new SnomedConcepts(limit, ancestorIds.size());
            if (stated) {
                concept.setStatedAncestors(ancestors);
            } else {
                concept.setAncestors(ancestors);
            }
        }
    }
}

From source file:com.flexive.core.search.genericSQL.GenericSQLDataFilter.java

/**
 * Builds an 'AND' condition.//from   w w w.  j  a  v  a  2s. c  o  m
 *
 * @param sb the StringBuilder to use
 * @param br the brace
 * @throws FxSqlSearchException if the build failed
 */
private void buildAnd(StringBuilder sb, Brace br) throws FxSqlSearchException {
    // Start AND
    if (br.size() > 1) {
        final Multimap<String, ConditionTableInfo> tables = getUsedContentTables(br, true);
        // for "AND" we can only optimize when ALL flatstorage conditions are not multi-lang and on the same level,
        // i.e. that table must have exactly one flat-storage entry, and we cannot optimize if an IS NULL is present
        if (tables.size() == 1 && tables.values().iterator().next().isFlatStorage()
                && !tables.values().iterator().next().isMultiLang() && !containsIsNullCondition(br)) {
            sb.append(getOptimizedFlatStorageSubquery(br, tables.keySet().iterator().next(), true));
            return;
        }
        if (tables.size() == 1 && tables.keys().iterator().next().equals(DatabaseConst.TBL_CONTENT)) {
            // combine main table selects into a single one
            sb.append("(SELECT id,ver," + getEmptyLanguage() + " as lang FROM " + DatabaseConst.TBL_CONTENT
                    + " cd" + " WHERE " + getOptimizedMainTableConditions(br, "cd") + ")");
            return;
        }
        // check if there are two or more flat storage queries in the same level that can be grouped
        try {
            final Brace grouped = br.groupConditions(new Brace.GroupFunction() {
                @Override
                public Object apply(Condition cond) {
                    try {
                        return getPropertyInfo(cond);
                    } catch (FxSqlSearchException e) {
                        throw e.asRuntimeException();
                    }
                }
            });
            if (grouped != br) {
                // reorg happened - process new version
                if (LOG.isTraceEnabled()) {
                    LOG.trace("AND statement reorganized, new statement: " + grouped);
                }
                buildAnd(sb, grouped);
                return;
            }
        } catch (SqlParserException e) {
            throw new FxSqlSearchException(e);
        }
    }
    int pos = 0;
    final StringBuilder combinedConditions = new StringBuilder();
    int firstId = -1;
    for (BraceElement be : br.getElements()) {
        if (pos == 0) {
            firstId = be.getId();
            // TODO: do we need .lang here?
            sb.append(("(SELECT tbl" + firstId + ".id,tbl" + firstId + ".ver,tbl" + firstId + ".lang FROM\n"));
        } else {
            sb.append(",");
            combinedConditions.append((pos > 1) ? " AND " : " ").append("tbl").append(firstId).append(".id=tbl")
                    .append(be.getId()).append(".id AND ").append("tbl").append(firstId).append(".ver=tbl")
                    .append(be.getId()).append(".ver AND ").append("(tbl").append(firstId)
                    .append(".lang=0 or tbl").append(firstId).append(".lang IS NULL OR ").append("tbl")
                    .append(be.getId()).append(".lang=0 OR tbl").append(be.getId()).append(".lang IS NULL OR ")
                    .append("tbl").append(firstId).append(".lang=tbl").append(be.getId()).append(".lang)");
        }

        if (be instanceof Condition) {
            sb.append(getConditionSubQuery(br.getStatement(), (Condition) be));
        } else if (be instanceof Brace) {
            build(sb, (Brace) be);
        } else {
            throw new FxSqlSearchException(LOG, "ex.sqlSearch.filter.invalidBrace", be);
        }
        sb.append(" tbl").append(be.getId()).append("\n");
        pos++;
    }
    // Where links the tables together
    sb.append(" WHERE ");
    sb.append(combinedConditions);
    // Close AND
    sb.append(")");
}

From source file:eu.itesla_project.simulation.ImpactAnalysisTool.java

@Override
public void run(CommandLine line) throws Exception {
    ComponentDefaultConfig config = new ComponentDefaultConfig();
    String caseFormat = line.getOptionValue("case-format");
    Path caseDir = Paths.get(line.getOptionValue("case-dir"));
    String caseBaseName = null;//from w  ww .ja v  a2s  . co  m
    if (line.hasOption("case-basename")) {
        caseBaseName = line.getOptionValue("case-basename");
    }
    final Set<String> contingencyIds = line.hasOption("contingencies")
            ? Sets.newHashSet(line.getOptionValue("contingencies").split(","))
            : null;
    Path outputCsvFile = null;
    if (line.hasOption("output-csv-file")) {
        outputCsvFile = Paths.get(line.getOptionValue("output-csv-file"));
    }

    try (ComputationManager computationManager = new LocalComputationManager()) {

        ContingenciesProvider contingenciesProvider = config
                .findFactoryImplClass(ContingenciesProviderFactory.class).newInstance().create();
        SimulatorFactory simulatorFactory = config.findFactoryImplClass(SimulatorFactory.class).newInstance();

        Importer importer = Importers.getImporter(caseFormat, computationManager);
        if (importer == null) {
            throw new ITeslaException("Format " + caseFormat + " not supported");
        }

        if (caseBaseName != null) {

            Multimap<String, SecurityIndex> securityIndexesPerContingency = runImpactAnalysis(caseDir,
                    caseBaseName, contingencyIds, importer, computationManager, simulatorFactory,
                    contingenciesProvider);

            if (securityIndexesPerContingency != null) {
                if (outputCsvFile == null) {
                    prettyPrint(securityIndexesPerContingency);
                } else {
                    writeCsv(securityIndexesPerContingency, outputCsvFile);
                }
            }
        } else {
            if (outputCsvFile == null) {
                throw new RuntimeException(
                        "In case of multiple impact analyses, only ouput to csv file is supported");
            }
            Map<String, Map<SecurityIndexId, SecurityIndex>> securityIndexesPerCase = new LinkedHashMap<>();
            Importers.importAll(caseDir, importer, false, network -> {
                try {
                    Multimap<String, SecurityIndex> securityIndexesPerContingency = runImpactAnalysis(network,
                            contingencyIds, computationManager, simulatorFactory, contingenciesProvider);
                    if (securityIndexesPerContingency == null) {
                        securityIndexesPerCase.put(network.getId(), null);
                    } else {
                        Map<SecurityIndexId, SecurityIndex> securityIndexesPerId = securityIndexesPerContingency
                                .values().stream().collect(Collectors.toMap(SecurityIndex::getId, e -> e));
                        securityIndexesPerCase.put(network.getId(), securityIndexesPerId);
                    }
                } catch (Exception e) {
                    LOGGER.error(e.toString(), e);
                }
            }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + "..."));

            writeCsv(securityIndexesPerCase, outputCsvFile);
        }
    }
}

From source file:eu.itesla_project.modules.simulation.ImpactAnalysisTool.java

@Override
public void run(CommandLine line) throws Exception {
    OfflineConfig config = OfflineConfig.load();
    String caseFormat = line.getOptionValue("case-format");
    Path caseDir = Paths.get(line.getOptionValue("case-dir"));
    String caseBaseName = null;//  w  w w.  j a v  a 2s  . co m
    if (line.hasOption("case-basename")) {
        caseBaseName = line.getOptionValue("case-basename");
    }
    final Set<String> contingencyIds = line.hasOption("contingencies")
            ? Sets.newHashSet(line.getOptionValue("contingencies").split(","))
            : null;
    Path outputCsvFile = null;
    if (line.hasOption("output-csv-file")) {
        outputCsvFile = Paths.get(line.getOptionValue("output-csv-file"));
    }

    try (ComputationManager computationManager = new LocalComputationManager()) {

        DynamicDatabaseClientFactory ddbFactory = config.getDynamicDbClientFactoryClass().newInstance();
        ContingenciesAndActionsDatabaseClient contingencyDb = config.getContingencyDbClientFactoryClass()
                .newInstance().create();
        SimulatorFactory simulatorFactory = config.getSimulatorFactoryClass().newInstance();

        Importer importer = Importers.getImporter(caseFormat, computationManager);
        if (importer == null) {
            throw new ITeslaException("Format " + caseFormat + " not supported");
        }

        if (caseBaseName != null) {

            Multimap<String, SecurityIndex> securityIndexesPerContingency = runImpactAnalysis(caseDir,
                    caseBaseName, contingencyIds, importer, computationManager, simulatorFactory, ddbFactory,
                    contingencyDb);

            if (securityIndexesPerContingency != null) {
                if (outputCsvFile == null) {
                    prettyPrint(securityIndexesPerContingency);
                } else {
                    writeCsv(securityIndexesPerContingency, outputCsvFile);
                }
            }
        } else {
            if (outputCsvFile == null) {
                throw new RuntimeException(
                        "In case of multiple impact analyses, only ouput to csv file is supported");
            }
            Map<String, Map<SecurityIndexId, SecurityIndex>> securityIndexesPerCase = new LinkedHashMap<>();
            Importers.importAll(caseDir, importer, false, network -> {
                try {
                    Multimap<String, SecurityIndex> securityIndexesPerContingency = runImpactAnalysis(network,
                            contingencyIds, computationManager, simulatorFactory, ddbFactory, contingencyDb);
                    if (securityIndexesPerContingency == null) {
                        securityIndexesPerCase.put(network.getId(), null);
                    } else {
                        Map<SecurityIndexId, SecurityIndex> securityIndexesPerId = securityIndexesPerContingency
                                .values().stream().collect(Collectors.toMap(SecurityIndex::getId, e -> e));
                        securityIndexesPerCase.put(network.getId(), securityIndexesPerId);
                    }
                } catch (Exception e) {
                    LOGGER.error(e.toString(), e);
                }
            }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + "..."));

            writeCsv(securityIndexesPerCase, outputCsvFile);
        }
    }
}

From source file:com.b2international.snowowl.snomed.core.ecl.SnomedEclRefinementEvaluator.java

/**
 * Evaluates partial results coming from a binary operator's left and right side within attribute group based refinements.
 * @param groupCardinality - the cardinality to check
 * @param groupOperator - the operator to use (AND or OR, aka {@link Sets#intersection(Set, Set)} or {@link Sets#union(Set, Set)})
 * @return a function that will can be chained via {@link Promise#then(Function)} to evaluate partial results when they are available
 *//*from ww  w .  ja va2  s.c  o m*/
private Function<List<Object>, Collection<Property>> evalParts(final Range<Long> groupCardinality,
        BinaryOperator<Set<Integer>> groupOperator) {
    return input -> {
        final Collection<Property> left = (Collection<Property>) input.get(0);
        final Collection<Property> right = (Collection<Property>) input.get(1);

        final Collection<Property> matchingAttributes = newHashSet();

        // group left and right side by source ID
        final Multimap<String, Property> leftRelationshipsBySource = Multimaps.index(left,
                Property::getObjectId);
        final Multimap<String, Property> rightRelationshipsBySource = Multimaps.index(right,
                Property::getObjectId);

        // check that each ID has the required number of groups with left and right relationships
        for (String sourceConcept : Iterables.concat(leftRelationshipsBySource.keySet(),
                rightRelationshipsBySource.keySet())) {
            final Multimap<Integer, Property> validGroups = ArrayListMultimap.create();

            final Collection<Property> leftSourceRelationships = leftRelationshipsBySource.get(sourceConcept);
            final Collection<Property> rightSourceRelationships = rightRelationshipsBySource.get(sourceConcept);

            final Multimap<Integer, Property> leftRelationshipsByGroup = Multimaps
                    .index(leftSourceRelationships, Property::getGroup);
            final Multimap<Integer, Property> rightRelationshipsByGroup = Multimaps
                    .index(rightSourceRelationships, Property::getGroup);

            for (Integer group : groupOperator.apply(leftRelationshipsByGroup.keySet(),
                    rightRelationshipsByGroup.keySet())) {
                validGroups.get(group).addAll(leftRelationshipsByGroup.get(group));
                validGroups.get(group).addAll(rightRelationshipsByGroup.get(group));
            }

            if (groupCardinality.contains((long) validGroups.keySet().size())) {
                matchingAttributes.addAll(validGroups.values());
            }
        }
        return matchingAttributes;
    };
}