Example usage for com.google.common.collect Multimap values

List of usage examples for com.google.common.collect Multimap values

Introduction

In this page you can find the example usage for com.google.common.collect Multimap values.

Prototype

Collection<V> values();

Source Link

Document

Returns a view collection containing the value from each key-value pair contained in this multimap, without collapsing duplicates (so values().size() == size() ).

Usage

From source file:com.palantir.atlasdb.cleaner.Scrubber.java

/**
 * @return number of cells read from _scrub table
 *//*from  w w w .  j  a  v a2s  .c o  m*/
private int scrubSomeCells(SortedMap<Long, Multimap<String, Cell>> scrubTimestampToTableNameToCell,
        final TransactionManager txManager, long maxScrubTimestamp) {

    // Don't call expensive toString() if trace logging is off
    if (log.isTraceEnabled()) {
        log.trace("Attempting to scrub cells: " + scrubTimestampToTableNameToCell);
    }

    if (log.isInfoEnabled()) {
        int numCells = 0;
        Set<String> tables = Sets.newHashSet();
        for (Multimap<String, Cell> v : scrubTimestampToTableNameToCell.values()) {
            tables.addAll(v.keySet());
            numCells += v.size();
        }
        log.info("Attempting to scrub " + numCells + " cells from tables " + tables);
    }

    if (scrubTimestampToTableNameToCell.size() == 0) {
        return 0; // No cells left to scrub
    }

    Multimap<Long, Cell> toRemoveFromScrubQueue = HashMultimap.create();

    int numCellsReadFromScrubTable = 0;
    List<Future<Void>> scrubFutures = Lists.newArrayList();
    for (Map.Entry<Long, Multimap<String, Cell>> entry : scrubTimestampToTableNameToCell.entrySet()) {
        final long scrubTimestamp = entry.getKey();
        final Multimap<String, Cell> tableNameToCell = entry.getValue();

        numCellsReadFromScrubTable += tableNameToCell.size();

        long commitTimestamp = getCommitTimestampRollBackIfNecessary(scrubTimestamp, tableNameToCell);
        if (commitTimestamp >= maxScrubTimestamp) {
            // We cannot scrub this yet because not all transactions can read this value.
            continue;
        } else if (commitTimestamp != TransactionConstants.FAILED_COMMIT_TS) {
            // This is CRITICAL; don't scrub if the hard delete transaction didn't actually finish
            // (we still remove it from the _scrub table with the call to markCellsAsScrubbed though),
            // or else we could cause permanent data loss if the hard delete transaction failed after
            // queuing cells to scrub but before successfully committing
            for (final List<Entry<String, Cell>> batch : Iterables.partition(tableNameToCell.entries(),
                    batchSizeSupplier.get())) {
                final Multimap<String, Cell> batchMultimap = HashMultimap.create();
                for (Entry<String, Cell> e : batch) {
                    batchMultimap.put(e.getKey(), e.getValue());
                }
                scrubFutures.add(exec.submit(new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {
                        scrubCells(txManager, batchMultimap, scrubTimestamp,
                                aggressiveScrub ? TransactionType.AGGRESSIVE_HARD_DELETE
                                        : TransactionType.HARD_DELETE);
                        return null;
                    }
                }));
            }
        }
        toRemoveFromScrubQueue.putAll(scrubTimestamp, tableNameToCell.values());
    }

    for (Future<Void> future : scrubFutures) {
        Futures.getUnchecked(future);
    }

    Multimap<Cell, Long> cellToScrubTimestamp = HashMultimap.create();
    scrubberStore.markCellsAsScrubbed(Multimaps.invertFrom(toRemoveFromScrubQueue, cellToScrubTimestamp),
            batchSizeSupplier.get());

    if (log.isTraceEnabled()) {
        log.trace("Finished scrubbing cells: " + scrubTimestampToTableNameToCell);
    }

    if (log.isInfoEnabled()) {
        Set<String> tables = Sets.newHashSet();
        for (Multimap<String, Cell> v : scrubTimestampToTableNameToCell.values()) {
            tables.addAll(v.keySet());
        }
        long minTimestamp = Collections.min(scrubTimestampToTableNameToCell.keySet());
        long maxTimestamp = Collections.max(scrubTimestampToTableNameToCell.keySet());
        log.info("Finished scrubbing " + numCellsReadFromScrubTable + " cells at "
                + scrubTimestampToTableNameToCell.size() + " timestamps (" + minTimestamp + "..." + maxTimestamp
                + ") from tables " + tables);
    }

    return numCellsReadFromScrubTable;
}

From source file:org.opennms.features.topology.plugins.topo.linkd.internal.LldpLinkStatusProvider.java

@Override
protected List<EdgeAlarmStatusSummary> getEdgeAlarmSummaries(List<Integer> linkIds) {
    List<LldpLink> links = m_lldpLinkDao.findLinksForIds(linkIds);

    Multimap<String, EdgeAlarmStatusSummary> summaryMap = HashMultimap.create();
    for (LldpLink sourceLink : links) {

        OnmsNode sourceNode = sourceLink.getNode();
        LldpElement sourceElement = sourceNode.getLldpElement();

        for (LldpLink targetLink : links) {
            OnmsNode targetNode = targetLink.getNode();
            LldpElement targetLldpElement = targetNode.getLldpElement();

            //Compare the remote data to the targetNode element data
            boolean bool1 = sourceLink.getLldpRemPortId().equals(targetLink.getLldpPortId())
                    && targetLink.getLldpRemPortId().equals(sourceLink.getLldpPortId());
            boolean bool2 = sourceLink.getLldpRemPortDescr().equals(targetLink.getLldpPortDescr())
                    && targetLink.getLldpRemPortDescr().equals(sourceLink.getLldpPortDescr());
            boolean bool3 = sourceLink.getLldpRemChassisId().equals(targetLldpElement.getLldpChassisId())
                    && targetLink.getLldpRemChassisId().equals(sourceElement.getLldpChassisId());
            boolean bool4 = sourceLink.getLldpRemSysname().equals(targetLldpElement.getLldpSysname())
                    && targetLink.getLldpRemSysname().equals(sourceElement.getLldpSysname());
            boolean bool5 = sourceLink.getLldpRemPortIdSubType() == targetLink.getLldpPortIdSubType()
                    && targetLink.getLldpRemPortIdSubType() == sourceLink.getLldpPortIdSubType();

            if (bool1 && bool2 && bool3 && bool4 && bool5) {

                summaryMap.put(sourceNode.getNodeId() + ":" + sourceLink.getLldpPortIfindex(),
                        new EdgeAlarmStatusSummary(sourceLink.getId(), targetLink.getId(), null));

            }/*w  w  w .  j  av  a 2s .  c o m*/
        }
    }

    List<OnmsAlarm> alarms = getLinkDownAlarms();

    for (OnmsAlarm alarm : alarms) {
        String key = alarm.getNodeId() + ":" + alarm.getIfIndex();
        if (summaryMap.containsKey(key)) {
            Collection<EdgeAlarmStatusSummary> summaries = summaryMap.get(key);
            for (EdgeAlarmStatusSummary summary : summaries) {
                summary.setEventUEI(alarm.getUei());
            }

        }

    }
    return new ArrayList<EdgeAlarmStatusSummary>(summaryMap.values());
}

From source file:com.b2international.snowowl.snomed.validation.detail.SnomedValidationIssueDetailExtension.java

private void extendRelationshipIssueLabels(BranchContext context, Collection<ValidationIssue> issues) {
    final RevisionSearcher searcher = context.service(RevisionSearcher.class);

    final List<ValidationIssue> relationshipIssues = issues.stream()
            .filter(issue -> SnomedTerminologyComponentConstants.RELATIONSHIP_NUMBER == issue
                    .getAffectedComponent().getTerminologyComponentId())
            .collect(Collectors.toList());

    if (relationshipIssues.isEmpty()) {
        return;/* w  w w .  ja  v a  2  s . c  o  m*/
    }

    final Multimap<String, ValidationIssue> issuesByRelationshipId = Multimaps.index(relationshipIssues,
            issue -> issue.getAffectedComponent().getComponentId());

    final Set<String> synonymIds = SnomedRequests.prepareGetSynonyms().build().execute(context).stream()
            .map(SnomedConcept::getId).collect(Collectors.toSet());

    final Set<String> conceptsToFetch = newHashSet();

    searcher.scroll(Query.select(String[].class).from(SnomedRelationshipIndexEntry.class)
            .fields(SnomedRelationshipIndexEntry.Fields.SOURCE_ID, SnomedRelationshipIndexEntry.Fields.TYPE_ID,
                    SnomedRelationshipIndexEntry.Fields.DESTINATION_ID)
            .where(SnomedRelationshipIndexEntry.Expressions.ids(issuesByRelationshipId.keySet()))
            .limit(SCROLL_SIZE).build()).forEach(hits -> {
                for (String[] hit : hits) {
                    conceptsToFetch.add(hit[0]);
                    conceptsToFetch.add(hit[1]);
                    conceptsToFetch.add(hit[2]);
                }
            });

    final Multimap<String, String> affectedComponentLabelsByConcept = HashMultimap.create();

    searcher.scroll(Query.select(String[].class).from(SnomedDescriptionIndexEntry.class)
            .fields(SnomedDescriptionIndexEntry.Fields.CONCEPT_ID, SnomedDescriptionIndexEntry.Fields.TERM)
            .where(Expressions.builder().filter(SnomedDescriptionIndexEntry.Expressions.active())
                    .filter(SnomedDescriptionIndexEntry.Expressions.concepts(conceptsToFetch))
                    .filter(SnomedDescriptionIndexEntry.Expressions.types(ImmutableSet.<String>builder()
                            .add(Concepts.FULLY_SPECIFIED_NAME).addAll(synonymIds).build()))
                    .build())
            .limit(SCROLL_SIZE).build()).forEach(hits -> {
                for (String[] hit : hits) {
                    affectedComponentLabelsByConcept.put(hit[0], hit[1]);
                }
            });

    if (!affectedComponentLabelsByConcept.isEmpty()) {
        issuesByRelationshipId.values().forEach(issue -> {
            final Collection<String> labels = affectedComponentLabelsByConcept
                    .get(issue.getAffectedComponent().getComponentId());
            issue.setAffectedComponentLabels(ImmutableList.copyOf(labels));
        });
    }
}

From source file:org.gradle.model.internal.manage.schema.extract.ImplTypeSchemaExtractionStrategySupport.java

private <R> List<ModelProperty<?>> extractPropertySchemas(ModelSchemaExtractionContext<R> extractionContext,
        Multimap<String, Method> methodsByName) {
    List<ModelProperty<?>> properties = Lists.newArrayList();
    Set<Method> handledMethods = Sets.newHashSet();

    for (String methodName : methodsByName.keySet()) {
        Collection<Method> methods = methodsByName.get(methodName);

        List<Method> overloadedMethods = getOverloadedMethods(methods);
        if (overloadedMethods != null) {
            handleOverloadedMethods(extractionContext, overloadedMethods);
            continue;
        }//from   w  w w  .j  av a2  s .c  o m

        if (methodName.startsWith("get") && !methodName.equals("get")) {
            PropertyAccessorExtractionContext getterContext = new PropertyAccessorExtractionContext(methods,
                    isGetterDefinedInManagedType(extractionContext, methodName, methods));

            Character getterPropertyNameFirstChar = methodName.charAt(3);
            if (!Character.isUpperCase(getterPropertyNameFirstChar)) {
                handleInvalidGetter(extractionContext, getterContext,
                        "the 4th character of the getter method name must be an uppercase character");
                continue;
            }

            String propertyNameCapitalized = methodName.substring(3);
            String propertyName = StringUtils.uncapitalize(propertyNameCapitalized);
            String setterName = "set" + propertyNameCapitalized;
            Collection<Method> setterMethods = methodsByName.get(setterName);
            PropertyAccessorExtractionContext setterContext = !setterMethods.isEmpty()
                    ? new PropertyAccessorExtractionContext(setterMethods)
                    : null;

            ModelProperty<?> property = extractPropertySchema(extractionContext, propertyName, getterContext,
                    setterContext, handledMethods);
            if (property != null) {
                properties.add(property);

                handledMethods.addAll(getterContext.getDeclaringMethods());
                if (setterContext != null) {
                    handledMethods.addAll(setterContext.getDeclaringMethods());
                }
            }
        }
    }

    validateAllNecessaryMethodsHandled(extractionContext, methodsByName.values(), handledMethods);
    return properties;
}

From source file:com.github.rinde.rinsim.ui.SimulationViewer.java

void panelsLayout(Multimap<Integer, PanelRenderer> panels) {
    if (panels.isEmpty()) {
        createContent(this);
    } else {/*from  w  w w .  j a  v  a2s.  c o  m*/

        final SashForm vertical = new SashForm(this, SWT.VERTICAL | SWT.SMOOTH);
        vertical.setLayout(new FillLayout());

        final int topHeight = configurePanels(vertical, panels.removeAll(SWT.TOP));

        final SashForm horizontal = new SashForm(vertical, SWT.HORIZONTAL | SWT.SMOOTH);
        horizontal.setLayout(new FillLayout());

        final int leftWidth = configurePanels(horizontal, panels.removeAll(SWT.LEFT));

        // create canvas
        createContent(horizontal);

        final int rightWidth = configurePanels(horizontal, panels.removeAll(SWT.RIGHT));
        final int bottomHeight = configurePanels(vertical, panels.removeAll(SWT.BOTTOM));

        final int canvasHeight = size.y - topHeight - bottomHeight;
        if (topHeight > 0 && bottomHeight > 0) {
            vertical.setWeights(varargs(topHeight, canvasHeight, bottomHeight));
        } else if (topHeight > 0) {
            vertical.setWeights(varargs(topHeight, canvasHeight));
        } else if (bottomHeight > 0) {
            vertical.setWeights(varargs(canvasHeight, bottomHeight));
        }

        final int canvasWidth = size.x - leftWidth - rightWidth;
        if (leftWidth > 0 && rightWidth > 0) {
            horizontal.setWeights(varargs(leftWidth, canvasWidth, rightWidth));
        } else if (leftWidth > 0) {
            horizontal.setWeights(varargs(leftWidth, canvasWidth));
        } else if (rightWidth > 0) {
            horizontal.setWeights(varargs(canvasWidth, rightWidth));
        }

        checkState(panels.isEmpty(), "Invalid preferred position set for panels: %s", panels.values());
    }
}

From source file:edu.cmu.lti.oaqa.baseqa.passage.rerank.scorers.LuceneInMemoryPassageScorer.java

@Override
public void prepare(JCas jcas) throws AnalysisEngineProcessException {
    uri2conf2score = HashBasedTable.create();
    uri2conf2rank = HashBasedTable.create();
    // index/*ww  w  .j ava  2 s  . co m*/
    List<Passage> passages = TypeUtil.getRankedPassages(jcas);
    RAMDirectory index = new RAMDirectory();
    try (IndexWriter writer = new IndexWriter(index, new IndexWriterConfig(analyzer))) {
        for (Passage passage : passages) {
            Document doc = new Document();
            doc.add(new StringField("uri", TypeUtil.getUriOffsets(passage, ":"), Field.Store.YES));
            doc.add(new TextField("text", passage.getText(), Field.Store.NO));
            writer.addDocument(doc);
        }
        writer.close();
        reader = DirectoryReader.open(index);
        searcher = new IndexSearcher(reader);
    } catch (IOException e) {
        throw new AnalysisEngineProcessException(e);
    }
    // queries
    List<String> tokens = TypeUtil.getOrderedTokens(jcas).stream().map(Token::getCoveredText)
            .map(QueryParser::escape).filter(name -> !name.isEmpty() && !stoplist.contains(name.toLowerCase()))
            .collect(toList());
    Multimap<String, String> ctype2names = HashMultimap.create();
    for (Concept concept : TypeUtil.getConcepts(jcas)) {
        Set<String> ctypes = TypeUtil.getConceptTypes(concept).stream().map(ConceptType::getAbbreviation)
                .collect(toSet());
        String cnames = TypeUtil.getConceptNames(concept).stream()
                .map(LuceneInMemoryPassageScorer::normalizeQuoteName).distinct().collect(joining(" "));
        ctypes.stream().filter(t -> !FORBIDDEN_CTYPES.contains(t))
                .forEach(ctype -> ctype2names.put(ctype, cnames));
    }
    Multimap<String, String> ctypepre2names = HashMultimap.create();
    ctype2names.asMap().entrySet().forEach(e -> ctypepre2names.putAll(e.getKey().split(":")[0], e.getValue()));
    Multimap<String, String> ctype2mentions = HashMultimap.create();
    for (Concept concept : TypeUtil.getConcepts(jcas)) {
        Set<String> ctypes = TypeUtil.getConceptTypes(concept).stream().map(ConceptType::getAbbreviation)
                .collect(toSet());
        String cmentions = TypeUtil.getConceptMentions(concept).stream().map(ConceptMention::getMatchedName)
                .map(LuceneInMemoryPassageScorer::normalizeQuoteName).distinct().collect(joining(" "));
        ctypes.stream().filter(t -> !FORBIDDEN_CTYPES.contains(t))
                .forEach(ctype -> ctype2mentions.put(ctype, cmentions));
    }
    Multimap<String, String> ctypepre2mentions = HashMultimap.create();
    ctypepre2mentions.asMap().entrySet()
            .forEach(e -> ctypepre2mentions.putAll(e.getKey().split(":")[0], e.getValue()));
    LOG.debug("Query strings");
    ExecutorService service = Executors.newCachedThreadPool();
    // execute against all tokens
    service.submit(() -> {
        String concatTokens = String.join(" ", tokens);
        LOG.debug(" - Concatenated tokens: {}", concatTokens);
        search(concatTokens, "tokens_concatenated@all");
    });
    // execute against concatenated concept names
    service.submit(() -> {
        String concatCnames = String.join(" ", ctype2names.values());
        LOG.debug(" - Concatenated concept names: {}", concatCnames);
        search(concatCnames, "cnames_concatenated@all");
    });
    // execute against concatenated concept mentions
    service.submit(() -> {
        String concatCmentions = String.join(" ", ctype2mentions.values());
        LOG.debug(" - Concatenated concept mentions: {}", concatCmentions);
        search(concatCmentions, "cmentions_concatenated@all");
    });
    // execute against concept names for each concept
    service.submit(() -> {
        for (String cnames : ImmutableSet.copyOf(ctype2names.values())) {
            LOG.debug(" - Concatenated concept names: {}", cnames);
            search(cnames, "cnames_individual@all");
        }
    });
    // execute against concept names for each concept type
    service.submit(() -> {
        for (String ctype : ctype2names.keySet()) {
            String concatCnames = String.join(" ", ctype2names.get(ctype));
            LOG.debug(" - Concatenated concept names for {}: {}", ctype, concatCnames);
            search(concatCnames, "cnames@" + ctype + "@all");
        }
    });
    // execute against concept names for each concept type prefix
    service.submit(() -> {
        for (String ctypepre : ctypepre2names.keySet()) {
            String concatCnames = String.join(" ", ctypepre2names.get(ctypepre));
            LOG.debug(" - Concatenated concept names for {}: {}", ctypepre, concatCnames);
            search(concatCnames, "cnames@" + ctypepre + "@all");
        }
    });
    // execute against concept mentions for each concept
    service.submit(() -> {
        for (String cmentions : ImmutableSet.copyOf(ctype2mentions.values())) {
            LOG.debug(" - Concatenated concept mentions: {}", cmentions);
            search(cmentions, "cmentions_individual@all");
        }
    });
    // execute against concept mentions for each concept type
    service.submit(() -> {
        for (String ctype : ctype2mentions.keySet()) {
            String concatCmentions = String.join(" ", ctype2mentions.get(ctype));
            LOG.debug(" - Concatenated concept mentions for {}: {}", ctype, concatCmentions);
            search(concatCmentions, "cmentions@" + ctype + "@all");
        }
    });
    // execute against concept mentions for each concept type prefix
    service.submit(() -> {
        for (String ctypepre : ctypepre2mentions.keySet()) {
            String concatCmentions = String.join(" ", ctypepre2mentions.get(ctypepre));
            LOG.debug(" - Concatenated concept mentions for {}: {}", ctypepre, concatCmentions);
            search(concatCmentions, "cmentions@" + ctypepre + "@all");
        }
    });
    service.shutdown();
    try {
        service.awaitTermination(1, TimeUnit.MINUTES);
    } catch (InterruptedException e) {
        throw new AnalysisEngineProcessException(e);
    }
    confs = uri2conf2score.columnKeySet();
}

From source file:org.artifactory.build.BuildServiceImpl.java

@Override
public Set<ArtifactoryBuildArtifact> getBuildArtifactsFileInfos(Build build, boolean useFallBack,
        String sourceRepo) {/*from w  w w .j ava  2 s .  c om*/
    AqlBase.AndClause and = and();
    log.debug("Executing Artifacts search for build {}:{}", build.getName(), build.getNumber());
    if (StringUtils.isNotBlank(sourceRepo)) {
        log.debug("Search limited to repo: {}", sourceRepo);
        and.append(AqlApiItem.repo().equal(sourceRepo));
    }
    and.append(AqlApiItem.property().property("build.name", AqlComparatorEnum.equals, build.getName()));
    and.append(AqlApiItem.property().property("build.number", AqlComparatorEnum.equals, build.getNumber()));
    AqlBase buildArtifactsQuery = AqlApiItem.create().filter(and);

    AqlEagerResult<AqlBaseFullRowImpl> aqlResult = aqlService.executeQueryEager(buildArtifactsQuery);
    log.debug("Search returned {} artifacts", aqlResult.getSize());
    Multimap<String, Artifact> buildArtifacts = BuildServiceUtils.getBuildArtifacts(build);
    log.debug("This build contains {} artifacts (taken from build info)", buildArtifacts.size());
    List<String> virtualRepoKeys = getVirtualRepoKeys();
    Set<ArtifactoryBuildArtifact> matchedArtifacts = matchArtifactsToFileInfos(aqlResult.getResults(),
            buildArtifacts, virtualRepoKeys);
    log.debug("Matched {} build artifacts to actual paths returned by search", matchedArtifacts.size());

    //buildArtifacts contains all remaining artifacts that weren't matched - match them with the weak search
    //only if indicated and if such remaining unmatched artifacts still exist in the map.
    if (useFallBack && !buildArtifacts.isEmpty()) {
        log.debug("Unmatched artifacts exist and 'use weak match fallback' flag is lit - executing weak match");
        Set<ArtifactoryBuildArtifact> weaklyMatchedArtifacts = matchUnmatchedArtifactsNonStrict(build,
                sourceRepo, buildArtifacts, virtualRepoKeys);
        log.debug("Weak match has matched {} additional artifacts", weaklyMatchedArtifacts);
        matchedArtifacts.addAll(weaklyMatchedArtifacts);
    }
    //Lastly, populate matchedArtifacts with all remaining unmatched artifacts with null values to help users of
    //this function know if all build artifacts were found.
    log.debug("{} artifacts were not matched to actual paths", buildArtifacts.size());
    for (Artifact artifact : buildArtifacts.values()) {
        matchedArtifacts.add(new ArtifactoryBuildArtifact(artifact, null));
    }
    return matchedArtifacts;
}

From source file:io.redlink.sdk.impl.analysis.model.RDFStructureParser.java

private Enhancement parseTextAnnotation(String taUri, RepositoryConnection conn, Queue<String> toParse,
        Multimap<Enhancement, String> relations) throws RepositoryException, EnhancementParserException {

    TextAnnotation enhancement = new TextAnnotation();
    String textAnnotationQuery = "PREFIX fise: <http://fise.iks-project.eu/ontology/> \n"
            + "PREFIX dct: <http://purl.org/dc/terms/> \n"
            + "PREFIX entityhub: <http://stanbol.apache.org/ontology/entityhub/entityhub#> \n"
            + "SELECT * { \n OPTIONAL { <" + taUri + ">  fise:confidence ?confidence } \n" + "  OPTIONAL { <"
            + taUri + ">  dct:language ?language } \n" + "  OPTIONAL { <" + taUri
            + "> fise:start ?start ; fise:end ?end } \n" + "  OPTIONAL { <" + taUri + "> dct:type ?type } \n"
            + "  OPTIONAL { <" + taUri + "> dct:relation ?relation } \n" + "  OPTIONAL { <" + taUri
            + "> fise:selection-context ?selectionContext } \n" + "  OPTIONAL { <" + taUri
            + "> fise:selected-text ?selectedText } \n" + "}";
    try {//from   ww w  .j a v  a2  s. c  om
        TupleQueryResult textAnnotationResults = conn
                .prepareTupleQuery(QueryLanguage.SPARQL, textAnnotationQuery).evaluate();

        while (textAnnotationResults.hasNext()) {
            BindingSet result = textAnnotationResults.next();
            int i = 0;
            if (i == 0) {
                setEnhancementData(enhancement, result);
                if (result.hasBinding("start")) {
                    enhancement
                            .setStarts(Integer.parseInt(result.getBinding("start").getValue().stringValue()));
                    enhancement.setEnds(Integer.parseInt(result.getBinding("end").getValue().stringValue()));
                }
                if (result.hasBinding("relation")) {
                    String nextRelationUri = result.getBinding("relation").getValue().stringValue();
                    if (!relations.values().contains(nextRelationUri))
                        toParse.add(nextRelationUri);
                    relations.put(enhancement, nextRelationUri);
                }
                if (result.hasBinding("selectionContext")) {
                    enhancement.setSelectionContext(
                            result.getBinding("selectionContext").getValue().stringValue());
                }
                if (result.hasBinding("selectedText")) {
                    Binding selectedText = result.getBinding("selectedText");
                    enhancement.setSelectedText(selectedText.getValue().stringValue());
                    if (!result.hasBinding("language") && (selectedText.getValue() instanceof Literal))
                        enhancement.setLanguage(((Literal) selectedText.getValue()).getLanguage());
                }
            } else {
                if (result.hasBinding("relation")) {
                    String nextRelationUri = result.getBinding("relation").getValue().stringValue();
                    if (!relations.values().contains(nextRelationUri))
                        toParse.add(nextRelationUri);
                    relations.put(enhancement, nextRelationUri);
                }
            }

            i++;
        }
    } catch (QueryEvaluationException | MalformedQueryException e) {
        throw new EnhancementParserException("Error parsing text annotation with URI: " + taUri, e);
    }

    return enhancement;
}

From source file:com.zimbra.cs.store.file.BlobConsistencyChecker.java

/**
 * Reconciles blobs against the files in the given directory and adds any inconsistencies
 * to the current result set./*from w ww. j  a v  a  2  s.  c  o m*/
 */
private void check(short volumeId, String blobDirPath, Multimap<Integer, BlobInfo> blobsById)
        throws IOException {
    Multimap<Integer, BlobInfo> revisions = HashMultimap.create();
    File blobDir = new File(blobDirPath);
    File[] files = blobDir.listFiles();
    if (files == null) {
        files = new File[0];
    }
    log.info("Comparing %d items to %d files in %s.", blobsById.size(), files.length, blobDirPath);
    for (File file : files) {
        // Parse id and mod_content value from filename.
        Matcher matcher = PAT_BLOB_FILENAME.matcher(file.getName());
        int itemId = 0;
        int modContent = 0;
        if (matcher.matches()) {
            itemId = Integer.parseInt(matcher.group(1));
            modContent = Integer.parseInt(matcher.group(2));
        }

        BlobInfo blob = null;
        if (blobsById.containsKey(itemId)) {
            Iterator<BlobInfo> iterator = blobsById.get(itemId).iterator();
            while (iterator.hasNext()) {
                BlobInfo tempBlob = iterator.next();
                if (tempBlob.modContent == modContent) {
                    blob = tempBlob;
                    revisions.put(itemId, tempBlob);
                    iterator.remove();
                }
            }
        }

        if (blob == null) {
            BlobInfo unexpected = new BlobInfo();
            unexpected.volumeId = volumeId;
            unexpected.path = file.getAbsolutePath();
            unexpected.fileSize = file.length();
            results.unexpectedBlobs.put(itemId, unexpected);
        } else {
            blob.fileSize = file.length();
            blob.fileModContent = modContent;
            if (reportUsedBlobs) {
                results.usedBlobs.put(blob.itemId, blob);
            }
            if (checkSize) {
                blob.fileDataSize = getDataSize(file, blob.dbSize);
                if (blob.dbSize != blob.fileDataSize) {
                    results.incorrectSize.put(blob.itemId, blob);
                }
            }
        }
    }

    // Any remaining items have missing blobs.
    for (BlobInfo blob : blobsById.values()) {
        results.missingBlobs.put(blob.itemId, blob);
    }

    // Redefining incorrect revisions for all items that support single revision
    // If there exists a single item with the same itemID in both missingBlobs and unexpectedBlobs
    // and if there aren't any items with same itemId in revisions then it is categorised as incorrect revision
    Iterator<Integer> keyIterator = results.missingBlobs.keySet().iterator();
    while (keyIterator.hasNext()) {
        int itemId = keyIterator.next();
        List<BlobInfo> missingBlobs = new ArrayList<BlobInfo>(results.missingBlobs.get(itemId));
        List<BlobInfo> unexpectedBlobs = new ArrayList<BlobInfo>(results.unexpectedBlobs.get(itemId));
        if (missingBlobs.size() == 1 && unexpectedBlobs.size() == 1 && revisions.get(itemId).size() == 0) {
            BlobInfo incorrectRevision = new BlobInfo();
            BlobInfo missingBlob = missingBlobs.get(0);
            incorrectRevision.itemId = missingBlob.itemId;
            incorrectRevision.modContent = missingBlob.modContent;
            incorrectRevision.dbSize = missingBlob.dbSize;
            incorrectRevision.volumeId = missingBlob.volumeId;

            BlobInfo unexpectedBlob = unexpectedBlobs.get(0);
            incorrectRevision.path = unexpectedBlob.path;
            incorrectRevision.fileSize = unexpectedBlob.fileSize;
            incorrectRevision.fileModContent = unexpectedBlob.fileModContent;

            results.incorrectModContent.put(incorrectRevision.itemId, incorrectRevision);
            keyIterator.remove();
            results.unexpectedBlobs.removeAll(itemId);
        }
    }
}

From source file:edu.cmu.lti.oaqa.baseqa.document.rerank.scorers.LuceneDocumentScorer.java

@Override
public void prepare(JCas jcas) throws AnalysisEngineProcessException {
    uri2conf2score = HashBasedTable.create();
    uri2conf2rank = HashBasedTable.create();
    List<String> tokens = TypeUtil.getOrderedTokens(jcas).stream().map(Token::getCoveredText)
            .map(QueryParser::escape).filter(name -> !name.isEmpty() && !stoplist.contains(name.toLowerCase()))
            .collect(toList());//from www  .  j  a v a2  s.co  m
    Multimap<String, String> ctype2names = HashMultimap.create();
    for (Concept concept : TypeUtil.getConcepts(jcas)) {
        Set<String> ctypes = TypeUtil.getConceptTypes(concept).stream().map(ConceptType::getAbbreviation)
                .collect(toSet());
        String cnames = TypeUtil.getConceptNames(concept).stream().map(LuceneDocumentScorer::normalizeQuoteName)
                .distinct().collect(joining(" "));
        ctypes.stream().filter(t -> !FORBIDDEN_CTYPES.contains(t))
                .forEach(ctype -> ctype2names.put(ctype, cnames));
    }
    Multimap<String, String> ctypepre2names = HashMultimap.create();
    ctype2names.asMap().entrySet().forEach(e -> ctypepre2names.putAll(e.getKey().split(":")[0], e.getValue()));
    Multimap<String, String> ctype2mentions = HashMultimap.create();
    for (Concept concept : TypeUtil.getConcepts(jcas)) {
        Set<String> ctypes = TypeUtil.getConceptTypes(concept).stream().map(ConceptType::getAbbreviation)
                .collect(toSet());
        String cmentions = TypeUtil.getConceptMentions(concept).stream().map(ConceptMention::getMatchedName)
                .map(LuceneDocumentScorer::normalizeQuoteName).distinct().collect(joining(" "));
        ctypes.stream().filter(t -> !FORBIDDEN_CTYPES.contains(t))
                .forEach(ctype -> ctype2mentions.put(ctype, cmentions));
    }
    Multimap<String, String> ctypepre2mentions = HashMultimap.create();
    ctypepre2mentions.asMap().entrySet()
            .forEach(e -> ctypepre2mentions.putAll(e.getKey().split(":")[0], e.getValue()));
    LOG.debug("Query strings");
    ExecutorService service = Executors.newCachedThreadPool();
    // execute against all tokens
    service.submit(() -> {
        String concatTokens = String.join(" ", tokens);
        LOG.debug(" - Concatenated tokens: {}", concatTokens);
        for (String field : fields) {
            searchInField(concatTokens, field, "tokens_concatenated@" + field);
        }
        searchAllField(concatTokens, "tokens_concatenated@all");
    });
    // execute against concatenated concept names
    service.submit(() -> {
        String concatCnames = String.join(" ", ctype2names.values());
        LOG.debug(" - Concatenated concept names: {}", concatCnames);
        for (String field : fields) {
            searchInField(concatCnames, field, "cnames_concatenated@" + field);
        }
        searchAllField(concatCnames, "cnames_concatenated@all");
    });
    // execute against concatenated concept mentions
    service.submit(() -> {
        String concatCmentions = String.join(" ", ctype2mentions.values());
        LOG.debug(" - Concatenated concept mentions: {}", concatCmentions);
        for (String field : fields) {
            searchInField(concatCmentions, field, "cmentions_concatenated@" + field);
        }
        searchAllField(concatCmentions, "cmentions_concatenated@");
    });
    // execute against concept names for each concept
    service.submit(() -> {
        for (String cnames : ImmutableSet.copyOf(ctype2names.values())) {
            LOG.debug(" - Concatenated concept names: {}", cnames);
            for (String field : fields) {
                searchInField(cnames, field, "cnames_individual@" + field);
            }
            searchAllField(cnames, "cnames_individual@all");
        }
    });
    // execute against concept names for each concept type
    service.submit(() -> {
        for (String ctype : ctype2names.keySet()) {
            String concatCnames = String.join(" ", ctype2names.get(ctype));
            LOG.debug(" - Concatenated concept names for {}: {}", ctype, concatCnames);
            for (String field : fields) {
                searchInField(concatCnames, field, "cnames@" + ctype + "@" + field);
            }
            searchAllField(concatCnames, "cnames@" + ctype + "@all");
        }
    });
    // execute against concept names for each concept type prefix
    service.submit(() -> {
        for (String ctypepre : ctypepre2names.keySet()) {
            String concatCnames = String.join(" ", ctypepre2names.get(ctypepre));
            LOG.debug(" - Concatenated concept names for {}: {}", ctypepre, concatCnames);
            for (String field : fields) {
                searchInField(concatCnames, field, "cnames@" + ctypepre + "@" + field);
            }
            searchAllField(concatCnames, "cnames@" + ctypepre + "@all");
        }
    });
    // execute against concept mentions for each concept
    service.submit(() -> {
        for (String cmentions : ImmutableSet.copyOf(ctype2mentions.values())) {
            LOG.debug(" - Concatenated concept mentions: {}", cmentions);
            for (String field : fields) {
                searchInField(cmentions, field, "cmentions_individual@" + field);
            }
            searchAllField(cmentions, "cmentions_individual@all");
        }
    });
    // execute against concept mentions for each concept type
    service.submit(() -> {
        for (String ctype : ctype2mentions.keySet()) {
            String concatCmentions = String.join(" ", ctype2mentions.get(ctype));
            LOG.debug(" - Concatenated concept mentions for {}: {}", ctype, concatCmentions);
            for (String field : fields) {
                searchInField(concatCmentions, field, "cmentions@" + ctype + "@" + field);
            }
            searchAllField(concatCmentions, "cmentions@" + ctype + "@all");
        }
    });
    // execute against concept mentions for each concept type prefix
    service.submit(() -> {
        for (String ctypepre : ctypepre2mentions.keySet()) {
            String concatCmentions = String.join(" ", ctypepre2mentions.get(ctypepre));
            LOG.debug(" - Concatenated concept mentions for {}: {}", ctypepre, concatCmentions);
            for (String field : fields) {
                searchInField(concatCmentions, field, "cmentions@" + ctypepre + "@" + field);
            }
            searchAllField(concatCmentions, "cmentions@" + ctypepre + "@all");
        }
    });
    service.shutdown();
    try {
        service.awaitTermination(1, TimeUnit.MINUTES);
    } catch (InterruptedException e) {
        throw new AnalysisEngineProcessException(e);
    }
    confs = new HashSet<>(uri2conf2score.columnKeySet()); // to avoid ConcurrentModificationException
}