Example usage for com.google.common.collect Multimap values

List of usage examples for com.google.common.collect Multimap values

Introduction

In this page you can find the example usage for com.google.common.collect Multimap values.

Prototype

Collection<V> values();

Source Link

Document

Returns a view collection containing the value from each key-value pair contained in this multimap, without collapsing duplicates (so values().size() == size() ).

Usage

From source file:edu.cmu.lti.oaqa.baseqa.concept.rerank.scorers.LuceneConceptScorer.java

@Override
public void prepare(JCas jcas) throws AnalysisEngineProcessException {
    uri2conf2score = HashBasedTable.create();
    uri2conf2rank = HashBasedTable.create();
    List<String> tokens = TypeUtil.getOrderedTokens(jcas).stream().map(Token::getCoveredText)
            .map(QueryParser::escape).filter(name -> !name.isEmpty() && !stoplist.contains(name.toLowerCase()))
            .collect(toList());//from  w ww. j  a  v a2  s  . c  o  m
    Multimap<String, String> ctype2names = HashMultimap.create();
    for (Concept concept : TypeUtil.getConcepts(jcas)) {
        Set<String> ctypes = TypeUtil.getConceptTypes(concept).stream().map(ConceptType::getAbbreviation)
                .collect(toSet());
        String cnames = TypeUtil.getConceptNames(concept).stream().map(LuceneConceptScorer::normalizeQuoteName)
                .distinct().collect(joining(" "));
        ctypes.stream().filter(t -> !FORBIDDEN_CTYPES.contains(t))
                .forEach(ctype -> ctype2names.put(ctype, cnames));
    }
    Multimap<String, String> ctypepre2names = HashMultimap.create();
    ctype2names.asMap().entrySet().forEach(e -> ctypepre2names.putAll(e.getKey().split(":")[0], e.getValue()));
    Multimap<String, String> ctype2mentions = HashMultimap.create();
    for (Concept concept : TypeUtil.getConcepts(jcas)) {
        Set<String> ctypes = TypeUtil.getConceptTypes(concept).stream().map(ConceptType::getAbbreviation)
                .collect(toSet());
        String cmentions = TypeUtil.getConceptMentions(concept).stream().map(ConceptMention::getMatchedName)
                .map(LuceneConceptScorer::normalizeQuoteName).distinct().collect(joining(" "));
        ctypes.stream().filter(t -> !FORBIDDEN_CTYPES.contains(t))
                .forEach(ctype -> ctype2mentions.put(ctype, cmentions));
    }
    Multimap<String, String> ctypepre2mentions = HashMultimap.create();
    ctypepre2mentions.asMap().entrySet()
            .forEach(e -> ctypepre2mentions.putAll(e.getKey().split(":")[0], e.getValue()));
    LOG.debug("Query strings");
    ExecutorService service = Executors.newCachedThreadPool();
    // execute against all tokens
    service.submit(() -> {
        String concatTokens = String.join(" ", tokens);
        LOG.debug(" - Concatenated tokens: {}", concatTokens);
        for (String field : fields) {
            searchInField(concatTokens, field, "tokens_concatenated@" + field);
        }
        searchAllField(concatTokens, "tokens_concatenated@all");
    });
    // execute against concatenated concept names
    service.submit(() -> {
        String concatCnames = String.join(" ", ctype2names.values());
        LOG.debug(" - Concatenated concept names: {}", concatCnames);
        for (String field : fields) {
            searchInField(concatCnames, field, "cnames_concatenated@" + field);
        }
        searchAllField(concatCnames, "cnames_concatenated@all");
    });
    // execute against concatenated concept mentions
    service.submit(() -> {
        String concatCmentions = String.join(" ", ctype2mentions.values());
        LOG.debug(" - Concatenated concept mentions: {}", concatCmentions);
        for (String field : fields) {
            searchInField(concatCmentions, field, "cmentions_concatenated@" + field);
        }
        searchAllField(concatCmentions, "cmentions_concatenated@");
    });
    // execute against concept names for each concept
    service.submit(() -> {
        for (String cnames : ImmutableSet.copyOf(ctype2names.values())) {
            LOG.debug(" - Concatenated concept names: {}", cnames);
            for (String field : fields) {
                searchInField(cnames, field, "cnames_individual@" + field);
            }
            searchAllField(cnames, "cnames_individual@all");
        }
    });
    // execute against concept names for each concept type
    service.submit(() -> {
        for (String ctype : ctype2names.keySet()) {
            String concatCnames = String.join(" ", ctype2names.get(ctype));
            LOG.debug(" - Concatenated concept names for {}: {}", ctype, concatCnames);
            for (String field : fields) {
                searchInField(concatCnames, field, "cnames@" + ctype + "@" + field);
            }
            searchAllField(concatCnames, "cnames@" + ctype + "@all");
        }
    });
    // execute against concept names for each concept type prefix
    service.submit(() -> {
        for (String ctypepre : ctypepre2names.keySet()) {
            String concatCnames = String.join(" ", ctypepre2names.get(ctypepre));
            LOG.debug(" - Concatenated concept names for {}: {}", ctypepre, concatCnames);
            for (String field : fields) {
                searchInField(concatCnames, field, "cnames@" + ctypepre + "@" + field);
            }
            searchAllField(concatCnames, "cnames@" + ctypepre + "@all");
        }
    });
    // execute against concept mentions for each concept
    service.submit(() -> {
        for (String cmentions : ImmutableSet.copyOf(ctype2mentions.values())) {
            LOG.debug(" - Concatenated concept mentions: {}", cmentions);
            for (String field : fields) {
                searchInField(cmentions, field, "cmentions_individual@" + field);
            }
            searchAllField(cmentions, "cmentions_individual@all");
        }
    });
    // execute against concept mentions for each concept type
    service.submit(() -> {
        for (String ctype : ctype2mentions.keySet()) {
            String concatCmentions = String.join(" ", ctype2mentions.get(ctype));
            LOG.debug(" - Concatenated concept mentions for {}: {}", ctype, concatCmentions);
            for (String field : fields) {
                searchInField(concatCmentions, field, "cmentions@" + ctype + "@" + field);
            }
            searchAllField(concatCmentions, "cmentions@" + ctype + "@all");
        }
    });
    // execute against concept mentions for each concept type prefix
    service.submit(() -> {
        for (String ctypepre : ctypepre2mentions.keySet()) {
            String concatCmentions = String.join(" ", ctypepre2mentions.get(ctypepre));
            LOG.debug(" - Concatenated concept mentions for {}: {}", ctypepre, concatCmentions);
            for (String field : fields) {
                searchInField(concatCmentions, field, "cmentions@" + ctypepre + "@" + field);
            }
            searchAllField(concatCmentions, "cmentions@" + ctypepre + "@all");
        }
    });
    service.shutdown();
    try {
        service.awaitTermination(1, TimeUnit.MINUTES);
    } catch (InterruptedException e) {
        throw new AnalysisEngineProcessException(e);
    }
    confs = uri2conf2score.columnKeySet();
}

From source file:com.cloudant.sync.datastore.DatastoreImpl.java

/**
 * Removes revisions present in the datastore from the input map.
 *
 * @param revisions an multimap from document id to set of revisions. The
 *                  map is modified in place for performance consideration.
 *//* ww w .  j  ava 2s.  c o m*/
void revsDiffBatch(SQLDatabase db, Multimap<String, String> revisions) throws DatastoreException {

    final String sql = String.format(
            "SELECT docs.docid, revs.revid FROM docs, revs "
                    + "WHERE docs.doc_id = revs.doc_id AND docs.docid IN (%s) AND revs.revid IN (%s) "
                    + "ORDER BY docs.docid",
            DatabaseUtils.makePlaceholders(revisions.keySet().size()),
            DatabaseUtils.makePlaceholders(revisions.size()));

    String[] args = new String[revisions.keySet().size() + revisions.size()];
    String[] keys = revisions.keySet().toArray(new String[revisions.keySet().size()]);
    String[] values = revisions.values().toArray(new String[revisions.size()]);
    System.arraycopy(keys, 0, args, 0, revisions.keySet().size());
    System.arraycopy(values, 0, args, revisions.keySet().size(), revisions.size());

    Cursor cursor = null;
    try {
        cursor = db.rawQuery(sql, args);
        while (cursor.moveToNext()) {
            String docId = cursor.getString(0);
            String revId = cursor.getString(1);
            revisions.remove(docId, revId);
        }
    } catch (SQLException e) {
        throw new DatastoreException(e);
    } finally {
        DatabaseUtils.closeCursorQuietly(cursor);
    }
}

From source file:de.hzi.helmholtz.Compare.PathwayComparisonWithModules.java

public Multimap<Double, String> SubsetsMatching(final PathwayWithModules firstPathway,
        final PathwayWithModules secondPathway, BiMap<Integer, Integer> newSourceGeneIdToPositionMap,
        BiMap<Integer, Integer> newTargetGeneIdToPositionMap, int Yes) {
    Multimap<Double, String> resultPerfect = TreeMultimap.create(Ordering.natural().reverse(),
            Ordering.natural());//w w  w.j  a v a2 s.c o m
    PathwayWithModules firstPathwayCopy = new PathwayWithModules(firstPathway);// Copy of the Query pathway
    PathwayWithModules secondPathwayCopy = new PathwayWithModules(secondPathway);// Copy of the Target pathway'
    // PathwayWithModules secondPathwayCopy1 = new PathwayWithModules(secondPathway);
    int currentQueryGene = 0;
    Iterator<ModuleGene> sourceGeneIt = firstPathway.moduleGeneIterator();
    List<Integer> QueryToRemove = new ArrayList<Integer>();
    List<Integer> TargetToRemove = new ArrayList<Integer>();
    while (sourceGeneIt.hasNext()) {
        currentQueryGene++;
        ModuleGene queryGene = sourceGeneIt.next();

        int currentTargetGene = 0;
        Multiset<String> qfunction = LinkedHashMultiset.create();
        List<String> qfunctionList = new ArrayList<String>();
        List<String> qactivity = new ArrayList<String>();
        List<Set<String>> qsubstrate = new ArrayList<Set<String>>();
        for (Module m : queryGene.getModule()) {
            for (Domain d : m.getDomains()) {
                qfunction.add(d.getDomainFunctionString());
                qfunctionList.add(d.getDomainFunctionString());
                qactivity.add(d.getStatus().toString());
                qsubstrate.add(d.getSubstrates());
            }
        }
        Iterator<ModuleGene> targetGeneIt = secondPathway.moduleGeneIterator();

        while (targetGeneIt.hasNext()) {
            currentTargetGene++;
            ModuleGene targetGene = targetGeneIt.next();
            Multiset<String> tfunction = LinkedHashMultiset.create();
            List<String> tfunctionList = new ArrayList<String>();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            for (Module m : targetGene.getModule()) {
                for (Domain d : m.getDomains()) {
                    tfunctionList.add(d.getDomainFunctionString());
                    tfunction.add(d.getDomainFunctionString());
                    tactivity.add(d.getStatus().toString());
                    tsubstrate.add(d.getSubstrates());
                }
            }
            Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction);
            if (DomainsCovered.size() == qfunction.size() && DomainsCovered.size() == tfunction.size()) {
                Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity,
                        tactivity);
                Multimap<String, Integer> Functionscores = ArrayListMultimap.create();

                int TranspositionDomains = LevenshteinDistance.computeLevenshteinDistance(qfunctionList,
                        tfunctionList);
                if (TranspositionDomains > 0) {
                    TranspositionDomains = 1;
                }

                Functionscores.put(qfunction.size() + "-0", TranspositionDomains);
                Multimap<Double, Multimap<String, Integer>> substratescore = myFunction
                        .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate));
                Object activityScore = activityscores.asMap().keySet().toArray()[0];
                Object substrateScore = substratescore.asMap().keySet().toArray()[0];
                double finalScore = Math
                        .round((((2.9 * 1.0) + (0.05 * Double.parseDouble(activityScore.toString().trim()))
                                + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0)
                        / 100.0;
                String ConvertedGeneIDs = "";
                if (Yes == 0) {
                    ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene),
                            newSourceGeneIdToPositionMap) + "->"
                            + reconstructWithGeneId(Integer.toString(currentTargetGene),
                                    newTargetGeneIdToPositionMap);
                } else {
                    ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentTargetGene),
                            newTargetGeneIdToPositionMap) + "->"
                            + reconstructWithGeneId(Integer.toString(currentQueryGene),
                                    newSourceGeneIdToPositionMap);
                }
                resultPerfect.put(finalScore, ConvertedGeneIDs);
                ScoreFunctionMatchMisMatch.put(ConvertedGeneIDs, Functionscores);
                ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values());
                ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescore.values());

                TargetToRemove.add(currentTargetGene);
                QueryToRemove.add(currentQueryGene);
            }
        }

    }
    for (int i : TargetToRemove) {
        secondPathwayCopy.removeGene(i);
    }
    for (int i : QueryToRemove) {
        firstPathwayCopy.removeGene(i);
    }
    if (firstPathwayCopy.size() > 0 && secondPathwayCopy.size() > 0) {
        // Re-construct the bimaps
        newSourceGeneIdToPositionMap = HashBiMap.create();
        int temp = 0;
        for (ModuleGene e : firstPathwayCopy.getModulegenes()) {
            temp = temp + 1;
            newSourceGeneIdToPositionMap.put(e.getGeneId(), temp);
        }
        newTargetGeneIdToPositionMap = HashBiMap.create();
        temp = 0;
        for (ModuleGene e : secondPathwayCopy.getModulegenes()) {
            temp = temp + 1;
            newTargetGeneIdToPositionMap.put(e.getGeneId(), temp);
        }
        resultPerfect.putAll(SubsetIdentification(firstPathwayCopy, secondPathwayCopy,
                newSourceGeneIdToPositionMap, newTargetGeneIdToPositionMap, Yes));
    }
    System.out.println(resultPerfect);
    return resultPerfect;
}

From source file:io.redlink.sdk.impl.analysis.model.RDFStructureParser.java

private EntityAnnotation parseEntityAnnotation(String eaUri, RepositoryConnection conn, Queue<String> toParse,
        Multimap<Enhancement, String> relations) throws RepositoryException, EnhancementParserException {

    EntityAnnotation enhancement = new EntityAnnotation();
    String entityAnnotationsQuery = "PREFIX fise: <http://fise.iks-project.eu/ontology/> \n"
            + "PREFIX dct: <http://purl.org/dc/terms/> \n"
            + "PREFIX entityhub: <http://stanbol.apache.org/ontology/entityhub/entityhub#> \n"
            + "SELECT * { \n OPTIONAL { <" + eaUri + ">  fise:confidence ?confidence ; \n" + eaUri
            + ">  dct:language ?language . \n" + "  OPTIONAL { <" + eaUri + "> dct:relation ?relation } \n"
            + "  OPTIONAL { <" + eaUri + "> fise:entity-label ?entityLabel } \n" + "  OPTIONAL { <" + eaUri
            + "> fise:entity-reference ?entityReference } \n" + "  OPTIONAL { <" + eaUri
            + "> fise:entity-type ?entityType } \n" + "  OPTIONAL { <" + eaUri + "> entityhub:site ?site } \n"
            + "}";
    try {/*from   ww  w.j a v  a 2s. co  m*/
        TupleQueryResult entityAnnotationsResults = conn
                .prepareTupleQuery(QueryLanguage.SPARQL, entityAnnotationsQuery).evaluate();

        int i = 0;
        while (entityAnnotationsResults.hasNext()) {
            BindingSet result = entityAnnotationsResults.next();
            if (i == 0) {
                setEnhancementData(enhancement, result);
                if (result.hasBinding("entityLabel")) {
                    Binding entityLabel = result.getBinding("entityLabel");
                    enhancement.setEntityLabel(entityLabel.getValue().stringValue());
                    if (!result.hasBinding("language") && (entityLabel.getValue() instanceof Literal))
                        enhancement.setLanguage(((Literal) entityLabel.getValue()).getLanguage());

                }
                if (result.hasBinding("site")) {
                    enhancement.setDataset(result.getBinding("site").getValue().stringValue());
                }
                if (result.hasBinding("entityReference")) {
                    enhancement.setEntityReference(
                            parseEntity(conn, result.getBinding("entityReference").getValue().stringValue(),
                                    enhancement.getDataset()));
                }
                if (result.hasBinding("relation")) {
                    String nextRelationUri = result.getBinding("relation").getValue().stringValue();
                    if (!relations.values().contains(nextRelationUri))
                        toParse.add(nextRelationUri);
                    relations.put(enhancement, nextRelationUri);
                }
                if (result.hasBinding("entityType")) {
                    Collection<String> types = new HashSet<String>();
                    types.add(result.getBinding("entityType").getValue().stringValue());
                    enhancement.setEntityTypes(types);
                }
            } else {
                if (result.hasBinding("relation")) {
                    String nextRelationUri = result.getBinding("relation").getValue().stringValue();
                    Collection<String> eRelations = relations.get(enhancement);
                    if (!eRelations.contains(nextRelationUri)) {
                        if (!relations.values().contains(nextRelationUri))
                            toParse.add(nextRelationUri);
                        relations.put(enhancement, nextRelationUri);
                    }
                }

                if (result.hasBinding("entityType")) {
                    String nextType = result.getBinding("entityType").getValue().stringValue();
                    if (!enhancement.getEntityTypes().contains(nextType))
                        enhancement.getEntityTypes().add(nextType);
                }
            }

            i++;
        }

    } catch (QueryEvaluationException | MalformedQueryException e) {
        throw new EnhancementParserException("Error parsing entity annotation with URI: " + eaUri, e);
    }

    return enhancement;
}

From source file:com.android.ide.common.layout.grid.GridModel.java

/**
 * Update the model to account for the given nodes getting deleted. The nodes
 * are not actually deleted by this method; that is assumed to be performed by the
 * caller. Instead this method performs whatever model updates are necessary to
 * preserve the grid structure./*w  w w .  j  av  a2s . com*/
 *
 * @param nodes the nodes to be deleted
 */
public void onDeleted(@NonNull List<INode> nodes) {
    if (nodes.size() == 0) {
        return;
    }

    // Attempt to clean up spacer objects for any newly-empty rows or columns
    // as the result of this deletion

    Set<INode> deleted = new HashSet<INode>();

    for (INode child : nodes) {
        // We don't care about deletion of spacers
        String fqcn = child.getFqcn();
        if (fqcn.equals(FQCN_SPACE) || fqcn.equals(FQCN_SPACE_V7)) {
            continue;
        }
        deleted.add(child);
    }

    Set<Integer> usedColumns = new HashSet<Integer>(actualColumnCount);
    Set<Integer> usedRows = new HashSet<Integer>(actualRowCount);
    Multimap<Integer, ViewData> columnSpacers = ArrayListMultimap.create(actualColumnCount, 2);
    Multimap<Integer, ViewData> rowSpacers = ArrayListMultimap.create(actualRowCount, 2);
    Set<ViewData> removedViews = new HashSet<ViewData>();

    for (ViewData view : mChildViews) {
        if (deleted.contains(view.node)) {
            removedViews.add(view);
        } else if (view.isColumnSpacer()) {
            columnSpacers.put(view.column, view);
        } else if (view.isRowSpacer()) {
            rowSpacers.put(view.row, view);
        } else {
            usedColumns.add(Integer.valueOf(view.column));
            usedRows.add(Integer.valueOf(view.row));
        }
    }

    if (usedColumns.size() == 0 || usedRows.size() == 0) {
        // No more views - just remove all the spacers
        for (ViewData spacer : columnSpacers.values()) {
            layout.removeChild(spacer.node);
        }
        for (ViewData spacer : rowSpacers.values()) {
            layout.removeChild(spacer.node);
        }
        mChildViews.clear();
        actualColumnCount = 0;
        declaredColumnCount = 2;
        actualRowCount = 0;
        declaredRowCount = UNDEFINED;
        setGridAttribute(layout, ATTR_COLUMN_COUNT, 2);

        return;
    }

    // Determine columns to introduce spacers into:
    // This is tricky; I should NOT combine spacers if there are cells tied to
    // individual ones

    // TODO: Invalidate column sizes too! Otherwise repeated updates might get confused!
    // Similarly, inserts need to do the same!

    // Produce map of old column numbers to new column numbers
    // Collapse regions of consecutive space and non-space ranges together
    int[] columnMap = new int[actualColumnCount + 1]; // +1: Easily handle columnSpans as well
    int newColumn = 0;
    boolean prevUsed = usedColumns.contains(0);
    for (int column = 1; column < actualColumnCount; column++) {
        boolean used = usedColumns.contains(column);
        if (used || prevUsed != used) {
            newColumn++;
            prevUsed = used;
        }
        columnMap[column] = newColumn;
    }
    newColumn++;
    columnMap[actualColumnCount] = newColumn;
    assert columnMap[0] == 0;

    int[] rowMap = new int[actualRowCount + 1]; // +1: Easily handle rowSpans as well
    int newRow = 0;
    prevUsed = usedRows.contains(0);
    for (int row = 1; row < actualRowCount; row++) {
        boolean used = usedRows.contains(row);
        if (used || prevUsed != used) {
            newRow++;
            prevUsed = used;
        }
        rowMap[row] = newRow;
    }
    newRow++;
    rowMap[actualRowCount] = newRow;
    assert rowMap[0] == 0;

    // Adjust column and row numbers to account for deletions: for a given cell, if it
    // is to the right of a deleted column, reduce its column number, and if it only
    // spans across the deleted column, reduce its column span.
    for (ViewData view : mChildViews) {
        if (removedViews.contains(view)) {
            continue;
        }
        int newColumnStart = columnMap[Math.min(columnMap.length - 1, view.column)];
        // Gracefully handle rogue/invalid columnSpans in the XML
        int newColumnEnd = columnMap[Math.min(columnMap.length - 1, view.column + view.columnSpan)];
        if (newColumnStart != view.column) {
            view.column = newColumnStart;
            setGridAttribute(view.node, ATTR_LAYOUT_COLUMN, view.column);
        }

        int columnSpan = newColumnEnd - newColumnStart;
        if (columnSpan != view.columnSpan) {
            if (columnSpan >= 1) {
                view.columnSpan = columnSpan;
                setColumnSpanAttribute(view.node, view.columnSpan);
            } // else: merging spacing columns together
        }

        int newRowStart = rowMap[Math.min(rowMap.length - 1, view.row)];
        int newRowEnd = rowMap[Math.min(rowMap.length - 1, view.row + view.rowSpan)];
        if (newRowStart != view.row) {
            view.row = newRowStart;
            setGridAttribute(view.node, ATTR_LAYOUT_ROW, view.row);
        }

        int rowSpan = newRowEnd - newRowStart;
        if (rowSpan != view.rowSpan) {
            if (rowSpan >= 1) {
                view.rowSpan = rowSpan;
                setRowSpanAttribute(view.node, view.rowSpan);
            } // else: merging spacing rows together
        }
    }

    // Merge spacers (and add spacers for newly empty columns)
    int start = 0;
    while (start < actualColumnCount) {
        // Find next unused span
        while (start < actualColumnCount && usedColumns.contains(start)) {
            start++;
        }
        if (start == actualColumnCount) {
            break;
        }
        assert !usedColumns.contains(start);
        // Find the next span of unused columns and produce a SINGLE
        // spacer for that range (unless it's a zero-sized columns)
        int end = start + 1;
        for (; end < actualColumnCount; end++) {
            if (usedColumns.contains(end)) {
                break;
            }
        }

        // Add up column sizes
        int width = getColumnWidth(start, end - start);

        // Find all spacers: the first one found should be moved to the start column
        // and assigned to the full height of the columns, and
        // the column count reduced by the corresponding amount

        // TODO: if width = 0, fully remove

        boolean isFirstSpacer = true;
        for (int column = start; column < end; column++) {
            Collection<ViewData> spacers = columnSpacers.get(column);
            if (spacers != null && !spacers.isEmpty()) {
                // Avoid ConcurrentModificationException since we're inserting into the
                // map within this loop (always at a different index, but the map doesn't
                // know that)
                spacers = new ArrayList<ViewData>(spacers);
                for (ViewData spacer : spacers) {
                    if (isFirstSpacer) {
                        isFirstSpacer = false;
                        spacer.column = columnMap[start];
                        setGridAttribute(spacer.node, ATTR_LAYOUT_COLUMN, spacer.column);
                        if (end - start > 1) {
                            // Compute a merged width for all the spacers (not needed if
                            // there's just one spacer; it should already have the correct width)
                            int columnWidthDp = mRulesEngine.pxToDp(width);
                            spacer.node.setAttribute(ANDROID_URI, ATTR_LAYOUT_WIDTH,
                                    String.format(VALUE_N_DP, columnWidthDp));
                        }
                        columnSpacers.put(start, spacer);
                    } else {
                        removedViews.add(spacer); // Mark for model removal
                        layout.removeChild(spacer.node);
                    }
                }
            }
        }

        if (isFirstSpacer) {
            // No spacer: create one
            int columnWidthDp = mRulesEngine.pxToDp(width);
            addSpacer(layout, -1, UNDEFINED, columnMap[start], columnWidthDp, DEFAULT_CELL_HEIGHT);
        }

        start = end;
    }
    actualColumnCount = newColumn;
    //if (usedColumns.contains(newColumn)) {
    //    // TODO: This may be totally wrong for right aligned content!
    //    actualColumnCount++;
    //}

    // Merge spacers for rows
    start = 0;
    while (start < actualRowCount) {
        // Find next unused span
        while (start < actualRowCount && usedRows.contains(start)) {
            start++;
        }
        if (start == actualRowCount) {
            break;
        }
        assert !usedRows.contains(start);
        // Find the next span of unused rows and produce a SINGLE
        // spacer for that range (unless it's a zero-sized rows)
        int end = start + 1;
        for (; end < actualRowCount; end++) {
            if (usedRows.contains(end)) {
                break;
            }
        }

        // Add up row sizes
        int height = getRowHeight(start, end - start);

        // Find all spacers: the first one found should be moved to the start row
        // and assigned to the full height of the rows, and
        // the row count reduced by the corresponding amount

        // TODO: if width = 0, fully remove

        boolean isFirstSpacer = true;
        for (int row = start; row < end; row++) {
            Collection<ViewData> spacers = rowSpacers.get(row);
            if (spacers != null && !spacers.isEmpty()) {
                // Avoid ConcurrentModificationException since we're inserting into the
                // map within this loop (always at a different index, but the map doesn't
                // know that)
                spacers = new ArrayList<ViewData>(spacers);
                for (ViewData spacer : spacers) {
                    if (isFirstSpacer) {
                        isFirstSpacer = false;
                        spacer.row = rowMap[start];
                        setGridAttribute(spacer.node, ATTR_LAYOUT_ROW, spacer.row);
                        if (end - start > 1) {
                            // Compute a merged width for all the spacers (not needed if
                            // there's just one spacer; it should already have the correct height)
                            int rowHeightDp = mRulesEngine.pxToDp(height);
                            spacer.node.setAttribute(ANDROID_URI, ATTR_LAYOUT_HEIGHT,
                                    String.format(VALUE_N_DP, rowHeightDp));
                        }
                        rowSpacers.put(start, spacer);
                    } else {
                        removedViews.add(spacer); // Mark for model removal
                        layout.removeChild(spacer.node);
                    }
                }
            }
        }

        if (isFirstSpacer) {
            // No spacer: create one
            int rowWidthDp = mRulesEngine.pxToDp(height);
            addSpacer(layout, -1, rowMap[start], UNDEFINED, DEFAULT_CELL_WIDTH, rowWidthDp);
        }

        start = end;
    }
    actualRowCount = newRow;
    //        if (usedRows.contains(newRow)) {
    //            actualRowCount++;
    //        }

    // Update the model: remove removed children from the view data list
    if (removedViews.size() <= 2) {
        mChildViews.removeAll(removedViews);
    } else {
        List<ViewData> remaining = new ArrayList<ViewData>(mChildViews.size() - removedViews.size());
        for (ViewData view : mChildViews) {
            if (!removedViews.contains(view)) {
                remaining.add(view);
            }
        }
        mChildViews = remaining;
    }

    // Update the final column and row declared attributes
    if (declaredColumnCount != UNDEFINED) {
        declaredColumnCount = actualColumnCount;
        setGridAttribute(layout, ATTR_COLUMN_COUNT, actualColumnCount);
    }
    if (declaredRowCount != UNDEFINED) {
        declaredRowCount = actualRowCount;
        setGridAttribute(layout, ATTR_ROW_COUNT, actualRowCount);
    }
}

From source file:de.hzi.helmholtz.Compare.PathwayComparisonWithModules.java

public Multimap<Double, String> SubsetIdentification(PathwayWithModules firstPathway,
        PathwayWithModules secondPathway, BiMap<Integer, Integer> newSourceGeneIdToPositionMap,
        BiMap<Integer, Integer> newTargetGeneIdToPositionMap, int Yes) {
    Multimap<Double, String> result = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural());

    Iterator<ModuleGene> sourceGeneIt = firstPathway.moduleGeneIterator();
    int currentQueryGene = 0;
    while (sourceGeneIt.hasNext()) {
        currentQueryGene++;//from   w  ww .  j  a  v  a2s . co  m
        ModuleGene queryGene = sourceGeneIt.next();
        Multimap<Integer, String> resultr = TreeMultimap.create(Ordering.natural(), Ordering.natural());
        int currentTargetGene = 0;
        Multiset<String> qfunction = LinkedHashMultiset.create();
        List<String> qfunctionList = new ArrayList<String>();
        List<String> qactivity = new ArrayList<String>();
        List<Set<String>> qsubstrate = new ArrayList<Set<String>>();
        for (Module m : queryGene.getModule()) {
            for (Domain d : m.getDomains()) {
                qfunction.add(d.getDomainFunctionString());
                qfunctionList.add(d.getDomainFunctionString());
                qactivity.add(d.getStatus().toString());
                qsubstrate.add(d.getSubstrates());
            }
        }
        List<String> TargenesSelected = new ArrayList<String>();
        Iterator<ModuleGene> targetGeneIt = secondPathway.moduleGeneIterator();
        while (targetGeneIt.hasNext()) {
            currentTargetGene++;
            ModuleGene targetGene = targetGeneIt.next();
            Multiset<String> tfunction = LinkedHashMultiset.create();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            List<String> tfunctionList = new ArrayList<String>();
            Iterator<Module> mIter = targetGene.moduleIterator();
            while (mIter.hasNext()) {
                Module m = mIter.next();
                Iterator<Domain> dIter = m.domainIterator();
                while (dIter.hasNext()) {
                    Domain d = dIter.next();
                    tfunction.add(d.getDomainFunctionString());
                    tfunctionList.add(d.getDomainFunctionString());
                    tactivity.add(d.getStatus().toString());
                    tsubstrate.add(d.getSubstrates());
                }
            }
            Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction);
            int Differences = Math.max(Math.abs(DomainsCovered.size() - tfunction.size()),
                    Math.abs(DomainsCovered.size() - qfunction.size()));
            if (DomainsCovered.size() == tfunction.size() && tfunction.size() > 4) {
                TargenesSelected.add(Integer.toString(currentTargetGene));
            } else {
                resultr.put(Differences, Integer.toString(currentTargetGene));
            }

        }
        int count = 0;
        if (resultr.size() > 0) {
            while (TargenesSelected.size() < 2) {
                Multiset<String> k = LinkedHashMultiset.create(resultr.values());
                Multiset<String> t = LinkedHashMultiset.create(TargenesSelected);
                Multiset<String> Covered = Multisets.intersection(k, t);
                if (Covered.size() == k.size()) {
                    break;
                }

                try {
                    TargenesSelected.addAll(
                            resultr.get(Integer.parseInt(resultr.keySet().toArray()[count].toString())));
                } catch (Exception ds) {
                }
                count = count + 1;
            }
        }
        // //System.out.println(TargenesSelected);
        //  Permutation perm = new Permutation();
        //  List<String> perms = perm.run(TargenesSelected);
        CombinationGenerator c = new CombinationGenerator(10, 10);
        List<String> perms = c.GenerateAllPossibleCombinations(TargenesSelected);
        myFunction sim = new myFunction();
        double score = 0;
        String targetIdentified = "";
        List<ModuleGene> targetGenesList = secondPathway.getModulegenes();
        for (String permu : perms) {
            String[] values = permu.replace("[", "").replace("]", "").split(",");
            List<String> mergedTargetgenes = new ArrayList<String>();
            List<Integer> ToRemove = new ArrayList<Integer>();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            for (String j : values) {
                ToRemove.add(Integer.parseInt(j.trim()));
                for (Module m : targetGenesList.get(Integer.parseInt(j.trim()) - 1).getModule()) {
                    for (Domain i : m.getDomains()) {
                        mergedTargetgenes.add(i.getDomainFunctionString());
                        tactivity.add(i.getStatus().toString());
                        tsubstrate.add(i.getSubstrates());
                    }
                }
            }
            Multimap<Double, Multimap<String, Integer>> FunctionScores = sim.calculate(qfunctionList,
                    mergedTargetgenes);
            Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity,
                    tactivity);
            Multimap<Double, Multimap<String, Integer>> substratescores = myFunction
                    .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate));
            Object FunctionScore = FunctionScores.asMap().keySet().toArray()[0];
            Object activityScore = activityscores.asMap().keySet().toArray()[0];
            Object substrateScore = substratescores.asMap().keySet().toArray()[0];

            double finalScore = Math
                    .round((((2.9 * Double.parseDouble(FunctionScore.toString().trim()))
                            + (0.05 * Double.parseDouble(activityScore.toString().trim()))
                            + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0)
                    / 100.0;
            targetIdentified = permu.replace(",", "+");
            String ConvertedGeneIDs = "";
            if (Yes == 0) {
                ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene),
                        newSourceGeneIdToPositionMap) + "->"
                        + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""),
                                newTargetGeneIdToPositionMap);
            } else {
                ConvertedGeneIDs = reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""),
                        newTargetGeneIdToPositionMap) + "->"
                        + reconstructWithGeneId(Integer.toString(currentQueryGene),
                                newSourceGeneIdToPositionMap);
            }
            // String ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap) + "->" + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""), newTargetGeneIdToPositionMap);

            result.put(finalScore, ConvertedGeneIDs);

            ScoreFunctionMatchMisMatch.putAll(ConvertedGeneIDs, FunctionScores.values());
            ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values());
            ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescores.values());

        }

    }
    return result;
}

From source file:de.hzi.helmholtz.Compare.PathwayComparisonUsingModules.java

public Multimap<Double, String> SubsetsMatching(final PathwayUsingModules firstPathway,
        final PathwayUsingModules secondPathway, BiMap<String, Integer> newSourceGeneIdToPositionMap,
        BiMap<String, Integer> newTargetGeneIdToPositionMap, int Yes) {
    Multimap<Double, String> resultPerfect = TreeMultimap.create(Ordering.natural().reverse(),
            Ordering.natural());/*from   www . j a v a2 s. c o  m*/
    PathwayUsingModules firstPathwayCopy = new PathwayUsingModules(firstPathway);// Copy of the Query pathway
    PathwayUsingModules secondPathwayCopy = new PathwayUsingModules(secondPathway);// Copy of the Target pathway'
    // PathwayUsingModules secondPathwayCopy1 = new PathwayUsingModules(secondPathway);
    int currentQueryGene = 0;
    Iterator<Module> sourceGeneIt = firstPathway.geneIterator();
    List<String> QueryToRemove = new ArrayList<String>();
    List<String> TargetToRemove = new ArrayList<String>();
    while (sourceGeneIt.hasNext()) {
        currentQueryGene++;
        Module queryGene = sourceGeneIt.next();

        int currentTargetGene = 0;
        Multiset<String> qfunction = LinkedHashMultiset.create();
        List<String> qfunctionList = new ArrayList<String>();
        List<String> qactivity = new ArrayList<String>();
        List<Set<String>> qsubstrate = new ArrayList<Set<String>>();
        for (Domain d : queryGene.getDomains()) {
            qfunction.add(d.getDomainFunctionString());
            qfunctionList.add(d.getDomainFunctionString());
            qactivity.add(d.getStatus().toString());
            qsubstrate.add(d.getSubstrates());
        }
        Iterator<Module> targetGeneIt = secondPathway.geneIterator();

        while (targetGeneIt.hasNext()) {
            currentTargetGene++;
            Module targetGene = targetGeneIt.next();
            Multiset<String> tfunction = LinkedHashMultiset.create();
            List<String> tfunctionList = new ArrayList<String>();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            for (Domain d : targetGene.getDomains()) {
                tfunctionList.add(d.getDomainFunctionString());
                tfunction.add(d.getDomainFunctionString());
                tactivity.add(d.getStatus().toString());
                tsubstrate.add(d.getSubstrates());
            }
            Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction);
            if (DomainsCovered.size() == qfunction.size() && DomainsCovered.size() == tfunction.size()) {
                Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity,
                        tactivity);
                Multimap<String, Integer> Functionscores = ArrayListMultimap.create();

                int TranspositionDomains = LevenshteinDistance.computeLevenshteinDistance(qfunctionList,
                        tfunctionList);
                if (TranspositionDomains > 0) {
                    TranspositionDomains = 1;
                }

                Functionscores.put(qfunction.size() + "-0", TranspositionDomains);
                Multimap<Double, Multimap<String, Integer>> substratescore = myFunction
                        .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate));
                Object activityScore = activityscores.asMap().keySet().toArray()[0];
                Object substrateScore = substratescore.asMap().keySet().toArray()[0];
                double finalScore = Math
                        .round((((2.9 * 1.0) + (0.05 * Double.parseDouble(activityScore.toString().trim()))
                                + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0)
                        / 100.0;
                String ConvertedGeneIDs = "";
                if (Yes == 0) {
                    ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene),
                            newSourceGeneIdToPositionMap) + "->"
                            + reconstructWithGeneId(Integer.toString(currentTargetGene),
                                    newTargetGeneIdToPositionMap);
                } else {
                    ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentTargetGene),
                            newTargetGeneIdToPositionMap) + "->"
                            + reconstructWithGeneId(Integer.toString(currentQueryGene),
                                    newSourceGeneIdToPositionMap);
                }
                resultPerfect.put(finalScore, ConvertedGeneIDs);
                ScoreFunctionMatchMisMatch.put(ConvertedGeneIDs, Functionscores);
                ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values());
                ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescore.values());

                TargetToRemove.add(reconstructWithGeneId(Integer.toString(currentTargetGene),
                        newTargetGeneIdToPositionMap));
                QueryToRemove.add(reconstructWithGeneId(Integer.toString(currentQueryGene),
                        newSourceGeneIdToPositionMap));
            }
        }

    }
    for (String i : TargetToRemove) {
        secondPathwayCopy.removeModule(i);
    }
    for (String i : QueryToRemove) {
        firstPathwayCopy.removeModule(i);
    }
    if (firstPathwayCopy.size() > 0 && secondPathwayCopy.size() > 0) {
        // Re-construct the bimaps
        newSourceGeneIdToPositionMap = HashBiMap.create();
        int temp = 0;
        for (Module e : firstPathwayCopy.getModules()) {
            temp = temp + 1;
            newSourceGeneIdToPositionMap.put(e.getModuleId(), temp);
        }
        newTargetGeneIdToPositionMap = HashBiMap.create();
        temp = 0;
        for (Module e : secondPathwayCopy.getModules()) {
            temp = temp + 1;
            newTargetGeneIdToPositionMap.put(e.getModuleId(), temp);
        }
        resultPerfect.putAll(SubsetIdentification(firstPathwayCopy, secondPathwayCopy,
                newSourceGeneIdToPositionMap, newTargetGeneIdToPositionMap, Yes));
    }
    ////System.out.println(resultPerfect);
    return resultPerfect;
}

From source file:com.puppetlabs.geppetto.validation.impl.DirectoryValidatorImpl.java

private void loadRubyFiles(Multimap<ModuleName, MetadataInfo> moduleInfos, SubMonitor ticker) {
    // Load all ruby
    Multimap<String, Provider> allProviders = ArrayListMultimap.create();
    Map<File, Type> allTypes = Maps.newHashMap();
    Map<File, Function> allFunctions = Maps.newHashMap();
    for (File f : rbFiles) {
        try {/*from   w  w w .j a  va  2s  .  c  om*/
            // Skip "Rakefile.rb" or they will be processed twice (but still tick x2
            // onece for validate and once for load - as this is included in work-count)
            if (f.getName().toLowerCase().equals("rakefile.rb")) {
                worked(ticker, 2);
                continue;
            }

            URI uri = URI.createFileURI(f.getPath());
            if (PptpRubyResource.detectLoadType(uri) == LoadType.IGNORED) {
                validateRubyFile(f, ticker);
                continue;
            }

            Resource r = ppRunner.loadResource(new FileInputStream(f), uri);
            if (options.isValidationCandidate(f)) {
                for (org.eclipse.emf.ecore.resource.Resource.Diagnostic diag : r.getErrors())
                    if (diag instanceof RubyIssueDiagnostic)
                        addRubyIssueDiagnostic(((RubyIssueDiagnostic) diag).getIssue(), f);
                for (org.eclipse.emf.ecore.resource.Resource.Diagnostic diag : r.getWarnings())
                    if (diag instanceof RubyIssueDiagnostic)
                        addRubyIssueDiagnostic(((RubyIssueDiagnostic) diag).getIssue(), f);

                if (options.isExtractDocs())
                    for (EObject c : r.getContents()) {
                        if (c instanceof Type)
                            allTypes.put(f, (Type) c);
                        else if (c instanceof Provider) {
                            Provider p = (Provider) c;
                            allProviders.put(p.getTypeName(), p);
                        } else if (c instanceof Function)
                            allFunctions.put(f, (Function) c);
                    }
            }

            if (options.isCheckReferences())
                rememberRootInResource(r);
            worked(ticker, 1);
        } catch (Exception e) {
            addExceptionDiagnostic("Internal Error: Exception while processing file: " + f.getName(), e);
        }
    }

    if (options.isExtractDocs()) {
        // Key all MetadataInfo with their respective module directory
        Map<java.nio.file.Path, MetadataInfo> mdInfos = Maps.newHashMap();
        for (MetadataInfo mdInfo : moduleInfos.values())
            mdInfos.put(mdInfo.getFile().getParentFile().toPath(), mdInfo);

        for (Map.Entry<File, Type> typeEntry : allTypes.entrySet()) {
            Type t = typeEntry.getValue();
            com.puppetlabs.geppetto.forge.model.Type ft = new com.puppetlabs.geppetto.forge.model.Type();
            ft.setDoc(t.getDocumentation());
            ft.setName(t.getName());
            ft.setParameters(convertToNamedTypeItems(t.getParameters()));
            ft.setProperties(convertToNamedTypeItems(t.getProperties()));
            ft.setProviders(convertToNamedTypeItems(allProviders.get(t.getName())));

            java.nio.file.Path typePath = typeEntry.getKey().toPath();
            for (Map.Entry<java.nio.file.Path, MetadataInfo> mdEntry : mdInfos.entrySet())
                if (typePath.startsWith(mdEntry.getKey())) {
                    mdEntry.getValue().addType(ft);
                    break;
                }
        }
        for (Map.Entry<File, Function> functionEntry : allFunctions.entrySet()) {
            Function f = functionEntry.getValue();
            NamedDocItem ff = new NamedDocItem();
            ff.setDoc(f.getDocumentation());
            ff.setName(f.getName());
            java.nio.file.Path typePath = functionEntry.getKey().toPath();
            for (Map.Entry<java.nio.file.Path, MetadataInfo> mdEntry : mdInfos.entrySet())
                if (typePath.startsWith(mdEntry.getKey())) {
                    mdEntry.getValue().addFunction(ff);
                    break;
                }
        }
    }
}

From source file:com.google.devtools.build.android.AndroidResourceProcessor.java

@Nullable
public SymbolLoader loadResourceSymbolTable(List<SymbolFileProvider> libraries, String appPackageName,
        Path primaryRTxt, Multimap<String, SymbolLoader> libMap) throws IOException {
    // The reported availableProcessors may be higher than the actual resources
    // (on a shared system). On the other hand, a lot of the work is I/O, so it's not completely
    // CPU bound. As a compromise, divide by 2 the reported availableProcessors.
    int numThreads = Math.max(1, Runtime.getRuntime().availableProcessors() / 2);
    ListeningExecutorService executorService = MoreExecutors
            .listeningDecorator(Executors.newFixedThreadPool(numThreads));
    try (Closeable closeable = ExecutorServiceCloser.createWith(executorService)) {
        // Load the package names from the manifest files.
        Map<SymbolFileProvider, ListenableFuture<String>> packageJobs = new HashMap<>();
        for (final SymbolFileProvider lib : libraries) {
            packageJobs.put(lib, executorService.submit(new PackageParsingTask(lib.getManifest())));
        }/*  www.  ja  va2 s  .  c o  m*/
        Map<SymbolFileProvider, String> packageNames = new HashMap<>();
        try {
            for (Map.Entry<SymbolFileProvider, ListenableFuture<String>> entry : packageJobs.entrySet()) {
                packageNames.put(entry.getKey(), entry.getValue().get());
            }
        } catch (InterruptedException | ExecutionException e) {
            throw new IOException("Failed to load package name: ", e);
        }
        // Associate the packages with symbol files.
        for (SymbolFileProvider lib : libraries) {
            String packageName = packageNames.get(lib);
            // If the library package matches the app package skip -- the final app resource IDs are
            // stored in the primaryRTxt file.
            if (appPackageName.equals(packageName)) {
                continue;
            }
            File rFile = lib.getSymbolFile();
            // If the library has no resource, this file won't exist.
            if (rFile.isFile()) {
                SymbolLoader libSymbols = new SymbolLoader(rFile, stdLogger);
                libMap.put(packageName, libSymbols);
            }
        }
        // Even if there are no libraries, load fullSymbolValues, in case we only have resources
        // defined for the binary.
        File primaryRTxtFile = primaryRTxt.toFile();
        SymbolLoader fullSymbolValues = null;
        if (primaryRTxtFile.isFile()) {
            fullSymbolValues = new SymbolLoader(primaryRTxtFile, stdLogger);
        }
        // Now load the symbol files in parallel.
        List<ListenableFuture<?>> loadJobs = new ArrayList<>();
        Iterable<SymbolLoader> toLoad = fullSymbolValues != null
                ? Iterables.concat(libMap.values(), ImmutableList.of(fullSymbolValues))
                : libMap.values();
        for (final SymbolLoader loader : toLoad) {
            loadJobs.add(executorService.submit(new SymbolLoadingTask(loader)));
        }
        try {
            Futures.allAsList(loadJobs).get();
        } catch (InterruptedException | ExecutionException e) {
            throw new IOException("Failed to load SymbolFile: ", e);
        }
        return fullSymbolValues;
    }
}

From source file:de.hzi.helmholtz.Compare.PathwayComparisonUsingModules.java

public Multimap<Double, String> SubsetIdentification(PathwayUsingModules firstPathway,
        PathwayUsingModules secondPathway, BiMap<String, Integer> newSourceGeneIdToPositionMap,
        BiMap<String, Integer> newTargetGeneIdToPositionMap, int Yes) {
    Multimap<Double, String> result = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural());

    Iterator<Module> sourceGeneIt = firstPathway.geneIterator();
    int currentQueryGene = 0;
    while (sourceGeneIt.hasNext()) {
        currentQueryGene++;/* ww w.  j  a va 2 s  .com*/
        Module queryGene = sourceGeneIt.next();
        Multimap<Integer, String> resultr = TreeMultimap.create(Ordering.natural(), Ordering.natural());
        int currentTargetGene = 0;
        Multiset<String> qfunction = LinkedHashMultiset.create();
        List<String> qfunctionList = new ArrayList<String>();
        List<String> qactivity = new ArrayList<String>();
        List<Set<String>> qsubstrate = new ArrayList<Set<String>>();
        for (Domain d : queryGene.getDomains()) {
            qfunction.add(d.getDomainFunctionString());
            qfunctionList.add(d.getDomainFunctionString());
            qactivity.add(d.getStatus().toString());
            qsubstrate.add(d.getSubstrates());
        }
        List<String> TargenesSelected = new ArrayList<String>();
        Iterator<Module> targetGeneIt = secondPathway.geneIterator();
        while (targetGeneIt.hasNext()) {
            currentTargetGene++;
            Module targetGene = targetGeneIt.next();
            Multiset<String> tfunction = LinkedHashMultiset.create();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            List<String> tfunctionList = new ArrayList<String>();
            Iterator<Domain> dIter = targetGene.domainIterator();
            while (dIter.hasNext()) {
                Domain d = dIter.next();
                tfunction.add(d.getDomainFunctionString());
                tfunctionList.add(d.getDomainFunctionString());
                tactivity.add(d.getStatus().toString());
                tsubstrate.add(d.getSubstrates());
            }
            Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction);
            int Differences = Math.max(Math.abs(DomainsCovered.size() - tfunction.size()),
                    Math.abs(DomainsCovered.size() - qfunction.size()));
            if (DomainsCovered.size() == tfunction.size() && tfunction.size() > 4) {
                TargenesSelected.add(Integer.toString(currentTargetGene));
            } else {
                resultr.put(Differences, Integer.toString(currentTargetGene));
            }

        }
        int count = 0;
        if (resultr.size() > 0) {
            int tsize = 0;
            if ((firstPathway.size() > 8 && firstPathway.size() < 10)
                    || (secondPathway.size() > 8 && secondPathway.size() < 10)) {
                tsize = 2;
            } else if ((firstPathway.size() > 2 && firstPathway.size() < 8)
                    && (secondPathway.size() > 2 && secondPathway.size() < 8)) {
                tsize = 4;
            } else {
                tsize = 1;
            }
            while (TargenesSelected.size() < tsize) {
                Multiset<String> k = LinkedHashMultiset.create(resultr.values());
                Multiset<String> t = LinkedHashMultiset.create(TargenesSelected);
                Multiset<String> Covered = Multisets.intersection(k, t);
                if (Covered.size() == k.size()) {
                    break;
                }

                try {
                    TargenesSelected.addAll(
                            resultr.get(Integer.parseInt(resultr.keySet().toArray()[count].toString())));
                } catch (Exception ds) {
                }
                count = count + 1;
            }
        }
        // ////System.out.println(TargenesSelected);
        //  Permutation perm = new Permutation();
        //  List<String> perms = perm.run(TargenesSelected);
        CombinationGenerator c = new CombinationGenerator(10, 10);
        List<String> perms = c.GenerateAllPossibleCombinations(TargenesSelected);
        myFunction sim = new myFunction();
        double score = 0;
        String targetIdentified = "";
        List<Module> targetGenesList = secondPathway.getModules();
        for (String permu : perms) {
            String[] values = permu.replace("[", "").replace("]", "").split(",");
            List<String> mergedTargetgenes = new ArrayList<String>();
            List<Integer> ToRemove = new ArrayList<Integer>();
            List<String> tactivity = new ArrayList<String>();
            List<Set<String>> tsubstrate = new ArrayList<Set<String>>();
            for (String j : values) {
                ToRemove.add(Integer.parseInt(j.trim()));
                for (Domain i : targetGenesList.get(Integer.parseInt(j.trim()) - 1).getDomains()) {

                    mergedTargetgenes.add(i.getDomainFunctionString());
                    tactivity.add(i.getStatus().toString());
                    tsubstrate.add(i.getSubstrates());
                }
            }
            Multimap<Double, Multimap<String, Integer>> FunctionScores = sim.calculate(qfunctionList,
                    mergedTargetgenes);
            Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity,
                    tactivity);
            Multimap<Double, Multimap<String, Integer>> substratescores = myFunction
                    .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate));
            Object FunctionScore = FunctionScores.asMap().keySet().toArray()[0];
            Object activityScore = activityscores.asMap().keySet().toArray()[0];
            Object substrateScore = substratescores.asMap().keySet().toArray()[0];

            double finalScore = Math
                    .round((((2.9 * Double.parseDouble(FunctionScore.toString().trim()))
                            + (0.05 * Double.parseDouble(activityScore.toString().trim()))
                            + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0)
                    / 100.0;
            targetIdentified = permu.replace(",", "+");
            String ConvertedGeneIDs = "";
            if (Yes == 0) {
                ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene),
                        newSourceGeneIdToPositionMap) + "->"
                        + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""),
                                newTargetGeneIdToPositionMap);
            } else {
                ConvertedGeneIDs = reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""),
                        newTargetGeneIdToPositionMap) + "->"
                        + reconstructWithGeneId(Integer.toString(currentQueryGene),
                                newSourceGeneIdToPositionMap);
            }
            // String ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap) + "->" + reconstructWithGeneId(targetIdentified.replace("[", "").replace("]", ""), newTargetGeneIdToPositionMap);

            result.put(finalScore, ConvertedGeneIDs);

            ScoreFunctionMatchMisMatch.putAll(ConvertedGeneIDs, FunctionScores.values());
            ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values());
            ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescores.values());

        }

    }
    return result;
}