Example usage for java.util Set retainAll

List of usage examples for java.util Set retainAll

Introduction

In this page you can find the example usage for java.util Set retainAll.

Prototype

boolean retainAll(Collection<?> c);

Source Link

Document

Retains only the elements in this set that are contained in the specified collection (optional operation).

Usage

From source file:Main.java

public static void main(String[] argv) throws Exception {
    Set set1 = new HashSet();
    Set set2 = new HashSet();

    set1.retainAll(set2);

    // Remove all elements from a set
    set1.clear();//  w w w . j  a  v  a  2  s  . co m

}

From source file:MainClass.java

public static void main(String[] args) {

    // Create two sets.
    Set s1 = new HashSet();
    s1.add("A");/*from   ww w. ja va  2 s  . c  o m*/
    s1.add("B");
    s1.add("C");

    Set s2 = new HashSet();
    s2.add("A");
    s2.add("B");

    Set union = new TreeSet(s1);
    union.addAll(s2); // now contains the union

    print("union", union);

    Set intersect = new TreeSet(s1);
    intersect.retainAll(s2);

    print("intersection", intersect);

}

From source file:SetStuff.java

public static void main(String[] args) {

    // Create two sets.
    Set s1 = new HashSet();
    s1.add("Ian Darwin");
    s1.add("Bill Dooley");
    s1.add("Jesse James");

    Set s2 = new HashSet();
    s2.add("Ian Darwin");
    s2.add("Doolin' Dalton");

    Set union = new TreeSet(s1);
    union.addAll(s2); // now contains the union

    print("union", union);

    Set intersect = new TreeSet(s1);
    intersect.retainAll(s2);

    print("intersection", intersect);

}

From source file:ubic.pubmedgate.resolve.focusedAnalysis.CreateNIFBulkDump.java

/**
 * @param args//from   www . ja v  a 2 s  . c o  m
 */
public static void main(String[] args) throws Exception {

    // training or unsupervised
    boolean allComp = true;
    // if training then check for accepted
    Model modelLoad = ModelFactory.createDefaultModel();
    String fileProperty;

    if (allComp) {
        fileProperty = "resolve.Lexicon.resolution.RDF.allComp";
        modelLoad.read(new FileInputStream(Config.config.getString(fileProperty)), null);
    }

    fileProperty = "resolve.Lexicon.resolution.RDF";
    modelLoad.read(new FileInputStream(Config.config.getString(fileProperty)), null);

    boolean reason = true;
    EvaluationRDFModel model = new EvaluationRDFModel(modelLoad, reason);

    int count = 0;
    int rejected = 0;
    String result = "pmid,mention,concept";
    log.info("total number of pmids:" + model.getPMIDs().size());
    Set<Resource> NIFSTDConcepts = model.getNIFSTDConcepts();
    Set<Resource> allTerms = model.getTerms();
    int passedPMIDs = 0;
    StopWatch s = new StopWatch();
    s.start();
    Set<Resource> PMIDs = model.getPMIDs();
    for (Resource pmid : PMIDs) {

        log.info("Time per PMID:" + (s.getTime() / ++passedPMIDs) + " processed:" + passedPMIDs + " of "
                + PMIDs.size());

        String pmidID = pmid.getURI().substring(pmid.getURI().lastIndexOf(':') + 1);
        log.info(pmidID);
        // PMID -> Mention -> Concept
        Set<Resource> mentions = model.getMentionsInPMID(pmid);
        for (Resource mention : mentions) {
            Set<Resource> terms = model.getLinkedResources(mention);
            // it's all resources before it's filtered
            terms.retainAll(allTerms);

            String mentionString = JenaUtil.getLabel(mention);
            for (Resource term : terms) {
                Set<Resource> concepts = model.getConceptsFromTerms(term);
                String termString = JenaUtil.getLabel(term);

                concepts.retainAll(NIFSTDConcepts);

                for (Resource concept : concepts) {
                    if (!model.rejected(mention, concept)) {
                        String conceptLabel = JenaUtil.getLabel(concept);
                        String conceptURI = "http://ontology.neuinfo.org/NIF/BiomaterialEntities/NIF-GrossAnatomy.owl#"
                                + concept.getLocalName();
                        log.info("  " + mentionString + "->" + termString + "->" + conceptLabel + "  "
                                + conceptURI);
                        // write mention, concept and pubmed abstract
                        result += "\r\n" + pmidID + ",\"" + mentionString + "\"," + conceptURI;
                        count++;
                    } else {
                        rejected++;
                    }
                }
            }
        }
    }
    log.info("Number printed:" + count);
    log.info("Number rejected:" + rejected);
    FileTools.stringToFile(result,
            new File(Config.config.getString("whitetext.resolve.results.folder") + "forNIFAll.txt"));

}

From source file:ubic.BAMSandAllen.RankedGeneListLoader.java

public static void main(String[] args) throws Exception {

    // RankedGeneListLoader jesseIdea = new RankedGeneListLoader(
    // "/grp/java/workspace/BAMSandAllen/data/rankedGenes/ranked once/Incoming.partialcon.genes.txt" );

    boolean removeNonExp = true;
    boolean useVirtual = true;
    boolean keepSign = true;

    // ERG, gene info top ten results varies for direction
    Direction direction = AnalyzeBAMSandAllenGenes.Direction.ANYDIRECTION;

    ConnectivityAndAllenExpressionMatrixPair pair = ExpressionMatrixPairFactory
            .connectivityAndExpression(direction, useVirtual, removeNonExp);
    // ConnectivityAndAllenExpressionMatrixPair pair = ExpressionMatrixPairFactory.connectivityPartial( direction,
    // false, RegressMatrix.CONNECTIVITY, useVirtual, removeNonExp, true );

    // need a partialcon pair
    // jesseIdea.writeCorrelationBasedData( pair );
    // System.exit( 1 );
    // pair.applyGeneFilter( new PrefixGeneFilter( "Drd" ) );
    // log.info( "Genes:" + Util.getUniqueGenes( pair.getMatrixBDataRows() ) );
    // log.info( pair.getCorrelation() );
    // pair.writeImages();
    ////  w  w w. java  2s . c  o  m
    // System.exit( 1 );

    // processDirectoryNoMissing( "/grp/java/workspace/BAMSandAllen/data/rankedGenes/direct increasing", pair );

    // processDirectoryNoMissing( "/grp/java/workspace/BAMSandAllen/data/rankedGenes/near final nobed", pair );
    processDirectoryNoMissing("/grp/java/workspace/BAMSandAllen/data/rankedGenes/LitCon", pair);
    System.exit(1);

    // LOOGenesInOrder.out.partialcon.txt.329.0.014448.topGenes.txt
    // LOOGenesInOrder.in.partialcon.txt.410.0.018005.topGenes.txt
    // LOOGenesInOrder.space.7.txt.435.0.019093.topGenes.txt

    /*
     * RankedGeneListLoader aLook = new RankedGeneListLoader(
     * "/grp/java/workspace/BAMSandAllen/data/rankedGenes/near final
     * nobed/LOOGenesInOrder.space.7.txt.435.0.019093.topGenes.txt" );
     */
    // ConnectivityAndAllenExpressionMatrixPair.NewEnergies.incoming.rOrder
    // ConnectivityAndAllenExpressionMatrixPair.NewEnergies.space.rOrder
    // ConnectivityAndAllenExpressionMatrixPair.NewEnergies.outgoing.rOrder
    // LOOGenesInOrder.out.partialcon.txt.329.0.014448.topGenes.txt
    // LOOGenesInOrder.in.partialcon.txt.410.0.018005.topGenes.txt
    // LOOGenesInOrder.space.7.txt.435.0.019093.topGenes.txt
    RankedGeneListLoader aLook = new RankedGeneListLoader(
            "/grp/java/workspace/BAMSandAllen/data/rankedGenes/near final nobed/LOOGenesInOrder.out.partialcon.txt.329.0.014448.topGenes.txt");
    pair.setMatrixBDataRows(aLook.lines);
    // log.info( pair.getCorrelation() );
    // pair.writeRMatrices();
    // pair.writeImages();
    pair.orderDataRows(aLook.lines);
    log.info(pair.getCorrelation());
    System.exit(1);
    pair.writeRMatrices();
    pair.writeImages();

    ExploreRegionNames explore = new ExploreRegionNames(pair);
    StringToStringSetMap parents = explore.getParents();

    String focusRegion = "Midbrain";
    Set<String> ROIs = parents.get(focusRegion);
    // some may have no exp
    ROIs.retainAll(pair.getAllenDataColNames());
    pair.removeAllenCols(ROIs);
    // Hindbrain
    // Interbrain
    // Midbrain
    // Cerebrum
    pair.run();

    log.info(focusRegion + ":" + pair.getCorrelation());
    pair.test(1000);

    System.exit(1);

    List<String> lines = aLook.lines;
    lines.remove(0);
    lines.remove(1);
    lines.remove(2);
    lines.remove(3);
    lines.remove(4);
    lines.remove(5);
    lines.remove(6);
    lines.remove(7);
    lines.remove(8);
    lines.remove(9);
    pair.setMatrixBDataRows(aLook.lines);
    log.info(pair.getCorrelation());
    log.info(lines.size());

    // processDirectoryNoMissing( "/grp/java/workspace/BAMSandAllen/data/rankedGenes/for Ray", pair );
    // processDirectoryNoMissing( "/grp/java/workspace/BAMSandAllen/data/rankedGenes/near final 962 regions", pair
    // );
    // processDirectoryNoMissing( "/grp/java/workspace/BAMSandAllen/data/rankedGenes/decreasing after lab meeting",
    // pair, keepSign );

}

From source file:ubic.pubmedgate.resolve.focusedAnalysis.PickNeuroLexRegions.java

/**
 * @param args/*ww w . j a va 2s  .c om*/
 */
public static void main(String[] args) throws Exception {
    // has human species
    // has spec to gen match to NIFSTD
    // no rejections
    // sort by occurances
    // get PMID references

    // common.add( "species:ncbi:9606" ); // human

    Model modelLoad = ModelFactory.createDefaultModel();
    String fileProperty;
    fileProperty = "resolve.Lexicon.resolution.RDF.allComp";
    modelLoad.read(new FileInputStream(Config.config.getString(fileProperty)), null);
    log.info(modelLoad.size());
    fileProperty = "resolve.Lexicon.resolution.RDF";
    modelLoad.read(new FileInputStream(Config.config.getString(fileProperty)), null);
    log.info(modelLoad.size());

    boolean reason = true;
    EvaluationRDFModel model = new EvaluationRDFModel(modelLoad, reason);
    Set<Resource> humanMentions = model.getMentionsForSpecies("9606");
    Set<Resource> commonSpeciesMentions = new HashSet<Resource>();
    Set<String> commonSpecies = SpeciesCounter.getCommonSpeciesLinStrings();
    Set<Resource> commonSpeciesPMIDs = new HashSet<Resource>();

    for (String common : commonSpecies) {
        common = common.substring(common.lastIndexOf(":") + 1);
        Set<Resource> mentionsForSpecies = model.getMentionsForSpecies(common);
        commonSpeciesMentions.addAll(mentionsForSpecies);

        commonSpeciesPMIDs.addAll(model.getPMIDsForSpecies(common));

    }

    log.info("Common mentions:" + commonSpeciesMentions.size());
    commonSpeciesMentions.retainAll(model.getMatchedMentions());

    log.info("Common pmids:" + commonSpeciesPMIDs.size());

    log.info("Matched common mentions:" + commonSpeciesMentions.size());

    log.info("Human mentions:" + humanMentions.size());
    Set<Resource> BIRNConcepts = model.getNIFSTDConcepts();
    Set<Resource> NNConcepts = model.getNNConcepts();
    humanMentions.retainAll(model.getMatchedMentions());

    log.info("Matched Human mentions:" + humanMentions.size());

    Set<Resource> finalRegionMentions = new HashSet<Resource>();
    int occuranceThreshold = 0;
    int count = 0;
    int nnLinks = 0;
    int birnConcepts = 0;
    ParamKeeper keeper = new ParamKeeper();

    ParamKeeper finalKeeper = new ParamKeeper();

    for (Resource commonMention : commonSpeciesMentions) {
        Set<Resource> concepts = model.getMentionEvaluations(commonMention);
        // there's going to be two of these properties - get the larger? should have number of unseen occurances
        int occurances = commonMention.getProperty(Vocabulary.number_of_occurances).getInt();
        // int abstractCount = commonMention.getProperty( Vocabulary.number_of_abstracts ).getInt();
        Set<Resource> pmids = model.getPMIDs(commonMention);
        Set<Resource> commonPmids = (Set<Resource>) Util.intersect(commonSpeciesPMIDs, pmids);
        // filter pmids for common species
        // if the pmid has link to one of common

        // StmtIterator statements = neuromention.listProperties( Vocabulary.in_PMID );
        // Resource taxon = model.createResource( "http://bio2rdf.org/taxon:" + species );
        // ResIterator resIt = model.listResourcesWithProperty( Vocabulary.mentions_species, taxon );

        int abstractCount = pmids.size();
        int commonAbstractCount = commonPmids.size();

        for (Resource concept : concepts) {
            if (BIRNConcepts.contains(concept)) {
                birnConcepts++;
                // how do I know it's spec to gen? -need to create automatic evaluations!
                if (model.specToGen(commonMention, concept) && commonAbstractCount > 1) {
                    Map<String, String> params = new HashMap<String, String>();
                    params.put("mention URI", commonMention.getURI());
                    String mentionLabel = JenaUtil.getLabel(commonMention);
                    mentionLabel = mentionLabel.substring(0, 1).toUpperCase() + mentionLabel.substring(1);
                    params.put("mention label", mentionLabel);
                    params.put("parent concept URI", concept.getURI());
                    params.put("parent concept label", "\"" + JenaUtil.getLabel(concept) + "\"");
                    params.put("abstract count", abstractCount + "");
                    params.put("common abstract count", commonAbstractCount + "");

                    params.put("PubMed link",
                            "HYPERLINK(\"" + model.getNCBIPMIDLink(commonPmids) + "\",\"PubMed Link\")");

                    count++;
                    finalRegionMentions.add(commonMention);

                    log.info(abstractCount + " " + commonAbstractCount + " " + JenaUtil.getLabel(commonMention)
                            + " < " + JenaUtil.getLabel(concept));

                    // TODO get birnlex IDs? - are they exact?
                    if (Util.intersectSize(NNConcepts, concepts) > 0) {
                        nnLinks++;
                        params.put("hasNNlink", "true");
                    } else {
                        params.put("hasNNlink", "false");
                    }

                    keeper.addParamInstance(params);

                    Map<String, String> finalParams = new HashMap<String, String>();
                    finalParams.put("Label", mentionLabel);
                    finalParams.put("Is part of", JenaUtil.getLabel(concept));
                    finalParams.put("Has role", "Brain Subdivisions based on automated term selection");
                    finalParams.put("Id", "");
                    String pmidComaString = "\"" + model.getComaSepPMIDs(commonPmids, 140) + "\"";
                    finalParams.put("PMID", pmidComaString);
                    finalKeeper.addParamInstance(finalParams);
                }
            }
        }
    }
    log.info("Number printed:" + count);
    log.info("Number in set:" + finalRegionMentions.size());
    log.info("Has NN links:" + nnLinks);
    log.info("Birn concepts:" + birnConcepts);

    keeper.writeExcel(Config.config.getString("whitetext.resolve.results.folder") + "forNeuroLex.xls");
    finalKeeper
            .writeExcel(Config.config.getString("whitetext.resolve.results.folder") + "forNeuroLexFinal.xls");

    // printMentionCountForPaper( finalRegionMentions, true );
    // printMentionCountForPaper( finalRegionMentions, false );
}

From source file:Main.java

public static <T> Set<T> intersect(Collection<T> l1, Collection<T> l2) {
    Set<T> nl = new HashSet<T>(l1);
    nl.retainAll(l2);
    return nl;/*from   w  w w . java 2  s.c  om*/
}

From source file:Main.java

public static <E> Set<E> intersection(final Set<E> set1, final Set<E> set2) {
    Set<E> set = new HashSet<>(set1);
    set.retainAll(set2);
    return Collections.unmodifiableSet(set);
}

From source file:Main.java

public static void performIntersection(Set<String> s1, Set<String> s2) {
    Set<String> s1Intersections2 = new HashSet<>(s1);
    s1Intersections2.retainAll(s2);
    System.out.println("s1 intersection  s2: " + s1Intersections2);
}

From source file:Main.java

/**
 * Returns the intersection of the given Collections.
 *
 * @param c1  the first Collection.//w w  w .  j  a v a 2  s.  co m
 * @param c2  the second Collection.
 * @param <T> the type.
 * @return the intersection of the Collections.
 */
public static <T> Collection<T> intersection(Collection<T> c1, Collection<T> c2) {
    Set<T> set1 = new HashSet<>(c1);
    set1.retainAll(new HashSet<>(c2));
    return set1;
}