Example usage for java.util Collection clear

List of usage examples for java.util Collection clear

Introduction

In this page you can find the example usage for java.util Collection clear.

Prototype

void clear();

Source Link

Document

Removes all of the elements from this collection (optional operation).

Usage

From source file:ubic.gemma.core.loader.expression.arrayDesign.ArrayDesignSequenceProcessingServiceImpl.java

@Override
public Collection<BioSequence> processArrayDesign(ArrayDesign arrayDesign, InputStream sequenceIdentifierFile,
        String[] databaseNames, String blastDbHome, Taxon taxon, boolean force, FastaCmd fc)
        throws IOException {
    this.checkForCompositeSequences(arrayDesign);

    Map<String, String> probe2acc = this.parseAccessionFile(sequenceIdentifierFile);

    // values that were not found
    Collection<String> notFound = new HashSet<>(probe2acc.values());

    // the actual thing values to search for (with version numbers)
    Collection<String> accessionsToFetch = new HashSet<>(probe2acc.values());

    // only 1 taxon should be on array design if taxon not supplied on command line
    taxon = this.validateTaxon(taxon, arrayDesign);

    /*/*from   w  w w.  j a  v  a  2s. c  om*/
     * Fill in sequences from BLAST databases.
     */
    int numSwitched = 0;
    if (fc == null)
        fc = new SimpleFastaCmd();

    Collection<BioSequence> retrievedSequences = this.searchBlastDbs(databaseNames, blastDbHome, notFound, fc);

    // map of accessions to sequence.
    Map<String, BioSequence> found = this.findOrUpdateSequences(accessionsToFetch, retrievedSequences, taxon,
            force);

    Collection<BioSequence> finalResult = new HashSet<>(retrievedSequences);

    // replace the sequences.
    numSwitched = this.replaceSequences(arrayDesign, probe2acc, numSwitched, found);

    notFound = this.getUnFound(notFound, found);

    if (!notFound.isEmpty() && taxon != null) {

        Collection<String> stillLooking = new HashSet<>(notFound);
        notFound.clear();

        /*
         * clear the version number.
         */
        for (String accession : stillLooking) {
            notFound.remove(accession);
            accession = accession.replaceFirst("\\.\\d+$", "");
            notFound.add(accession);
        }
        assert notFound.size() > 0;
        /*
         * See if they're already in Gemma. This is good for sequences that are not in genbank but have been loaded
         * previously.
         */
        found = this.findLocalSequences(notFound, taxon);
        finalResult.addAll(found.values());

        numSwitched = this.replaceSequences(arrayDesign, probe2acc, numSwitched, found);
        notFound = this.getUnFound(notFound, found);
    }

    if (!notFound.isEmpty()) {
        this.logMissingSequences(arrayDesign, notFound);
    }

    ArrayDesignSequenceProcessingServiceImpl.log
            .info(numSwitched + " composite sequences had their biologicalCharacteristics changed");

    arrayDesignService.update(arrayDesign);

    arrayDesignReportService.generateArrayDesignReport(arrayDesign.getId());
    return finalResult;

}

From source file:org.apache.openjpa.jdbc.meta.ReverseMappingTool.java

/**
 * Generate mappings and class code for the current schema group.
 *//*from w  ww .j a v  a 2 s . c  o m*/
public void run() {
    // map base classes first
    Schema[] schemas = getSchemaGroup().getSchemas();
    Table[] tables;
    for (int i = 0; i < schemas.length; i++) {
        tables = schemas[i].getTables();
        for (int j = 0; j < tables.length; j++)
            if (isBaseTable(tables[j]))
                mapBaseClass(tables[j]);
    }

    // map vertical subclasses
    Set subs = null;
    for (int i = 0; i < schemas.length; i++) {
        tables = schemas[i].getTables();
        for (int j = 0; j < tables.length; j++) {
            if (!_tables.containsKey(tables[j]) && getSecondaryType(tables[j], false) == TABLE_SUBCLASS) {
                if (subs == null)
                    subs = new HashSet();
                subs.add(tables[j]);
            }
        }
    }
    if (subs != null)
        mapSubclasses(subs);

    // map fields in the primary tables of the classes
    ClassMapping cls;
    for (Iterator itr = _tables.values().iterator(); itr.hasNext();) {
        cls = (ClassMapping) itr.next();
        mapColumns(cls, cls.getTable(), null, false);
    }

    // map association tables, join tables, and secondary tables
    for (int i = 0; i < schemas.length; i++) {
        tables = schemas[i].getTables();
        for (int j = 0; j < tables.length; j++)
            if (!_tables.containsKey(tables[j]))
                mapTable(tables[j], getSecondaryType(tables[j], false));
    }

    // map discriminators and versions, make sure identity type is correct,
    // set simple field column java types, and ref schema components so
    // we can tell what is unmapped
    FieldMapping[] fields;
    for (Iterator itr = _tables.values().iterator(); itr.hasNext();) {
        cls = (ClassMapping) itr.next();
        cls.refSchemaComponents();
        if (cls.getDiscriminator().getStrategy() == null)
            getStrategyInstaller().installStrategy(cls.getDiscriminator());
        cls.getDiscriminator().refSchemaComponents();
        if (cls.getVersion().getStrategy() == null)
            getStrategyInstaller().installStrategy(cls.getVersion());
        cls.getVersion().refSchemaComponents();

        // double-check identity type; if it was set for builtin identity
        // it might have to switch to std application identity if pk field
        // not compatible
        if (cls.getPCSuperclass() == null && cls.getIdentityType() == ClassMapping.ID_APPLICATION) {
            if (cls.getPrimaryKeyFields().length == 0)
                throw new MetaDataException(_loc.get("no-pk-fields", cls));
            if (cls.getObjectIdType() == null || (cls.isOpenJPAIdentity() && !isBuiltinIdentity(cls)))
                setObjectIdType(cls);
        } else if (cls.getIdentityType() == ClassMapping.ID_DATASTORE)
            cls.getPrimaryKeyColumns()[0].setJavaType(JavaTypes.LONG);

        // set java types for simple fields;
        fields = cls.getDeclaredFieldMappings();
        for (int i = 0; i < fields.length; i++) {
            fields[i].refSchemaComponents();
            setColumnJavaType(fields[i]);
            setColumnJavaType(fields[i].getElementMapping());
        }
    }

    // set the java types of foreign key columns; we couldn't do this
    // earlier because we rely on the linked-to columns to do it
    for (Iterator itr = _tables.values().iterator(); itr.hasNext();) {
        cls = (ClassMapping) itr.next();
        setForeignKeyJavaType(cls.getJoinForeignKey());

        fields = cls.getDeclaredFieldMappings();
        for (int i = 0; i < fields.length; i++) {
            setForeignKeyJavaType(fields[i].getJoinForeignKey());
            setForeignKeyJavaType(fields[i].getForeignKey());
            setForeignKeyJavaType(fields[i].getElementMapping().getForeignKey());
        }
    }

    // allow customizer to map unmapped tables, and warn about anything
    // that ends up unmapped
    Column[] cols;
    Collection unmappedCols = new ArrayList(5);
    for (int i = 0; i < schemas.length; i++) {
        tables = schemas[i].getTables();
        for (int j = 0; j < tables.length; j++) {
            unmappedCols.clear();
            cols = tables[j].getColumns();
            for (int k = 0; k < cols.length; k++)
                if (cols[k].getRefCount() == 0)
                    unmappedCols.add(cols[k]);

            if (unmappedCols.size() == cols.length) {
                if (_custom == null || !_custom.unmappedTable(tables[j]))
                    _log.info(_loc.get("unmap-table", tables[j]));
            } else if (unmappedCols.size() > 0)
                _log.info(_loc.get("unmap-cols", tables[j], unmappedCols));
        }
    }
    if (_custom != null)
        _custom.close();

    // resolve mappings
    for (Iterator itr = _tables.values().iterator(); itr.hasNext();)
        ((ClassMapping) itr.next()).resolve(MODE_META | MODE_MAPPING);
}

From source file:eu.stratosphere.nephele.services.memorymanager.spi.DefaultMemoryManager.java

@Override
public <T extends MemorySegment> void release(Collection<T> segments) {

    // sanity checks
    if (segments == null) {
        return;//from   w ww.  j a  va2s  . c  om
    }

    // -------------------- BEGIN CRITICAL SECTION -------------------
    synchronized (this.lock) {
        if (this.isShutDown) {
            throw new IllegalStateException("Memory manager has been shut down.");
        }

        final Iterator<T> segmentsIterator = segments.iterator();

        AbstractInvokable lastOwner = null;
        Set<DefaultMemorySegment> segsForOwner = null;

        // go over all segments
        while (segmentsIterator.hasNext()) {

            final MemorySegment seg = segmentsIterator.next();
            if (seg.isFreed()) {
                continue;
            }

            final DefaultMemorySegment defSeg = (DefaultMemorySegment) seg;
            final AbstractInvokable owner = defSeg.owner;

            try {
                // get the list of segments by this owner only if it is a different owner than for
                // the previous one (or it is the first segment)
                if (lastOwner != owner) {
                    lastOwner = owner;
                    segsForOwner = this.allocatedSegments.get(owner);
                }

                // remove the segment from the list
                if (segsForOwner != null) {
                    segsForOwner.remove(defSeg);
                    if (segsForOwner.isEmpty()) {
                        this.allocatedSegments.remove(owner);
                    }
                }
            } catch (Throwable t) {
                LOG.error("Error removing book-keeping reference to allocated memory segment.", t);
            } finally {
                // release the memory in any case
                byte[] buffer = defSeg.destroy();
                this.freeSegments.add(buffer);
            }
        }

        segments.clear();
    }
    // -------------------- END CRITICAL SECTION -------------------
}

From source file:ubic.gemma.analysis.service.ArrayDesignAnnotationServiceImpl.java

@Override
public int generateAnnotationFile(Writer writer,
        Map<CompositeSequence, Collection<BioSequence2GeneProduct>> genesWithSpecificity, OutputType ty)
        throws IOException {

    int compositeSequencesProcessed = 0;
    int simple = 0;
    int empty = 0;
    int complex = 0;
    Collection<OntologyTerm> goTerms = new LinkedHashSet<OntologyTerm>();
    Set<String> genes = new LinkedHashSet<String>();
    Set<String> geneDescriptions = new LinkedHashSet<String>();
    Set<String> geneIds = new LinkedHashSet<String>();
    Set<String> ncbiIds = new LinkedHashSet<String>();

    Map<Gene, Collection<VocabCharacteristic>> goMappings = getGOMappings(genesWithSpecificity);

    for (CompositeSequence cs : genesWithSpecificity.keySet()) {

        Collection<BioSequence2GeneProduct> geneclusters = genesWithSpecificity.get(cs);

        if (++compositeSequencesProcessed % 2000 == 0 && log.isInfoEnabled()) {
            log.info("Processed " + compositeSequencesProcessed + "/" + genesWithSpecificity.size()
                    + " compositeSequences " + empty + " empty; " + simple + " simple; " + complex
                    + " complex;");
        }/* w  w w . j ava2s  .  c o  m*/

        if (geneclusters.isEmpty()) {
            writeAnnotationLine(writer, cs.getName(), "", "", null, "", "");
            empty++;
            continue;
        }

        if (geneclusters.size() == 1) {
            // common case, do it quickly.
            BioSequence2GeneProduct b2g = geneclusters.iterator().next();
            Gene g = b2g.getGeneProduct().getGene();
            goTerms = getGoTerms(g, goMappings.get(g), ty);
            String gemmaId = g.getId() == null ? "" : g.getId().toString();
            String ncbiId = g.getNcbiGeneId() == null ? "" : g.getNcbiGeneId().toString();
            writeAnnotationLine(writer, cs.getName(), g.getOfficialSymbol(), g.getOfficialName(), goTerms,
                    gemmaId, ncbiId);
            simple++;
            continue;
        }

        goTerms.clear();
        genes.clear();
        geneDescriptions.clear();
        geneIds.clear();
        ncbiIds.clear();

        for (BioSequence2GeneProduct bioSequence2GeneProduct : geneclusters) {

            Gene g = bioSequence2GeneProduct.getGeneProduct().getGene();

            genes.add(g.getOfficialSymbol());
            geneDescriptions.add(g.getOfficialName());
            geneIds.add(g.getId().toString());
            Integer ncbiGeneId = g.getNcbiGeneId();
            if (ncbiGeneId != null) {
                ncbiIds.add(ncbiGeneId.toString());
            }
            goTerms.addAll(getGoTerms(g, goMappings.get(g), ty));

        }

        String geneString = StringUtils.join(genes, "|");
        String geneDescriptionString = StringUtils.join(geneDescriptions, "|");
        String geneIdsString = StringUtils.join(geneIds, "|");
        String ncbiIdsString = StringUtils.join(ncbiIds, "|");
        writeAnnotationLine(writer, cs.getName(), geneString, geneDescriptionString, goTerms, geneIdsString,
                ncbiIdsString);
        complex++;

    }
    writer.close();
    return compositeSequencesProcessed;
}

From source file:ubic.gemma.persistence.service.expression.experiment.ExpressionExperimentDaoImpl.java

@Override
public Map<ExpressionExperiment, FactorValue> findByFactorValues(Collection<FactorValue> fvs) {

    if (fvs.isEmpty())
        return new HashMap<>();

    //language=HQL
    final String queryString = "select distinct ee, f from ExpressionExperiment ee "
            + " join ee.experimentalDesign ed join ed.experimentalFactors ef join ef.factorValues f"
            + " where f in (:fvs) group by ee";
    Map<ExpressionExperiment, FactorValue> results = new HashMap<>();
    Collection<FactorValue> batch = new HashSet<>();
    for (FactorValue o : fvs) {
        batch.add(o);//from   w ww. j av  a 2 s  . c om
        if (batch.size() == ExpressionExperimentDaoImpl.BATCH_SIZE) {

            //noinspection unchecked
            List<Object> r2 = this.getSessionFactory().getCurrentSession().createQuery(queryString)
                    .setParameterList("fvs", batch).list();
            for (Object o1 : r2) {
                Object[] a = (Object[]) o1;
                results.put((ExpressionExperiment) a[0], (FactorValue) a[1]);
            }

            batch.clear();
        }
    }

    if (batch.size() > 0) {

        //noinspection unchecked
        List<Object> r2 = this.getSessionFactory().getCurrentSession().createQuery(queryString)
                .setParameterList("fvs", batch).list();
        for (Object o1 : r2) {
            Object[] a = (Object[]) o1;
            results.put((ExpressionExperiment) a[0], (FactorValue) a[1]);
        }

    }

    return results;

}

From source file:eu.dime.ps.semantic.service.impl.PimoService.java

public URI getPIMOTypeForOccurrenceNIEType(org.ontoware.rdf2go.model.node.Resource resource) {
    URI pimoType = null;/*  w  ww.ja va2  s. c o m*/
    URI possibleClass = null;

    Set<URI> occTypes = new HashSet<URI>();
    ClosableIterator<? extends Statement> it = tripleStore.findStatements(Variable.ANY, resource, RDF.type,
            Variable.ANY);
    while (it.hasNext()) {
        occTypes.add(it.next().getObject().asURI());
    }
    it.close();

    // there is none type
    if (occTypes.isEmpty()) {
        pimoType = PIMO.Thing;
    }

    if (pimoType == null) {
        for (URI occType : occTypes) {
            if (occType.equals(NCAL.Event)) {
                pimoType = PIMO.SocialEvent;
                break;
            } else if (occType.equals(NCO.PersonContact)) {
                pimoType = PIMO.Person;
                break;
            }
        }
    }

    if (pimoType == null) {
        for (URI occType : occTypes) {
            // we can use the mapping of classes
            Collection<URI> types = getPossiblePIMOTypeForNIEType(occType);
            if (possibleClass == null && !types.isEmpty())
                possibleClass = types.iterator().next();
            if (types.size() == 1) {
                // the best case is, if there is only one possible
                // PIMO type for this thing.
                pimoType = types.iterator().next();
                break;
            }
            types.clear();
        }
    }

    if (pimoType == null) {
        for (URI occType : occTypes) {
            // if the type of the information element is a subclass of a type of the pimo,
            // we can use this type from the information element
            ClosableIterator<? extends Statement> isupertypes = tripleStore.findStatements(Variable.ANY,
                    occType, RDFS.subClassOf, Variable.ANY);
            while (isupertypes.hasNext()) {
                URI supertype = (URI) isupertypes.next().getObject();
                if (supertype.toString().startsWith(PIMO.NS_PIMO.toString())) {
                    pimoType = occType;
                    break;
                }
            }
        }
    }

    if (pimoType == null) {
        // assign the first class which we found
        pimoType = possibleClass;
    }

    return pimoType;
}

From source file:eu.dime.ps.semantic.service.impl.PimoService.java

/**
 * Creates a Thing from a given occurrence. 
 * The URI of the occurrence will be added as groundingOccurrence.
 * All identifiers from the occurrence, which can be found in the pimo will be added
 * as statements in the model.//from   w  w w  .j  a  v a2 s . com
 * 
 * @param occurrence the occurrence from which to create a resoucre
 * @param label the name of the resource, used as label and part of the uri
 * @param ofClass the URI of the class of the created resource
 * @return the URI of the created resource
 * @throws OntologyInvalidException if the passed URI is not defined as
 *       class in the pimo.
 */
public URI createResourceFromGroundingOccurrence(URI occurrence, String label, URI ofClass)
        throws OntologyInvalidException {
    // detect label and class
    URI clazz = ofClass;
    Set<URI> occTypes = new HashSet<URI>();

    if (clazz == null) {
        ClosableIterator<? extends Statement> it = tripleStore.findStatements(Variable.ANY, occurrence,
                RDF.type, Variable.ANY);
        while (it.hasNext()) {
            occTypes.add(it.next().getObject().asURI());
        }
        it.close();
        // no type was found
        if (occTypes.isEmpty()) {
            clazz = PIMO.Thing;
        }
    }

    URI candidateClass = null;
    if (clazz == null) {
        for (URI occType : occTypes) {
            // can use the mapping of classes
            Collection<URI> types = getPossiblePIMOTypeForNIEType(occType);
            if (candidateClass == null && !types.isEmpty())
                candidateClass = types.iterator().next();
            if (types.size() == 1) {
                // the best case is, if there is only one possible
                // PIMO type for this thing.
                clazz = types.iterator().next();
                break;
            }
            types.clear();
        }
    }
    if (clazz == null) {
        for (URI occType : occTypes) {
            // first check whether there is a type which has the same name
            // if the labels are the same, the possibility is high, that 
            // these are equal types.
            String occTypeLabel = ResourceUtils.guessPreferredLabel(tripleStore, occType);
            List<URI> candidates = new LinkedList<URI>();
            ClosableIterator<? extends Statement> statements = null;
            try {
                Literal rdfsLabel = new PlainLiteralImpl(occTypeLabel);
                for (statements = tripleStore.findStatements(PIMO.NS_PIMO, Variable.ANY, RDFS.label,
                        rdfsLabel); statements.hasNext();) {
                    candidates.add(statements.next().getSubject().asURI());
                }
            } finally {
                statements.close();
            }
            if (candidateClass == null && !candidates.isEmpty())
                candidateClass = candidates.iterator().next();
            // check whether the result contains more than one type.
            // if this is the case, we can't say anything about which
            // type is the better one.
            if (candidates.size() == 1) {
                clazz = candidates.iterator().next();
                break;
            }
        }
    }
    if (clazz == null) {
        for (URI occType : occTypes) {
            // if the type of the information element is a subclass of a type of the pimo,
            // we can use this type from the information element
            ClosableIterator<? extends Statement> isupertypes = tripleStore.findStatements(Variable.ANY,
                    occType, RDFS.subClassOf, Variable.ANY);
            while (isupertypes.hasNext()) {
                URI supertype = (URI) isupertypes.next().getObject();
                if (supertype.toString().startsWith(PIMO.NS_PIMO.toString())) {
                    clazz = occType;
                    break;
                }
            }
            isupertypes.close();
        }
    }

    if (clazz == null)
        clazz = candidateClass; // assign the first class which we found
    if (clazz == null)
        clazz = PIMO.Topic;

    /**
     * find a proper label for the new thing based on the resource
     */
    if (label == null || label.equals("")) {
        label = ResourceUtils.guessPreferredLabel(tripleStore, occurrence);
    }

    // create thing
    return createResourceFromGroundingOccurrence(occurrence, tripleStore, label, clazz);
}

From source file:org.apache.ojb.broker.QueryTest.java

/**
 * Simple ReportQuery returning rows with 3 columns of Person
 * needs SQL paging/* www .  j ava  2 s. c  om*/
 */
public void testReportQueryWithStartAndEnd() {
    Criteria crit = new Criteria();
    Collection results = new Vector();
    crit.addLike("firstname", "%o%");
    ReportQueryByCriteria q = QueryFactory.newReportQuery(Person.class, crit);
    q.setAttributes(new String[] { "id", "firstname", "count(*)" });
    q.addGroupBy(new String[] { "id", "firstname" });

    q.setStartAtIndex(3);
    q.setEndAtIndex(5);
    Iterator iter = broker.getReportQueryIteratorByQuery(q);
    assertNotNull(iter);
    while (iter.hasNext()) {
        results.add(iter.next());
    }
    assertTrue(results.size() == 3);

    results.clear();
    q.setStartAtIndex(1);
    q.setEndAtIndex(5);
    iter = broker.getReportQueryIteratorByQuery(q);
    assertNotNull(iter);
    while (iter.hasNext()) {
        results.add(iter.next());
    }
    assertTrue(results.size() == 5);
}

From source file:org.apache.openjpa.persistence.XMLPersistenceMetaDataSerializer.java

/**
 * Serialize cascades.//  w  ww.  j  a va 2 s.co  m
 */
private void serializeCascades(ValueMetaData vmd) throws SAXException {
    Collection<String> cascades = null;
    if (vmd.getCascadePersist() == ValueMetaData.CASCADE_IMMEDIATE) {
        if (cascades == null)
            cascades = new ArrayList<String>();
        cascades.add("cascade-persist");
    }
    if (vmd.getCascadeAttach() == ValueMetaData.CASCADE_IMMEDIATE) {
        if (cascades == null)
            cascades = new ArrayList<String>();
        cascades.add("cascade-merge");
    }
    if (vmd.getCascadeDelete() == ValueMetaData.CASCADE_IMMEDIATE) {
        if (cascades == null)
            cascades = new ArrayList<String>();
        cascades.add("cascade-remove");
    }
    if (vmd.getCascadeRefresh() == ValueMetaData.CASCADE_IMMEDIATE) {
        if (cascades == null)
            cascades = new ArrayList<String>();
        cascades.add("cascade-refresh");
    }
    if (vmd.getCascadeDetach() == ValueMetaData.CASCADE_IMMEDIATE) {
        if (cascades == null)
            cascades = new ArrayList<String>();
        cascades.add("cascade-detach");
    }
    if (cascades != null && cascades.size() == 5) // ALL
    {
        cascades.clear();
        cascades.add("cascade-all");
    }
    if (cascades != null) {
        startElement("cascade");
        for (String cascade : cascades) {
            startElement(cascade);
            endElement(cascade);
        }
        endElement("cascade");
    }
}