Example usage for java.util Collection removeAll

List of usage examples for java.util Collection removeAll

Introduction

In this page you can find the example usage for java.util Collection removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes all of this collection's elements that are also contained in the specified collection (optional operation).

Usage

From source file:ubic.gemma.ontology.OntologyServiceImpl.java

@Override
public void removeBioMaterialStatement(Collection<Long> characterIds, Collection<Long> bmIdList) {

    log.debug("Vocab Characteristic: " + characterIds);
    log.debug("biomaterial ID List: " + bmIdList);

    Collection<BioMaterial> bms = bioMaterialService.loadMultiple(bmIdList);

    for (BioMaterial bm : bms) {

        Collection<Characteristic> current = bm.getCharacteristics();
        if (current == null)
            continue;

        Collection<Characteristic> found = new HashSet<Characteristic>();

        for (Characteristic characteristic : current) {
            if (characterIds.contains(characteristic.getId()))
                found.add(characteristic);

        }// www . j  a  v  a  2s . c  o m
        if (found.size() == 0)
            continue;

        current.removeAll(found);

        for (Characteristic characteristic : found) {
            log.info("Removing characteristic from " + bm + " : " + characteristic);
        }

        bm.setCharacteristics(current);
        bioMaterialService.update(bm);

    }

    for (Long id : characterIds) {
        characteristicService.delete(id);
    }
}

From source file:chibi.gemmaanalysis.cli.deprecated.BioSequenceCleanupCli.java

@Override
protected Exception doWork(String[] args) {

    Exception err = processCommandLine(args);
    if (err != null)
        return err;

    Collection<ArrayDesign> ads = new HashSet<ArrayDesign>();
    if (!this.arrayDesignsToProcess.isEmpty()) {
        ads.addAll(this.arrayDesignsToProcess);
    } else if (file != null) {
        try (InputStream is = new FileInputStream(file);
                BufferedReader br = new BufferedReader(new InputStreamReader(is));) {

            String id = null;/* w ww  .  j  a  va 2s  .  c  o m*/
            Collection<Long> ids = new HashSet<Long>();
            while ((id = br.readLine()) != null) {
                if (StringUtils.isBlank(id)) {
                    continue;
                }
                ids.add(Long.parseLong(id));
            }

            Collection<BioSequence> bioSequences = bss.loadMultiple(ids);
            bioSequences = bss.thaw(bioSequences);
            processSequences(bioSequences);
            return null;
        } catch (Exception e) {
            return e;
        }
    } else {
        ads = this.arrayDesignService.loadAll();
    }

    for (ArrayDesign design : ads) {
        log.info(design);
        design = unlazifyArrayDesign(design);

        Collection<BioSequence> bioSequences = new HashSet<BioSequence>();

        for (CompositeSequence cs : design.getCompositeSequences()) {
            if (cs == null)
                continue;
            if (cs.getBiologicalCharacteristic() == null)
                continue;
            bioSequences.add(cs.getBiologicalCharacteristic());
        }

        processSequences(bioSequences); // fast.

        log.info("Phase II starting");

        // ///////////////////////////////
        // Second phase: make sure composite sequences don't refer to sequences that have duplicates based on name,
        // using stricter equality criteria.
        int i = 0;
        for (CompositeSequence cs : design.getCompositeSequences()) {

            if (++i % 500 == 0) {
                log.info("Processing: " + i + "/" + bioSequences.size() + " sequences");
            }

            BioSequence anchorSeq = cs.getBiologicalCharacteristic();
            if (anchorSeq == null) {
                continue;
            }
            Collection<BioSequence> seqs = bss.findByName(anchorSeq.getName());

            // no evidence of duplicates?
            if (seqs.size() == 1) {
                continue;
            }

            seqs.remove(anchorSeq);

            seqs = this.bss.thaw(seqs);

            // ensure this group really does contain all duplicates.
            if (log.isDebugEnabled())
                log.debug("Examining set of " + seqs.size() + " possible duplicates of " + anchorSeq);

            Collection<BioSequence> notDuplicate = new HashSet<BioSequence>();
            for (BioSequence candidateForRemoval : seqs) {
                if (log.isDebugEnabled())
                    log.debug("   Examining: " + candidateForRemoval);
                assert !candidateForRemoval.equals(anchorSeq) : candidateForRemoval + " equals " + anchorSeq;
                if (!this.equals(anchorSeq, candidateForRemoval)) {
                    notDuplicate.add(candidateForRemoval);
                } else {
                    if (log.isDebugEnabled())
                        log.debug("    Duplicate: " + anchorSeq + " " + candidateForRemoval);
                }
            }

            seqs.removeAll(notDuplicate);

            for (BioSequence toChange : seqs) {
                if (log.isDebugEnabled())
                    log.debug("Processing " + toChange);
                if (!this.equals(anchorSeq, toChange)) {
                    throw new IllegalStateException(
                            "Sequences weren't equal " + anchorSeq + " and " + toChange);
                }
                switchAndDeleteExtra(anchorSeq, toChange);

            }
        }
    }

    return null;

}

From source file:org.lockss.test.LockssTestCase.java

/** Assert that a collection cannot be modified, <i>ie</i>, that all of
 * the following methods, plus the collection's iterator().remove()
 * method, throw UnsupportedOperationException: add(), addAll(), clear(),
 * remove(), removeAll(), retainAll() */

public static void assertUnmodifiable(Collection coll) {
    List list = ListUtil.list("bar");
    try {//  w ww.j a  v a  2s  .c  om
        coll.add("foo");
        fail("add() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.addAll(list);
        fail("addAll() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.clear();
        fail("clear() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.remove("foo");
        fail("remove() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.removeAll(list);
        fail("removeAll() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    try {
        coll.retainAll(list);
        fail("retainAll() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
    Iterator iter = coll.iterator();
    iter.next();
    try {
        iter.remove();
        fail("iterator().remove() didn't throw");
    } catch (UnsupportedOperationException e) {
    }
}

From source file:org.dllearner.utilities.QueryUtils.java

/**
 * Removes triple patterns of form (s rdf:type A) if there exists a
 * triple pattern (s rdf:type B) such that the underlying
 * knowledge base entails (B rdfs:subClassOf A).
 * @param qef the query execution factory
 * @param query the query//  w w w. j a  va  2s.  c  o  m
 */
public void filterOutGeneralTypes(QueryExecutionFactory qef, Query query) {
    // extract all rdf:type triple patterns
    Set<Triple> typeTriplePatterns = extractTriplePatternsWithPredicate(query, RDF.type.asNode());

    // group by subject
    Multimap<Node, Triple> subject2TriplePatterns = HashMultimap.create();
    for (Triple tp : typeTriplePatterns) {
        subject2TriplePatterns.put(tp.getSubject(), tp);
    }

    // keep the most specific types for each subject
    for (Node subject : subject2TriplePatterns.keySet()) {
        Collection<Triple> triplePatterns = subject2TriplePatterns.get(subject);
        Collection<Triple> triplesPatterns2Remove = new HashSet<>();

        for (Triple tp : triplePatterns) {
            if (!triplesPatterns2Remove.contains(tp)) {
                // get all super classes for the triple object
                Set<Node> superClasses = getSuperClasses(qef, tp.getObject());

                // remove triple patterns that have one of the super classes as object
                for (Triple tp2 : triplePatterns) {
                    if (tp2 != tp && superClasses.contains(tp2.getObject())) {
                        triplesPatterns2Remove.add(tp2);
                    }
                }
            }
        }

        // remove triple patterns
        triplePatterns.removeAll(triplesPatterns2Remove);
    }
}

From source file:uk.ac.ebi.intact.util.protein.ProteinServiceImpl.java

/**
 * Update an existing intact protein's annotations.
 * <p/>/*from   w w  w .  ja v  a  2s  .  c om*/
 * That includes, all Xrefs, Aliases, splice variants.
 *
 * @param protein        the intact protein to update.
 * @param uniprotProtein the uniprot protein used for data input.
 */
private void updateProtein(Protein protein, UniprotProtein uniprotProtein) throws ProteinServiceException {
    List<Protein> proteins = new ArrayList<Protein>();

    // check that both protein carry the same organism information
    if (!UpdateBioSource(protein, uniprotProtein.getOrganism())) {
        return;
    }

    // Fullname
    String fullname = uniprotProtein.getDescription();
    if (fullname != null && fullname.length() > 250) {
        if (log.isDebugEnabled()) {
            log.debug("Truncating fullname to the first 250 first chars.");
        }
        fullname = fullname.substring(0, 250);
    }
    protein.setFullName(fullname);

    // Shortlabel
    protein.setShortLabel(generateProteinShortlabel(uniprotProtein));

    // Xrefs -- but UniProt's as they are supposed to be up-to-date at this stage.
    XrefUpdaterReport reports = XrefUpdaterUtils.updateAllXrefs(protein, uniprotProtein, databaseName2mi,
            IntactContext.getCurrentInstance().getDataContext(), processor,
            new TreeSet<InteractorXref>(new InteractorXrefComparator()),
            new TreeSet<UniprotXref>(new UniprotXrefComparator(databaseName2mi)));

    uniprotServiceResult.addXrefUpdaterReport(reports);

    // Aliases
    AliasUpdaterUtils.updateAllAliases(protein, uniprotProtein,
            IntactContext.getCurrentInstance().getDataContext(), processor);

    // Sequence
    updateProteinSequence(protein, uniprotProtein.getSequence(), uniprotProtein.getCrc64());

    // Persist changes
    DaoFactory daoFactory = IntactContext.getCurrentInstance().getDataContext().getDaoFactory();
    ProteinDao pdao = daoFactory.getProteinDao();
    pdao.update((ProteinImpl) protein);

    ///////////////////////////////
    // Update Splice Variants and feature chains

    // search intact
    // splice variants with no 'no-uniprot-update'
    Collection<ProteinImpl> spliceVariantsAndChains = pdao.getSpliceVariants(protein);

    // feature chains
    spliceVariantsAndChains.addAll(pdao.getProteinChains(protein));

    // We create a copy of the collection that hold the protein transcripts as the findMatches remove the protein transcripts
    // from the collection when a match is found. Therefore the first time it runs, it finds the match, protein transcripts
    //  are correctly created, the protein transcripts are deleted from the collection so that the second
    // you run it, the splice variant are not linked anymore to the uniprotProtein and therefore they are not correctly
    // updated.
    Collection<UniprotProteinTranscript> variantsClone = new ArrayList<UniprotProteinTranscript>();

    variantsClone.addAll(uniprotProtein.getSpliceVariants());
    variantsClone.addAll(uniprotProtein.getFeatureChains());

    for (UniprotProteinTranscript transcript : variantsClone) {
        proteins.addAll(createOrUpdateProteinTranscript(transcript, uniprotProtein, protein));
    }

    if (!proteins.containsAll(spliceVariantsAndChains)) {

        if (proteins.size() < spliceVariantsAndChains.size()) {
            for (Object protNotUpdated : CollectionUtils.subtract(spliceVariantsAndChains, proteins)) {
                Protein prot = (Protein) protNotUpdated;

                if (prot.getActiveInstances().size() == 0) {
                    deleteProtein(prot);

                    uniprotServiceResult.addMessage(
                            "The protein " + getProteinDescription(prot) + " is a protein transcript of "
                                    + getProteinDescription(protein) + " in IntAct but not in Uniprot."
                                    + " As it is not part of any interactions in IntAct we have deleted it.");

                } else if (ProteinUtils.isFromUniprot(prot)) {
                    uniprotServiceResult.addError(
                            UniprotServiceResult.SPLICE_VARIANT_IN_INTACT_BUT_NOT_IN_UNIPROT,
                            "In Intact the protein " + getProteinDescription(prot)
                                    + " is a protein transcript of protein " + getProteinDescription(protein)
                                    + " but in Uniprot it is not the case. As it is part of interactions in IntAct we couldn't "
                                    + "delete it.");
                }
            }
        } else {
            Collection<Protein> spliceVariantsNotUpdated = new ArrayList<Protein>(spliceVariantsAndChains);
            spliceVariantsNotUpdated.removeAll(CollectionUtils.intersection(spliceVariantsAndChains, proteins));

            for (Protein protNotUpdated : spliceVariantsNotUpdated) {

                if (protNotUpdated.getActiveInstances().size() == 0) {
                    deleteProtein(protNotUpdated);

                    uniprotServiceResult.addMessage("The protein " + getProteinDescription(protNotUpdated)
                            + " is a protein transcript of " + getProteinDescription(protein)
                            + " in IntAct but not in Uniprot."
                            + " As it is not part of any interactions in IntAct we have deleted it.");

                } else if (ProteinUtils.isFromUniprot(protNotUpdated)) {
                    uniprotServiceResult.addError(
                            UniprotServiceResult.SPLICE_VARIANT_IN_INTACT_BUT_NOT_IN_UNIPROT,
                            "In Intact the protein " + getProteinDescription(protNotUpdated)
                                    + " is a protein transcript of protein " + getProteinDescription(protein)
                                    + " but in Uniprot it is not the case. As it is part of interactions in IntAct we couldn't "
                                    + "delete it.");
                }
            }
        }
    }

    //        Collection<ProteinTranscriptMatch> matches = findMatches( variants, variantsClone) );
    /*Collection<ProteinTranscriptMatch> matches = findMatches( spliceVariantsAndChains, variantsClone );
    for ( ProteinTranscriptMatch match : matches ) {
            
    if ( match.isSuccessful() ) {
        // update
        final UniprotProteinTranscript variant = match.getUniprotTranscript();
        final Protein intactProtein = match.getIntactProtein();
            
        if (ProteinUtils.isFromUniprot(intactProtein)){
            updateProteinTranscript(intactProtein, protein, variant, uniprotProtein );
        }
            
        if (variant.getSequence() != null || (variant.getSequence() == null && variant.isNullSequenceAllowed())) {
            proteins.add(intactProtein);
        }
            
    } else if ( match.hasNoIntact() ) {
            
        // TODO in the case of a global update, and the user requested splice variants without interactions to be deleted,
        // TODO we don't create splice variants when they are missing as they wouldn't have interactions anyways.
        // NOTE: this does not apply say in our curation environment as the users want to see imported SV so they can choose them
        // TODO test this
        final ProteinUpdateProcessorConfig config = ProteinUpdateContext.getInstance().getConfig();
        final boolean globalProteinUpdate = config.isGlobalProteinUpdate();
        final boolean deleteProteinTranscript = config.isDeleteProteinTranscriptWithoutInteractions();
            
        if( ! globalProteinUpdate && !deleteProteinTranscript) {
            // create shallow
            Protein intactTranscript = createMinimalisticProteinTranscript( match.getUniprotTranscript(),
                    protein.getAc(),
                    protein.getBioSource(),
                    uniprotProtein );
            // update
            final UniprotProteinTranscript uniprotTranscript = match.getUniprotTranscript();
            updateProteinTranscript( intactTranscript, protein, uniprotTranscript, uniprotProtein);
            
            proteinCreated(intactTranscript);
            
            if (uniprotTranscript.getSequence() != null || (uniprotTranscript.getSequence() == null && uniprotTranscript.isNullSequenceAllowed())) {
                proteins.add(intactTranscript);
            }
        }
            
    } else {
        Protein intactProteinTranscript = match.getIntactProtein();
            
        if(intactProteinTranscript.getActiveInstances().size() == 0){
            deleteProtein(intactProteinTranscript);
            
            uniprotServiceResult.addMessage("The protein " + getProteinDescription(intactProteinTranscript) +
                    " is a protein transcript of " + getProteinDescription(protein) + " in IntAct but not in Uniprot." +
                    " As it is not part of any interactions in IntAct we have deleted it."  );
            
        }else if (ProteinUtils.isFromUniprot(intactProteinTranscript)){
            uniprotServiceResult.addError(UniprotServiceResult.SPLICE_VARIANT_IN_INTACT_BUT_NOT_IN_UNIPROT,
                    "In Intact the protein "+ getProteinDescription(intactProteinTranscript) +
                            " is a protein transcript of protein "+ getProteinDescription(protein)+
                            " but in Uniprot it is not the case. As it is part of interactions in IntAct we couldn't " +
                            "delete it.");
        }
    }
    }*/
}

From source file:uk.ac.ebi.intact.editor.controller.curate.AnnotatedObjectController.java

public void removeAlias(String alias, String aliasMI, Collection<Alias> aliases) {

    // modify if exists
    Collection<Alias> existingAliases = AliasUtils.collectAllAliasesHavingType(aliases, aliasMI, alias);
    aliases.removeAll(existingAliases);
    setUnsavedChanges(true);/*  w  ww  .  j  av  a  2  s.  co  m*/
}

From source file:org.intalio.deploy.deployment.impl.DeploymentServiceImpl.java

@ManagedAttribute
public String getMissingComponents() {
    Collection<String> missing = new HashSet<String>();
    missing.addAll(_requiredComponentManagers);
    missing.removeAll(_componentManagers.keySet());
    return String.valueOf(missing);
}

From source file:ubic.gemma.expression.experiment.service.ExpressionExperimentServiceImpl.java

@Override
public Collection<Long> getUntroubled(Collection<Long> ids) {
    Collection<Long> firstPass = this.expressionExperimentDao.getUntroubled(ids);

    /*/*from  w  ww  . j a  v a2s .  c  o m*/
     * Now check the array designs.
     */
    Map<ArrayDesign, Collection<Long>> ads = this.expressionExperimentDao.getArrayDesignsUsed(firstPass);
    Collection<Long> troubled = new HashSet<Long>();
    for (ArrayDesign a : ads.keySet()) {
        if (a.getStatus().getTroubled()) {
            troubled.addAll(ads.get(a));
        }
    }

    firstPass.removeAll(troubled);

    return firstPass;
}

From source file:com.univocity.app.DataUpdateTest.java

private boolean isExpectedDiscrepancy(String entityName, String[] fieldNames, boolean inserting,
        Collection<String> ids) {

    Map<String, String> sr25Rows = getMapOfRows(entityName, fieldNames, true);
    Map<String, String> sr26Rows = getMapOfRows(entityName, fieldNames, false);

    List<String> newRows = new ArrayList<String>();
    List<String> updatedRows = new ArrayList<String>();
    List<String> unnecessaryUpdates = new ArrayList<String>();

    Set<String> disconsider = new HashSet<String>();

    for (String rowId : ids) {
        String r25 = sr25Rows.get(rowId);
        String r26 = sr26Rows.get(rowId);

        assertFalse(r25 == null && r26 == null); //can't have a rowID from nowhere.

        if (!inserting && r25 != null && r26 != null) { //it is an update
            if (r25.equals(r26)) {
                disconsider.add(rowId);/*from w ww . ja va2  s.c om*/
                unnecessaryUpdates.add(r26 + " - no changes since SR25");
            } else {
                disconsider.add(rowId);
                updatedRows.add(r26 + " - changed from SR25: " + r25);
            }
        }

        if (inserting && r25 == null && r26 != null) { //new row in SR26
            disconsider.add(rowId);
            newRows.add(r26);
        }

    }

    printDiscrepantRows(entityName, newRows, "new rows");
    printDiscrepantRows(entityName, updatedRows, "updated rows");
    printDiscrepantRows(entityName, unnecessaryUpdates, "unnecessary updates");

    ids.removeAll(disconsider);
    return ids.isEmpty();
}

From source file:org.kuali.rice.krad.service.impl.KNSLegacyDataAdapterImpl.java

protected <T> Collection<T> filterCurrentDataObjects(Class<T> dataObjectClass, Collection<T> unfiltered,
        Map<String, String> formProps) {
    if (InactivatableFromTo.class.isAssignableFrom(dataObjectClass)) {
        Boolean currentSpecifier = lookupCriteriaCurrentSpecifier(formProps);
        if (currentSpecifier != null) {
            List<InactivatableFromTo> onlyCurrent = KRADServiceLocator.getInactivateableFromToService()
                    .filterOutNonCurrent(new ArrayList(unfiltered),
                            new Date(LookupUtils.getActiveDateTimestampForCriteria(formProps).getTime()));
            if (currentSpecifier) {
                return (Collection<T>) onlyCurrent;
            } else {
                unfiltered.removeAll(onlyCurrent);
                return unfiltered;
            }//  ww w  . j a v a2  s  . c o m
        }
    }
    return unfiltered;
}