Example usage for org.apache.commons.collections CollectionUtils subtract

List of usage examples for org.apache.commons.collections CollectionUtils subtract

Introduction

In this page you can find the example usage for org.apache.commons.collections CollectionUtils subtract.

Prototype

public static Collection subtract(final Collection a, final Collection b) 

Source Link

Document

Returns a new Collection containing a - b.

Usage

From source file:uk.ac.ebi.intact.dataexchange.cvutils.CvUpdater.java

/**
 * Starts the creation and update of CVs by using the CVobject List provided
 *
 * @param allValidCvs           List of all valid  Cvs
 * @param annotationInfoDataset A seperate dataset specific for intact
 * @return An object containing some statistics about the update
 *//*ww  w. j a  v  a  2s .c  o m*/
@Transactional
@IntactFlushMode(FlushModeType.COMMIT)
public CvUpdaterStatistics createOrUpdateCVs(List<CvDagObject> allValidCvs,
        AnnotationInfoDataset annotationInfoDataset) {

    if (allValidCvs == null) {
        throw new IllegalArgumentException("You must give a non null collection of CvDagObject");
    }
    if (annotationInfoDataset == null) {
        throw new IllegalArgumentException("You must give a non null AnnotationInfoDataset");
    }

    List<CvDagObject> alreadyExistingObsoleteCvList = new ArrayList<CvDagObject>();
    List<CvDagObject> orphanCvList = dealWithOrphans(allValidCvs, alreadyExistingObsoleteCvList);

    if (log.isDebugEnabled()) {
        log.debug("Orphan count: " + orphanCvList.size());
        log.debug("AlreadyExisting cvs annotated with Obsolete: " + alreadyExistingObsoleteCvList.size());
    }

    //first step remove the orphan cvs that are not existing in database
    List<CvDagObject> cleanedList = (List<CvDagObject>) CollectionUtils.subtract(allValidCvs, orphanCvList);
    if (log.isDebugEnabled())
        log.debug("Size of CV list after removing orphans: " + cleanedList.size());
    //second step remove the orphan cvs that are are already existing in the database
    cleanedList = (List<CvDagObject>) CollectionUtils.subtract(cleanedList, alreadyExistingObsoleteCvList);

    if (log.isDebugEnabled())
        log.debug("Size of CV list after removing obsolete terms: " + cleanedList.size());

    CorePersister corePersister = persisterHelper.getCorePersister();
    corePersister.setUpdateWithoutAcEnabled(true);

    updateCVsUsingAnnotationDataset(cleanedList, annotationInfoDataset, corePersister);

    CvObject[] cvObjects = cleanedList.toArray(new CvObject[cleanedList.size()]);

    corePersister.saveOrUpdate(cvObjects);

    PersisterStatistics persisterStats = corePersister.getStatistics();

    addCvObjectsToUpdaterStats(persisterStats, stats);

    if (log.isDebugEnabled()) {
        log.debug("Persisted: " + persisterStats);
        log.debug("Processed: " + processed.size());
        log.debug(stats);
    }

    return stats;
}

From source file:uk.ac.ebi.intact.dataexchange.cvutils.model.CvObjectOntologyBuilderTest.java

@Test
public void build_subset_drugable() throws Exception {

    //OBOSession oboSession = OboUtils.createOBOSessionFromDefault( "1.51" );
    OBOSession oboSession = OboUtils/*from   w w  w  . j av  a2s .c o m*/
            .createOBOSession(CvObjectOntologyBuilderTest.class.getResource("/ontologies/psi-mi25-1_51.obo"));
    log.debug(oboSession.getObjects().size());

    CvObjectOntologyBuilder ontologyBuilder = new CvObjectOntologyBuilder(oboSession);

    OboCategory oboCatDrug = new OboCategory(OboCategory.DRUGABLE);
    OboCategory oboCatPsi = new OboCategory(OboCategory.PSI_MI_SLIM);

    Collection<IdentifiedObject> testRoot = ontologyBuilder.getRootOBOObjects(oboCatDrug);

    for (IdentifiedObject identObj : testRoot) {
        if (log.isDebugEnabled())
            log.debug("ID: " + identObj.getID());
    }

    OBOObject drugableObj = (OBOObject) oboSession.getObject("MI:0686");//drugable
    OBOObject psiObj = (OBOObject) oboSession.getObject("MI:0439");//psi-mi

    Assert.assertTrue(ontologyBuilder.checkIfCategorySubset(drugableObj, oboCatDrug));
    Assert.assertTrue(ontologyBuilder.checkIfCategorySubset(drugableObj, oboCatPsi));

    Assert.assertTrue(ontologyBuilder.checkIfCategorySubset(psiObj, oboCatPsi));
    Assert.assertFalse(ontologyBuilder.checkIfCategorySubset(psiObj, oboCatDrug));

    /*  log.info( "--------------------------------------------" );
            
    List<CvDagObject> allCvs = ontologyBuilder.getAllCvs();
    log.info( "All cvs size " + allCvs.size() );
            
    log.info( "--------------------------------------------" );
    */

    List<CvDagObject> allDrugableCvs = ontologyBuilder.getAllCvs(oboCatDrug);
    if (log.isDebugEnabled()) {
        log.debug("Drug subset size " + allDrugableCvs.size());
    }

    Collection<String> drugablemis = new ArrayList<String>();
    for (CvDagObject cvDag : allDrugableCvs) {
        if (log.isDebugEnabled())
            log.debug(cvDag.getIdentifier() + " -> " + cvDag.getShortLabel());
        OBOObject drugable = (OBOObject) oboSession.getObject(cvDag.getIdentifier());
        Assert.assertTrue(ontologyBuilder.checkIfCategorySubset(drugable, oboCatDrug));
        drugablemis.add(drugable.getID());

    }

    if (log.isDebugEnabled()) {
        log.debug("drugablemis size " + drugablemis.size());
    }

    Collection<String> crossCheck = crossCheckFromOBOFile(OboCategory.DRUGABLE);

    if (log.isDebugEnabled())
        log.debug("crossCheckFromOBOFile().size() " + crossCheck.size());

    Collection<String> difference = CollectionUtils.subtract(crossCheck, drugablemis);

    if (log.isDebugEnabled())
        log.debug("difference size " + difference.size());
    for (String diff : difference) {
        if (log.isDebugEnabled())
            log.debug("diff MI: " + diff);
    }

    //Only the root term MI:0000 should be in the difference
    Assert.assertEquals(1, difference.size());

}

From source file:uk.ac.ebi.intact.dataexchange.cvutils.model.CvObjectOntologyBuilderTest.java

@Test
public void build_subset_psi() throws Exception {

    //OBOSession oboSession = OboUtils.createOBOSessionFromDefault( "1.51" );
    OBOSession oboSession = OboUtils/*  w ww . java2  s. c om*/
            .createOBOSession(CvObjectOntologyBuilderTest.class.getResource("/ontologies/psi-mi25-1_51.obo"));
    if (log.isDebugEnabled())
        log.debug(oboSession.getObjects().size());

    CvObjectOntologyBuilder ontologyBuilder = new CvObjectOntologyBuilder(oboSession);
    OBOObject psiObj = (OBOObject) oboSession.getObject("MI:0005");//psi-mi

    OboCategory oboCatPsi = new OboCategory(OboCategory.PSI_MI_SLIM);
    Assert.assertTrue(ontologyBuilder.checkIfCategorySubset(psiObj, oboCatPsi));

    List<CvDagObject> allPsimiCvs = ontologyBuilder.getAllCvs(oboCatPsi);
    Collection<String> psiMis = new ArrayList<String>();

    for (CvDagObject cvDag : allPsimiCvs) {

        OBOObject psimi = (OBOObject) oboSession.getObject(cvDag.getMiIdentifier());
        Assert.assertTrue(ontologyBuilder.checkIfCategorySubset(psimi, oboCatPsi));
        psiMis.add(psimi.getID());
    }

    if (log.isDebugEnabled())
        log.debug("PSI-MI slim subset size " + allPsimiCvs.size());
    if (log.isDebugEnabled())
        log.debug("PSI-MIs  size " + psiMis.size());

    Collection<String> crossCheck = crossCheckFromOBOFile(OboCategory.PSI_MI_SLIM);

    if (log.isDebugEnabled())
        log.debug("crossCheckFromOBOFile().size() " + crossCheck.size());
    Collection<String> difference = CollectionUtils.subtract(crossCheck, psiMis);
    log.info("difference size " + difference.size());

    for (String diff : difference) {
        if (log.isDebugEnabled())
            log.debug("diff MI: " + diff);
    }

    //Only the root term MI:0000 should be in the difference
    Assert.assertEquals(1, difference.size());

}

From source file:uk.ac.ebi.intact.dataexchange.psimi.xml.converter.shared.ExperimentConverter.java

public void extractPublicationAnnotationsAndXrefsAbsentFromExperiment(Publication pub,
        ExperimentDescription expDesc, Experiment exp) {
    AnnotationConverterConfig configAnnotation = ConverterContext.getInstance().getAnnotationConfig();

    if (pub.getAnnotations().isEmpty() && pub.getXrefs().isEmpty()) {
        return;// w w w  .  j  a  v  a2  s. co  m
    }

    for (Annotation attr : pub.getAnnotations()) {
        if (!configAnnotation.isExcluded(attr.getCvTopic())) {
            Attribute attribute = annotationConverter.intactToPsi(attr);

            if (!expDesc.getAttributes().contains(attribute)) {
                expDesc.getAttributes().add(attribute);
            }
        }
    }

    if (!pub.getXrefs().isEmpty()) {
        Set<DbReference> convertedRefs = PsiConverterUtils.toDbReferences(pub.getXrefs(),
                publicationXrefConverter);

        if (expDesc.getXref() == null) {
            Set<DbReference> existingDbRefs = new HashSet();

            if (expDesc.getBibref() != null && expDesc.getBibref().getXref() != null) {
                existingDbRefs.addAll(expDesc.getBibref().getXref().getAllDbReferences());
            }
            Collection<DbReference> disjunction = CollectionUtils.subtract(convertedRefs, existingDbRefs);
            if (!disjunction.isEmpty()) {
                Iterator<DbReference> iteratorDb = disjunction.iterator();

                Xref expRef = new Xref(iteratorDb.next());

                while (iteratorDb.hasNext()) {
                    expRef.getSecondaryRef().add(iteratorDb.next());
                }

                expDesc.setXref(expRef);
            }
        } else {
            Xref expRef = expDesc.getXref();

            Set<DbReference> existingDbRefs = new HashSet(expRef.getAllDbReferences());

            if (expDesc.getBibref() != null && expDesc.getBibref().getXref() != null) {
                existingDbRefs.addAll(expDesc.getBibref().getXref().getAllDbReferences());
            }

            Collection<DbReference> disjunction = CollectionUtils.subtract(convertedRefs, existingDbRefs);

            if (!disjunction.isEmpty()) {
                if (expRef.getPrimaryRef() == null) {
                    Iterator<DbReference> iteratorDb = disjunction.iterator();
                    expRef.setPrimaryRef(iteratorDb.next());

                    while (iteratorDb.hasNext()) {
                        expRef.getSecondaryRef().add(iteratorDb.next());
                    }
                } else {
                    expRef.getSecondaryRef().addAll(disjunction);
                }
            }
        }
    }
}

From source file:uk.ac.ebi.intact.dataexchange.psimi.xml.converter.util.IntactConverterUtils.java

public static Component newComponent(Institution institution, Participant participant,
        uk.ac.ebi.intact.model.Interaction interaction) {

    Interactor interactor = new InteractorConverter(institution).psiToIntact(participant.getInteractor());

    BiologicalRole psiBioRole = participant.getBiologicalRole();
    if (psiBioRole == null) {
        psiBioRole = PsiConverterUtils.createUnspecifiedBiologicalRole();
    }//  w ww . j  a  v a2s .  co m
    CvBiologicalRole biologicalRole = new BiologicalRoleConverter(institution).psiToIntact(psiBioRole);

    if (participant.getExperimentalRoles().size() > 1) {
        throw new UnsupportedConversionException(
                "Cannot convert participants with more than one experimental role: " + participant);
    }

    // only the first experimental role
    Collection<ExperimentalRole> roles = new ArrayList<ExperimentalRole>(2);

    if (participant.getExperimentalRoles().isEmpty()) {
        if (log.isWarnEnabled())
            log.warn("Participant without experimental roles: " + participant);

        roles.add(PsiConverterUtils.createUnspecifiedExperimentalRole());
    } else {
        roles = participant.getExperimentalRoles();
    }

    Collection<CvExperimentalRole> intactExpRoles = new ArrayList<CvExperimentalRole>(roles.size());

    for (ExperimentalRole role : roles) {
        CvExperimentalRole experimentalRole = new ExperimentalRoleConverter(institution).psiToIntact(role);
        intactExpRoles.add(experimentalRole);
    }

    Component component = new Component(institution, interaction, interactor, intactExpRoles.iterator().next(),
            biologicalRole);

    // author confidence annotations to migrate to componentConfidences later
    Collection<Attribute> annotationConfidencesToMigrate = extractAuthorConfidencesFrom(
            participant.getAttributes());

    // all other attributes will be converted into annotations
    Collection<Attribute> attributesToConvert = CollectionUtils.subtract(participant.getAttributes(),
            annotationConfidencesToMigrate);

    IntactConverterUtils.populateNames(participant.getNames(), component);
    IntactConverterUtils.populateXref(participant.getXref(), component,
            new XrefConverter<ComponentXref>(institution, ComponentXref.class));
    IntactConverterUtils.populateAnnotations(attributesToConvert, component, institution);

    component.setExperimentalRoles(intactExpRoles);

    FeatureConverter featureConverter = new FeatureConverter(institution);

    for (psidev.psi.mi.xml.model.Feature psiFeature : participant.getFeatures()) {
        Feature feature = featureConverter.psiToIntact(psiFeature);
        component.getBindingDomains().add(feature);
        feature.setComponent(component);

        if (interactor instanceof Polymer) {
            Polymer polymer = (Polymer) interactor;
            String sequence = polymer.getSequence();

            if (sequence != null) {
                for (Range r : feature.getRanges()) {

                    r.prepareSequence(polymer.getSequence());
                }
            }
        }
    }

    for (ParticipantIdentificationMethod pim : participant.getParticipantIdentificationMethods()) {
        ParticipantIdentificationMethodConverter pimConverter = new ParticipantIdentificationMethodConverter(
                institution);
        CvIdentification cvIdentification = pimConverter.psiToIntact(pim);
        component.getParticipantDetectionMethods().add(cvIdentification);
    }

    for (ExperimentalPreparation expPrep : participant.getExperimentalPreparations()) {
        CvObjectConverter<CvExperimentalPreparation, ExperimentalPreparation> epConverter = new CvObjectConverter<CvExperimentalPreparation, ExperimentalPreparation>(
                institution, CvExperimentalPreparation.class, ExperimentalPreparation.class);
        CvExperimentalPreparation cvExpPrep = epConverter.psiToIntact(expPrep);
        component.getExperimentalPreparations().add(cvExpPrep);
    }

    if (!participant.getHostOrganisms().isEmpty()) {
        HostOrganism hostOrganism = participant.getHostOrganisms().iterator().next();
        Organism organism = new Organism();
        organism.setNcbiTaxId(hostOrganism.getNcbiTaxId());
        organism.setNames(hostOrganism.getNames());
        organism.setCellType(hostOrganism.getCellType());
        organism.setCompartment(hostOrganism.getCompartment());
        organism.setTissue(hostOrganism.getTissue());

        BioSource bioSource = new OrganismConverter(institution).psiToIntact(organism);
        component.setExpressedIn(bioSource);
    }

    ParticipantConfidenceConverter confConverter = new ParticipantConfidenceConverter(institution);
    for (psidev.psi.mi.xml.model.Confidence psiConfidence : participant.getConfidenceList()) {
        ComponentConfidence confidence = confConverter.psiToIntact(psiConfidence);
        component.addConfidence(confidence);
    }
    for (Attribute authorConf : annotationConfidencesToMigrate) {

        String value = authorConf.getValue();
        ComponentConfidence confidence = confConverter.newConfidenceInstance(value);

        CvConfidenceType cvConfType = new CvConfidenceType();
        cvConfType.setOwner(confConverter.getInstitution());
        cvConfType.setShortLabel(AUTHOR_SCORE);
        confidence.setCvConfidenceType(cvConfType);

        component.addConfidence(confidence);
    }

    ParticipantParameterConverter paramConverter = new ParticipantParameterConverter(institution);
    for (psidev.psi.mi.xml.model.Parameter psiParameter : participant.getParameters()) {
        ComponentParameter parameter = paramConverter.psiToIntact(psiParameter);
        component.addParameter(parameter);
    }

    return component;
}

From source file:uk.ac.ebi.intact.dataexchange.psimi.xml.converter.util.PsiConverterUtils.java

public static void populateXref(AnnotatedObject<?, ?> annotatedObject, XrefContainer xrefContainer,
        XrefConverter converter) {/*from   w  w  w .j  ava2 s  .  c  om*/

    // ac - create a xref to the institution db
    String ac = annotatedObject.getAc();
    boolean containsAcXref = false;
    DbReference acRef = null;

    Collection<? extends uk.ac.ebi.intact.model.Xref> xrefs;
    if (converter.isCheckInitializedCollections()) {
        xrefs = IntactCore.ensureInitializedXrefs(annotatedObject);
    } else {
        xrefs = annotatedObject.getXrefs();
    }

    if (ac != null) {
        for (uk.ac.ebi.intact.model.Xref xref : xrefs) {
            if (annotatedObject.getAc().equals(xref.getPrimaryId())) {
                containsAcXref = true;
                break;
            }
        }

        if (!containsAcXref) {
            String dbMi = null;
            String db = null;

            // calculate the owner of the interaction, based on the AC prefix first,
            // then in the defaultInstitutionForACs if passed to the ConverterContext or,
            // finally to the Institution in the source section of the PSI-XML
            if (ac.startsWith("EBI")) {
                dbMi = Institution.INTACT_REF;
                db = Institution.INTACT.toLowerCase();
            } else if (ac.startsWith("MINT")) {
                dbMi = Institution.MINT_REF;
                db = Institution.MINT.toLowerCase();
            } else if (ConverterContext.getInstance().getDefaultInstitutionForAcs() != null) {
                Institution defaultInstitution = ConverterContext.getInstance().getDefaultInstitutionForAcs();
                dbMi = converter.calculateInstitutionPrimaryId(defaultInstitution);
                db = defaultInstitution.getShortLabel().toLowerCase();
            } else {
                dbMi = converter.getInstitutionPrimaryId();
                db = converter.getInstitution().getShortLabel().toLowerCase();
            }

            acRef = new DbReference(db, dbMi, ac, CvXrefQualifier.IDENTITY, CvXrefQualifier.IDENTITY_MI_REF);
        }
    }

    if (acRef == null && xrefs.isEmpty()) {
        return;
    }

    Xref xref = xrefContainer.getXref();

    if (xref == null) {
        xref = new Xref();
    }

    Collection<DbReference> dbRefs = toDbReferences(annotatedObject, xrefs, converter);

    // normally the primary reference is the identity reference, but for bibliographic references
    // it is the primary-reference and it does not contain secondary refs
    if (xrefContainer instanceof Bibref) {
        DbReference primaryRef = getPrimaryReference(dbRefs, CvDatabase.PUBMED_MI_REF);

        if (primaryRef != null) {
            xref.setPrimaryRef(primaryRef);
        } else {
            primaryRef = getPrimaryReference(dbRefs, CvDatabase.DOI_MI_REF);

            if (primaryRef != null) {
                xref.setPrimaryRef(primaryRef);

                if (log.isWarnEnabled())
                    log.warn("Primary-reference (refTypeAc=" + CvXrefQualifier.PRIMARY_REFERENCE_MI_REF + ") "
                            + " found in " + xrefContainer.getClass().getSimpleName() + ": " + xrefContainer
                            + ", located at: "
                            + ConverterContext.getInstance().getLocation().getCurrentLocation()
                                    .pathFromRootAsString()
                            + " is neither a reference to Pubmed (dbAc=" + CvDatabase.PUBMED_MI_REF
                            + ") nor a DOI (dbAc=" + CvDatabase.DOI_MI_REF + ")");

            } else {
                primaryRef = getPrimaryReference(dbRefs);

                if (primaryRef != null) {
                    xref.setPrimaryRef(primaryRef);
                } else {
                    if (log.isWarnEnabled())
                        log.warn("No primary-reference (refTypeAc=" + CvXrefQualifier.PRIMARY_REFERENCE_MI_REF
                                + ") " + " could be found in " + xrefContainer.getClass().getSimpleName() + ": "
                                + xrefContainer + ", located at: " + ConverterContext.getInstance()
                                        .getLocation().getCurrentLocation().pathFromRootAsString());
                }
            }

            // add the secondary xrefs
            xref.getSecondaryRef().addAll(CollectionUtils.subtract(dbRefs, Arrays.asList(primaryRef)));
        }
    } else {
        // remove the primary ref from the collection if it is a experiment
        // so we don't have the same ref in the bibref and the xref sections
        if (annotatedObject instanceof Experiment) {
            DbReference bibref = getPrimaryReference(dbRefs, CvDatabase.PUBMED_MI_REF);

            if (bibref == null) {
                bibref = getPrimaryReference(dbRefs, CvDatabase.DOI_MI_REF);
            }

            if (bibref != null) {
                dbRefs.remove(bibref);
            }
        }

        DbReference primaryRef = getIdentity(dbRefs, acRef);

        if (primaryRef == null) {
            primaryRef = getPrimaryReference(dbRefs);
        }

        xref.setPrimaryRef(primaryRef);

        // remove the primary ref
        // from the collection and add the rest as secondary refs
        dbRefs.remove(primaryRef);

        for (DbReference secDbRef : dbRefs) {
            if (!xref.getSecondaryRef().contains(secDbRef)) {
                xref.getSecondaryRef().add(secDbRef);
            }
        }
    }

    if (xref.getPrimaryRef() != null) {
        xrefContainer.setXref(xref);
    }
}

From source file:uk.ac.ebi.intact.dbupdate.prot.actions.impl.DuplicatesFixerImpl.java

/**
 * Merge the duplicates, the interactions are moved (not the cross references as they will be deleted)
 * @param duplicates//w  w w.j  a v a 2 s .  co  m
 */
protected Protein merge(List<Protein> duplicates,
        Map<String, Collection<Component>> proteinsNeedingPartialMerge, DuplicatesFoundEvent evt,
        boolean isSequenceChanged) {
    ProteinUpdateProcessorConfig config = ProteinUpdateContext.getInstance().getConfig();
    ProteinUpdateErrorFactory errorfactory = config.getErrorFactory();

    DaoFactory factory = evt.getDataContext().getDaoFactory();

    // calculate the original protein (the oldest is kept as original)
    Protein originalProt = calculateOriginalProtein(duplicates);
    // set the protein kept from the merge
    evt.setReferenceProtein(originalProt);

    // the merge can be done without looking at the sequence of the duplicates
    if (!isSequenceChanged) {
        // move the interactions from the rest of proteins to the original
        for (Protein duplicate : duplicates) {

            // don't process the original protein with itself
            if (!duplicate.getAc().equals(originalProt.getAc())) {

                // move the interactions
                Set<String> movedInteractions = ProteinTools.moveInteractionsBetweenProteins(originalProt,
                        duplicate, evt.getDataContext(), (ProteinUpdateProcessor) evt.getSource(),
                        evt.getPrimaryUniprotAc());

                // report the interactions to move
                reportMovedInteraction(duplicate, movedInteractions, evt);

                // add the intact secondary references
                Collection<InteractorXref> addedXRef = ProteinTools.addIntactSecondaryReferences(originalProt,
                        duplicate, factory);

                // update the protein transcripts if necessary
                Collection<String> updatedTranscripts = ProteinTools.updateProteinTranscripts(factory,
                        originalProt, duplicate);

                evt.getMovedXrefs().put(duplicate.getAc(), addedXRef);
                evt.getUpdatedTranscripts().put(duplicate.getAc(), updatedTranscripts);

                // the duplicate will be deleted
                //factory.getProteinDao().update((ProteinImpl) duplicate);

                // and delete the duplicate if no active instances are attached to it
                if (duplicate.getActiveInstances().isEmpty()) {
                    ProteinEvent protEvt = new ProteinEvent(evt.getSource(), evt.getDataContext(), duplicate,
                            "Duplicate of " + originalProt.getAc());
                    protEvt.setUniprotIdentity(evt.getPrimaryUniprotAc());
                    deleteProtein(protEvt);
                } else {
                    throw new ProcessorException("The duplicate " + duplicate.getAc() + " still have "
                            + duplicate.getActiveInstances().size() + " active instances and should not.");
                }
            }
        }
    }
    // before merging, we need to check the feature conflicts because the sequence needs to be updated
    else {
        // even if the ranges were not shifted, the sequence has been updated
        evt.setHasShiftedRanges(true);
        ProteinUpdateProcessor processor = (ProteinUpdateProcessor) evt.getSource();

        // move the interactions from the rest of proteins to the original
        for (Protein duplicate : duplicates) {
            // sequence of the duplicate
            String sequence = duplicate.getSequence();

            // don't process the original protein with itself
            if (!duplicate.getAc().equals(originalProt.getAc())) {

                // we have feature conflicts for this protein which cannot be merged and becomes deprecated
                if (proteinsNeedingPartialMerge.containsKey(duplicate.getAc())) {
                    ProteinUpdateError impossibleMerge = errorfactory.createImpossibleMergeError(
                            duplicate.getAc(), originalProt.getAc(), evt.getPrimaryUniprotAc(),
                            "the duplicated protein has "
                                    + proteinsNeedingPartialMerge.get(duplicate.getAc()).size()
                                    + " components with range conflicts. The protein is now deprecated.");
                    processor.fireOnProcessErrorFound(new UpdateErrorEvent(processor, evt.getDataContext(),
                            impossibleMerge, duplicate, evt.getPrimaryUniprotAc()));

                    // add no-uniprot-update and caution
                    Collection<Annotation> addedAnnotations = addAnnotationsForBadParticipant(duplicate,
                            originalProt.getAc(), factory);
                    // components to let on the current protein
                    Collection<Component> componentToFix = proteinsNeedingPartialMerge.get(duplicate.getAc());
                    // components without conflicts to move on the original protein
                    Collection<Component> componentToMove = CollectionUtils
                            .subtract(duplicate.getActiveInstances(), componentToFix);

                    Set<String> movedInteractions = Collections.EMPTY_SET;
                    // move components without conflicts
                    if (!componentToMove.isEmpty()) {
                        movedInteractions = ComponentTools.moveComponents(originalProt, duplicate,
                                evt.getDataContext(), processor, componentToMove, evt.getPrimaryUniprotAc());
                    }

                    // report the interactions to move before moving them
                    reportMovedInteraction(duplicate, movedInteractions, evt);

                    evt.getAddedAnnotations().put(duplicate.getAc(), addedAnnotations);

                    // the sequence is not updated because of range conflicts
                    //double relativeConservation = computesRequenceConservation(sequence, evt.getUniprotSequence());
                    // if the sequence in uniprot is different than the one of the duplicate, need to update the sequence and shift the ranges
                    //processor.fireOnProteinSequenceChanged(new ProteinSequenceChangeEvent(processor, evt.getDataContext(), duplicate, sequence, evt.getPrimaryUniprotAc(), evt.getUniprotSequence(), evt.getUniprotCrc64(), relativeConservation));

                    // update duplicate which will be kept because of range conflicts
                    factory.getProteinDao().update((ProteinImpl) duplicate);
                }
                // we don't have feature conflicts, we can merge the proteins normally
                else {

                    // move the interactions
                    Set<String> movedInteractions = ProteinTools.moveInteractionsBetweenProteins(originalProt,
                            duplicate, evt.getDataContext(), processor, evt.getPrimaryUniprotAc());

                    // report the interactions to move before moving them
                    reportMovedInteraction(duplicate, movedInteractions, evt);

                    // the duplicate will be deleted, add intact secondary references
                    Collection<InteractorXref> addedXRef = ProteinTools
                            .addIntactSecondaryReferences(originalProt, duplicate, factory);
                    evt.getMovedXrefs().put(duplicate.getAc(), addedXRef);

                    // if the sequence in uniprot is different than the one of the duplicate, need to update the sequence and shift the ranges
                    if (ProteinTools.isSequenceChanged(sequence, evt.getUniprotSequence())) {
                        double relativeConservation = computesRequenceConservation(sequence,
                                evt.getUniprotSequence());
                        processor.fireOnProteinSequenceChanged(new ProteinSequenceChangeEvent(processor,
                                evt.getDataContext(), duplicate, sequence, evt.getPrimaryUniprotAc(),
                                evt.getUniprotSequence(), evt.getUniprotCrc64(), relativeConservation));
                    }
                }

                // update isoforms and feature chains
                Collection<String> updatedTranscripts = ProteinTools.updateProteinTranscripts(factory,
                        originalProt, duplicate);

                evt.getUpdatedTranscripts().put(duplicate.getAc(), updatedTranscripts);

                // and delete the duplicate if no active instances are still attached to it
                if (duplicate.getActiveInstances().isEmpty()) {
                    ProteinEvent protEvt = new ProteinEvent(evt.getSource(), evt.getDataContext(), duplicate,
                            "Duplicate of " + originalProt.getAc());
                    protEvt.setUniprotIdentity(evt.getPrimaryUniprotAc());
                    deleteProtein(protEvt);
                } else {
                    log.trace("The duplicate " + duplicate.getAc() + " still have "
                            + duplicate.getActiveInstances().size()
                            + " active instances and cannot be deleted.");
                }
            }
        }
    }

    return originalProt;
}

From source file:uk.ac.ebi.intact.editor.controller.bulk.BulkAnnotationController.java

public void addBulkAnnotation(ActionEvent evt) {
    Class aoClass = null;//from  ww  w .  j  a  v  a 2  s. c  o  m
    try {
        aoClass = Thread.currentThread().getContextClassLoader().loadClass(aoClassName);

        try {
            getBulkOperations().getIntactDao().getUserContext().setUser(getCurrentUser());
            updatedAcs = getBulkOperations().addAnnotation(new DefaultAnnotation(this.topic, this.value), acs,
                    aoClass, replaceIfTopicExists);
        } catch (SynchronizerException e) {
            addErrorMessage("Cannot add annotation " + this.topic, e.getCause() + ": " + e.getMessage());
        } catch (FinderException e) {
            addErrorMessage("Cannot add annotation " + this.topic, e.getCause() + ": " + e.getMessage());
        } catch (PersisterException e) {
            addErrorMessage("Cannot add annotation " + this.topic, e.getCause() + ": " + e.getMessage());
        } catch (Throwable e) {
            addErrorMessage("Cannot add annotation " + this.topic, e.getCause() + ": " + e.getMessage());
        }

        if (acs.length > 0 && updatedAcs.length == 0) {
            addErrorMessage("Operation failed. The acs may not exist in the database",
                    "None of the ACs could be updated (do they exist?)");
            couldNotUpdateAcs = acs;
        } else if (acs.length != updatedAcs.length) {
            List<String> acsList = Arrays.asList(acs);
            List<String> updatedAcsList = Arrays.asList(updatedAcs);

            Collection<String> couldNotUpdateList = CollectionUtils.subtract(acsList, updatedAcsList);
            couldNotUpdateAcs = couldNotUpdateList.toArray(new String[couldNotUpdateList.size()]);

            addWarningMessage("Finished with warnings", updatedAcs.length + " objects were updated, "
                    + couldNotUpdateAcs.length + " objects couldn't be updated (do they exist?)");
        } else {
            addInfoMessage("Operation successful", updatedAcs.length + " objects were updated");
        }

    } catch (ClassNotFoundException e) {
        addErrorMessage("Could not find class: " + aoClassName, e.getMessage());
    }
}

From source file:uk.ac.ebi.intact.editor.controller.dbmanager.ImportJobController.java

public void load(ComponentSystemEvent event) {

    if (!FacesContext.getCurrentInstance().isPostback()) {

        log.debug("Load job summary");
        List<JobInstance> existingJobs1 = getJobInstances("interactionMixImport");
        List<JobInstance> existingJobs2 = getJobInstances("complexImport");
        this.runningJobEvidence = getRunningJobExecutions("interactionMixImport");
        this.runningJobComplex = getRunningJobExecutions("complexImport");
        this.completedJobComplex = new ArrayList<JobExecution>();
        this.completedJobEvidence = new ArrayList<JobExecution>();

        for (JobInstance jobEvidence : existingJobs1) {
            List<JobExecution> allExecutions = getJobExecutions(jobEvidence.getId());
            this.completedJobEvidence.addAll(CollectionUtils.subtract(allExecutions, runningJobEvidence));
        }/*w  w  w  .j  a  v  a2 s .  c  o m*/
        for (JobInstance jobComplex : existingJobs2) {
            List<JobExecution> allExecutions = getJobExecutions(jobComplex.getId());
            this.completedJobComplex.addAll(CollectionUtils.subtract(allExecutions, runningJobComplex));
        }
    }
}

From source file:uk.ac.ebi.intact.util.protein.ProteinServiceImpl.java

/**
 * Update an existing intact protein's annotations.
 * <p/>/*  w w  w.j  av a 2s. c  o  m*/
 * That includes, all Xrefs, Aliases, splice variants.
 *
 * @param protein        the intact protein to update.
 * @param uniprotProtein the uniprot protein used for data input.
 */
private void updateProtein(Protein protein, UniprotProtein uniprotProtein) throws ProteinServiceException {
    List<Protein> proteins = new ArrayList<Protein>();

    // check that both protein carry the same organism information
    if (!UpdateBioSource(protein, uniprotProtein.getOrganism())) {
        return;
    }

    // Fullname
    String fullname = uniprotProtein.getDescription();
    if (fullname != null && fullname.length() > 250) {
        if (log.isDebugEnabled()) {
            log.debug("Truncating fullname to the first 250 first chars.");
        }
        fullname = fullname.substring(0, 250);
    }
    protein.setFullName(fullname);

    // Shortlabel
    protein.setShortLabel(generateProteinShortlabel(uniprotProtein));

    // Xrefs -- but UniProt's as they are supposed to be up-to-date at this stage.
    XrefUpdaterReport reports = XrefUpdaterUtils.updateAllXrefs(protein, uniprotProtein, databaseName2mi,
            IntactContext.getCurrentInstance().getDataContext(), processor,
            new TreeSet<InteractorXref>(new InteractorXrefComparator()),
            new TreeSet<UniprotXref>(new UniprotXrefComparator(databaseName2mi)));

    uniprotServiceResult.addXrefUpdaterReport(reports);

    // Aliases
    AliasUpdaterUtils.updateAllAliases(protein, uniprotProtein,
            IntactContext.getCurrentInstance().getDataContext(), processor);

    // Sequence
    updateProteinSequence(protein, uniprotProtein.getSequence(), uniprotProtein.getCrc64());

    // Persist changes
    DaoFactory daoFactory = IntactContext.getCurrentInstance().getDataContext().getDaoFactory();
    ProteinDao pdao = daoFactory.getProteinDao();
    pdao.update((ProteinImpl) protein);

    ///////////////////////////////
    // Update Splice Variants and feature chains

    // search intact
    // splice variants with no 'no-uniprot-update'
    Collection<ProteinImpl> spliceVariantsAndChains = pdao.getSpliceVariants(protein);

    // feature chains
    spliceVariantsAndChains.addAll(pdao.getProteinChains(protein));

    // We create a copy of the collection that hold the protein transcripts as the findMatches remove the protein transcripts
    // from the collection when a match is found. Therefore the first time it runs, it finds the match, protein transcripts
    //  are correctly created, the protein transcripts are deleted from the collection so that the second
    // you run it, the splice variant are not linked anymore to the uniprotProtein and therefore they are not correctly
    // updated.
    Collection<UniprotProteinTranscript> variantsClone = new ArrayList<UniprotProteinTranscript>();

    variantsClone.addAll(uniprotProtein.getSpliceVariants());
    variantsClone.addAll(uniprotProtein.getFeatureChains());

    for (UniprotProteinTranscript transcript : variantsClone) {
        proteins.addAll(createOrUpdateProteinTranscript(transcript, uniprotProtein, protein));
    }

    if (!proteins.containsAll(spliceVariantsAndChains)) {

        if (proteins.size() < spliceVariantsAndChains.size()) {
            for (Object protNotUpdated : CollectionUtils.subtract(spliceVariantsAndChains, proteins)) {
                Protein prot = (Protein) protNotUpdated;

                if (prot.getActiveInstances().size() == 0) {
                    deleteProtein(prot);

                    uniprotServiceResult.addMessage(
                            "The protein " + getProteinDescription(prot) + " is a protein transcript of "
                                    + getProteinDescription(protein) + " in IntAct but not in Uniprot."
                                    + " As it is not part of any interactions in IntAct we have deleted it.");

                } else if (ProteinUtils.isFromUniprot(prot)) {
                    uniprotServiceResult.addError(
                            UniprotServiceResult.SPLICE_VARIANT_IN_INTACT_BUT_NOT_IN_UNIPROT,
                            "In Intact the protein " + getProteinDescription(prot)
                                    + " is a protein transcript of protein " + getProteinDescription(protein)
                                    + " but in Uniprot it is not the case. As it is part of interactions in IntAct we couldn't "
                                    + "delete it.");
                }
            }
        } else {
            Collection<Protein> spliceVariantsNotUpdated = new ArrayList<Protein>(spliceVariantsAndChains);
            spliceVariantsNotUpdated.removeAll(CollectionUtils.intersection(spliceVariantsAndChains, proteins));

            for (Protein protNotUpdated : spliceVariantsNotUpdated) {

                if (protNotUpdated.getActiveInstances().size() == 0) {
                    deleteProtein(protNotUpdated);

                    uniprotServiceResult.addMessage("The protein " + getProteinDescription(protNotUpdated)
                            + " is a protein transcript of " + getProteinDescription(protein)
                            + " in IntAct but not in Uniprot."
                            + " As it is not part of any interactions in IntAct we have deleted it.");

                } else if (ProteinUtils.isFromUniprot(protNotUpdated)) {
                    uniprotServiceResult.addError(
                            UniprotServiceResult.SPLICE_VARIANT_IN_INTACT_BUT_NOT_IN_UNIPROT,
                            "In Intact the protein " + getProteinDescription(protNotUpdated)
                                    + " is a protein transcript of protein " + getProteinDescription(protein)
                                    + " but in Uniprot it is not the case. As it is part of interactions in IntAct we couldn't "
                                    + "delete it.");
                }
            }
        }
    }

    //        Collection<ProteinTranscriptMatch> matches = findMatches( variants, variantsClone) );
    /*Collection<ProteinTranscriptMatch> matches = findMatches( spliceVariantsAndChains, variantsClone );
    for ( ProteinTranscriptMatch match : matches ) {
            
    if ( match.isSuccessful() ) {
        // update
        final UniprotProteinTranscript variant = match.getUniprotTranscript();
        final Protein intactProtein = match.getIntactProtein();
            
        if (ProteinUtils.isFromUniprot(intactProtein)){
            updateProteinTranscript(intactProtein, protein, variant, uniprotProtein );
        }
            
        if (variant.getSequence() != null || (variant.getSequence() == null && variant.isNullSequenceAllowed())) {
            proteins.add(intactProtein);
        }
            
    } else if ( match.hasNoIntact() ) {
            
        // TODO in the case of a global update, and the user requested splice variants without interactions to be deleted,
        // TODO we don't create splice variants when they are missing as they wouldn't have interactions anyways.
        // NOTE: this does not apply say in our curation environment as the users want to see imported SV so they can choose them
        // TODO test this
        final ProteinUpdateProcessorConfig config = ProteinUpdateContext.getInstance().getConfig();
        final boolean globalProteinUpdate = config.isGlobalProteinUpdate();
        final boolean deleteProteinTranscript = config.isDeleteProteinTranscriptWithoutInteractions();
            
        if( ! globalProteinUpdate && !deleteProteinTranscript) {
            // create shallow
            Protein intactTranscript = createMinimalisticProteinTranscript( match.getUniprotTranscript(),
                    protein.getAc(),
                    protein.getBioSource(),
                    uniprotProtein );
            // update
            final UniprotProteinTranscript uniprotTranscript = match.getUniprotTranscript();
            updateProteinTranscript( intactTranscript, protein, uniprotTranscript, uniprotProtein);
            
            proteinCreated(intactTranscript);
            
            if (uniprotTranscript.getSequence() != null || (uniprotTranscript.getSequence() == null && uniprotTranscript.isNullSequenceAllowed())) {
                proteins.add(intactTranscript);
            }
        }
            
    } else {
        Protein intactProteinTranscript = match.getIntactProtein();
            
        if(intactProteinTranscript.getActiveInstances().size() == 0){
            deleteProtein(intactProteinTranscript);
            
            uniprotServiceResult.addMessage("The protein " + getProteinDescription(intactProteinTranscript) +
                    " is a protein transcript of " + getProteinDescription(protein) + " in IntAct but not in Uniprot." +
                    " As it is not part of any interactions in IntAct we have deleted it."  );
            
        }else if (ProteinUtils.isFromUniprot(intactProteinTranscript)){
            uniprotServiceResult.addError(UniprotServiceResult.SPLICE_VARIANT_IN_INTACT_BUT_NOT_IN_UNIPROT,
                    "In Intact the protein "+ getProteinDescription(intactProteinTranscript) +
                            " is a protein transcript of protein "+ getProteinDescription(protein)+
                            " but in Uniprot it is not the case. As it is part of interactions in IntAct we couldn't " +
                            "delete it.");
        }
    }
    }*/
}