Example usage for java.util LinkedHashSet addAll

List of usage examples for java.util LinkedHashSet addAll

Introduction

In this page you can find the example usage for java.util LinkedHashSet addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:org.jahia.services.content.JCRPublicationService.java

public void publishByInfoList(final List<PublicationInfo> publicationInfos, final String sourceWorkspace,
        final String destinationWorkspace, boolean checkPermissions, final List<String> comments)
        throws RepositoryException {
    LinkedHashSet<String> allIds = new LinkedHashSet<String>();

    for (PublicationInfo publicationInfo : publicationInfos) {
        allIds.addAll(publicationInfo.getAllUuids(false, false, false));
        for (PublicationInfo subtree : publicationInfo.getAllReferences()) {
            allIds.addAll(subtree.getAllUuids(false, false, false));
        }/*from w w  w.ja v a  2s  . com*/
    }
    publish(new ArrayList<String>(allIds), sourceWorkspace, destinationWorkspace, checkPermissions, comments);
}

From source file:uk.ac.ebi.intact.dataexchange.cvutils.model.CvObjectOntologyBuilder.java

/**
 *   The main method which converts the OBOOBject
 *   toCVObject.//from   w w w .  j a  va 2 s  .c  om
 *
 */
public <T extends CvObject> T toCvObject(OBOObject oboObj, OboCategory... categories) {
    T cvObject;
    Class<T> cvClass = null;
    try {

        if (log.isTraceEnabled())
            log.trace("ID    ->" + oboObj.getID() + "   Name ->" + oboObj.getName());

        /* first check if it has more than one parents
         * it true, get all the children
         * and add to the map4misWithMoreParent but with a new
         * LinkedHashset same as that of the parent
         */

        if (this.isHavingMoreThanOneParent(oboObj.getID())) {

            String[] children = getChildren4MisWithMoreParent(oboObj);

            for (String child : children) {
                Set<String> linkedSet4child = map4misWithMoreParent.get(oboObj.getID());

                if (linkedSet4child != null) {
                    LinkedHashSet<String> newLinkedSet4child = new LinkedHashSet<String>();
                    newLinkedSet4child.addAll(linkedSet4child);

                    map4misWithMoreParent.put(child, newLinkedSet4child);
                }
            }

            /*
            * map4misWithMoreParent stores the mi id and a
            * LinkedHashSet that contains all the root parent class
            * as we have to create cvterms for each root, for each
            * iteration take one parent root assign to cv class
            * and remove it from the set...otherwise we don't know
            * which parent class was assigned when
            *
            * */
            String rootclass = null;
            Set<String> linkedSet = map4misWithMoreParent.get(oboObj.getID());
            if (linkedSet != null && linkedSet.size() > 0) {
                rootclass = (String) linkedSet.toArray()[0];
                cvClass = getRootClass(rootclass);
            }

            if (rootclass != null) {
                linkedSet.remove(rootclass);
            }

            if (log.isTraceEnabled()) {
                log.trace("More than One Parent True " + oboObj.getID());
            }
            /*
            *   if more than one parent is true and cvClass is still null means
             it has parents that are from same root class eg: 3 parents 2 root class
             so when it iterates third time and as the set contains one two classes, it returns null
            * */
            if (cvClass == null) {
                cvClass = findCvClassforMI(oboObj.getID());
                if (cvClass != null) {
                    String processedKey_ = createCvKey(cvClass, oboObj.getID());

                    if (processed.containsKey(processedKey_)) {
                        return (T) processed.get(processedKey_);
                    }
                }
            }

        } else {
            //find the CvClass for any given MI identifier
            cvClass = findCvClassforMI(oboObj.getID());
        }

        //if CvClass is null then CvTopic.class is taken as default
        if (cvClass == null) {
            cvClass = (Class<T>) CvTopic.class;
        }
        if (log.isTraceEnabled())
            log.trace("cvClass ->" + cvClass.getName());

        //Checks if the given object is already mi2cv. If so, returns the CvObject
        String processedKey = createCvKey(cvClass, oboObj.getID());

        if (processed.containsKey(processedKey)) {
            return (T) processed.get(processedKey);
        }

        final Institution institution = IntactContext.getCurrentInstance().getInstitution();

        //Short label look for EXACT PSI-MI-short  in synonym tag OBO 1.2
        String shortLabel = calculateShortLabel(oboObj);
        cvObject = CvObjectUtils.createCvObject(institution, cvClass, null, shortLabel);
        if (log.isTraceEnabled())
            log.trace("shortLabel     ->" + shortLabel);

        //Identity xref is added to all the cvs
        cvObject.addXref(createIdentityXref(cvObject, oboObj.getID()));
        cvObject.setIdentifier(oboObj.getID());
        cvObject.setFullName(oboObj.getName());

        /********************************
         *Database and Qualifier Cv
         ********************************/
        Set<Dbxref> defDbXrefSet = oboObj.getDefDbxrefs();
        Object[] dbxrefArray = defDbXrefSet.toArray();

        //check if Unique Resid
        boolean uniqueResid;
        uniqueResid = checkIfUniqueResid(dbxrefArray);

        if (dbxrefArray != null) {
            CvObjectXref xref;
            //more than one dbxreference
            //add the first one
            boolean firstDatabasexref = false;
            int firstPubmedIndex = getPubmedIndex(dbxrefArray);

            //add  xrefs
            for (int i = 0; i < dbxrefArray.length; i++) {

                if (i == firstPubmedIndex) {
                    firstDatabasexref = true;
                }
                Dbxref defDbxref = (Dbxref) dbxrefArray[i];
                CvTermXref cvtermXref = addQualifiersForOtherDbXreferences(defDbxref, firstDatabasexref,
                        uniqueResid);
                if (cvtermXref != null) {
                    xref = toXref(cvObject, cvtermXref.getId(), cvtermXref.getQualifier(),
                            cvtermXref.getDatabase());
                    if (xref != null) {
                        cvObject.addXref(xref);
                    }
                }
            }
        } else {
            log.debug("No dbxreference");
        }

        /********************************
         *Definitions
         ********************************/

        // definition
        if (oboObj.getDefinition() != null) {

            String definition = oboObj.getDefinition();

            if (definition.contains("\n")) {
                String[] defArray = definition.split("\n");
                String prefixString = "";
                String suffixString = "";

                if (defArray.length == 2) {
                    prefixString = defArray[0];
                    suffixString = defArray[1];
                } else if (defArray.length > 2) {
                    prefixString = defArray[0];

                    for (int i = 1; i < defArray.length; i++) {
                        if (i == 1) {
                            suffixString = defArray[i];
                        } else {
                            suffixString = suffixString + "\n" + defArray[i];
                        }
                    }
                }

                if (suffixString.startsWith("OBSOLETE") || oboObj.isObsolete()) {

                    Annotation annot = toAnnotation(CvTopic.OBSOLETE, suffixString);
                    if (annot != null) {
                        cvObject.addAnnotation(annot);
                    }
                    CvTopic definitionTopicDef = CvObjectUtils.createCvObject(institution, CvTopic.class, null,
                            CvTopic.DEFINITION);
                    cvObject.addAnnotation(new Annotation(institution, definitionTopicDef, prefixString));

                } else if (suffixString.startsWith("http")) {

                    Annotation annot = toAnnotation(CvTopic.URL, suffixString);
                    if (annot != null) {
                        cvObject.addAnnotation(annot);
                    }
                    CvTopic definitionTopicDef = CvObjectUtils.createCvObject(institution, CvTopic.class, null,
                            CvTopic.DEFINITION);
                    cvObject.addAnnotation(new Annotation(institution, definitionTopicDef, prefixString));

                } else {
                    if (log.isDebugEnabled())
                        log.debug(" Line Break in Definition--special case  MI: " + oboObj.getID()
                                + "  Defintion:  " + oboObj.getDefinition());
                    CvTopic definitionTopic = CvObjectUtils.createCvObject(institution, CvTopic.class, null,
                            CvTopic.DEFINITION);
                    cvObject.addAnnotation(
                            new Annotation(institution, definitionTopic, oboObj.getDefinition()));
                }

            } else {

                CvTopic definitionTopic = CvObjectUtils.createCvObject(institution, CvTopic.class, null,
                        CvTopic.DEFINITION);
                cvObject.addAnnotation(new Annotation(institution, definitionTopic, oboObj.getDefinition()));
            }
        } //end of definition

        /********************************
         *XREF ANNOTATIONS
         ********************************/
        Set<Dbxref> dbxrefSet = oboObj.getDbxrefs();
        for (Dbxref dbxref : dbxrefSet) {

            String xref = dbxref.toString();
            if (xref.contains(CvTopic.XREF_VALIDATION_REGEXP)) {
                int firstIndex = xref.indexOf('"');
                int lastIndex = xref.lastIndexOf('"');

                String annotationText = firstIndex > -1 && lastIndex > -1
                        ? xref.substring(firstIndex + 1, lastIndex)
                        : xref;
                Annotation annot = toAnnotation(CvTopic.XREF_VALIDATION_REGEXP, annotationText);
                if (annot != null) {
                    cvObject.addAnnotation(annot);
                }
            } else if (xref.contains(CvTopic.SEARCH_URL)) {
                Annotation annot = toAnnotation(CvTopic.SEARCH_URL, dbxref.getDesc());
                if (annot != null) {
                    cvObject.addAnnotation(annot);
                }
            }
        }

        /********************************
         * comment
         ********************************/
        if (oboObj.getComment() != null && oboObj.getComment().length() > 0) {

            Annotation annot = toAnnotation(CvTopic.COMMENT, oboObj.getComment());
            if (annot != null) {
                cvObject.addAnnotation(annot);
            }
        } //end comment

        /********************************
         * Alias
         ********************************/
        Set<Synonym> syn = oboObj.getSynonyms();
        CvObjectAlias alias_;
        for (Synonym aSyn : syn) {
            SynonymType synCat = aSyn.getSynonymType();
            if (synCat != null && synCat.getID() != null
                    && synCat.getID().equalsIgnoreCase(CvObjectOntologyBuilder.ALIAS_IDENTIFIER)) {
                String aliasName = aSyn.getText();
                alias_ = (CvObjectAlias) toAlias(cvObject, aliasName);
                cvObject.addAlias(alias_);
            }
        }

        processed.put(processedKey, cvObject);

        if (log.isTraceEnabled())
            log.trace("--Processed size " + processed.size());

        if (cvObject instanceof CvDagObject) {
            Collection<Link> childLinks = oboObj.getChildren();

            for (Link childLink1 : childLinks) {

                CvDagObject dagObject = (CvDagObject) cvObject;

                OBOObject childObj = (OBOObject) childLink1.getChild();

                //check for subset
                if (categories == null || categories.length == 0) {
                    dagObject.addChild((CvDagObject) toCvObject(childObj, categories));

                } else {
                    for (OboCategory category : categories) {
                        for (TermSubset oboCat : childObj.getSubsets()) {
                            if (category.getName().equalsIgnoreCase(oboCat.getName())) {
                                if (log.isTraceEnabled()) {
                                    log.trace("Adding child after subset check: " + childObj.getID() + "   "
                                            + childObj.getName());
                                }

                                dagObject.addChild((CvDagObject) toCvObject(childObj, categories));
                            }
                        }
                    }
                }
            }
        }

    } catch (Exception ex) {
        throw new IntactException("Exception converting to CvObject from OBOObject: " + oboObj.getID(), ex);
    }

    return cvObject;
}

From source file:se.unlogic.hierarchy.core.servlets.CoreServlet.java

private void appendLinks(Document doc, ForegroundModuleResponse moduleResponse) {

    Element links = doc.createElement("links");
    doc.getDocumentElement().appendChild(links);

    LinkedHashSet<LinkTag> linkSet = new LinkedHashSet<LinkTag>();

    if (moduleResponse.getLinks() != null) {
        linkSet.addAll(moduleResponse.getLinks());
    }/*from   w w w.j a  va2  s . c  om*/

    if (moduleResponse.getBackgroundModuleResponses() != null) {

        for (BackgroundModuleResponse backgroundModuleResponse : moduleResponse
                .getBackgroundModuleResponses()) {

            if (backgroundModuleResponse.getLinks() != null) {

                linkSet.addAll(backgroundModuleResponse.getLinks());
            }
        }
    }

    XMLUtils.append(doc, links, linkSet);
}

From source file:se.unlogic.hierarchy.core.servlets.CoreServlet.java

private void appendScripts(Document doc, ForegroundModuleResponse moduleResponse) {

    Element scripts = doc.createElement("scripts");
    doc.getDocumentElement().appendChild(scripts);

    LinkedHashSet<ScriptTag> scriptsSet = new LinkedHashSet<ScriptTag>();

    if (moduleResponse.getScripts() != null) {

        scriptsSet.addAll(moduleResponse.getScripts());
    }//from  w  w w  .  ja v  a  2 s  .c  o  m

    if (moduleResponse.getBackgroundModuleResponses() != null) {

        for (BackgroundModuleResponse backgroundModuleResponse : moduleResponse
                .getBackgroundModuleResponses()) {

            if (backgroundModuleResponse.getScripts() != null) {

                scriptsSet.addAll(backgroundModuleResponse.getScripts());
            }
        }
    }

    XMLUtils.append(doc, scripts, scriptsSet);
}

From source file:cross.io.InputDataFactory.java

/**
 * Create a collection of files from the given string resource paths.
 *
 * @param input the string resource paths
 * @return a collection of files/*from w  w  w.  ja v  a  2 s . c o  m*/
 */
@Override
public Collection<File> getInputFiles(String[] input) {
    LinkedHashSet<File> files = new LinkedHashSet<>();
    for (String inputString : input) {
        log.debug("Processing input string {}", inputString);
        //separate wildcards from plain files
        String name = FilenameUtils.getName(inputString);
        boolean isWildcard = name.contains("?") || name.contains("*");
        String fullPath = FilenameUtils.getFullPath(inputString);
        File path = new File(fullPath);
        File baseDirFile = new File(this.basedir);
        if (!baseDirFile.exists()) {
            throw new ExitVmException("Input base directory '" + baseDirFile + "' does not exist!");
        }
        if (!baseDirFile.isDirectory()) {
            throw new ExitVmException("Input base directory '" + baseDirFile + "' is not a directory!");
        }
        log.debug("Path is absolute: {}", path.isAbsolute());
        //identify absolute and relative files
        if (!path.isAbsolute()) {
            log.info("Resolving relative file against basedir: {}", this.basedir);
            path = new File(this.basedir, fullPath);
        }
        //normalize filenames
        fullPath = FilenameUtils.normalize(path.getAbsolutePath());
        log.debug("After normalization: {}", fullPath);
        IOFileFilter dirFilter = this.recurse ? TrueFileFilter.INSTANCE : null;
        if (isWildcard) {
            log.debug("Using wildcard matcher for {}", name);
            files.addAll(FileUtils.listFiles(new File(fullPath),
                    new WildcardFileFilter(name, IOCase.INSENSITIVE), dirFilter));
        } else {
            log.debug("Using name for {}", name);
            File f = new File(fullPath, name);
            if (!f.exists()) {
                throw new ExitVmException("Input file '" + f + "' does not exist!");
            }
            files.add(f);
        }
    }
    return files;
}

From source file:istata.service.StataService.java

/**
 * produce a list with possible sidebar suggestions for the current context
 * /*from   w w  w  . ja v a  2 s . c om*/
 * @param filter
 * @param pos
 * @param from
 * @param to
 * @return
 */
public List<ContentLine> suggest(String filter, int pos, int from, int to) {
    LinkedHashSet<ContentLine> res = new LinkedHashSet<ContentLine>();

    ArrayList<ContentLine> rescmd = new ArrayList<ContentLine>();
    {
        int i = 0;
        for (ContentLine cl : cmdRepository.findAll()) {
            if (cl.getContent().startsWith(filter)) {
                ContentLine srl = new ContentLine();
                Map<String, Object> model = new HashMap<String, Object>();
                model.put("cmd", cl);
                model.put("from", from);
                model.put("to", to);

                String text = VelocityEngineUtils.mergeTemplateIntoString(velocityEngine, "items/cmd.vm",
                        "UTF-8", model);

                srl.setContent(text);
                srl.setLine(i++);
                rescmd.add(srl);
            }
        }
    }

    Collections.reverse(rescmd);

    res.addAll(rescmd.subList(0, Math.min(10, rescmd.size())));

    List<ContentLine> out = new ArrayList<ContentLine>();

    try {
        IStata stata = stataFactory.getInstance();

        /*
         * get files
         */
        Collection<ContentLine> filesNames = filteredFiles(filter, pos, from, to);
        res.addAll(filesNames);

        /*
         * get VARS, should be a mothod call probably
         */

        // current token
        StringBuilder token = new StringBuilder("");
        StringBuilder rest = new StringBuilder(filter);
        int p = (pos == -1 || pos > filter.length()) ? filter.length() : pos;
        char ch = 'x';
        while (p > 0 && (CharUtils.isAsciiAlphanumeric(ch = filter.charAt(p - 1)) || ch == '_')) {
            token.insert(0, ch);
            rest.deleteCharAt(p - 1);
            p--;
        }

        // remove rest of potential token
        while (rest.length() > 0 && p > 0 && p < rest.length()
                && (CharUtils.isAsciiAlphanumeric(rest.charAt(p)) || rest.charAt(p) == '_')) {
            rest.deleteCharAt(p);
        }

        String t = token.toString();

        List<StataVar> list = new ArrayList<StataVar>();
        List<StataVar> listfull = stata.getVars("", false);
        if (t.length() > 0) {
            for (StataVar sv : listfull) {
                if (sv.getName().startsWith(t)) {
                    list.add(sv);
                }
            }
        } else {
            list = listfull;
        }

        for (int i = 0; i < list.size(); i++) {
            ContentLine srl = new ContentLine();
            srl.setLine(i + 100);
            String vname = list.get(i).getName();
            String cl = new StringBuilder(rest).insert(p, " ").insert(p, vname).toString();
            try {
                String cc = URLEncoder.encode(cl, "UTF-8");
                Map<String, Object> model = new HashMap<String, Object>();
                model.put("var", vname);
                model.put("repl", cc);
                model.put("focuspos", p + 1 + vname.length());
                model.put("from", from);
                model.put("to", to);

                String text = VelocityEngineUtils.mergeTemplateIntoString(velocityEngine, "items/var.vm",
                        "UTF-8", model);

                srl.setContent(text);
                res.add(srl);
            } catch (UnsupportedEncodingException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    } catch (StataNotRunningException e) {
        ContentLine srl = new ContentLine();
        srl.setLine(1);
        srl.setContent(
                "<div class='list-group-item sidebaritem error' >" + "Stata not running, you can try to start "
                        + "an instance by clicking " + "<a target='_blank' href='/start'>here</a>" + "</div>");
        out.add(srl);
    } catch (StataBusyException e1) {
        ContentLine srl = new ContentLine();
        srl.setLine(1);
        srl.setContent("<div class='list-group-item sidebaritem error' >"
                + "Stata appears to by busy or not running, you can try to "
                + "start a new instance by clicking " + "<a target='_blank' href='/start'>here</a> "
                + "or wait for the current job to complete</div>");
        out.add(srl);
    }

    out.addAll(res);
    return out;
}

From source file:se.unlogic.hierarchy.core.servlets.CoreServlet.java

private void appendScripts(Document doc, List<BackgroundModuleResponse> moduleResponses) {

    if (moduleResponses != null) {

        Element scripts = doc.createElement("scripts");
        doc.getDocumentElement().appendChild(scripts);

        LinkedHashSet<ScriptTag> scriptsSet = new LinkedHashSet<ScriptTag>();

        for (BackgroundModuleResponse backgroundModuleResponse : moduleResponses) {

            if (backgroundModuleResponse.getScripts() != null) {

                scriptsSet.addAll(backgroundModuleResponse.getScripts());
            }//  www  .  j a v  a  2  s  . c o m
        }

        XMLUtils.append(doc, scripts, scriptsSet);
    }
}

From source file:org.mskcc.cbio.importer.converter.internal.ConverterImpl.java

/**
 * Generates case lists for the given portal.
 *
  * @param portal String//w  w w.j a v  a2s. c  om
 * @throws Exception
 */
@Override
public void generateCaseLists(String portal) throws Exception {

    if (LOG.isInfoEnabled()) {
        LOG.info("generateCaseLists()");
    }

    // check args
    if (portal == null) {
        throw new IllegalArgumentException("portal must not be null");
    }

    // get portal metadata
    PortalMetadata portalMetadata = config.getPortalMetadata(portal).iterator().next();
    if (portalMetadata == null) {
        if (LOG.isInfoEnabled()) {
            LOG.info("convertData(), cannot find PortalMetadata, returning");
        }
        return;
    }

    // get CaseListMetadata
    Collection<CaseListMetadata> caseListMetadatas = config.getCaseListMetadata(Config.ALL);

    // iterate over all cancer studies
    for (CancerStudyMetadata cancerStudyMetadata : config.getCancerStudyMetadata(portalMetadata.getName())) {
        // iterate over case lists
        for (CaseListMetadata caseListMetadata : caseListMetadatas) {
            if (LOG.isInfoEnabled()) {
                LOG.info("generateCaseLists(), processing cancer study: " + cancerStudyMetadata
                        + ", case list: " + caseListMetadata.getCaseListFilename());
            }
            // how many staging files are we working with?
            String[] stagingFilenames = null;
            // setup union/intersection bools
            boolean unionCaseList = caseListMetadata.getStagingFilenames()
                    .contains(CaseListMetadata.CASE_LIST_UNION_DELIMITER);
            boolean intersectionCaseList = caseListMetadata.getStagingFilenames()
                    .contains(CaseListMetadata.CASE_LIST_INTERSECTION_DELIMITER);
            // union (like all cases)
            if (unionCaseList) {
                stagingFilenames = caseListMetadata.getStagingFilenames()
                        .split("\\" + CaseListMetadata.CASE_LIST_UNION_DELIMITER);
            }
            // intersection (like complete or cna-seq)
            else if (intersectionCaseList) {
                stagingFilenames = caseListMetadata.getStagingFilenames()
                        .split("\\" + CaseListMetadata.CASE_LIST_INTERSECTION_DELIMITER);
            }
            // just a single staging file
            else {
                stagingFilenames = new String[] { caseListMetadata.getStagingFilenames() };
            }
            if (LOG.isInfoEnabled()) {
                LOG.info("generateCaseLists(), stagingFilenames: "
                        + java.util.Arrays.toString(stagingFilenames));
            }
            // this is the set we will pass to writeCaseListFile
            LinkedHashSet<String> caseSet = new LinkedHashSet<String>();
            // this indicates the number of staging files processed -
            // used to verify that an intersection should be written
            int numStagingFilesProcessed = 0;
            for (String stagingFilename : stagingFilenames) {
                if (LOG.isInfoEnabled()) {
                    LOG.info("generateCaseLists(), processing stagingFile: " + stagingFilename);
                }
                // compute the case set
                List<String> caseList = fileUtils.getCaseListFromStagingFile(caseIDs, portalMetadata,
                        cancerStudyMetadata, stagingFilename);
                // we may not have this datatype in study
                if (caseList.size() == 0) {
                    if (LOG.isInfoEnabled()) {
                        LOG.info("generateCaseLists(), stagingFileHeader is empty: " + stagingFilename
                                + ", skipping...");
                    }
                    continue;
                }
                // intersection 
                if (intersectionCaseList) {
                    if (caseSet.isEmpty()) {
                        caseSet.addAll(caseList);
                    } else {
                        caseSet.retainAll(caseList);
                    }
                }
                // otherwise union or single staging (treat the same)
                else {
                    caseSet.addAll(caseList);
                }
                ++numStagingFilesProcessed;
            }
            // write the case list file (don't make empty case lists)
            if (caseSet.size() > 0) {
                if (LOG.isInfoEnabled()) {
                    LOG.info("generateCaseLists(), calling writeCaseListFile()...");
                }
                // do not write out complete cases file unless we've processed all the files required
                if (intersectionCaseList && (numStagingFilesProcessed != stagingFilenames.length)) {
                    if (LOG.isInfoEnabled()) {
                        LOG.info(
                                "generateCaseLists(), number of staging files processed != number staging files required for cases_complete.txt, skipping call to writeCaseListFile()...");
                    }
                    continue;
                }
                fileUtils.writeCaseListFile(portalMetadata, cancerStudyMetadata, caseListMetadata,
                        caseSet.toArray(new String[0]));
            } else if (LOG.isInfoEnabled()) {
                LOG.info("generateCaseLists(), caseSet.size() <= 0, skipping call to writeCaseListFile()...");
            }
            // if union, write out the cancer study metadata file
            if (caseSet.size() > 0 && caseListMetadata.getCaseListFilename().equals(ALL_CASES_FILENAME)) {
                if (LOG.isInfoEnabled()) {
                    LOG.info(
                            "generateCaseLists(), processed all cases list, we can now update cancerStudyMetadata file()...");
                }
                fileUtils.writeCancerStudyMetadataFile(portalMetadata, cancerStudyMetadata, caseSet.size());
            }
        }
    }

}

From source file:se.unlogic.hierarchy.core.servlets.CoreServlet.java

private void appendLinks(Document doc, List<BackgroundModuleResponse> moduleResponses) {

    if (moduleResponses != null) {

        Element links = doc.createElement("links");
        doc.getDocumentElement().appendChild(links);

        LinkedHashSet<LinkTag> linkSet = new LinkedHashSet<LinkTag>();

        // Only add links from background modules that are not previously added
        for (BackgroundModuleResponse moduleResponse : moduleResponses) {

            if (moduleResponse.getLinks() != null) {

                linkSet.addAll(moduleResponse.getLinks());
            }/* w  w w.  j  a  va  2 s  . com*/
        }

        XMLUtils.append(doc, links, linkSet);
    }
}

From source file:org.apache.ws.scout.registry.BusinessLifeCycleManagerImpl.java

protected BulkResponse deleteOperation(Collection<Key> keys, String op) throws JAXRException {
    if (keys == null)
        throw new JAXRException("Keys provided to " + op + " are null");

    //Now we need to convert the collection into a vector for juddi
    BulkResponseImpl bulk = new BulkResponseImpl();
    String[] keyarr = new String[keys.size()];
    Result[] keyResultArr;//from   w w w  .  j  a va  2s.c  om

    LinkedHashSet<Key> coll = new LinkedHashSet<Key>();
    Collection<Exception> exceptions = new ArrayList<Exception>();

    try {
        Iterator iter = keys.iterator();
        int currLoc = 0;
        while (iter.hasNext()) {
            Key key = (Key) iter.next();
            keyarr[currLoc] = key.getId();
            currLoc++;
        }
        // Save business
        DispositionReport bd = (DispositionReport) executeOperation(keyarr, op);
        List<Result> resultList = bd.getResult();
        keyResultArr = new Result[resultList.size()];
        resultList.toArray(keyResultArr);

        log.debug("After deleting Business. Obtained vector size:" + keyResultArr != null ? keyResultArr.length
                : 0);
        for (int i = 0; keyResultArr != null && i < keyResultArr.length; i++) {
            Result result = (Result) keyResultArr[i];
            int errno = result.getErrno();
            if (errno == 0) {
                coll.addAll(keys);
            } else {
                ErrInfo errinfo = result.getErrInfo();
                DeleteException de = new DeleteException(errinfo.getErrCode() + ":" + errinfo.getValue());
                bulk.setStatus(JAXRResponse.STATUS_FAILURE);
                exceptions.add(de);
            }
        }
    } catch (RegistryException regExcept) {

        /*
         * jUDDI (and prollie others) throw an exception on any fault in
         * the transaction w/ the registry, so we don't get any partial
         * success
         */
        DeleteException de = new DeleteException(regExcept.getFaultCode() + ":" + regExcept.getFaultString(),
                regExcept);

        bulk.setStatus(JAXRResponse.STATUS_FAILURE);
        exceptions.add(de);
    } catch (JAXRException tran) {
        exceptions.add(new JAXRException("Apache JAXR Impl:", tran));
        bulk.setStatus(JAXRResponse.STATUS_FAILURE);
    }

    bulk.setCollection(coll);
    bulk.setExceptions(exceptions);

    return bulk;
}