Example usage for java.util LinkedList addAll

List of usage examples for java.util LinkedList addAll

Introduction

In this page you can find the example usage for java.util LinkedList addAll.

Prototype

public boolean addAll(Collection<? extends E> c) 

Source Link

Document

Appends all of the elements in the specified collection to the end of this list, in the order that they are returned by the specified collection's iterator.

Usage

From source file:org.nuxeo.ecm.core.opencmis.impl.server.CMISQLtoNXQL.java

/**
 * Gets the NXQL from a CMISQL query.//w w w . j av a2 s .  co  m
 */
public String getNXQL(String cmisql, NuxeoCmisService service, Map<String, PropertyDefinition<?>> typeInfo,
        boolean searchAllVersions) throws QueryParseException {
    this.typeInfo = typeInfo;
    boolean searchLatestVersion = !searchAllVersions;
    TypeManagerImpl typeManager = service.repository.getTypeManager();
    coreSession = service.coreSession;

    query = new QueryObject(typeManager);
    CmisQueryWalker walker = null;
    try {
        walker = QueryUtil.getWalker(cmisql);
        walker.setDoFullTextParse(false);
        walker.query(query, new AnalyzingWalker());
    } catch (RecognitionException e) {
        String msg;
        if (walker == null) {
            msg = e.getMessage();
        } else {
            msg = "Line " + e.line + ":" + e.charPositionInLine + " "
                    + walker.getErrorMessage(e, walker.getTokenNames());
        }
        throw new QueryParseException(msg, e);
    } catch (QueryParseException e) {
        throw e;
    } catch (Exception e) {
        throw new QueryParseException(e.getMessage() + " for query: " + cmisql, e);
    }
    if (query.getTypes().size() != 1 && query.getJoinedSecondaryTypes() == null) {
        throw new QueryParseException("JOINs not supported in query: " + cmisql);
    }

    fromType = query.getMainFromName();
    BaseTypeId fromBaseTypeId = fromType.getBaseTypeId();

    // now resolve column selectors to actual database columns
    for (CmisSelector sel : query.getSelectReferences()) {
        recordSelectSelector(sel);
    }
    for (CmisSelector sel : query.getJoinReferences()) {
        ColumnReference col = ((ColumnReference) sel);
        if (col.getTypeDefinition().getBaseTypeId() == BaseTypeId.CMIS_SECONDARY) {
            // ignore reference to ON FACET.cmis:objectId
            continue;
        }
        recordSelector(sel, JOIN);
    }
    for (CmisSelector sel : query.getWhereReferences()) {
        recordSelector(sel, WHERE);
    }
    for (SortSpec spec : query.getOrderBys()) {
        recordSelector(spec.getSelector(), ORDER_BY);
    }

    addSystemColumns();

    List<String> whereClauses = new ArrayList<String>();

    // what to select (result columns)

    String what = StringUtils.join(realColumns.values(), ", ");

    // determine relevant primary types

    String nxqlFrom;
    if (fromBaseTypeId == CMIS_RELATIONSHIP) {
        if (fromType.getParentTypeId() == null) {
            nxqlFrom = NXQL_RELATION;
        } else {
            nxqlFrom = fromType.getId();
        }
    } else {
        nxqlFrom = NXQL_DOCUMENT;
        List<String> types = new ArrayList<String>();
        if (fromType.getParentTypeId() != null) {
            // don't add abstract root types
            types.add(fromType.getId());
        }
        LinkedList<TypeDefinitionContainer> typesTodo = new LinkedList<TypeDefinitionContainer>();
        typesTodo.addAll(typeManager.getTypeDescendants(fromType.getId(), -1, Boolean.TRUE));
        // recurse to get all subtypes
        TypeDefinitionContainer tc;
        while ((tc = typesTodo.poll()) != null) {
            types.add(tc.getTypeDefinition().getId());
            typesTodo.addAll(tc.getChildren());
        }
        if (types.isEmpty()) {
            // shoudn't happen
            types = Collections.singletonList("__NOSUCHTYPE__");
        }
        // build clause
        StringBuilder pt = new StringBuilder();
        pt.append(NXQL.ECM_PRIMARYTYPE);
        pt.append(" IN (");
        for (Iterator<String> it = types.iterator(); it.hasNext();) {
            pt.append(QUOTE);
            pt.append(it.next());
            pt.append(QUOTE);
            if (it.hasNext()) {
                pt.append(", ");
            }
        }
        pt.append(")");
        whereClauses.add(pt.toString());
    }

    // lifecycle not deleted filter

    if (skipDeleted) {
        whereClauses
                .add(String.format("%s <> '%s'", NXQL.ECM_LIFECYCLESTATE, LifeCycleConstants.DELETED_STATE));
    }

    // searchAllVersions filter

    if (searchLatestVersion && fromBaseTypeId == CMIS_DOCUMENT) {
        whereClauses.add(String.format("%s = 1", NXQL.ECM_ISLATESTVERSION));
    }

    // no proxies

    whereClauses.add(String.format("%s = 0", NXQL.ECM_ISPROXY));

    // WHERE clause

    Tree whereNode = walker.getWherePredicateTree();
    boolean distinct = false;
    if (whereNode != null) {
        GeneratingWalker generator = new GeneratingWalker();
        generator.walkPredicate(whereNode);
        whereClauses.add(generator.buf.toString());
        distinct = generator.distinct;
    }

    // ORDER BY clause

    List<String> orderbys = new ArrayList<String>();
    for (SortSpec spec : query.getOrderBys()) {
        String orderby;
        CmisSelector sel = spec.getSelector();
        if (sel instanceof ColumnReference) {
            orderby = (String) sel.getInfo();
        } else {
            orderby = NXQL.ECM_FULLTEXT_SCORE;
        }
        if (!spec.ascending) {
            orderby += " DESC";
        }
        orderbys.add(orderby);
    }

    // create the whole select

    String where = StringUtils.join(whereClauses, " AND ");
    String nxql = "SELECT " + (distinct ? "DISTINCT " : "") + what + " FROM " + nxqlFrom + " WHERE " + where;
    if (!orderbys.isEmpty()) {
        nxql += " ORDER BY " + StringUtils.join(orderbys, ", ");
    }
    // System.err.println("CMIS: " + statement);
    // System.err.println("NXQL: " + nxql);
    return nxql;
}

From source file:org.trnltk.morphology.contextless.rootfinder.RootFinderChain.java

public List<Root> findRootsForPartialInput(TurkishSequence partialInput, TurkishSequence input) {
    LinkedList<Root> roots = new LinkedList<Root>();
    for (RootFinderChainItem rootFinderChainItem : rootFinderChainItems) {
        final RootFinder rootFinder = rootFinderChainItem.getRootFinder();
        final RootFinderPolicy rootFinderPolicy = rootFinderChainItem.getRootFinderPolicy();

        if (!rootFinder.handles(partialInput, input))
            continue;
        final Collection<? extends Root> rootsForPartialInput = rootFinder
                .findRootsForPartialInput(partialInput, input);
        if (CollectionUtils.isNotEmpty(rootsForPartialInput)) {
            //roots must be beginning of the partial input
            for (Root rootForPartialInput : rootsForPartialInput) {
                final boolean isPartialInputValid = rootValidator.isValid(rootForPartialInput, partialInput);
                if (!isPartialInputValid)
                    throw new IllegalArgumentException("Invalid root " + rootForPartialInput.toString()
                            + " for partial input " + partialInput);
            }/*  ww w  .jav  a2  s .  c  o m*/
            roots.addAll(rootsForPartialInput);
        }

        if (RootFinderPolicy.CONTINUE_ON_CHAIN.equals(rootFinderPolicy))
            //noinspection UnnecessaryContinue
            continue;
        else if (RootFinderPolicy.STOP_CHAIN_WHEN_INPUT_IS_HANDLED.equals(rootFinderPolicy))
            break;
        else
            throw new IllegalStateException();
    }
    return roots;
}

From source file:org.apache.fop.layoutmgr.list.ListItemLayoutManager.java

private List getCombinedKnuthElementsForListItem(List<ListElement> labelElements,
        List<ListElement> bodyElements, LayoutContext context) {
    // Copy elements to array lists to improve element access performance
    List[] elementLists = { new ArrayList<ListElement>(labelElements),
            new ArrayList<ListElement>(bodyElements) };
    int[] fullHeights = { ElementListUtils.calcContentLength(elementLists[0]),
            ElementListUtils.calcContentLength(elementLists[1]) };
    int[] partialHeights = { 0, 0 };
    int[] start = { -1, -1 };
    int[] end = { -1, -1 };

    int totalHeight = Math.max(fullHeights[0], fullHeights[1]);
    int step;/*w  ww . j  av  a  2  s.c  o m*/
    int addedBoxHeight = 0;
    Keep keepWithNextActive = Keep.KEEP_AUTO;

    LinkedList<ListElement> returnList = new LinkedList<ListElement>();
    while ((step = getNextStep(elementLists, start, end, partialHeights)) > 0) {

        if (end[0] + 1 == elementLists[0].size()) {
            keepWithNextActive = keepWithNextActive.compare(keepWithNextPendingOnLabel);
        }
        if (end[1] + 1 == elementLists[1].size()) {
            keepWithNextActive = keepWithNextActive.compare(keepWithNextPendingOnBody);
        }

        // compute penalty height and box height
        int penaltyHeight = step + getMaxRemainingHeight(fullHeights, partialHeights) - totalHeight;

        //Additional penalty height from penalties in the source lists
        int additionalPenaltyHeight = 0;
        int stepPenalty = 0;
        KnuthElement endEl = (KnuthElement) elementLists[0].get(end[0]);
        if (endEl instanceof KnuthPenalty) {
            additionalPenaltyHeight = endEl.getWidth();
            stepPenalty = Math.max(stepPenalty, endEl.getPenalty());
        }
        endEl = (KnuthElement) elementLists[1].get(end[1]);
        if (endEl instanceof KnuthPenalty) {
            additionalPenaltyHeight = Math.max(additionalPenaltyHeight, endEl.getWidth());
            stepPenalty = Math.max(stepPenalty, endEl.getPenalty());
        }

        int boxHeight = step - addedBoxHeight - penaltyHeight;
        penaltyHeight += additionalPenaltyHeight; //Add AFTER calculating boxHeight!

        // collect footnote information
        // TODO this should really not be done like this. ListItemLM should remain as
        // footnote-agnostic as possible
        LinkedList<FootnoteBodyLayoutManager> footnoteList = null;
        ListElement el;
        for (int i = 0; i < elementLists.length; i++) {
            for (int j = start[i]; j <= end[i]; j++) {
                el = (ListElement) elementLists[i].get(j);
                if (el instanceof KnuthBlockBox && ((KnuthBlockBox) el).hasAnchors()) {
                    if (footnoteList == null) {
                        footnoteList = new LinkedList<FootnoteBodyLayoutManager>();
                    }
                    footnoteList.addAll(((KnuthBlockBox) el).getFootnoteBodyLMs());
                }
            }
        }

        // add the new elements
        addedBoxHeight += boxHeight;
        ListItemPosition stepPosition = new ListItemPosition(this, start[0], end[0], start[1], end[1]);
        if (footnoteList == null) {
            returnList.add(new KnuthBox(boxHeight, stepPosition, false));
        } else {
            returnList.add(new KnuthBlockBox(boxHeight, footnoteList, stepPosition, false));
        }

        if (addedBoxHeight < totalHeight) {
            Keep keep = keepWithNextActive.compare(getKeepTogether());
            int p = stepPenalty;
            if (p > -KnuthElement.INFINITE) {
                p = Math.max(p, keep.getPenalty());
            }
            returnList.add(new BreakElement(stepPosition, penaltyHeight, p, keep.getContext(), context));
        }
    }

    return returnList;
}

From source file:com.mirth.connect.donkey.server.channel.RecoveryTask.java

private Void doCall() throws Exception {
    StorageSettings storageSettings = channel.getStorageSettings();
    Long maxMessageId = null;//  www. j a v  a 2s  .  co m
    // The number of messages that were attempted to be recovered
    long attemptedMessages = 0L;
    // The number of messages that were successfully recovered
    long recoveredMessages = 0L;

    // The buffer size for each sub-task
    int sourceBufferSize = 1;
    int unfinishedBufferSize = 10;
    int pendingBufferSize = 10;
    // The minimum message Id that can be retrieved for the next query.
    long sourceMinMessageId = 0L;
    long unfinishedMinMessageId = 0L;
    long pendingMinMessageId = 0L;
    // The completed status of each sub-task
    boolean sourceComplete = false;
    boolean unfinishedComplete = false;
    boolean pendingComplete = false;
    // The queue buffer for each sub-task
    LinkedList<ConnectorMessage> sourceConnectorMessages = new LinkedList<ConnectorMessage>();
    LinkedList<Message> unfinishedMessages = new LinkedList<Message>();
    LinkedList<Message> pendingMessages = new LinkedList<Message>();

    do {
        ThreadUtils.checkInterruptedStatus();
        DonkeyDao dao = channel.getDaoFactory().getDao();

        try {
            if (maxMessageId == null) {
                // Cache the max messageId of the channel to be used in the query below
                maxMessageId = dao.getMaxMessageId(channel.getChannelId());
            }

            if (!sourceComplete && sourceConnectorMessages.isEmpty()) {
                // Fill the buffer
                sourceConnectorMessages
                        .addAll(dao.getConnectorMessages(channel.getChannelId(), channel.getServerId(), 0,
                                Status.RECEIVED, 0, sourceBufferSize, sourceMinMessageId, maxMessageId));

                // Mark the sub-task as completed if no messages were retrieved by the query to prevent the query from running again
                if (sourceConnectorMessages.isEmpty()) {
                    sourceComplete = true;
                } else {
                    /*
                     * If the source queue is on, these messages are usually ignored. Therefore
                     * we only retrieve one of these messages until we know for sure that we'll
                     * need to recover them.
                     */
                    sourceBufferSize = 100;
                }
            }

            if (!unfinishedComplete && unfinishedMessages.isEmpty()) {
                // Fill the buffer
                unfinishedMessages.addAll(dao.getUnfinishedMessages(channel.getChannelId(),
                        channel.getServerId(), unfinishedBufferSize, unfinishedMinMessageId));

                // Mark the sub-task as completed if no messages were retrieved by the query to prevent the query from running again
                if (unfinishedMessages.isEmpty()) {
                    unfinishedComplete = true;
                }
            }

            if (!pendingComplete && pendingMessages.isEmpty()) {
                // Fill the buffer
                pendingMessages.addAll(dao.getPendingConnectorMessages(channel.getChannelId(),
                        channel.getServerId(), pendingBufferSize, pendingMinMessageId));

                // Mark the sub-task as completed if no messages were retrieved by the query to prevent the query from running again
                if (pendingMessages.isEmpty()) {
                    pendingComplete = true;
                }
            }
        } finally {
            dao.close();
        }

        // Retrieve the first message of each sub-task
        ConnectorMessage sourceConnectorMessage = sourceConnectorMessages.peekFirst();
        Message unfinishedMessage = unfinishedMessages.peekFirst();
        Message pendingMessage = pendingMessages.peekFirst();

        if (!storageSettings.isMessageRecoveryEnabled()) {
            sourceComplete = true;
            unfinishedComplete = true;
            pendingComplete = true;
            if (unfinishedMessage != null || pendingMessage != null || (sourceConnectorMessage != null
                    && channel.getSourceConnector().isRespondAfterProcessing())) {
                logger.info("Incomplete messages found for channel " + channel.getName() + " ("
                        + channel.getChannelId()
                        + ") but message storage settings do not support recovery. Skipping recovery task.");
            }
        } else {
            Long messageId = null;

            try {
                /*
                 * Perform a 3-way merge. The sub-task that has the lowest messageId will be
                 * executed first. However it is possible for the unfinishedMessage and
                 * pendingMessage to have the same messageId. In these cases the unfinished
                 * sub-task should be executed and the pending sub-task should be ignored
                 */
                if (sourceConnectorMessage != null
                        && (unfinishedMessage == null
                                || sourceConnectorMessage.getMessageId() < unfinishedMessage.getMessageId())
                        && (pendingMessage == null
                                || sourceConnectorMessage.getMessageId() < pendingMessage.getMessageId())) {
                    if (!channel.getSourceConnector().isRespondAfterProcessing() && unfinishedComplete
                            && pendingComplete) {
                        /*
                         * If the other two sub-tasks are completed already and the source queue
                         * is enabled for this channel, then there is no need to continue
                         * recovering source RECEIVED messages because they will be picked up by
                         * the source queue.
                         */
                        sourceComplete = true;
                    } else {
                        // Store the messageId so we can log it out if an exception occurs
                        messageId = sourceConnectorMessage.getMessageId();
                        // Remove the message from the buffer and update the minMessageId
                        sourceMinMessageId = sourceConnectorMessages.pollFirst().getMessageId() + 1;

                        if (attemptedMessages++ == 0) {
                            logger.info("Starting message recovery for channel " + channel.getName() + " ("
                                    + channel.getChannelId() + "). Incomplete messages found.");
                        }

                        // Execute the recovery process for this message
                        channel.process(sourceConnectorMessage, true);
                        // Use this to decrement the queue size
                        channel.getSourceQueue().decrementSize();
                        // Increment the number of successfully recovered messages
                        recoveredMessages++;
                    }
                } else if (unfinishedMessage != null && (pendingMessage == null
                        || unfinishedMessage.getMessageId() <= pendingMessage.getMessageId())) {
                    // Store the messageId so we can log it out if an exception occurs
                    messageId = unfinishedMessage.getMessageId();
                    // Remove the message from the buffer and update the minMessageId
                    unfinishedMinMessageId = unfinishedMessages.pollFirst().getMessageId() + 1;

                    // If the unfinishedMessage and pendingMessage have the same messageId, remove the pendingMessage from the buffer
                    if (pendingMessage != null
                            && unfinishedMessage.getMessageId() == pendingMessage.getMessageId()) {
                        pendingMinMessageId = pendingMessages.pollFirst().getMessageId() + 1;
                        pendingMessage = pendingMessages.peekFirst();
                    }

                    if (attemptedMessages++ == 0) {
                        logger.info("Starting message recovery for channel " + channel.getName() + " ("
                                + channel.getChannelId() + "). Incomplete messages found.");
                    }

                    // Execute the recovery process for this message
                    recoverUnfinishedMessage(unfinishedMessage);
                    // Increment the number of successfully recovered messages
                    recoveredMessages++;
                } else if (pendingMessage != null) {
                    // Store the messageId so we can log it out if an exception occurs
                    messageId = pendingMessage.getMessageId();
                    // Remove the message from the buffer and update the minMessageId
                    pendingMinMessageId = pendingMessages.pollFirst().getMessageId() + 1;

                    if (attemptedMessages++ == 0) {
                        logger.info("Starting message recovery for channel " + channel.getName() + " ("
                                + channel.getChannelId() + "). Incomplete messages found.");
                    }

                    // Execute the recovery process for this message
                    recoverPendingMessage(pendingMessage);
                    // Increment the number of successfully recovered messages
                    recoveredMessages++;
                }
            } catch (InterruptedException e) {
                // This should only occur if a halt was requested so stop the entire recovery task
                throw e;
            } catch (Exception e) {
                /*
                 * If an exception occurs we skip the message and log an error. This is to
                 * prevent one bad exception or message from locking the entire channel.
                 * 
                 * If a non-Exception gets thrown (OutofMemoryError, etc) then it will
                 * intentionally not be caught here and the recovery task will be stopped.
                 */
                logger.error("Failed to recover message " + messageId + " for channel " + channel.getName()
                        + " (" + channel.getChannelId() + "): \n" + ExceptionUtils.getStackTrace(e));
            }
        }
    } while (!unfinishedComplete || !pendingComplete || !sourceComplete);

    if (attemptedMessages > 0) {
        logger.info("Completed message recovery for channel " + channel.getName() + " ("
                + channel.getChannelId() + "). Successfully recovered " + recoveredMessages + " out of "
                + attemptedMessages + " messages.");
    }

    return null;
}

From source file:org.apache.zeppelin.interpreter.InterpreterSettingManager.java

private List<String> getNoteInterpreterSettingBinding(String noteId) {
    LinkedList<String> bindings = new LinkedList<>();
    synchronized (interpreterSettings) {
        List<String> settingIds = interpreterBindings.get(noteId);
        if (settingIds != null) {
            bindings.addAll(settingIds);
        }/*from   w w  w. j av a2 s.  co  m*/
    }
    return bindings;
}

From source file:solidbase.core.UpgradeFile.java

/**
 * Retrieves all versions that are reachable from the given source version. The current version is also considered.
 *
 * @param source The source version.//w w w  .jav a  2 s  . c om
 * @param targeting Already targeting a specific version.
 * @param downgradesAllowed Allow downgrades.
 * @param result This set gets filled with all versions that are reachable from the given source version.
 */
protected void collectReachableVersions(String source, String targeting, boolean downgradesAllowed,
        Set<String> result) {
    if (!this.versions.contains(source))
        throw new FatalException("The current database version "
                + StringUtils.defaultString(source, "<no version>")
                + " is not available in the upgrade file. Maybe this version is deprecated or the wrong upgrade file is used.");

    if (targeting == null)
        result.add(source); // The source is reachable

    Collection<UpgradeSegment> segments = this.segments.get(source); // Get all segments with the given source
    if (segments == null)
        return;

    // Queue contains segments that await processing
    LinkedList<UpgradeSegment> queue = new LinkedList<UpgradeSegment>();

    // Fill queue with segments
    if (targeting != null) {
        for (UpgradeSegment segment : segments)
            if (targeting.equals(segment.getTarget()))
                queue.add(segment); // Add segment to the end of the list
        if (queue.isEmpty())
            throw new FatalException("The database is incompletely upgraded to version " + targeting
                    + ", but that version is not reachable from version "
                    + StringUtils.defaultString(source, "<no version>"));
    } else
        queue.addAll(segments);

    // Process the queue
    while (!queue.isEmpty()) {
        UpgradeSegment segment = queue.removeFirst(); // pop() is not available in java 5
        if (!result.contains(segment.getTarget())) // Already there?
            if (downgradesAllowed || !segment.isDowngrade()) // Downgrades allowed?
            {
                result.add(segment.getTarget());
                if (!segment.isOpen()) // Stop when segment is open.
                {
                    segments = this.segments.get(segment.getTarget()); // Add the next to the queue
                    if (segments != null)
                        queue.addAll(segments); // Add segments to the end of the list
                }
            }
    }
}

From source file:edu.ur.ir.user.service.DefaultUserFileSystemService.java

/** 
 * @see edu.ur.ir.user.UserFileSystemService#getAllFoldersNotInChildFolders(java.util.List, 
 * java.lang.Long, java.lang.Long)/* w  w  w .  j  a  v a 2  s. c om*/
 */
public List<PersonalFolder> getAllFoldersNotInChildFolders(List<Long> folders, Long userId,
        Long parentFolderId) {

    HashMap<Long, LinkedList<PersonalFolder>> foldersGroupedByRoot = new HashMap<Long, LinkedList<PersonalFolder>>();
    LinkedList<Long> rootFolderIds = new LinkedList<Long>();
    // get the folder id's
    for (Long folderId : folders) {
        // find the folder
        PersonalFolder p = getPersonalFolder(folderId, false);

        // see if we already have a group for it's root folder if not add it
        LinkedList<PersonalFolder> folderGroup = foldersGroupedByRoot.get(p.getTreeRoot().getId());
        if (folderGroup == null) {
            rootFolderIds.add(p.getTreeRoot().getId());
            folderGroup = new LinkedList<PersonalFolder>();
            folderGroup.add(p);
            foldersGroupedByRoot.put(p.getTreeRoot().getId(), folderGroup);
        } else {
            folderGroup.add(p);
        }
    }

    // for each set of excluded folders for a root folder owned by the user execute the query.
    Iterator<Long> rootFolderIterator = foldersGroupedByRoot.keySet().iterator();
    LinkedList<PersonalFolder> availableFolders = new LinkedList<PersonalFolder>();

    while (rootFolderIterator.hasNext()) {
        Long rootFolderId = rootFolderIterator.next();
        List<PersonalFolder> folderGroup = foldersGroupedByRoot.get(rootFolderId);
        availableFolders
                .addAll(personalFolderDAO.getAllFoldersNotInChildFolders(folderGroup, userId, rootFolderId));

        if (log.isDebugEnabled()) {
            log.debug("Current available folders");
            for (PersonalFolder f : availableFolders) {
                log.debug("Folder id = " + f.getId());
            }
        }
    }

    // all other root folders can be added
    if (log.isDebugEnabled()) {
        for (Long id : rootFolderIds) {
            log.debug("Adding the following id " + id);
        }
    }
    availableFolders.addAll(personalFolderDAO.getAllOtherRootFolders(rootFolderIds, userId));
    return availableFolders;

}

From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java

/**
 * @param start//from w w w .ja  v  a 2 s.c  om
 * @param factorsToUse
 * @return list of factors, sorted from simplest (fewest number of values from the biomaterials passed in) to least
 *         simple
 */
public static List<ExperimentalFactor> orderFactorsByExperimentalDesign(List<BioMaterial> start,
        Collection<ExperimentalFactor> factors) {

    if (factors == null || factors.isEmpty()) {
        log.warn("No factors supplied for sorting");
        return new LinkedList<ExperimentalFactor>();
    }

    LinkedList<ExperimentalFactor> sortedFactors = new LinkedList<ExperimentalFactor>();
    Collection<ExperimentalFactor> factorsToTake = new HashSet<ExperimentalFactor>(factors);
    while (!factorsToTake.isEmpty()) {
        ExperimentalFactor simplest = chooseSimplestFactor(start, factorsToTake);
        if (simplest == null) {
            // none of the factors have more than one factor value. One-sided t-tests ...

            /*
             * This assertion isn't right -- we now allow this, though we can only have ONE such constant factor.
             * See bug 2390. Unless we are dealing with a subset, in which case there can be any number of constant
             * factors within the subset.
             */
            // assert factors.size() == 1 :
            // "It's possible to have just one factor value, but only if there is only one factor.";

            sortedFactors.addAll(factors);
            return sortedFactors;
        }
        sortedFactors.addLast(simplest);
        factorsToTake.remove(simplest);
    }

    return sortedFactors;
}

From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java

/**
 * @param start//from  w  ww.  j av a 2 s .c  o m
 * @param factorsToUse
 * @return list of factors, sorted from simplest (fewest number of values from the biomaterials passed in) to least
 *         simple
 */
public static List<ExperimentalFactor> orderFactorsByExperimentalDesignVO(List<BioMaterialValueObject> start,
        Collection<ExperimentalFactor> factors) {

    if (factors == null || factors.isEmpty()) {
        log.warn("No factors supplied for sorting");
        return new LinkedList<ExperimentalFactor>();
    }

    LinkedList<ExperimentalFactor> sortedFactors = new LinkedList<ExperimentalFactor>();
    Collection<ExperimentalFactor> factorsToTake = new HashSet<ExperimentalFactor>(factors);
    while (!factorsToTake.isEmpty()) {
        ExperimentalFactor simplest = chooseSimplestFactorVO(start, factorsToTake);
        if (simplest == null) {
            // none of the factors have more than one factor value. One-sided t-tests ...

            /*
             * This assertion isn't right -- we now allow this, though we can only have ONE such constant factor.
             * See bug 2390. Unless we are dealing with a subset, in which case there can be any number of constant
             * factors within the subset.
             */
            // assert factors.size() == 1 :
            // "It's possible to have just one factor value, but only if there is only one factor.";

            sortedFactors.addAll(factors);
            return sortedFactors;
        }
        sortedFactors.addLast(simplest);
        factorsToTake.remove(simplest);
    }

    return sortedFactors;
}

From source file:org.obm.push.mail.MailBackendImpl.java

@Override
public List<ItemChange> fetch(UserDataRequest udr, CollectionId collectionId, List<ServerId> itemIds,
        SyncCollectionOptions collectionOptions) throws ProcessingEmailException {

    LinkedList<ItemChange> fetchs = new LinkedList<ItemChange>();
    Map<CollectionId, Collection<Long>> emailUids = getEmailUidByCollectionId(itemIds);
    try {/*from   w w  w  .  j  av  a 2 s .  c om*/
        Folder folder = folderSnapshotDao.get(udr.getUser(), udr.getDevice(), collectionId);
        for (Entry<CollectionId, Collection<Long>> entry : emailUids.entrySet()) {
            Collection<Long> uids = entry.getValue();
            fetchs.addAll(fetchItems(udr, folder, uids, collectionOptions));
        }
    } catch (CollectionNotFoundException e) {
        logger.error("fetchItems : collection {} not found !", collectionId);
    }
    return fetchs;
}