Example usage for java.util LinkedList isEmpty

List of usage examples for java.util LinkedList isEmpty

Introduction

In this page you can find the example usage for java.util LinkedList isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this list contains no elements.

Usage

From source file:org.commonjava.maven.ext.io.PomIO.java

private List<PomPeek> peekAtPomHierarchy(final File topPom) throws ManipulationException {
    final List<PomPeek> peeked = new ArrayList<>();

    try {// w  w w .  ja  va2  s  . c om
        final LinkedList<File> pendingPoms = new LinkedList<>();
        pendingPoms.add(topPom.getCanonicalFile());

        final String topDir = topPom.getAbsoluteFile().getParentFile().getCanonicalPath();

        final Set<File> seen = new HashSet<>();

        File topLevelParent = topPom;

        while (!pendingPoms.isEmpty()) {
            final File pom = pendingPoms.removeFirst();
            seen.add(pom);

            logger.debug("PEEK: " + pom);

            final PomPeek peek = new PomPeek(pom);
            final ProjectVersionRef key = peek.getKey();
            if (key != null) {
                peeked.add(peek);

                final File dir = pom.getParentFile();

                final String relPath = peek.getParentRelativePath();
                if (relPath != null) {
                    logger.debug("Found parent relativePath: " + relPath + " in pom: " + pom);
                    File parent = new File(dir, relPath);
                    if (parent.isDirectory()) {
                        parent = new File(parent, "pom.xml");
                    }

                    parent = parent.getCanonicalFile();
                    if (parent.getParentFile().getCanonicalPath().startsWith(topDir) && parent.exists()
                            && !seen.contains(parent) && !pendingPoms.contains(parent)) {
                        topLevelParent = parent;
                        logger.debug("Possible top level parent " + parent);
                        pendingPoms.add(parent);
                    } else {
                        logger.debug("Skipping reference to non-existent parent relativePath: '" + relPath
                                + "' in: " + pom);
                    }
                }

                final Set<String> modules = peek.getModules();
                if (modules != null && !modules.isEmpty()) {
                    for (final String module : modules) {
                        logger.debug("Found module: " + module + " in pom: " + pom);

                        File modPom = new File(dir, module);
                        if (modPom.isDirectory()) {
                            modPom = new File(modPom, "pom.xml");
                        }

                        if (modPom.exists() && !seen.contains(modPom) && !pendingPoms.contains(modPom)) {
                            pendingPoms.addLast(modPom);
                        } else {
                            logger.debug(
                                    "Skipping reference to non-existent module: '" + module + "' in: " + pom);
                        }
                    }
                }
            } else {
                logger.debug("Skipping " + pom + " as its a template file.");
            }
        }

        final HashSet<ProjectVersionRef> projectrefs = new HashSet<>();

        for (final PomPeek p : peeked) {
            projectrefs.add(p.getKey());

            if (p.getPom().equals(topLevelParent)) {
                logger.debug("Setting top level parent to " + p.getPom() + " :: " + p.getKey());
                p.setInheritanceRoot(true);
            }
        }

        for (final PomPeek p : peeked) {
            if (p.getParentKey() == null || !seenThisParent(projectrefs, p.getParentKey())) {
                logger.debug("Found a standalone pom " + p.getPom() + " :: " + p.getKey());
                p.setInheritanceRoot(true);
            }
        }
    } catch (final IOException e) {
        throw new ManipulationException("Problem peeking at POMs.", e);
    }

    return peeked;
}

From source file:com.twitter.distributedlog.service.balancer.ClusterBalancer.java

void moveStreams(List<Host> hosts, AtomicInteger hostIdxMoveFrom, int moveFromLowWaterMark,
        AtomicInteger hostIdxMoveTo, int moveToHighWaterMark, Optional<RateLimiter> rateLimiter) {
    if (hostIdxMoveFrom.get() < 0 || hostIdxMoveFrom.get() >= hosts.size() || hostIdxMoveTo.get() < 0
            || hostIdxMoveTo.get() >= hosts.size() || hostIdxMoveFrom.get() >= hostIdxMoveTo.get()) {
        return;//from  w  w w  . j  a va2s.  c om
    }

    if (logger.isDebugEnabled()) {
        logger.debug(
                "Moving streams : hosts = {}, from = {}, to = {} : from_low_water_mark = {}, to_high_water_mark = {}",
                new Object[] { hosts, hostIdxMoveFrom.get(), hostIdxMoveTo.get(), moveFromLowWaterMark,
                        moveToHighWaterMark });
    }

    Host hostMoveFrom = hosts.get(hostIdxMoveFrom.get());
    int numStreamsOnFromHost = hostMoveFrom.streams.size();
    if (numStreamsOnFromHost <= moveFromLowWaterMark) {
        // do nothing
        return;
    }

    int numStreamsToMove = numStreamsOnFromHost - moveFromLowWaterMark;
    LinkedList<String> streamsToMove = new LinkedList<String>(hostMoveFrom.streams);
    Collections.shuffle(streamsToMove);

    if (logger.isDebugEnabled()) {
        logger.debug("Try to move {} streams from host {} : streams = {}",
                new Object[] { numStreamsToMove, hostMoveFrom.address, streamsToMove });
    }

    while (numStreamsToMove-- > 0 && !streamsToMove.isEmpty()) {
        if (rateLimiter.isPresent()) {
            rateLimiter.get().acquire();
        }

        // pick a host to move
        Host hostMoveTo = hosts.get(hostIdxMoveTo.get());
        while (hostMoveTo.streams.size() >= moveToHighWaterMark) {
            int hostIdx = hostIdxMoveTo.decrementAndGet();
            logger.info("move to host : {}, from {}", hostIdx, hostIdxMoveFrom.get());
            if (hostIdx <= hostIdxMoveFrom.get()) {
                return;
            } else {
                hostMoveTo = hosts.get(hostIdx);
                if (logger.isDebugEnabled()) {
                    logger.debug("Target host to move moved to host {} @ {}", hostIdx, hostMoveTo);
                }
            }
        }

        // pick a stream
        String stream = streamsToMove.remove();

        // move the stream
        if (moveStream(stream, hostMoveFrom, hostMoveTo)) {
            hostMoveFrom.streams.remove(stream);
            hostMoveTo.streams.add(stream);
        }
    }

}

From source file:org.apache.distributedlog.service.balancer.ClusterBalancer.java

void moveStreams(List<Host> hosts, AtomicInteger hostIdxMoveFrom, int moveFromLowWaterMark,
        AtomicInteger hostIdxMoveTo, int moveToHighWaterMark, Optional<RateLimiter> rateLimiter) {
    if (hostIdxMoveFrom.get() < 0 || hostIdxMoveFrom.get() >= hosts.size() || hostIdxMoveTo.get() < 0
            || hostIdxMoveTo.get() >= hosts.size() || hostIdxMoveFrom.get() >= hostIdxMoveTo.get()) {
        return;/*from  w w w.  j a  v a  2s .co  m*/
    }

    if (logger.isDebugEnabled()) {
        logger.debug(
                "Moving streams : hosts = {}, from = {}, to = {} :"
                        + " from_low_water_mark = {}, to_high_water_mark = {}",
                new Object[] { hosts, hostIdxMoveFrom.get(), hostIdxMoveTo.get(), moveFromLowWaterMark,
                        moveToHighWaterMark });
    }

    Host hostMoveFrom = hosts.get(hostIdxMoveFrom.get());
    int numStreamsOnFromHost = hostMoveFrom.streams.size();
    if (numStreamsOnFromHost <= moveFromLowWaterMark) {
        // do nothing
        return;
    }

    int numStreamsToMove = numStreamsOnFromHost - moveFromLowWaterMark;
    LinkedList<String> streamsToMove = new LinkedList<String>(hostMoveFrom.streams);
    Collections.shuffle(streamsToMove);

    if (logger.isDebugEnabled()) {
        logger.debug("Try to move {} streams from host {} : streams = {}",
                new Object[] { numStreamsToMove, hostMoveFrom.address, streamsToMove });
    }

    while (numStreamsToMove-- > 0 && !streamsToMove.isEmpty()) {
        if (rateLimiter.isPresent()) {
            rateLimiter.get().acquire();
        }

        // pick a host to move
        Host hostMoveTo = hosts.get(hostIdxMoveTo.get());
        while (hostMoveTo.streams.size() >= moveToHighWaterMark) {
            int hostIdx = hostIdxMoveTo.decrementAndGet();
            logger.info("move to host : {}, from {}", hostIdx, hostIdxMoveFrom.get());
            if (hostIdx <= hostIdxMoveFrom.get()) {
                return;
            } else {
                hostMoveTo = hosts.get(hostIdx);
                if (logger.isDebugEnabled()) {
                    logger.debug("Target host to move moved to host {} @ {}", hostIdx, hostMoveTo);
                }
            }
        }

        // pick a stream
        String stream = streamsToMove.remove();

        // move the stream
        if (moveStream(stream, hostMoveFrom, hostMoveTo)) {
            hostMoveFrom.streams.remove(stream);
            hostMoveTo.streams.add(stream);
        }
    }

}

From source file:org.springframework.core.convert.support.GenericConversionService.java

private GenericConverter findConverterForClassPair(TypeDescriptor sourceType, TypeDescriptor targetType) {
    Class<?> sourceObjectType = sourceType.getObjectType();
    if (sourceObjectType.isInterface()) {
        LinkedList<Class<?>> classQueue = new LinkedList<Class<?>>();
        classQueue.addFirst(sourceObjectType);
        while (!classQueue.isEmpty()) {
            Class<?> currentClass = classQueue.removeLast();
            if (logger.isTraceEnabled()) {
                logger.trace("Searching for converters indexed by sourceType [" + currentClass.getName() + "]");
            }//from  ww w . j  a  v  a2  s. co m
            Map<Class<?>, MatchableConverters> converters = getTargetConvertersForSource(currentClass);
            GenericConverter converter = getMatchingConverterForTarget(sourceType, targetType, converters);
            if (converter != null) {
                return converter;
            }
            Class<?>[] interfaces = currentClass.getInterfaces();
            for (Class<?> ifc : interfaces) {
                classQueue.addFirst(ifc);
            }
        }
        Map<Class<?>, MatchableConverters> objectConverters = getTargetConvertersForSource(Object.class);
        return getMatchingConverterForTarget(sourceType, targetType, objectConverters);
    } else {
        LinkedList<Class<?>> classQueue = new LinkedList<Class<?>>();
        classQueue.addFirst(sourceObjectType);
        while (!classQueue.isEmpty()) {
            Class<?> currentClass = classQueue.removeLast();
            if (logger.isTraceEnabled()) {
                logger.trace("Searching for converters indexed by sourceType [" + currentClass.getName() + "]");
            }
            Map<Class<?>, MatchableConverters> converters = getTargetConvertersForSource(currentClass);
            GenericConverter converter = getMatchingConverterForTarget(sourceType, targetType, converters);
            if (converter != null) {
                return converter;
            }
            if (currentClass.isArray()) {
                Class<?> componentType = ClassUtils
                        .resolvePrimitiveIfNecessary(currentClass.getComponentType());
                if (componentType.getSuperclass() != null) {
                    classQueue.addFirst(Array.newInstance(componentType.getSuperclass(), 0).getClass());
                } else if (componentType.isInterface()) {
                    classQueue.addFirst(Object[].class);
                }
            } else {
                Class<?>[] interfaces = currentClass.getInterfaces();
                for (Class<?> ifc : interfaces) {
                    addInterfaceHierarchy(ifc, classQueue);
                }
                if (currentClass.getSuperclass() != null) {
                    classQueue.addFirst(currentClass.getSuperclass());
                }
            }
        }
        return null;
    }
}

From source file:org.springframework.core.convert.support.GenericConversionService.java

private GenericConverter getMatchingConverterForTarget(TypeDescriptor sourceType, TypeDescriptor targetType,
        Map<Class<?>, MatchableConverters> converters) {

    Class<?> targetObjectType = targetType.getObjectType();
    if (targetObjectType.isInterface()) {
        LinkedList<Class<?>> classQueue = new LinkedList<Class<?>>();
        classQueue.addFirst(targetObjectType);
        while (!classQueue.isEmpty()) {
            Class<?> currentClass = classQueue.removeLast();
            if (logger.isTraceEnabled()) {
                logger.trace("and indexed by targetType [" + currentClass.getName() + "]");
            }//from   ww w  .j a v  a 2 s.  c  o  m
            MatchableConverters matchable = converters.get(currentClass);
            GenericConverter converter = matchConverter(matchable, sourceType, targetType);
            if (converter != null) {
                return converter;
            }
            Class<?>[] interfaces = currentClass.getInterfaces();
            for (Class<?> ifc : interfaces) {
                classQueue.addFirst(ifc);
            }
        }
        if (logger.isTraceEnabled()) {
            logger.trace("and indexed by [java.lang.Object]");
        }
        return matchConverter(converters.get(Object.class), sourceType, targetType);
    } else {
        LinkedList<Class<?>> classQueue = new LinkedList<Class<?>>();
        classQueue.addFirst(targetObjectType);
        while (!classQueue.isEmpty()) {
            Class<?> currentClass = classQueue.removeLast();
            if (logger.isTraceEnabled()) {
                logger.trace("and indexed by targetType [" + currentClass.getName() + "]");
            }
            MatchableConverters matchable = converters.get(currentClass);
            GenericConverter converter = matchConverter(matchable, sourceType, targetType);
            if (converter != null) {
                return converter;
            }
            if (currentClass.isArray()) {
                Class<?> componentType = ClassUtils
                        .resolvePrimitiveIfNecessary(currentClass.getComponentType());
                if (componentType.getSuperclass() != null) {
                    classQueue.addFirst(Array.newInstance(componentType.getSuperclass(), 0).getClass());
                } else if (componentType.isInterface()) {
                    classQueue.addFirst(Object[].class);
                }
            } else {
                Class<?>[] interfaces = currentClass.getInterfaces();
                for (Class<?> ifc : interfaces) {
                    addInterfaceHierarchy(ifc, classQueue);
                }
                if (currentClass.getSuperclass() != null) {
                    classQueue.addFirst(currentClass.getSuperclass());
                }
            }
        }
        return null;
    }
}

From source file:org.jdto.util.expression.Expression.java

/**
 * Parse the expression into something easily evaluable.
 * @param expression/* w w w  . j  ava2 s  .  com*/
 * @return 
 */
private synchronized ExpressionTerm parseExpression(String expression) {

    position = 0;

    LinkedList<String> precedenceStack = new LinkedList<String>();

    //add the first imaginary parentheses.
    precedenceStack.push("(");

    //append a closing parenthesis to the expression.
    expression = expression + ")";

    //the previous token.
    String token = null;

    StringBuilder postFix = new StringBuilder();

    /**
     * Go through the expression.
     */
    while (!precedenceStack.isEmpty() && position < expression.length()) {

        //use the token from previous iteration
        token = readToken(token, expression);

        //if is a left parentheses
        if ("(".equals(token)) {
            precedenceStack.push(token);
            postFix.append(" "); //a separation
            continue;
        }

        //check if it is an operator
        Operator operator = Operator.getOperaorByString(token);
        if (operator != null) {
            postFix.append(" "); //add a seprarator char to the result.
            while (operator.precedence(precedenceStack.peek())) {
                postFix.append(precedenceStack.pop());
                postFix.append(" ");
            }
            precedenceStack.push(token);
            continue;
        }

        //check if it is a right parenthesis
        if (")".equals(token)) {
            postFix.append(" "); //add a separator to the result.
            while (!"(".equals(precedenceStack.peek())) {
                String stackElement = precedenceStack.pop();

                if (isOperator(stackElement)) {
                    postFix.append(stackElement);
                    postFix.append(" ");
                }
            }
            //remove the extra parenthesis
            precedenceStack.pop();
            continue;
        }

        //if everything else fails, just add the token to the postfix expr
        postFix.append(token);
        //and we're done with the loop here
    }

    //at this point we need to convert the postfix expression into terms.
    if (!precedenceStack.isEmpty()) {
        throw new IllegalArgumentException("Could not parse expression!");
    }

    return parsePostfixExpr(postFix.toString());
}

From source file:de.hasait.clap.CLAP.java

public CLAPResult parse(final String... pArgs) {
    final Set<CLAPParseContext> contextsWithInvalidToken = new HashSet<CLAPParseContext>();
    final List<CLAPParseContext> parsedContexts = new ArrayList<CLAPParseContext>();
    final LinkedList<CLAPParseContext> activeContexts = new LinkedList<CLAPParseContext>();
    activeContexts.add(new CLAPParseContext(this, pArgs));
    while (!activeContexts.isEmpty()) {
        final CLAPParseContext context = activeContexts.removeFirst();
        if (context.hasMoreTokens()) {
            final CLAPParseContext[] result = _root.parse(context);
            if (result != null) {
                for (final CLAPParseContext nextContext : result) {
                    activeContexts.add(nextContext);
                }/*ww w . j  a v  a2  s.  co  m*/
            } else {
                contextsWithInvalidToken.add(context);
            }
        } else {
            parsedContexts.add(context);
        }
    }
    if (parsedContexts.isEmpty()) {
        int maxArgIndex = Integer.MIN_VALUE;
        final Set<String> invalidTokensOfBestContexts = new HashSet<String>();
        for (final CLAPParseContext context : contextsWithInvalidToken) {
            final int currentArgIndex = context.getCurrentArgIndex();
            if (currentArgIndex > maxArgIndex) {
                invalidTokensOfBestContexts.clear();
            }
            if (currentArgIndex >= maxArgIndex) {
                maxArgIndex = currentArgIndex;
                invalidTokensOfBestContexts.add(context.currentArg());
            }
        }
        throw new CLAPException(
                nls(NLSKEY_CLAP_ERROR_INVALID_TOKEN_LIST, StringUtils.join(invalidTokensOfBestContexts, ", "))); //$NON-NLS-1$
    }

    final Map<CLAPParseContext, List<String>> contextErrorMessages = new HashMap<CLAPParseContext, List<String>>();
    final Set<CLAPResultImpl> results = new LinkedHashSet<CLAPResultImpl>();
    for (final CLAPParseContext context : parsedContexts) {
        final List<String> errorMessages = new ArrayList<String>();
        _root.validate(context, errorMessages);
        if (errorMessages.isEmpty()) {
            final CLAPResultImpl result = new CLAPResultImpl();
            _root.fillResult(context, result);
            results.add(result);
        } else {
            contextErrorMessages.put(context, errorMessages);
        }
    }

    if (results.isEmpty()) {
        int minErrorMessages = Integer.MAX_VALUE;
        final List<String> errorMessagesOfBestContexts = new ArrayList<String>();
        for (final Entry<CLAPParseContext, List<String>> entry : contextErrorMessages.entrySet()) {
            final int countErrorMessages = entry.getValue().size();
            if (countErrorMessages < minErrorMessages) {
                errorMessagesOfBestContexts.clear();
            }
            if (countErrorMessages <= minErrorMessages) {
                minErrorMessages = countErrorMessages;
                errorMessagesOfBestContexts
                        .add(StringUtils.join(entry.getValue(), nls(NLSKEY_CLAP_ERROR_ERROR_MESSAGE_SPLIT)));
            }
        }
        throw new CLAPException(nls(NLSKEY_CLAP_ERROR_VALIDATION_FAILED,
                StringUtils.join(errorMessagesOfBestContexts, nls(NLSKEY_CLAP_ERROR_ERROR_MESSAGES_SPLIT))));
    }

    if (results.size() > 1) {
        throw new CLAPException(nls(NLSKEY_CLAP_ERROR_AMBIGUOUS_RESULT));
    }

    return results.iterator().next();
}

From source file:library.memorymonitor.ProcfsBasedProcessTree.java

/**
 * Get the process-tree with latest state. If the root-process is not alive,
 * an empty tree will be returned./* w  ww. jav  a2  s.  co m*/
 * 
 * @return the process-tree with latest state.
 */
public ProcfsBasedProcessTree getProcessTree() {
    if (!pid.equals(deadPid)) {
        // Get the list of processes
        List<String> processList = getProcessList();

        Map<String, ProcessInfo> allProcessInfo = new HashMap<String, ProcessInfo>();

        // cache the processTree to get the age for processes
        Map<String, ProcessInfo> oldProcs = new HashMap<String, ProcessInfo>(processTree);
        processTree.clear();

        ProcessInfo me = null;
        for (String proc : processList) {
            // Get information for each process
            ProcessInfo pInfo = new ProcessInfo(proc);
            if (constructProcessInfo(pInfo, procfsDir) != null) {
                allProcessInfo.put(proc, pInfo);
                if (proc.equals(this.pid)) {
                    me = pInfo; // cache 'me'
                    processTree.put(proc, pInfo);
                }
            }
        }

        if (me == null) {
            return this;
        }

        // Add each process to its parent.
        for (Map.Entry<String, ProcessInfo> entry : allProcessInfo.entrySet()) {
            String pID = entry.getKey();
            if (!pID.equals("1")) {
                ProcessInfo pInfo = entry.getValue();
                ProcessInfo parentPInfo = allProcessInfo.get(pInfo.getPpid());
                if (parentPInfo != null) {
                    parentPInfo.addChild(pInfo);
                }
            }
        }

        // now start constructing the process-tree
        LinkedList<ProcessInfo> pInfoQueue = new LinkedList<ProcessInfo>();
        pInfoQueue.addAll(me.getChildren());
        while (!pInfoQueue.isEmpty()) {
            ProcessInfo pInfo = pInfoQueue.remove();
            if (!processTree.containsKey(pInfo.getPid())) {
                processTree.put(pInfo.getPid(), pInfo);
            }
            pInfoQueue.addAll(pInfo.getChildren());
        }

        // update age values and compute the number of jiffies since last update
        for (Map.Entry<String, ProcessInfo> procs : processTree.entrySet()) {
            ProcessInfo oldInfo = oldProcs.get(procs.getKey());
            if (procs.getValue() != null) {
                procs.getValue().updateJiffy(oldInfo);
                if (oldInfo != null) {
                    procs.getValue().updateAge(oldInfo);
                }
            }
        }

        if (LOG.isDebugEnabled()) {
            // Log.debug the ProcfsBasedProcessTree
            LOG.debug(this.toString());
        }
    }
    return this;
}

From source file:com.streamsets.pipeline.lib.jdbc.JdbcMultiRowRecordWriter.java

@SuppressWarnings("unchecked")
private void processPartition(Connection connection, Multimap<Long, Record> partitions, Long partitionKey,
        List<OnRecordErrorException> errorRecords) throws SQLException, OnRecordErrorException {
    Collection<Record> partition = partitions.get(partitionKey);
    // Fetch the base insert query for this partition.
    SortedMap<String, String> columnsToParameters = getFilteredColumnsToParameters(getColumnsToParameters(),
            partition.iterator().next());

    // put all the records in a queue for consumption
    LinkedList<Record> queue = new LinkedList<>(partition);

    // compute number of rows per batch
    if (columnsToParameters.isEmpty()) {
        throw new OnRecordErrorException(Errors.JDBCDEST_22);
    }/*w w  w . jav  a  2  s  . c om*/
    int maxRowsPerBatch = maxPrepStmtParameters / columnsToParameters.size();

    PreparedStatement statement = null;

    // parameters are indexed starting with 1
    int paramIdx = 1;
    int rowCount = 0;
    while (!queue.isEmpty()) {
        // we're at the start of a batch.
        if (statement == null) {
            // instantiate the new statement
            statement = generatePreparedStatement(columnsToParameters,
                    // the next batch will have either the max number of records, or however many are left.
                    Math.min(maxRowsPerBatch, queue.size()), getTableName(), connection);
        }

        // process the next record into the current statement
        Record record = queue.removeFirst();
        for (String column : columnsToParameters.keySet()) {
            Field field = record.get(getColumnsToFields().get(column));
            Field.Type fieldType = field.getType();
            Object value = field.getValue();

            try {
                switch (fieldType) {
                case LIST:
                    List<Object> unpackedList = unpackList((List<Field>) value);
                    Array array = connection.createArrayOf(getSQLTypeName(fieldType), unpackedList.toArray());
                    statement.setArray(paramIdx, array);
                    break;
                case DATE:
                case DATETIME:
                    // Java Date types are not accepted by JDBC drivers, so we need to convert to java.sql.Date
                    java.util.Date date = field.getValueAsDatetime();
                    statement.setObject(paramIdx, new java.sql.Date(date.getTime()));
                    break;
                default:
                    statement.setObject(paramIdx, value, getColumnType(column));
                    break;
                }
            } catch (SQLException e) {
                LOG.error(Errors.JDBCDEST_23.getMessage(), column, fieldType.toString(), e);
                throw new OnRecordErrorException(record, Errors.JDBCDEST_23, column, fieldType.toString());
            }
            ++paramIdx;
        }

        rowCount++;

        // check if we've filled up the current batch
        if (rowCount == maxRowsPerBatch) {
            // time to execute the current batch
            statement.addBatch();
            statement.executeBatch();
            statement.close();
            statement = null;

            // reset our counters
            rowCount = 0;
            paramIdx = 1;
        }
    }

    // check if there are any records left. this should occur whenever there isn't *exactly* maxRowsPerBatch records in
    // this partition.
    if (statement != null) {
        statement.addBatch();
        statement.executeBatch();
        statement.close();
    }
}

From source file:com.zimbra.cs.mime.Mime.java

private static List<MPartInfo> listParts(MimePart root, String defaultCharset)
        throws MessagingException, IOException {
    List<MPartInfo> parts = new ArrayList<MPartInfo>();

    LinkedList<MPartInfo> queue = new LinkedList<MPartInfo>();
    queue.add(generateMPartInfo(root, null, "", 0));

    MimeMultipart emptyMultipart = null;
    while (!queue.isEmpty()) {
        MPartInfo mpart = queue.removeFirst();
        MimePart mp = mpart.getMimePart();
        parts.add(mpart);//from ww w.j a  v a 2 s. co  m

        String cts = mpart.mContentType;
        boolean isMultipart = cts.startsWith(MimeConstants.CT_MULTIPART_PREFIX);
        boolean isMessage = !isMultipart && cts.equals(MimeConstants.CT_MESSAGE_RFC822);

        if (isMultipart) {
            // IMAP part numbering is screwy: top-level multipart doesn't get a number
            String prefix = mpart.mPartName.length() > 0 ? (mpart.mPartName + '.') : "";
            if (mp instanceof MimeMessage) {
                mpart.mPartName = prefix + "TEXT";
            }
            MimeMultipart multi = getMultipartContent(mp, cts);
            if (multi != null) {
                if (multi.getCount() == 0 && LC.mime_promote_empty_multipart.booleanValue()) {
                    if (emptyMultipart == null) {
                        emptyMultipart = multi;
                    }
                    if (MimeConstants.CT_MULTIPART_APPLEDOUBLE.equalsIgnoreCase(getContentType(mp))) {
                        ZimbraLog.misc.debug(
                                "appledouble with no children; assuming it is malformed and really applefile");
                        mpart.mContentType = mpart.mContentType.replace(MimeConstants.CT_MULTIPART_APPLEDOUBLE,
                                MimeConstants.CT_APPLEFILE);
                    }
                }
                mpart.mChildren = new ArrayList<MPartInfo>(multi.getCount());
                for (int i = 1; i <= multi.getCount(); i++) {
                    mpart.mChildren
                            .add(generateMPartInfo((MimePart) multi.getBodyPart(i - 1), mpart, prefix + i, i));
                }
                queue.addAll(0, mpart.mChildren);
            }
        } else if (isMessage) {
            MimeMessage mm = getMessageContent(mp);
            if (mm != null) {
                MPartInfo child = generateMPartInfo(mm, mpart, mpart.mPartName, 0);
                queue.addFirst(child);
                mpart.mChildren = Arrays.asList(child);
            }
        } else {
            // nothing to do at this stage
        }
    }

    if (emptyMultipart != null && parts.size() == 1) {
        String text = emptyMultipart.getPreamble();
        if (!StringUtil.isNullOrEmpty(text)) {
            ZimbraLog.misc
                    .debug("single multipart with no children. promoting the preamble into a single text part");
            parts.remove(0);
            MPartInfo mpart = new MPartInfo();
            ZMimeBodyPart mp = new ZMimeBodyPart();
            mp.setText(text, defaultCharset);
            mpart.mPart = mp;
            mpart.mContentType = mp.getContentType();
            mpart.mDisposition = "";
            mpart.mPartName = "1";
            parts.add(mpart);
        }
    }

    return parts;
}