Example usage for java.util LinkedList isEmpty

List of usage examples for java.util LinkedList isEmpty

Introduction

In this page you can find the example usage for java.util LinkedList isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this list contains no elements.

Usage

From source file:com.multimedia.service.wallpaper.CmsWallpaperService.java

@Override
public long uploadWallpapers(User uploader, Long id_pages, StatusBean usb) {
    File upload_dir = new File(wallpaper_service.getUploadPath());
    OnlyFilesFilter filenameFilter = new OnlyFilesFilter();
    usb.setDone(0);//  w  w w. j  av  a 2  s.  co m
    usb.setTotal(scanFolder(upload_dir));
    //logger.debug("starting upload process id_pages="+id_pages);
    if (upload_dir.exists()) {
        File description_file;
        Long id_pages_cur;
        boolean pre_uploaded;
        Wallpaper wallpaper;
        LinkedList<File> files = new LinkedList<File>();
        files.addLast(upload_dir);

        Set<String> dimmensions_set = wallpaper_service.getDimmensions().keySet();

        int restart_count = 0;

        while (!files.isEmpty()) {
            File f = files.removeLast();
            pre_uploaded = false;
            //logger.debug("test file: '"+f.getAbsolutePath()+"'");
            if (f.isDirectory()) {
                //search for DESCRIPTION_FILE
                description_file = new File(f, DESCRIPTION_FILE);
                if (description_file.exists()) {
                    id_pages_cur = null;
                    try {
                        BufferedReader reader = new BufferedReader(
                                new InputStreamReader(new FileInputStream(description_file), "UTF-8"));
                        String line;
                        while ((line = reader.readLine()) != null) {
                            if (line.startsWith("id=")) {
                                id_pages_cur = Long.parseLong(line.substring(3), 10);
                            } else if (line.startsWith("pre_uploaded=true")) {
                                //means that this folder contains subfolders with pre uploaded images
                                //i.e. wallpapers are allready resized and are stored in an appropriate folders
                                //but they still must be checked
                                pre_uploaded = true;
                            }
                        }
                    } catch (IOException ex) {
                        logger.error("", ex);
                    }
                } else {
                    id_pages_cur = id_pages;
                }
                File[] files_temp = f.listFiles();
                for (File tmp : files_temp) {
                    if (tmp.isFile()) {
                        if (!tmp.getName().equals(DESCRIPTION_FILE) && id_pages_cur != null) {
                            wallpaper = new Wallpaper();
                            wallpaper.setUser(uploader);
                            wallpaper.setId_pages(id_pages_cur);
                            wallpaper.setActive(Boolean.TRUE);
                            wallpaper.setContent_file(tmp);

                            usb.setCur_name(tmp.getAbsolutePath());
                            logger.debug("normal file uploading: '" + tmp.getAbsolutePath() + "'");

                            if (insert(wallpaper)) {
                                tmp.delete();
                                usb.increaseDone(1);
                                restart_count++;
                                if (restart_count == UPLOAD_RESTART_COUNT) {
                                    restart_count = 0;
                                    wallpaper_service.restartTransaction();
                                }
                            }
                        } //else error
                    } else if (!pre_uploaded) {
                        files.addLast(tmp);
                    }
                }
                if (pre_uploaded) {
                    //uploading pre_uploaded files if any
                    File pre_uploaded_folder = new File(f, Utils.FULL_DIMMENSION_NAME);
                    if (pre_uploaded_folder.exists() && pre_uploaded_folder.isDirectory()) {
                        files_temp = pre_uploaded_folder.listFiles(filenameFilter);
                        for (File tmp : files_temp) {
                            wallpaper = new Wallpaper();
                            wallpaper.setUser(uploader);
                            wallpaper.setId_pages(id_pages_cur);
                            wallpaper.setActive(Boolean.TRUE);
                            wallpaper.setContent_file(tmp);

                            logger.debug("pre_uploaded file uploading: '" + tmp.getAbsolutePath() + "'");
                            if (insert(wallpaper, f)) {
                                Iterator<String> dimmensions = dimmensions_set.iterator();
                                while (dimmensions.hasNext()) {
                                    String dimmension = dimmensions.next();
                                    File pre_uploaded_image = new File(f,
                                            dimmension + File.separator + tmp.getName());
                                    if (!pre_uploaded_image.delete()) {
                                        pre_uploaded_image.deleteOnExit();
                                    }
                                }
                                usb.increaseDone(1);
                                restart_count++;
                                if (restart_count == UPLOAD_RESTART_COUNT) {
                                    restart_count = 0;
                                    wallpaper_service.restartTransaction();
                                }
                            }
                        }
                        //deleting pre_uploaded folder if it contains no images
                        if (pre_uploaded_folder.listFiles(filenameFilter).length == 0) {
                            FileUtils.deleteFiles(f, true);
                        }
                    }
                }
            }
        }
    }
    return usb.getDone();
}

From source file:org.fusesource.mop.MOP.java

private ArrayList<ArtifactId> parseArtifactList(LinkedList<String> values) throws UsageException {
    ArrayList<ArtifactId> rc = new ArrayList<ArtifactId>();
    assertNotEmpty(values);/*from  w  ww  . j  av  a 2 s.  c  o m*/
    String value = values.removeFirst();
    ArtifactId id = parseArtifactId(value);
    rc.add(id);

    while (!values.isEmpty() && isAnotherArtifactId(values.getFirst())) {
        value = values.removeFirst().substring(1);
        id = parseArtifactId(value);
        rc.add(id);
    }

    return rc;

}

From source file:com.unboundid.scim2.common.utils.Parser.java

/**
 * Close a grouping of filters enclosed by parenthesis.
 *
 * @param operators The stack of operators tokens.
 * @param output The stack of output tokens.
 * @param isAtTheEnd Whether the end of the filter string was reached.
 * @return The last operator encountered that signaled the end of the group.
 * @throws BadRequestException If the filter string could not be parsed.
 *//*from  w w w.ja v a  2 s.c om*/
private static String closeGrouping(final Stack<String> operators, final Stack<Filter> output,
        final boolean isAtTheEnd) throws BadRequestException {
    String operator = null;
    String repeatingOperator = null;
    LinkedList<Filter> components = new LinkedList<Filter>();

    // Iterate over the logical operators on the stack until either there are
    // no more operators or an opening parenthesis or not is found.
    while (!operators.isEmpty()) {
        operator = operators.pop();
        if (operator.equals("(") || operator.equalsIgnoreCase(FilterType.NOT.getStringValue())) {
            if (isAtTheEnd) {
                throw BadRequestException.invalidFilter("Unexpected end of filter string");
            }
            break;
        }
        if (repeatingOperator == null) {
            repeatingOperator = operator;
        }
        if (!operator.equals(repeatingOperator)) {
            if (output.isEmpty()) {
                throw BadRequestException.invalidFilter("Unexpected end of filter string");
            }
            components.addFirst(output.pop());
            if (repeatingOperator.equalsIgnoreCase(FilterType.AND.getStringValue())) {
                output.push(Filter.and(components));
            } else {
                output.push(Filter.or(components));
            }
            components.clear();
            repeatingOperator = operator;
        }
        if (output.isEmpty()) {
            throw BadRequestException.invalidFilter("Unexpected end of filter string");
        }
        components.addFirst(output.pop());
    }

    if (repeatingOperator != null && !components.isEmpty()) {
        if (output.isEmpty()) {
            throw BadRequestException.invalidFilter("Unexpected end of filter string");
        }
        components.addFirst(output.pop());
        if (repeatingOperator.equalsIgnoreCase(FilterType.AND.getStringValue())) {
            output.push(Filter.and(components));
        } else {
            output.push(Filter.or(components));
        }
    }

    return operator;
}

From source file:org.eclipse.che.api.vfs.server.VirtualFileSystemImpl.java

@Path("replace/{path:.*}")
@Override//from  w ww .j  av a2 s  .  co  m
public void replace(@PathParam("path") String path, List<ReplacementSet> replacements,
        @QueryParam("lockToken") String lockToken)
        throws NotFoundException, ForbiddenException, ConflictException, ServerException {
    VirtualFile projectRoot = mountPoint.getVirtualFile(path);
    if (!projectRoot.isFolder()) {
        throw new ConflictException("Given path must be an project root folder. ");
    }
    final Map<String, ReplacementContainer> changesPerFile = new HashMap<>();
    // fill changes matrix first
    for (final ReplacementSet replacement : replacements) {
        for (final String regex : replacement.getFiles()) {
            Pattern pattern = Pattern.compile(regex);
            ItemNode rootNode = getTree(projectRoot.getId(), -1, false, PropertyFilter.ALL_FILTER);
            LinkedList<ItemNode> q = new LinkedList<>();
            q.add(rootNode);
            while (!q.isEmpty()) {
                ItemNode node = q.pop();
                Item item = node.getItem();
                if (item.getItemType().equals(ItemType.FOLDER)) {
                    q.addAll(node.getChildren());
                } else if (item.getItemType().equals(ItemType.FILE)) {
                    // for cases like:  src/main/java/(.*)
                    String itemInternalPath = item.getPath().substring(projectRoot.getPath().length() + 1);
                    if (pattern.matcher(item.getName()).matches()
                            || pattern.matcher(itemInternalPath).matches()) {
                        ReplacementContainer container = (changesPerFile.get(item.getPath()) != null)
                                ? changesPerFile.get(item.getPath())
                                : new ReplacementContainer();
                        for (Variable variable : replacement.getEntries()) {
                            String replaceMode = variable.getReplacemode();
                            if (replaceMode == null || "variable_singlepass".equals(replaceMode)) {
                                container.getVariableProps().put(variable.getFind(), variable.getReplace());
                            } else if ("text_multipass".equals(replaceMode)) {
                                container.getTextProps().put(variable.getFind(), variable.getReplace());
                            }
                        }
                        changesPerFile.put(item.getPath(), container);
                    }
                }
            }
        }
    }
    //now apply changes matrix
    for (Map.Entry<String, ReplacementContainer> entry : changesPerFile.entrySet()) {
        try {
            if (entry.getValue().hasReplacements()) {
                ContentStream cs = mountPoint.getVirtualFile(entry.getKey()).getContent();
                String content = IoUtil.readAndCloseQuietly(cs.getStream());
                String modified = Deserializer.resolveVariables(content, entry.getValue().getVariableProps(),
                        false);
                for (Map.Entry<String, String> replacement : entry.getValue().getTextProps().entrySet()) {
                    if (modified.contains(replacement.getKey())) {
                        modified = modified.replace(replacement.getKey(), replacement.getValue());
                    }
                }
                //better to compare big strings by hash codes first
                if (!(content.hashCode() == modified.hashCode()) || !content.equals(modified)) {
                    mountPoint.getVirtualFile(entry.getKey()).updateContent(
                            new ByteArrayInputStream(modified.getBytes(StandardCharsets.UTF_8)), lockToken);
                }
            }
        } catch (IOException e) {
            LOG.warn(e.getMessage(), e);
        }
    }
}

From source file:com.moorestudio.seniorimageprocessing.SeniorSorter.java

public void sortImages() {
    LinkedList<Map.Entry<String, Long>> timestampList = new LinkedList<>(timestampData.entrySet());
    sort(timestampList, (x, y) -> x.getValue() > y.getValue() ? -1 : x.getValue().equals(y.getValue()) ? 0 : 1);
    // Sort in reverse so that the most recent timestamps are first.e so that the most recent timestamps are first.

    LinkedList<Map.Entry<File, Long>> imageDataList = new LinkedList<>(imageData.entrySet());
    sort(imageDataList, (x, y) -> x.getValue() > y.getValue() ? -1 : x.getValue().equals(y.getValue()) ? 0 : 1); // Sort in reverse so that the most recent timestamps are first.

    // For the gui update
    int idCount = imageDataList.size();

    //Take the first image and the first timestamp scan taken, which is last in the list, 
    //and sync the camera time to the timestamp time.Both are throwaways.
    if (!timestampList.isEmpty() && !imageDataList.isEmpty() && parent.syncTime) {
        Map.Entry<File, Long> iData = imageDataList.pollLast();
        Map.Entry<String, Long> tsData = timestampList.pollLast();

        //Make the offset
        cameraTimeOffset = tsData.getValue() - iData.getValue();
    }//ww w  . ja v  a  2s  .  c o m

    //add the file to the top timestamp student until it is no longer more than it
    while (!timestampList.isEmpty() && !imageDataList.isEmpty()) {
        Map.Entry<File, Long> iData = imageDataList.peekFirst();
        Map.Entry<String, Long> tsData = timestampList.pollFirst();
        ArrayList<File> studentImages = new ArrayList<>();
        while (!imageDataList.isEmpty() && iData.getValue() + cameraTimeOffset > tsData.getValue()) {
            iData = imageDataList.pollFirst();
            studentImages.add(iData.getKey());
            iData = imageDataList.peekFirst();
            //update the GUI
            parent.addProgress((.125 / parent.numThreads) / idCount);
        }
        if (!studentImages.isEmpty()) {
            parent.addImagesToStudent(tsData.getKey(), studentImages);
        }
    }

    //add the unsorted images to the parent's unsorted queue
    for (Map.Entry<File, Long> entry : imageDataList) {
        parent.unsortedFiles.add(entry.getKey());
        //update the GUI
        parent.addProgress((.125 / parent.numThreads) / idCount);
    }
}

From source file:org.minig.imap.EncodedWord.java

/**
 * Takes a text in form of a CharSequence encoded in the given charset (e.g.
 * ISO-8859-1) and makes it US-ASCII compatible and RFC822 compatible for
 * the use as e.g. subject with special characters. <br>
 * This algorithm tries to achieve several goals when decoding: <li>never
 * encode a single character but try to encode whole words</li> <li>if two
 * words must be encoded and there a no more than 3 characters inbetween,
 * encode everything in one single encoded word</li> <li>an encoded word
 * must never be longer than 76 characters in total</li> <li>ensure that no
 * encodedWord is in a line-wrap (RFC822 advices to no have more than 78
 * characters in a headerline)</li>
 * /*  w  w w  . ja  v  a2  s. c  o  m*/
 * @param input
 *            the headerline
 * @param charset
 *            the used charset (e.g. ISO-8859-1)
 * @param type
 *            the encoding to be used
 * @return input encoded in EncodedWords
 */
public static StringBuilder encode(CharSequence input, Charset charset, int type) {
    StringBuilder result = new StringBuilder(input.length());
    LinkedList<int[]> words = new LinkedList<int[]>();
    String encodedWordPrototype;

    int maxLength;
    if (type == QUOTED_PRINTABLE) {
        encodedWordPrototype = "=?" + charset.displayName() + "?q?";
        maxLength = 75 - encodedWordPrototype.length() - 2;
    } else {
        encodedWordPrototype = "=?" + charset.displayName() + "?b?";
        maxLength = 75 - encodedWordPrototype.length() - 6;
    }

    // First find words which need to be encoded
    Matcher matcher = wordTokenizerPattern.matcher(input);
    float encodedChar = type == QUOTED_PRINTABLE ? 3.0f : 4.0f / 3.0f;
    float normalChar = type == QUOTED_PRINTABLE ? 1.0f : 4.0f / 3.0f;

    while (matcher.find()) {
        String word = matcher.group(1);
        float encodedLength = 0.0f;
        int start = matcher.start();
        int end = matcher.end();
        boolean mustEncode = false;

        for (int i = 0; i < word.length(); i++) {
            if (word.charAt(i) > 127) {
                encodedLength += encodedChar;
                mustEncode = true;
            } else {
                encodedLength += normalChar;
            }

            // Split if too long
            if (Math.ceil(encodedLength) > maxLength) {
                words.add(new int[] { start, start + i, maxLength });
                word = word.substring(i);

                start += i;
                i = 0;
                encodedLength = 0.0f;
                mustEncode = false;
            }
        }
        if (mustEncode)
            words.add(new int[] { start, end, (int) Math.ceil(encodedLength) });
    }

    // No need to create encodedWords
    if (words.isEmpty()) {
        return result.append(input);
    }

    // Second group them together if possible (see goals above)
    int[] last = null;
    for (int i = 0; i < words.size(); i++) {
        int[] act = words.get(i);
        if (last != null && (last[2] + act[2] + (act[0] - last[1]) * normalChar < maxLength)
                && (act[0] - last[1]) < 10) {
            words.remove(i--);
            last[1] = act[1];
            last[2] += act[2] + (act[0] - last[1]) * normalChar;
        } else {
            last = act;
        }
    }

    // Create encodedWords
    Iterator<int[]> it = words.iterator();
    int lastWordEnd = 0;
    while (it.hasNext()) {
        int[] act = it.next();

        // create encoded part
        CharSequence rawWord = input.subSequence(act[0], act[1]);
        CharSequence encodedPart;
        if (type == QUOTED_PRINTABLE) {
            // Replace <space> with _
            Matcher wsMatcher = whitespacePattern.matcher(rawWord);
            rawWord = wsMatcher.replaceAll("_");

            encodedPart = QuotedPrintable.encode(rawWord, charset);
        } else {
            encodedPart = Base64.encodeBase64String(charset.encode(CharBuffer.wrap(rawWord)).array());
        }

        result.append(input.subSequence(lastWordEnd, act[0]));
        result.append(encodedWordPrototype);
        result.append(encodedPart);
        result.append("?=");

        lastWordEnd = act[1];
    }
    result.append(input.subSequence(lastWordEnd, input.length()));

    return result;
}

From source file:com.mirth.connect.donkey.server.channel.RecoveryTask.java

private Void doCall() throws Exception {
    StorageSettings storageSettings = channel.getStorageSettings();
    Long maxMessageId = null;//from  w  ww. j av  a2  s . c o  m
    // The number of messages that were attempted to be recovered
    long attemptedMessages = 0L;
    // The number of messages that were successfully recovered
    long recoveredMessages = 0L;

    // The buffer size for each sub-task
    int sourceBufferSize = 1;
    int unfinishedBufferSize = 10;
    int pendingBufferSize = 10;
    // The minimum message Id that can be retrieved for the next query.
    long sourceMinMessageId = 0L;
    long unfinishedMinMessageId = 0L;
    long pendingMinMessageId = 0L;
    // The completed status of each sub-task
    boolean sourceComplete = false;
    boolean unfinishedComplete = false;
    boolean pendingComplete = false;
    // The queue buffer for each sub-task
    LinkedList<ConnectorMessage> sourceConnectorMessages = new LinkedList<ConnectorMessage>();
    LinkedList<Message> unfinishedMessages = new LinkedList<Message>();
    LinkedList<Message> pendingMessages = new LinkedList<Message>();

    do {
        ThreadUtils.checkInterruptedStatus();
        DonkeyDao dao = channel.getDaoFactory().getDao();

        try {
            if (maxMessageId == null) {
                // Cache the max messageId of the channel to be used in the query below
                maxMessageId = dao.getMaxMessageId(channel.getChannelId());
            }

            if (!sourceComplete && sourceConnectorMessages.isEmpty()) {
                // Fill the buffer
                sourceConnectorMessages
                        .addAll(dao.getConnectorMessages(channel.getChannelId(), channel.getServerId(), 0,
                                Status.RECEIVED, 0, sourceBufferSize, sourceMinMessageId, maxMessageId));

                // Mark the sub-task as completed if no messages were retrieved by the query to prevent the query from running again
                if (sourceConnectorMessages.isEmpty()) {
                    sourceComplete = true;
                } else {
                    /*
                     * If the source queue is on, these messages are usually ignored. Therefore
                     * we only retrieve one of these messages until we know for sure that we'll
                     * need to recover them.
                     */
                    sourceBufferSize = 100;
                }
            }

            if (!unfinishedComplete && unfinishedMessages.isEmpty()) {
                // Fill the buffer
                unfinishedMessages.addAll(dao.getUnfinishedMessages(channel.getChannelId(),
                        channel.getServerId(), unfinishedBufferSize, unfinishedMinMessageId));

                // Mark the sub-task as completed if no messages were retrieved by the query to prevent the query from running again
                if (unfinishedMessages.isEmpty()) {
                    unfinishedComplete = true;
                }
            }

            if (!pendingComplete && pendingMessages.isEmpty()) {
                // Fill the buffer
                pendingMessages.addAll(dao.getPendingConnectorMessages(channel.getChannelId(),
                        channel.getServerId(), pendingBufferSize, pendingMinMessageId));

                // Mark the sub-task as completed if no messages were retrieved by the query to prevent the query from running again
                if (pendingMessages.isEmpty()) {
                    pendingComplete = true;
                }
            }
        } finally {
            dao.close();
        }

        // Retrieve the first message of each sub-task
        ConnectorMessage sourceConnectorMessage = sourceConnectorMessages.peekFirst();
        Message unfinishedMessage = unfinishedMessages.peekFirst();
        Message pendingMessage = pendingMessages.peekFirst();

        if (!storageSettings.isMessageRecoveryEnabled()) {
            sourceComplete = true;
            unfinishedComplete = true;
            pendingComplete = true;
            if (unfinishedMessage != null || pendingMessage != null || (sourceConnectorMessage != null
                    && channel.getSourceConnector().isRespondAfterProcessing())) {
                logger.info("Incomplete messages found for channel " + channel.getName() + " ("
                        + channel.getChannelId()
                        + ") but message storage settings do not support recovery. Skipping recovery task.");
            }
        } else {
            Long messageId = null;

            try {
                /*
                 * Perform a 3-way merge. The sub-task that has the lowest messageId will be
                 * executed first. However it is possible for the unfinishedMessage and
                 * pendingMessage to have the same messageId. In these cases the unfinished
                 * sub-task should be executed and the pending sub-task should be ignored
                 */
                if (sourceConnectorMessage != null
                        && (unfinishedMessage == null
                                || sourceConnectorMessage.getMessageId() < unfinishedMessage.getMessageId())
                        && (pendingMessage == null
                                || sourceConnectorMessage.getMessageId() < pendingMessage.getMessageId())) {
                    if (!channel.getSourceConnector().isRespondAfterProcessing() && unfinishedComplete
                            && pendingComplete) {
                        /*
                         * If the other two sub-tasks are completed already and the source queue
                         * is enabled for this channel, then there is no need to continue
                         * recovering source RECEIVED messages because they will be picked up by
                         * the source queue.
                         */
                        sourceComplete = true;
                    } else {
                        // Store the messageId so we can log it out if an exception occurs
                        messageId = sourceConnectorMessage.getMessageId();
                        // Remove the message from the buffer and update the minMessageId
                        sourceMinMessageId = sourceConnectorMessages.pollFirst().getMessageId() + 1;

                        if (attemptedMessages++ == 0) {
                            logger.info("Starting message recovery for channel " + channel.getName() + " ("
                                    + channel.getChannelId() + "). Incomplete messages found.");
                        }

                        // Execute the recovery process for this message
                        channel.process(sourceConnectorMessage, true);
                        // Use this to decrement the queue size
                        channel.getSourceQueue().decrementSize();
                        // Increment the number of successfully recovered messages
                        recoveredMessages++;
                    }
                } else if (unfinishedMessage != null && (pendingMessage == null
                        || unfinishedMessage.getMessageId() <= pendingMessage.getMessageId())) {
                    // Store the messageId so we can log it out if an exception occurs
                    messageId = unfinishedMessage.getMessageId();
                    // Remove the message from the buffer and update the minMessageId
                    unfinishedMinMessageId = unfinishedMessages.pollFirst().getMessageId() + 1;

                    // If the unfinishedMessage and pendingMessage have the same messageId, remove the pendingMessage from the buffer
                    if (pendingMessage != null
                            && unfinishedMessage.getMessageId() == pendingMessage.getMessageId()) {
                        pendingMinMessageId = pendingMessages.pollFirst().getMessageId() + 1;
                        pendingMessage = pendingMessages.peekFirst();
                    }

                    if (attemptedMessages++ == 0) {
                        logger.info("Starting message recovery for channel " + channel.getName() + " ("
                                + channel.getChannelId() + "). Incomplete messages found.");
                    }

                    // Execute the recovery process for this message
                    recoverUnfinishedMessage(unfinishedMessage);
                    // Increment the number of successfully recovered messages
                    recoveredMessages++;
                } else if (pendingMessage != null) {
                    // Store the messageId so we can log it out if an exception occurs
                    messageId = pendingMessage.getMessageId();
                    // Remove the message from the buffer and update the minMessageId
                    pendingMinMessageId = pendingMessages.pollFirst().getMessageId() + 1;

                    if (attemptedMessages++ == 0) {
                        logger.info("Starting message recovery for channel " + channel.getName() + " ("
                                + channel.getChannelId() + "). Incomplete messages found.");
                    }

                    // Execute the recovery process for this message
                    recoverPendingMessage(pendingMessage);
                    // Increment the number of successfully recovered messages
                    recoveredMessages++;
                }
            } catch (InterruptedException e) {
                // This should only occur if a halt was requested so stop the entire recovery task
                throw e;
            } catch (Exception e) {
                /*
                 * If an exception occurs we skip the message and log an error. This is to
                 * prevent one bad exception or message from locking the entire channel.
                 * 
                 * If a non-Exception gets thrown (OutofMemoryError, etc) then it will
                 * intentionally not be caught here and the recovery task will be stopped.
                 */
                logger.error("Failed to recover message " + messageId + " for channel " + channel.getName()
                        + " (" + channel.getChannelId() + "): \n" + ExceptionUtils.getStackTrace(e));
            }
        }
    } while (!unfinishedComplete || !pendingComplete || !sourceComplete);

    if (attemptedMessages > 0) {
        logger.info("Completed message recovery for channel " + channel.getName() + " ("
                + channel.getChannelId() + "). Successfully recovered " + recoveredMessages + " out of "
                + attemptedMessages + " messages.");
    }

    return null;
}

From source file:com.datatorrent.stram.client.EventsAgent.java

public List<EventInfo> getLatestEvents(String appId, int limit) {
    LinkedList<EventInfo> result = new LinkedList<EventInfo>();
    String dir = getEventsDirectory(appId);
    if (dir == null) {
        return null;
    }// ww w .  j  av  a 2s .c o  m
    long totalNumEvents = 0;
    IndexFileBufferedReader ifbr = null;
    LinkedList<Pair<String, Long>> partFiles = new LinkedList<Pair<String, Long>>();
    try {
        ifbr = new IndexFileBufferedReader(new InputStreamReader(
                stramAgent.getFileSystem().open(new Path(dir, FSPartFileCollection.INDEX_FILE))), dir);
        EventsIndexLine indexLine;
        while ((indexLine = (EventsIndexLine) ifbr.readIndexLine()) != null) {
            if (indexLine.isEndLine) {
                continue;
            }
            partFiles.add(new Pair<String, Long>(indexLine.partFile, indexLine.numEvents));
            totalNumEvents += indexLine.numEvents;
        }
    } catch (Exception ex) {
        LOG.warn("Got exception when reading events", ex);
        return result;
    } finally {
        IOUtils.closeQuietly(ifbr);
    }

    long offset = 0;
    while (totalNumEvents > limit && !partFiles.isEmpty()) {
        Pair<String, Long> head = partFiles.getFirst();
        if (totalNumEvents - head.second < limit) {
            offset = Math.max(0, totalNumEvents - limit);
            break;
        }
        totalNumEvents -= head.second;
        partFiles.removeFirst();
    }
    String lastProcessPartFile = null;
    for (Pair<String, Long> partFile : partFiles) {
        BufferedReader partBr = null;
        try {
            partBr = new BufferedReader(
                    new InputStreamReader(stramAgent.getFileSystem().open(new Path(dir, partFile.first))));
            processPartFile(partBr, null, null, offset, limit, result);
            offset = 0;
            lastProcessPartFile = partFile.first;
        } catch (Exception ex) {
            LOG.warn("Got exception when reading events", ex);
        } finally {
            IOUtils.closeQuietly(partBr);
        }
    }

    BufferedReader partBr = null;
    try {
        String extraPartFile = getNextPartFile(lastProcessPartFile);
        if (extraPartFile != null && limit > 0) {
            partBr = new BufferedReader(
                    new InputStreamReader(stramAgent.getFileSystem().open(new Path(dir, extraPartFile))));
            processPartFile(partBr, null, null, 0, Integer.MAX_VALUE, result);
        }
    } catch (Exception ex) {
        // ignore
    } finally {
        IOUtils.closeQuietly(partBr);
    }
    while (result.size() > limit) {
        result.removeFirst();
    }
    return result;
}

From source file:net.firejack.platform.core.store.AbstractStore.java

protected LinkedList<Criterion> addFilterCriterion(LinkedList<Criterion> criterionList,
        SpecifiedIdsFilter filter) {/*from ww  w .  j  a va2s . co m*/
    LinkedList<Criterion> resultCriterionList;
    Criterion filterCriterion = createFilterCriterion(filter);
    if (criterionList.isEmpty()) {
        criterionList.add(filterCriterion);
        resultCriterionList = criterionList;
    } else {
        resultCriterionList = new LinkedList<Criterion>();
        Criterion criterion = null;
        for (Criterion cr : criterionList) {
            criterion = criterion == null ? cr : Restrictions.or(criterion, cr);
        }
        resultCriterionList.add(Restrictions.and(filterCriterion, criterion));
    }
    return resultCriterionList;
}

From source file:gate.creole.tokeniser.SimpleTokeniser.java

/**
 * Converts the finite state machine to a deterministic one.
 *
 * @param s/*from w  w w. ja  va 2 s  .  c  om*/
 */
private AbstractSet<FSMState> lambdaClosure(Set<FSMState> s) {

    //the stack/queue used by the algorithm
    LinkedList<FSMState> list = new LinkedList<FSMState>(s);

    //the set to be returned
    AbstractSet<FSMState> lambdaClosure = new HashSet<FSMState>(s);

    FSMState top;
    FSMState currentState;
    Set<FSMState> nextStates;
    Iterator<FSMState> statesIter;

    while (!list.isEmpty()) {
        top = list.removeFirst();
        nextStates = top.nextSet(null);

        if (null != nextStates) {
            statesIter = nextStates.iterator();

            while (statesIter.hasNext()) {
                currentState = statesIter.next();
                if (!lambdaClosure.contains(currentState)) {
                    lambdaClosure.add(currentState);
                    list.addFirst(currentState);
                } //if(!lambdaClosure.contains(currentState))
            } //while(statesIter.hasNext())

        } //if(null != nextStates)
    }
    return lambdaClosure;
}