Example usage for java.util LinkedList removeFirst

List of usage examples for java.util LinkedList removeFirst

Introduction

In this page you can find the example usage for java.util LinkedList removeFirst.

Prototype

public E removeFirst() 

Source Link

Document

Removes and returns the first element from this list.

Usage

From source file:org.apache.fop.layoutmgr.AbstractBreaker.java

/**
 * Justifies the boxes and returns them as a new KnuthSequence.
 * @param blockList block list to justify
 * @param alg reference to the algorithm instance
 * @param availableBPD the available BPD
 * @return the effective list//from w  ww  .j a va  2  s. c o m
 */
private BlockSequence justifyBoxes // CSOK: MethodLength
(BlockSequence blockList, PageBreakingAlgorithm alg, int availableBPD) {
    int optimalPageCount;
    alg.setConstantLineWidth(availableBPD);
    optimalPageCount = alg.findBreakingPoints(blockList, /*availableBPD,*/
            1, true, BreakingAlgorithm.ALL_BREAKS);
    log.debug("PLM> optimalPageCount= " + optimalPageCount);

    //
    ListIterator<KnuthElement> sequenceIterator = blockList.listIterator();
    ListIterator<PageBreakPosition> breakIterator = alg.getPageBreaks().listIterator();
    KnuthElement thisElement = null;
    PageBreakPosition thisBreak;
    int adjustedDiff; // difference already adjusted

    while (breakIterator.hasNext()) {
        thisBreak = breakIterator.next();
        if (log.isDebugEnabled()) {
            log.debug("| first page: break= " + thisBreak.getLeafPos() + " difference= " + thisBreak.difference
                    + " ratio= " + thisBreak.bpdAdjust);
        }
        adjustedDiff = 0;

        // glue and penalty items at the beginning of the page must
        // be ignored:
        // the first element returned by sequenceIterator.next()
        // inside the
        // while loop must be a box
        KnuthElement firstElement;
        while (sequenceIterator.hasNext()) {
            firstElement = sequenceIterator.next();
            if (!firstElement.isBox()) {
                log.debug("PLM> ignoring glue or penalty element " + "at the beginning of the sequence");
                if (firstElement.isGlue()) {
                    ((BlockLevelLayoutManager) firstElement.getLayoutManager())
                            .discardSpace((KnuthGlue) firstElement);
                }
            } else {
                break;
            }
        }
        sequenceIterator.previous();

        // scan the sub-sequence representing a page,
        // collecting information about potential adjustments
        MinOptMax lineNumberMaxAdjustment = MinOptMax.ZERO;
        MinOptMax spaceMaxAdjustment = MinOptMax.ZERO;
        LinkedList<KnuthGlue> blockSpacesList = new LinkedList<KnuthGlue>();
        LinkedList<KnuthGlue> unconfirmedList = new LinkedList<KnuthGlue>();
        LinkedList<KnuthGlue> adjustableLinesList = new LinkedList<KnuthGlue>();
        boolean bBoxSeen = false;
        while (sequenceIterator.hasNext() && sequenceIterator.nextIndex() <= thisBreak.getLeafPos()) {
            thisElement = sequenceIterator.next();
            if (thisElement.isGlue()) {
                // glue elements are used to represent adjustable
                // lines
                // and adjustable spaces between blocks
                KnuthGlue thisGlue = (KnuthGlue) thisElement;
                Adjustment adjustment = thisGlue.getAdjustmentClass();
                if (adjustment.equals(Adjustment.SPACE_BEFORE_ADJUSTMENT)
                        || adjustment.equals(Adjustment.SPACE_AFTER_ADJUSTMENT)) {
                    // potential space adjustment
                    // glue items before the first box or after the
                    // last one
                    // must be ignored
                    unconfirmedList.add(thisGlue);
                } else if (adjustment.equals(Adjustment.LINE_NUMBER_ADJUSTMENT)) {
                    // potential line number adjustment
                    lineNumberMaxAdjustment = lineNumberMaxAdjustment.plusMax(thisElement.getStretch());
                    lineNumberMaxAdjustment = lineNumberMaxAdjustment.minusMin(thisElement.getShrink());
                    adjustableLinesList.add(thisGlue);
                } else if (adjustment.equals(Adjustment.LINE_HEIGHT_ADJUSTMENT)) {
                    // potential line height adjustment
                }
            } else if (thisElement.isBox()) {
                if (!bBoxSeen) {
                    // this is the first box met in this page
                    bBoxSeen = true;
                } else {
                    while (!unconfirmedList.isEmpty()) {
                        // glue items in unconfirmedList were not after
                        // the last box
                        // in this page; they must be added to
                        // blockSpaceList
                        KnuthGlue blockSpace = unconfirmedList.removeFirst();
                        spaceMaxAdjustment = spaceMaxAdjustment.plusMax(blockSpace.getStretch());
                        spaceMaxAdjustment = spaceMaxAdjustment.minusMin(blockSpace.getShrink());
                        blockSpacesList.add(blockSpace);
                    }
                }
            }
        }
        log.debug("| line number adj= " + lineNumberMaxAdjustment);
        log.debug("| space adj      = " + spaceMaxAdjustment);

        if (thisElement.isPenalty() && thisElement.getWidth() > 0) {
            log.debug("  mandatory variation to the number of lines!");
            ((BlockLevelLayoutManager) thisElement.getLayoutManager())
                    .negotiateBPDAdjustment(thisElement.getWidth(), thisElement);
        }

        if (thisBreak.bpdAdjust != 0
                && (thisBreak.difference > 0 && thisBreak.difference <= spaceMaxAdjustment.getMax())
                || (thisBreak.difference < 0 && thisBreak.difference >= spaceMaxAdjustment.getMin())) {
            // modify only the spaces between blocks
            adjustedDiff += adjustBlockSpaces(blockSpacesList, thisBreak.difference,
                    (thisBreak.difference > 0 ? spaceMaxAdjustment.getMax() : -spaceMaxAdjustment.getMin()));
            log.debug("single space: "
                    + (adjustedDiff == thisBreak.difference || thisBreak.bpdAdjust == 0 ? "ok" : "ERROR"));
        } else if (thisBreak.bpdAdjust != 0) {
            adjustedDiff += adjustLineNumbers(adjustableLinesList, thisBreak.difference,
                    (thisBreak.difference > 0 ? lineNumberMaxAdjustment.getMax()
                            : -lineNumberMaxAdjustment.getMin()));
            adjustedDiff += adjustBlockSpaces(blockSpacesList, thisBreak.difference - adjustedDiff,
                    ((thisBreak.difference - adjustedDiff) > 0 ? spaceMaxAdjustment.getMax()
                            : -spaceMaxAdjustment.getMin()));
            log.debug("lines and space: "
                    + (adjustedDiff == thisBreak.difference || thisBreak.bpdAdjust == 0 ? "ok" : "ERROR"));

        }
    }

    // create a new sequence: the new elements will contain the
    // Positions
    // which will be used in the addAreas() phase
    BlockSequence effectiveList = new BlockSequence(blockList.getStartOn(), blockList.getDisplayAlign());
    effectiveList.addAll(getCurrentChildLM().getChangedKnuthElements(
            blockList.subList(0, blockList.size() - blockList.ignoreAtEnd), /* 0, */0));
    //effectiveList.add(new KnuthPenalty(0, -KnuthElement.INFINITE,
    // false, new Position(this), false));
    effectiveList.endSequence();

    ElementListObserver.observe(effectiveList, "breaker-effective", null);

    alg.getPageBreaks().clear(); //Why this?
    return effectiveList;
}

From source file:org.dbpedia.spotlight.mediawiki.ModularParser.java

/**
 * Building a ContentElement, this funciton is calles by all the other
 * parseContentElement(..) functions/*w  w w.j a  v a  2s.c om*/
 */
private ContentElement parseContentElement(SpanManager sm, ContentElementParsingParameters cepp,
        LinkedList<Span> lineSpans, ContentElement result) {

    List<Link> localLinks = new ArrayList<Link>();
    List<Template> localTemplates = new ArrayList<Template>();

    List<Span> boldSpans = new ArrayList<Span>();
    List<Span> italicSpans = new ArrayList<Span>();
    sm.manageList(boldSpans);
    sm.manageList(italicSpans);

    List<Span> managedSpans = new ArrayList<Span>();
    sm.manageList(managedSpans);

    Span contentElementRange = new Span(lineSpans.getFirst().getStart(), lineSpans.getLast().getEnd()).trim(sm);
    managedSpans.add(contentElementRange);

    // set the SrcSpan
    if (calculateSrcSpans) {
        result.setSrcSpan(new SrcSpan(sm.getSrcPos(contentElementRange.getStart()),
                sm.getSrcPos(contentElementRange.getEnd())));
    }

    sm.manageList(lineSpans);
    while (!lineSpans.isEmpty()) {
        Span line = lineSpans.getFirst();

        parseBoldAndItalicSpans(sm, line, boldSpans, italicSpans);

        // External links
        parseExternalLinks(sm, line, "http://", managedSpans, localLinks, result);
        parseExternalLinks(sm, line, "https://", managedSpans, localLinks, result);
        parseExternalLinks(sm, line, "ftp://", managedSpans, localLinks, result);
        parseExternalLinks(sm, line, "mailto:", managedSpans, localLinks, result);

        // end of linewhise opperations
        lineSpans.removeFirst();
    }
    sm.removeManagedList(lineSpans);

    // Links
    int i;
    i = 0;
    while (i < cepp.linkSpans.size()) {
        if (contentElementRange.hits(cepp.linkSpans.get(i))) {
            Span linkSpan = cepp.linkSpans.remove(i);
            managedSpans.add(linkSpan);
            Link l = cepp.links.remove(i).setHomeElement(result);
            localLinks.add(l);
            if (!showImageText && l.getType() == Link.type.IMAGE) {
                // deletes the Image Text from the ContentElement Text.
                sm.delete(linkSpan);
            }
        } else {
            i++;
        }
    }

    // Templates
    //DBPedia - Spotlight. Removing the boiler plate logic from the wikitext
    //Commenting the Templates Logic
    /*      i = 0;
          while (i < cepp.templateSpans.size())
          {
             Span ts = cepp.templateSpans.get(i);
             if (contentElementRange.hits(ts))
             {
    ResolvedTemplate rt = cepp.templates.remove(i);
            
    if (rt.getPostParseReplacement() != null)
    {
       sm.replace(ts, rt.getPostParseReplacement());
    }
    cepp.templateSpans.remove(i);
            
    Object parsedObject = rt.getParsedObject();
    if (parsedObject != null)
    {
       managedSpans.add(ts);
            
       Class parsedObjectClass = parsedObject.getClass();
       if (parsedObjectClass == Template.class)
       {
          localTemplates.add((Template) parsedObject);
       }
       else if (parsedObjectClass == Link.class)
       {
          localLinks.add(((Link) parsedObject)
                .setHomeElement(result));
       }
       else
       {
          localTemplates.add(rt.getTemplate());
       }
    }
             }
             else
             {
    i++;
             }
          }
    */
    // HTML/XML Tags
    i = 0;
    List<Span> tags = new ArrayList<Span>();
    while (i < cepp.tagSpans.size()) {
        Span s = cepp.tagSpans.get(i);
        if (contentElementRange.hits(s)) {
            cepp.tagSpans.remove(i);
            if (deleteTags) {
                sm.delete(s);
            } else {
                tags.add(s);
                managedSpans.add(s);
            }
        } else {
            i++;
        }
    }

    // noWiki
    i = 0;
    List<Span> localNoWikiSpans = new ArrayList<Span>();
    while (i < cepp.noWikiSpans.size()) {
        Span s = cepp.noWikiSpans.get(i);
        if (contentElementRange.hits(s)) {
            cepp.noWikiSpans.remove(i);
            sm.replace(s, cepp.noWikiStrings.remove(i));
            localNoWikiSpans.add(s);
            managedSpans.add(s);
        } else {
            i++;
        }
    }

    // MATH Tags
    i = 0;
    List<Span> mathSpans = new ArrayList<Span>();
    while (i < cepp.mathSpans.size()) {
        Span s = cepp.mathSpans.get(i);
        if (contentElementRange.hits(s)) {
            cepp.mathSpans.remove(i);

            if (showMathTagContent) {
                mathSpans.add(s);
                managedSpans.add(s);
                sm.replace(s, cepp.mathStrings.remove(i));
            } else {
                sm.delete(s);
            }
        } else {
            i++;
        }
    }

    result.setText(sm.substring(contentElementRange));

    // managed spans must be removed here and not earlier, because every
    // change in the SpanManager affects the Spans!
    sm.removeManagedList(boldSpans);
    sm.removeManagedList(italicSpans);
    sm.removeManagedList(managedSpans);

    // contentElementRange ist auch noch in managedSpans !!! deswegen:
    final int adjust = -contentElementRange.getStart();
    for (Span s : boldSpans) {
        s.adjust(adjust);
    }
    for (Span s : italicSpans) {
        s.adjust(adjust);
    }
    for (Span s : managedSpans) {
        s.adjust(adjust);
    }

    result.setFormatSpans(FormatType.BOLD, boldSpans);
    result.setFormatSpans(FormatType.ITALIC, italicSpans);
    result.setFormatSpans(FormatType.TAG, tags);
    result.setFormatSpans(FormatType.MATH, mathSpans);
    result.setFormatSpans(FormatType.NOWIKI, localNoWikiSpans);

    result.setLinks(sortLinks(localLinks));
    result.setTemplates(sortTemplates(localTemplates));

    return result;
}

From source file:org.eclipse.wb.tests.designer.core.util.ast.AstNodeUtilsTest.java

public void test_moveNode() throws Exception {
    TypeDeclaration typeDeclaration = createTypeDeclaration_TestC("int m_value;");
    FieldDeclaration fieldDeclaration = typeDeclaration.getFields()[0];
    // remember ranges
    final LinkedList<Integer> ranges = Lists.newLinkedList();
    fieldDeclaration.accept(new ASTVisitor() {
        @Override/* w  ww .  j a  v  a 2  s .c  o m*/
        public void preVisit(ASTNode node) {
            ranges.add(node.getStartPosition());
            ranges.add(node.getLength());
        }
    });
    // do move
    int targetPosition = 1;
    final int delta = targetPosition - fieldDeclaration.getStartPosition();
    AstNodeUtils.moveNode(fieldDeclaration, targetPosition);
    // compare ranges
    fieldDeclaration.accept(new ASTVisitor() {
        @Override
        public void preVisit(ASTNode node) {
            assertEquals(ranges.removeFirst(), Integer.valueOf(node.getStartPosition() - delta));
            assertEquals(ranges.removeFirst(), Integer.valueOf(node.getLength()));
        }
    });
}

From source file:com.redhat.persistence.oql.QFrame.java

private void render(LinkedList joins, List where, QFrame oroot, QFrame root, Set emitted) {
    // If the first non empty frame is outer we treat it as inner.
    if (m_outer && !joins.isEmpty()) {
        oroot = this;
    }/*www.  j  ava  2s . c  o  m*/

    Code table = null;
    if (m_table != null && m_duplicate == null) {
        table = new Code(m_table).add(" ").add(alias());
    } else if (m_tableExpr != null && m_duplicate == null) {
        table = m_tableExpr.emit(m_generator).add(" ").add(alias());
    }

    if (table != null) {
        joins.addFirst(JFrame.leaf(table, this, oroot));
    }

    List children = getChildren();
    for (int i = 0; i < children.size(); i++) {
        QFrame child = (QFrame) children.get(i);
        child.render(joins, where, oroot, root, emitted);
    }

    if (m_condition != null) {
        Code c = m_condition.emit(m_generator);
        if (!c.isTrue() && !emitted.contains(c)) {
            m_used.clear();
            frames(m_condition, m_used);
            boolean join = false;
            for (Iterator it = joins.iterator(); it.hasNext();) {
                JFrame frame = (JFrame) it.next();
                boolean modified = m_used.removeAll(frame.defined);
                if (m_used.isEmpty()) {
                    // We default to putting things in the where
                    // clause here because oracle won't resolve
                    // external variable references correctly when
                    // they appear in join conditions.
                    if (oroot.equals(root)) {
                        where.add(c);
                    } else if (frame.froot != null && oroot.equals(frame.froot)) {
                        frame.join = frame.join.add(" and ").add(c);
                    } else {
                        throw new IllegalStateException(
                                "unable to place condition: " + m_condition + " " + c + trace(joins));
                    }
                } else if (modified) {
                    join = true;
                    break;
                }
            }
            if (join) {
                JFrame right = (JFrame) joins.removeFirst();

                if (joins.isEmpty()) {
                    throw new IllegalStateException(
                            "unresolved variable in condition: " + m_condition + " " + c + trace(joins));
                }

                LinkedList skipped = null;
                JFrame left = (JFrame) joins.removeFirst();
                while (true) {
                    m_used.clear();
                    frames(m_condition, m_used);
                    m_used.removeAll(right.defined);
                    boolean cross = m_used.removeAll(left.defined);
                    if (m_used.isEmpty()) {
                        joins.addFirst(JFrame.join(left, right, c));
                        break;
                    } else if (joins.isEmpty()) {
                        throw new IllegalStateException(
                                "unresolved variable in condition: " + m_condition + " " + c + trace(joins));
                    } else if (cross) {
                        JFrame lefter = (JFrame) joins.removeFirst();
                        left = JFrame.cross(lefter, left);
                    } else {
                        if (skipped == null) {
                            skipped = new LinkedList();
                        }
                        skipped.addLast(left);
                        left = (JFrame) joins.removeFirst();
                    }
                }

                if (skipped != null) {
                    while (!skipped.isEmpty()) {
                        joins.addFirst(skipped.removeLast());
                    }
                }
            }

            emitted.add(c);
        }
    }
}

From source file:org.myrian.persistence.oql.QFrame.java

private void render(LinkedList joins, List where, QFrame oroot, QFrame root, Set emitted) {
    // If the first non empty frame is outer we treat it as inner.
    if (m_outer && !joins.isEmpty()) {
        oroot = this;
    }/* w ww. j av  a 2 s. c o  m*/

    Code table = null;
    if (m_table != null && m_duplicate == null) {
        table = new Code(m_table).add(" ").add(alias());
    } else if (m_tableExpr != null && m_duplicate == null) {
        table = m_tableExpr.emit(m_generator).add(" ").add(alias());
    }

    if (table != null) {
        joins.addFirst(JFrame.leaf(table, this, oroot));
    }

    List children = getChildren();
    for (int i = 0; i < children.size(); i++) {
        QFrame child = (QFrame) children.get(i);
        child.render(joins, where, oroot, root, emitted);
    }

    if (m_condition != null) {
        Code c = m_condition.emit(m_generator);
        if (!c.isTrue() && !emitted.contains(c)) {
            m_used.clear();
            frames(m_condition, m_used);
            boolean join = false;
            for (Iterator it = joins.iterator(); it.hasNext();) {
                JFrame frame = (JFrame) it.next();
                boolean modified = m_used.removeAll(frame.defined);
                if (m_used.isEmpty()) {
                    // We default to putting things in the where
                    // clause here because oracle won't resolve
                    // external variable references correctly when
                    // they appear in join conditions.
                    if (oroot.equals(root)) {
                        where.add(c);
                    } else if (frame.froot != null && oroot.equals(frame.froot)) {
                        frame.join = frame.join.add(" and ").add(c);
                    } else {
                        /*
                         * XXX rhs needs to make sure this is
                         * the right thing to do
                         */
                        where.add(c);
                    }
                } else if (modified) {
                    join = true;
                    break;
                }
            }
            if (join) {
                JFrame right = (JFrame) joins.removeFirst();

                if (joins.isEmpty()) {
                    throw new IllegalStateException(
                            "unresolved variable in condition: " + m_condition + " " + c + trace(joins));
                }

                LinkedList skipped = null;
                JFrame left = (JFrame) joins.removeFirst();
                while (true) {
                    m_used.clear();
                    frames(m_condition, m_used);
                    m_used.removeAll(right.defined);
                    boolean cross = m_used.removeAll(left.defined);
                    if (m_used.isEmpty()) {
                        joins.addFirst(JFrame.join(left, right, c));
                        break;
                    } else if (joins.isEmpty()) {
                        throw new IllegalStateException(
                                "unresolved variable in condition: " + m_condition + " " + c + trace(joins));
                    } else if (cross) {
                        JFrame lefter = (JFrame) joins.removeFirst();
                        left = JFrame.cross(lefter, left);
                    } else {
                        if (skipped == null) {
                            skipped = new LinkedList();
                        }
                        skipped.addLast(left);
                        left = (JFrame) joins.removeFirst();
                    }
                }

                if (skipped != null) {
                    while (!skipped.isEmpty()) {
                        joins.addFirst(skipped.removeLast());
                    }
                }
            }

            emitted.add(c);
        }
    }
}

From source file:com.datatorrent.stram.StreamingContainerManager.java

/**
 * Compute checkpoints required for a given operator instance to be recovered.
 * This is done by looking at checkpoints available for downstream dependencies first,
 * and then selecting the most recent available checkpoint that is smaller than downstream.
 *
 * @param operator Operator instance for which to find recovery checkpoint
 * @param ctx      Context into which to collect traversal info
 *///from w  ww .  j av a2s .co  m
public void updateRecoveryCheckpoints(PTOperator operator, UpdateCheckpointsContext ctx) {
    if (operator.getRecoveryCheckpoint().windowId < ctx.committedWindowId.longValue()) {
        ctx.committedWindowId.setValue(operator.getRecoveryCheckpoint().windowId);
    }

    if (operator.getState() == PTOperator.State.ACTIVE && (ctx.currentTms
            - operator.stats.lastWindowIdChangeTms) > operator.stats.windowProcessingTimeoutMillis) {
        // if the checkpoint is ahead, then it is not blocked but waiting for activation (state-less recovery, at-most-once)
        if (ctx.committedWindowId.longValue() >= operator.getRecoveryCheckpoint().windowId) {
            LOG.debug("Marking operator {} blocked committed window {}, recovery window {}", operator,
                    Codec.getStringWindowId(ctx.committedWindowId.longValue()),
                    Codec.getStringWindowId(operator.getRecoveryCheckpoint().windowId));
            ctx.blocked.add(operator);
        }
    }

    // the most recent checkpoint eligible for recovery based on downstream state
    Checkpoint maxCheckpoint = Checkpoint.INITIAL_CHECKPOINT;

    Set<OperatorMeta> checkpointGroup = ctx.checkpointGroups.get(operator.getOperatorMeta());
    if (checkpointGroup == null) {
        checkpointGroup = Collections.singleton(operator.getOperatorMeta());
    }
    // find intersection of checkpoints that group can collectively move to
    TreeSet<Checkpoint> commonCheckpoints = new TreeSet<>(new Checkpoint.CheckpointComparator());
    synchronized (operator.checkpoints) {
        commonCheckpoints.addAll(operator.checkpoints);
    }
    Set<PTOperator> groupOpers = new HashSet<>(checkpointGroup.size());
    boolean pendingDeploy = operator.getState() == PTOperator.State.PENDING_DEPLOY;
    if (checkpointGroup.size() > 1) {
        for (OperatorMeta om : checkpointGroup) {
            Collection<PTOperator> operators = plan.getAllOperators(om);
            for (PTOperator groupOper : operators) {
                synchronized (groupOper.checkpoints) {
                    commonCheckpoints.retainAll(groupOper.checkpoints);
                }
                // visit all downstream operators of the group
                ctx.visited.add(groupOper);
                groupOpers.add(groupOper);
                pendingDeploy |= operator.getState() == PTOperator.State.PENDING_DEPLOY;
            }
        }
        // highest common checkpoint
        if (!commonCheckpoints.isEmpty()) {
            maxCheckpoint = commonCheckpoints.last();
        }
    } else {
        // without logical grouping, treat partitions as independent
        // this is especially important for parallel partitioning
        ctx.visited.add(operator);
        groupOpers.add(operator);
        maxCheckpoint = operator.getRecentCheckpoint();
        if (ctx.recovery && maxCheckpoint.windowId == Stateless.WINDOW_ID && operator.isOperatorStateLess()) {
            long currentWindowId = WindowGenerator.getWindowId(ctx.currentTms, this.vars.windowStartMillis,
                    this.getLogicalPlan().getValue(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS));
            maxCheckpoint = new Checkpoint(currentWindowId, 0, 0);
        }
    }

    // DFS downstream operators
    for (PTOperator groupOper : groupOpers) {
        for (PTOperator.PTOutput out : groupOper.getOutputs()) {
            for (PTOperator.PTInput sink : out.sinks) {
                PTOperator sinkOperator = sink.target;
                if (groupOpers.contains(sinkOperator)) {
                    continue; // downstream operator within group
                }
                if (!ctx.visited.contains(sinkOperator)) {
                    // downstream traversal
                    updateRecoveryCheckpoints(sinkOperator, ctx);
                }
                // recovery window id cannot move backwards
                // when dynamically adding new operators
                if (sinkOperator.getRecoveryCheckpoint().windowId >= operator
                        .getRecoveryCheckpoint().windowId) {
                    maxCheckpoint = Checkpoint.min(maxCheckpoint, sinkOperator.getRecoveryCheckpoint());
                }

                if (ctx.blocked.contains(sinkOperator)) {
                    if (sinkOperator.stats.getCurrentWindowId() == operator.stats.getCurrentWindowId()) {
                        // downstream operator is blocked by this operator
                        ctx.blocked.remove(sinkOperator);
                    }
                }
            }
        }
    }

    // find the common checkpoint that is <= downstream recovery checkpoint
    if (!commonCheckpoints.contains(maxCheckpoint)) {
        if (!commonCheckpoints.isEmpty()) {
            maxCheckpoint = Objects.firstNonNull(commonCheckpoints.floor(maxCheckpoint), maxCheckpoint);
        }
    }

    for (PTOperator groupOper : groupOpers) {
        // checkpoint frozen during deployment
        if (!pendingDeploy || ctx.recovery) {
            // remove previous checkpoints
            Checkpoint c1 = Checkpoint.INITIAL_CHECKPOINT;
            LinkedList<Checkpoint> checkpoints = groupOper.checkpoints;
            synchronized (checkpoints) {
                if (!checkpoints.isEmpty() && (checkpoints.getFirst()).windowId <= maxCheckpoint.windowId) {
                    c1 = checkpoints.getFirst();
                    Checkpoint c2;
                    while (checkpoints.size() > 1
                            && ((c2 = checkpoints.get(1)).windowId) <= maxCheckpoint.windowId) {
                        checkpoints.removeFirst();
                        //LOG.debug("Checkpoint to delete: operator={} windowId={}", operator.getName(), c1);
                        this.purgeCheckpoints.add(new Pair<PTOperator, Long>(groupOper, c1.windowId));
                        c1 = c2;
                    }
                } else {
                    if (ctx.recovery && checkpoints.isEmpty() && groupOper.isOperatorStateLess()) {
                        LOG.debug("Adding checkpoint for stateless operator {} {}", groupOper,
                                Codec.getStringWindowId(maxCheckpoint.windowId));
                        c1 = groupOper.addCheckpoint(maxCheckpoint.windowId, this.vars.windowStartMillis);
                    }
                }
            }
            //LOG.debug("Operator {} checkpoints: commit {} recent {}", new Object[] {operator.getName(), c1, operator.checkpoints});
            groupOper.setRecoveryCheckpoint(c1);
        } else {
            LOG.debug("Skipping checkpoint update {} during {}", groupOper, groupOper.getState());
        }
    }

}

From source file:elh.eus.absa.Features.java

/**
 * Check if the given word/lemma/ngram exists both in the ngram list and in the general or domain polarity
 * lexicons, and if yes updates the corresponding attributes in the feature vector
 * /*from  w w w .jav a 2  s .  c o m*/
 * @param ngrams
 * @param fVector
 * @param prefix
 * @param toknumNgram
 * @param toknumPol
 * @param empty
 * @param ngram
 */
private void checkNgramsAndPolarLexicons(LinkedList<String> ngrams, double[] fVector, String prefix,
        int toknumNgram, int toknumPol, boolean empty, boolean ngram) {
    //System.err.println(Arrays.asList(ngrams).toString());
    // if empty is active means that we are checking the end of the sentence and 
    // the ngram list must be emptied 
    if (empty) {
        // add ngrams to the feature vector
        while (!ngrams.isEmpty()) {
            String ng = featureFromArray(ngrams, prefix);
            //if the current lemma is in the ngram list activate the feature in the vector
            if (params.containsKey("lemmaNgrams")
                    && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0"))) {
                // add occurrence to feature vector (the functions checks if the given ngram feature exists).
                addNumericToFeatureVector(ng, fVector, toknumNgram); //tokNum
            }

            ng = featureFromArray(ngrams, "");
            if (params.containsKey("polarLexiconGeneral") || params.containsKey("polarLexiconDomain")) {
                checkPolarityLexicons(ng, fVector, toknumPol, ngram);
            } //end polarity ngram checker

            ngrams.removeFirst();

        } //end ngram checking
    }
    // if empty is false search for all ngrams in the window
    else {
        // add ngrams to the feature vector
        for (int i = 0; i < ngrams.size(); i++) {
            String ng = featureFromArray(ngrams.subList(0, i + 1), prefix);
            //if the current lemma is in the ngram list activate the feature in the vector
            if (params.containsKey("lemmaNgrams")
                    && (!params.getProperty("lemmaNgrams").equalsIgnoreCase("0"))) {
                // add occurrence to feature vector (the functions checks if the given ngram feature exists).
                addNumericToFeatureVector(ng, fVector, toknumNgram); //tokNum                                    
            }

            ng = featureFromArray(ngrams.subList(0, i + 1), "");
            if (params.containsKey("polarLexiconGeneral") || params.containsKey("polarLexiconDomain")) {
                checkPolarityLexicons(ng, fVector, toknumPol, ngram);
            } //end polarity ngram checker
        } //end ngram checking                          
    }
}

From source file:org.gudy.azureus2.ui.console.commands.Priority.java

public void execute(String commandName, ConsoleInput console, CommandLine commandLine) {

    String tnumstr, fnumstr, newpriostr;
    int tnumber;//  w w  w  .ja  v a 2s .co  m
    DiskManagerFileInfo[] files;
    String[] sections;
    List args = commandLine.getArgList();
    LinkedList fs, fe;
    DownloadManager dm;

    if (args.isEmpty()) {
        console.out.println("Torrent # required!");
        return;
    } else {
        tnumstr = (String) args.remove(0);
    }
    if (args.isEmpty()) {
        console.out.println("File # required!");
        return;
    } else {
        fnumstr = (String) args.remove(0);
    }

    if ((console.torrents == null) || console.torrents.isEmpty()) {
        console.out.println("> Command 'prio': No torrents in list (try 'show torrents' first).");
        return;
    }

    try {
        tnumber = Integer.parseInt(tnumstr);
        if ((tnumber == 0) || (tnumber > console.torrents.size())) {
            console.out.println("> Command 'prio': Torrent #" + tnumber + " unknown.");
            return;
        }

        dm = (DownloadManager) console.torrents.get(tnumber - 1);
        files = dm.getDiskManagerFileInfo();
    } catch (Exception e) {
        e.printStackTrace();
        console.out.println("> Command 'prio': Torrent # '" + tnumstr + "' unknown.");
        return;
    }

    if (args.isEmpty()) {
        console.out.println("> Command 'prio': missing parameter for new priority");
        return;
    } else {
        newpriostr = (String) args.remove(0);
    }

    if (newpriostr.equalsIgnoreCase("normal")) {
        newprio = NORMAL;
    } else if (newpriostr.equalsIgnoreCase("high")) {
        newprio = HIGH;
    } else if (newpriostr.equalsIgnoreCase("dnd")) {
        newprio = DONOTDOWNLOAD;
    } else if (newpriostr.equalsIgnoreCase("del")) {
        newprio = DELETE;
    } else {
        console.out.println("> Command 'prio': unknown priority " + newpriostr);
        return;
    }

    if (fnumstr.equalsIgnoreCase("all")) {
        sections = new String[1];
        sections[0] = "1-" + files.length;
    } else
        sections = fnumstr.split(",");

    fs = new LinkedList();
    fe = new LinkedList();

    int dash, start, end;
    for (int i = 0; i < sections.length; i++) {
        try {
            if ((dash = sections[i].indexOf('-')) != -1) {
                start = Integer.parseInt(sections[i].substring(0, dash));
                end = Integer.parseInt(sections[i].substring(dash + 1));
            } else
                start = end = Integer.parseInt(sections[i]);
            if ((start == 0) || (end > files.length)) {
                console.out.println("> Command 'prio': Invalid file range " + sections[i]);
                return;
            }
            if (start > end) {
                console.out.println("> Command 'prio': Invalid file range '" + sections[i] + "'");
            }

            // -1 compensates for 0-based offsets
            fs.add(new Integer(start - 1));
            fe.add(new Integer(end - 1));
        } catch (Exception e) {
            console.out.println("> Command 'prio': File # '" + sections[i] + "' unknown.");
            return;
        }
    }

    //      console.out.println("DM was " + dm.getState());
    if ((newprio == DELETE) && (dm.getState() != DownloadManager.STATE_STOPPED)) {
        try {
            dm.stopIt(DownloadManager.STATE_STOPPED, false, false);
        } catch (Exception e) {
            console.out.println("Failed to stop torrent " + tnumber);
            return;
        }
    }

    //      console.out.println("DM is " + dm.getState());
    int nummod = 0;
    while (fs.size() > 0) {
        start = ((Integer) fs.removeFirst()).intValue();
        end = ((Integer) fe.removeFirst()).intValue();
        for (int i = start; i <= end; i++) {
            nummod++;
            // DEBUG
            //            console.out.println("Setting priority for file " + i + " to " + newprio);
            if (newprio == NORMAL) {
                files[i].setPriority(0);
                files[i].setSkipped(false);
            } else if (newprio == HIGH) {
                files[i].setPriority(1);
                files[i].setSkipped(false);
            } else if (newprio == DONOTDOWNLOAD) {
                files[i].setPriority(0);
                files[i].setSkipped(true);
            } else if (newprio == DELETE) {
                int st = files[i].getStorageType();
                int target_st = -1;
                if (st == DiskManagerFileInfo.ST_LINEAR) {
                    target_st = DiskManagerFileInfo.ST_COMPACT;
                } else if (st == DiskManagerFileInfo.ST_REORDER) {
                    target_st = DiskManagerFileInfo.ST_REORDER_COMPACT;
                }
                if (target_st != -1 && files[i].setStorageType(target_st)) {
                    files[i].setPriority(0);
                    files[i].setSkipped(true);
                } else {
                    console.out.println("> Command 'prio': Failed to delete file " + (i + 1));
                    nummod--;
                }
            }
        }
    }
    if ((newprio == DELETE) && (dm.getState() == DownloadManager.STATE_STOPPED)) {
        try {
            dm.stopIt(DownloadManager.STATE_QUEUED, false, false);
        } catch (Exception e) {
            console.out.println("Failed to restart torrent " + tnumber);
            return;
        }
    }

    //      console.out.println("DM is again " + dm.getState());

    console.out.println(nummod + " file(s) priority set to " + priostr[newprio - 1]);
}

From source file:elh.eus.absa.Features.java

/**
 *  Extract n-grams up to a certain length from an Conll tabulated format corpus.
 * /*from  w  ww . j a v  a 2s.  c  o  m*/
 * @param int length : which 'n' use for 'n-grams' 
 * @param string type (wf|lemma|pos): what type of ngrams we want to extract.
 * @param boolean save : safe ngrams to file or not. 
 * @return TreeSet<String> return word form ngrams of length length
 */
private int extractNgramsTAB(int length, String type, List<String> discardPos, boolean save) {
    //System.err.println("ngram extraction Tab: _"+length+"_"+type);
    if (length == 0) {
        return 0;
    }

    for (String sent : corpus.getSentences().keySet()) {
        //System.err.println("ngram extraction, corpus sentences: "+corpus.getSentences().get(sent));           
        String[] tokens = corpus.getSentences().get(sent).split("\n");
        LinkedList<String> ngrams = new LinkedList<String>();
        for (String row : tokens) {
            String ngram = "";
            String[] fields = row.split("\t");
            String pos = "";
            switch (type) {
            case "wf":
                ngram = fields[0];
                break;
            case "lemma":
                if (fields.length > 1) {
                    ngram = fields[1];
                }
                if (fields.length > 2) {
                    pos = fields[2];
                }
                break;
            case "pos":
                if (fields.length > 2) {
                    ngram = fields[2];
                    switch (ngram.length()) {
                    case 0:
                        ngram = "-";
                        break;
                    case 1:
                        ngram = ngram.substring(0, 1);
                        break;
                    default:
                        ngram = ngram.substring(0, 2);
                        break;
                    }
                }
            }

            //if the is a blank line we assume sentence has ended and we empty and re-initialize the n-gram list 
            if (ngram.equals("")) {
                //empty n-gram list and add remaining n-grams to the feature list
                while (!ngrams.isEmpty()) {
                    String ng = featureFromArray(ngrams, type);
                    addNgram(type, ng);
                    ngrams.removeFirst();
                }
                continue;
            }

            if (ngrams.size() >= length) {
                ngrams.removeFirst();
            }

            //if no alphanumeric char is present discard the element as invalid ngram. Or if it has a PoS tag that
            //should be discarded
            String lCurrent = ngram;
            if ((!discardPos.contains(pos)) && (!ngram.matches("^[^\\p{L}\\p{M}\\p{Nd}\\p{InEmoticons}]+$"))
                    && (lCurrent.length() > 1)) {
                //standarize numeric values to NUMNUM lemma value
                //ngram.replaceFirst("^[0-9]$", "NUMNUM");
                if (!type.equalsIgnoreCase("pos")) {
                    ngrams.add(normalize(ngram, params.getProperty("normalization", "none")));
                } else {
                    ngrams.add(ngram);
                }
            }
            //certain punctuation marks are allowed as lemmas
            else if ((lCurrent.length() < 2) && (lCurrent.matches("[,;.?!]"))) {
                ngrams.add(lCurrent);
            }

            // add ngrams to the feature list
            for (int i = 0; i < ngrams.size(); i++) {
                String ng = featureFromArray(ngrams.subList(0, i + 1), type);
                addNgram(type, ng);
            }
        }
        //empty ngram list and add remaining ngrams to the feature list
        while (!ngrams.isEmpty()) {
            String ng = featureFromArray(ngrams, type);
            addNgram(type, ng);
            ngrams.removeFirst();
        }
    }
    return 1;
}

From source file:elh.eus.absa.Features.java

/**
 *  Extract n-grams up to a certain length from an Conll tabulated format string.
 * //from w  w  w.j a  v a2s  .  co  m
 * @param String input : input tagged conll string 
 * @param int length : which 'n' use for 'n-grams' 
 * @param string type (wf|lemma|pos): what type of ngrams we want to extract.
 * @param boolean save : safe ngrams to file or not. 
 * @return int success: return 1 if the process ended correctly
 */
private int extractNgramsTABString(InputStream input, int length, String type, List<String> discardPos,
        boolean save) {
    //System.err.println("ngram extraction Tab: _"+length+"_"+type);
    if (length == 0) {
        return 0;
    }

    //System.err.println("ngram extraction, corpus sentences: "+corpus.getSentences().get(sent));                 
    //String[] tokens = input.split("\n");
    BufferedReader reader = new BufferedReader(new InputStreamReader(input));
    LinkedList<String> ngrams = new LinkedList<String>();
    String line;
    try {
        while ((line = reader.readLine()) != null) {
            String ngram = "";
            String[] fields = line.split("\\s");
            String pos = "";
            switch (type) {
            case "wf":
                ngram = fields[0];
                break;
            case "lemma":
                if (fields.length > 1) {
                    ngram = fields[1];
                }
                if (fields.length > 2) {
                    pos = fields[2];
                }
                break;
            case "pos":
                if (fields.length > 2) {
                    ngram = fields[2];
                    switch (ngram.length()) {
                    case 0:
                        ngram = "-";
                        break;
                    case 1:
                        ngram = ngram.substring(0, 1);
                        break;
                    default:
                        ngram = ngram.substring(0, 2);
                        break;
                    }
                }
            }

            //if the is a blank line we assume sentence has ended and we empty and re-initialize the n-gram list 
            if (ngram.equals("")) {
                //empty n-gram list and add remaining n-grams to the feature list
                while (!ngrams.isEmpty()) {
                    String ng = featureFromArray(ngrams, type);
                    addNgram(type, ng);
                    ngrams.removeFirst();
                }
                continue;
            }

            if (ngrams.size() >= length) {
                ngrams.removeFirst();
            }

            //if no alphanumeric char is present discard the element as invalid ngram. Or if it has a PoS tag that
            //should be discarded
            String lCurrent = ngram;
            if ((!discardPos.contains(pos)) && (!ngram.matches("^[^\\p{L}\\p{M}\\p{Nd}\\p{InEmoticons}]+$"))
                    && (lCurrent.length() > 1)) {
                //standarize numeric values to NUMNUM lemma value
                //ngram.replaceFirst("^[0-9]$", "NUMNUM");
                if (!type.equalsIgnoreCase("pos")) {
                    ngrams.add(normalize(ngram, params.getProperty("normalization", "none")));
                } else {
                    ngrams.add(ngram);
                }
            }
            //certain punctuation marks are allowed as lemmas
            else if ((lCurrent.length() < 2) && (lCurrent.matches("[,;.?!]"))) {
                ngrams.add(lCurrent);
            }

            // add ngrams to the feature list
            for (int i = 0; i < ngrams.size(); i++) {
                String ng = featureFromArray(ngrams.subList(0, i + 1), type);
                addNgram(type, ng);
            }
        }
    } catch (IOException e) {
        System.err.println("EliXa::Features::extractNgramsTABString - WARNING: Error reading tagged file, "
                + "ngram extraction may be only partial\n");
    }

    //empty ngram list and add remaining ngrams to the feature list
    while (!ngrams.isEmpty()) {
        String ng = featureFromArray(ngrams, type);
        addNgram(type, ng);
        ngrams.removeFirst();
    }

    return 1;
}