Example usage for java.util Stack pop

List of usage examples for java.util Stack pop

Introduction

In this page you can find the example usage for java.util Stack pop.

Prototype

public synchronized E pop() 

Source Link

Document

Removes the object at the top of this stack and returns that object as the value of this function.

Usage

From source file:com.anite.zebra.hivemind.om.state.ZebraProcessInstance.java

/**
 * Looks for the first list of tasks that come from the child(ren) of this
 * processinstance This is used for finding the next screen. We don't do
 * this exaustively as it could be very large. The first is good enough for
 * determining the next screen/*from  w  w  w  .  ja va 2s . com*/
 */
@Transient
public List<ZebraTaskInstance> getFirstTasksFromAChildProcess() throws NestableException {

    Stack<ZebraProcessInstance> checkList = new Stack<ZebraProcessInstance>();
    checkList.push(this);
    while (!checkList.isEmpty()) {
        try {
            ZebraProcessInstance currentProcess = checkList.pop();
            List childProcesses = currentProcess.getRunningChildProcesses();
            for (Iterator it = childProcesses.iterator(); it.hasNext();) {
                ZebraProcessInstance child = (ZebraProcessInstance) it.next();
                List<ZebraTaskInstance> allTasks = child.getUsersTasks();
                if (!allTasks.isEmpty()) {
                    return allTasks;
                }
                checkList.push(child);
            }
        } catch (Exception e) {
            String emsg = "Failed to retrieve child processes";
            log.error(emsg, e);
            throw new NestableException(emsg, e);
        }
    }
    return new ArrayList<ZebraTaskInstance>();
}

From source file:org.pgptool.gui.encryption.implpgp.KeyFilesOperationsPgpImpl.java

@Override
public void exportPrivateKey(Key key, String targetFilePathname) {
    Preconditions.checkArgument(key != null && key.getKeyData() != null && key.getKeyInfo() != null,
            "Key must be providedand fully described");
    KeyDataPgp keyDataPgp = KeyDataPgp.get(key);
    Preconditions.checkArgument(keyDataPgp.getSecretKeyRing() != null, "KeyPair key wasn't provided");
    Preconditions.checkArgument(StringUtils.hasText(targetFilePathname), "targetFilePathname must be provided");
    Stack<OutputStream> os = new Stack<>();
    try {//from w w  w . j a v a 2  s  .co  m
        os.push(new FileOutputStream(targetFilePathname));
        if ("asc".equalsIgnoreCase(FilenameUtils.getExtension(targetFilePathname))) {
            os.push(new ArmoredOutputStream(os.peek()));
        }
        keyDataPgp.getSecretKeyRing().encode(os.peek());
        if (keyDataPgp.getPublicKeyRing() != null) {
            keyDataPgp.getPublicKeyRing().encode(os.peek());
        }
    } catch (Throwable t) {
        throw new RuntimeException(
                "Failed to export private key " + key.getKeyInfo().getUser() + " to " + targetFilePathname, t);
    } finally {
        while (!os.isEmpty()) {
            IoStreamUtils.safeClose(os.pop());
        }
    }
}

From source file:com.haulmont.cuba.core.app.EntityDiffManager.java

/**
 * Generate class difference for selected not null object
 *
 * @param diffObject   Object// ww w . j av a  2 s.  c  om
 * @param firstValue   First value
 * @param secondValue  Second value
 * @param viewProperty View property
 * @param metaProperty Meta property
 * @param diffBranch   Diff branch
 * @return Property difference
 */
protected EntityPropertyDiff generateClassDiffFor(Object diffObject, @Nullable Object firstValue,
        @Nullable Object secondValue, ViewProperty viewProperty, MetaProperty metaProperty,
        Stack<Object> diffBranch) {
    // link
    boolean isLinkChange = !Objects.equals(firstValue, secondValue);
    isLinkChange = !(diffObject instanceof EmbeddableEntity) && isLinkChange;

    EntityClassPropertyDiff classPropertyDiff = new EntityClassPropertyDiff(firstValue, secondValue,
            metaProperty, isLinkChange);

    boolean isInternalChange = false;
    diffBranch.push(diffObject);

    List<EntityPropertyDiff> propertyDiffs = getPropertyDiffs(viewProperty.getView(), (Entity) firstValue,
            (Entity) secondValue, diffBranch);

    diffBranch.pop();

    if (!propertyDiffs.isEmpty()) {
        isInternalChange = true;
        classPropertyDiff.setPropertyDiffs(propertyDiffs);
    }

    if (isInternalChange || isLinkChange)
        return classPropertyDiff;
    else
        return null;
}

From source file:org.apache.flink.cep.nfa.sharedbuffer.SharedBuffer.java

/**
 * Returns all elements from the previous relation starting at the given entry.
 *
 * @param nodeId  id of the starting entry
 * @param version Version of the previous relation which shall be extracted
 * @return Collection of previous relations starting with the given value
 * @throws Exception Thrown if the system cannot access the state.
 *///from  w w w.  j  a v a2  s  .  c  o m
public List<Map<String, List<EventId>>> extractPatterns(final NodeId nodeId, final DeweyNumber version)
        throws Exception {

    List<Map<String, List<EventId>>> result = new ArrayList<>();

    // stack to remember the current extraction states
    Stack<ExtractionState> extractionStates = new Stack<>();

    // get the starting shared buffer entry for the previous relation
    Lockable<SharedBufferNode> entryLock = entries.get(nodeId);

    if (entryLock != null) {
        SharedBufferNode entry = entryLock.getElement();
        extractionStates.add(new ExtractionState(Tuple2.of(nodeId, entry), version, new Stack<>()));

        // use a depth first search to reconstruct the previous relations
        while (!extractionStates.isEmpty()) {
            final ExtractionState extractionState = extractionStates.pop();
            // current path of the depth first search
            final Stack<Tuple2<NodeId, SharedBufferNode>> currentPath = extractionState.getPath();
            final Tuple2<NodeId, SharedBufferNode> currentEntry = extractionState.getEntry();

            // termination criterion
            if (currentEntry == null) {
                final Map<String, List<EventId>> completePath = new LinkedHashMap<>();

                while (!currentPath.isEmpty()) {
                    final NodeId currentPathEntry = currentPath.pop().f0;

                    String page = currentPathEntry.getPageName();
                    List<EventId> values = completePath.computeIfAbsent(page, k -> new ArrayList<>());
                    values.add(currentPathEntry.getEventId());
                }
                result.add(completePath);
            } else {

                // append state to the path
                currentPath.push(currentEntry);

                boolean firstMatch = true;
                for (SharedBufferEdge edge : currentEntry.f1.getEdges()) {
                    // we can only proceed if the current version is compatible to the version
                    // of this previous relation
                    final DeweyNumber currentVersion = extractionState.getVersion();
                    if (currentVersion.isCompatibleWith(edge.getDeweyNumber())) {
                        final NodeId target = edge.getTarget();
                        Stack<Tuple2<NodeId, SharedBufferNode>> newPath;

                        if (firstMatch) {
                            // for the first match we don't have to copy the current path
                            newPath = currentPath;
                            firstMatch = false;
                        } else {
                            newPath = new Stack<>();
                            newPath.addAll(currentPath);
                        }

                        extractionStates.push(new ExtractionState(
                                target != null ? Tuple2.of(target, entries.get(target).getElement()) : null,
                                edge.getDeweyNumber(), newPath));
                    }
                }
            }

        }
    }
    return result;
}

From source file:com.anite.antelope.zebra.om.AntelopeProcessInstance.java

/**
 * Looks for the first list of tasks that come from the child(ren) of this
 * processinstance This is used for finding the next screen. We don't do
 * this exaustively as it could be very large. The first is good enough for
 * determining the next screen/*from w ww. j a va2s .  c  o  m*/
 */
public List getFirstTasksFromAChildProcess() throws NestableException {

    Stack checkList = new Stack();
    checkList.push(this);
    while (!checkList.isEmpty()) {
        try {
            AntelopeProcessInstance currentProcess = (AntelopeProcessInstance) checkList.pop();
            List childProcesses = currentProcess.getRunningChildProcesses();
            for (Iterator it = childProcesses.iterator(); it.hasNext();) {
                AntelopeProcessInstance child = (AntelopeProcessInstance) it.next();
                List allTasks = child.getUsersTasks();
                if (!allTasks.isEmpty()) {
                    return allTasks;
                }
                checkList.push(child);
            }
        } catch (Exception e) {
            String emsg = "Failed to retrieve child processes";
            log.error(emsg, e);
            throw new NestableException(emsg, e);
        }
    }
    return new ArrayList();
}

From source file:net.riezebos.thoth.commands.CommentCommand.java

protected Section parseSections(String body, String contextName, String fileName)
        throws ContentManagerException {
    CommentManager commentManager = getThothEnvironment().getCommentManager();
    Pattern sectionStartPattern = Pattern.compile(DETAILSTART + "(.*?)" + MARKER);
    Pattern sectionEndPattern = Pattern.compile(DETAILEND);

    Stack<Section> sections = new Stack<>();
    Section main = new Section(fileName);
    main.setComments(commentManager.getComments(contextName, fileName, null));
    sections.push(main);//w  w  w  .  j  av  a 2 s . c  om

    for (String line : body.split("\n")) {
        Matcher matcher = sectionStartPattern.matcher(line);
        if (matcher.find()) {
            String path = matcher.group(1);
            Section subSection = new Section(path);
            List<Comment> comments = commentManager.getComments(contextName, path, null);
            subSection.setComments(comments);
            sections.peek().addSection(subSection);
            sections.push(subSection);
        } else if (sectionEndPattern.matcher(line).find()) {
            sections.pop();
        } else
            sections.peek().addSection(line);
    }

    return main;
}

From source file:de.codesourcery.jasm16.compiler.Main.java

private int run(String[] args) throws Exception {
    final List<ICompilationUnit> units = new ArrayList<ICompilationUnit>();

    final Stack<String> arguments = new Stack<String>();
    for (String arg : args) {
        arguments.push(arg);/*from   w  w  w . j a v  a2s.  c o  m*/
    }
    Collections.reverse(arguments);

    while (!arguments.isEmpty()) {
        final String arg = arguments.peek();
        if (arg.startsWith("-") || arg.startsWith("--")) {
            try {
                handleCommandlineOption(arg, arguments);
            } catch (NoSuchElementException e) {
                printError("Invalid command line, option " + arg + " lacks argument.");
                return 1;
            }
        } else {
            units.add(createCompilationUnit(arguments.pop()));
        }
    }

    if (verboseOutput) {
        printVersionInfo();
    }

    if (units.isEmpty()) {
        printError("No input files.");
        return 1;
    }

    setupCompiler(units);

    final ICompilationListener listener;
    if (printDebugStats || verboseOutput) {
        listener = new DebugCompilationListener(printDebugStats);
    } else {
        listener = new CompilationListener();
    }

    if (printSourceCode) {
        compiler.insertCompilerPhaseAfter(new CompilerPhase("format-code") {
            @Override
            protected void run(ICompilationUnit unit, ICompilationContext context) throws IOException {
                if (unit.getAST() != null) {
                    ASTUtils.visitInOrder(unit.getAST(), new FormattingVisitor(context));
                }
            };

        }, ICompilerPhase.PHASE_GENERATE_CODE);
    }

    // invoke compiler
    compiler.compile(units, listener);

    boolean hasErrors = false;
    for (ICompilationUnit unit : units) {
        if (unit.hasErrors()) {
            Misc.printCompilationErrors(unit, Misc.readSource(unit), printStackTraces);
            hasErrors = true;
        }
    }

    if (dumpObjectCode) {
        dumpObjectCode();
    }
    return hasErrors ? 1 : 0;
}

From source file:org.apache.tajo.engine.planner.rewrite.FilterPushDownRule.java

@Override
public LogicalNode visitWindowAgg(FilterPushDownContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
        WindowAggNode winAggNode, Stack<LogicalNode> stack) throws PlanningException {
    stack.push(winAggNode);/*from www .j  av  a2 s. c om*/
    super.visitWindowAgg(context, plan, block, winAggNode, stack);
    stack.pop();
    return winAggNode;
}

From source file:org.apache.flink.cep.nfa.SharedBuffer.java

private void internalRemove(final SharedBufferEntry<K, V> entry) {
    Stack<SharedBufferEntry<K, V>> entriesToRemove = new Stack<>();
    entriesToRemove.add(entry);// w  w  w .  java2s  .  com

    while (!entriesToRemove.isEmpty()) {
        SharedBufferEntry<K, V> currentEntry = entriesToRemove.pop();

        if (currentEntry.getReferenceCounter() == 0) {
            currentEntry.remove();

            for (SharedBufferEdge<K, V> edge : currentEntry.getEdges()) {
                if (edge.getTarget() != null) {
                    edge.getTarget().decreaseReferenceCounter();
                    entriesToRemove.push(edge.getTarget());
                }
            }
        }
    }
}

From source file:org.apache.flink.cep.nfa.sharedbuffer.SharedBufferAccessor.java

/**
 * Returns all elements from the previous relation starting at the given entry.
 *
 * @param nodeId  id of the starting entry
 * @param version Version of the previous relation which shall be extracted
 * @return Collection of previous relations starting with the given value
 *///w ww .j  a  v a2  s.  co m
public List<Map<String, List<EventId>>> extractPatterns(final NodeId nodeId, final DeweyNumber version) {

    List<Map<String, List<EventId>>> result = new ArrayList<>();

    // stack to remember the current extraction states
    Stack<SharedBufferAccessor.ExtractionState> extractionStates = new Stack<>();

    // get the starting shared buffer entry for the previous relation
    Lockable<SharedBufferNode> entryLock = sharedBuffer.getEntry(nodeId);

    if (entryLock != null) {
        SharedBufferNode entry = entryLock.getElement();
        extractionStates.add(
                new SharedBufferAccessor.ExtractionState(Tuple2.of(nodeId, entry), version, new Stack<>()));

        // use a depth first search to reconstruct the previous relations
        while (!extractionStates.isEmpty()) {
            final SharedBufferAccessor.ExtractionState extractionState = extractionStates.pop();
            // current path of the depth first search
            final Stack<Tuple2<NodeId, SharedBufferNode>> currentPath = extractionState.getPath();
            final Tuple2<NodeId, SharedBufferNode> currentEntry = extractionState.getEntry();

            // termination criterion
            if (currentEntry == null) {
                final Map<String, List<EventId>> completePath = new LinkedHashMap<>();

                while (!currentPath.isEmpty()) {
                    final NodeId currentPathEntry = currentPath.pop().f0;

                    String page = currentPathEntry.getPageName();
                    List<EventId> values = completePath.computeIfAbsent(page, k -> new ArrayList<>());
                    values.add(currentPathEntry.getEventId());
                }
                result.add(completePath);
            } else {

                // append state to the path
                currentPath.push(currentEntry);

                boolean firstMatch = true;
                for (SharedBufferEdge edge : currentEntry.f1.getEdges()) {
                    // we can only proceed if the current version is compatible to the version
                    // of this previous relation
                    final DeweyNumber currentVersion = extractionState.getVersion();
                    if (currentVersion.isCompatibleWith(edge.getDeweyNumber())) {
                        final NodeId target = edge.getTarget();
                        Stack<Tuple2<NodeId, SharedBufferNode>> newPath;

                        if (firstMatch) {
                            // for the first match we don't have to copy the current path
                            newPath = currentPath;
                            firstMatch = false;
                        } else {
                            newPath = new Stack<>();
                            newPath.addAll(currentPath);
                        }

                        extractionStates.push(new SharedBufferAccessor.ExtractionState(
                                target != null ? Tuple2.of(target, sharedBuffer.getEntry(target).getElement())
                                        : null,
                                edge.getDeweyNumber(), newPath));
                    }
                }
            }

        }
    }
    return result;
}