Example usage for java.util Stack push

List of usage examples for java.util Stack push

Introduction

In this page you can find the example usage for java.util Stack push.

Prototype

public E push(E item) 

Source Link

Document

Pushes an item onto the top of this stack.

Usage

From source file:org.apache.flink.cep.nfa.sharedbuffer.SharedBufferAccessor.java

/**
 * Returns all elements from the previous relation starting at the given entry.
 *
 * @param nodeId  id of the starting entry
 * @param version Version of the previous relation which shall be extracted
 * @return Collection of previous relations starting with the given value
 *///from  w w  w .  ja v a 2 s  .c om
public List<Map<String, List<EventId>>> extractPatterns(final NodeId nodeId, final DeweyNumber version) {

    List<Map<String, List<EventId>>> result = new ArrayList<>();

    // stack to remember the current extraction states
    Stack<SharedBufferAccessor.ExtractionState> extractionStates = new Stack<>();

    // get the starting shared buffer entry for the previous relation
    Lockable<SharedBufferNode> entryLock = sharedBuffer.getEntry(nodeId);

    if (entryLock != null) {
        SharedBufferNode entry = entryLock.getElement();
        extractionStates.add(
                new SharedBufferAccessor.ExtractionState(Tuple2.of(nodeId, entry), version, new Stack<>()));

        // use a depth first search to reconstruct the previous relations
        while (!extractionStates.isEmpty()) {
            final SharedBufferAccessor.ExtractionState extractionState = extractionStates.pop();
            // current path of the depth first search
            final Stack<Tuple2<NodeId, SharedBufferNode>> currentPath = extractionState.getPath();
            final Tuple2<NodeId, SharedBufferNode> currentEntry = extractionState.getEntry();

            // termination criterion
            if (currentEntry == null) {
                final Map<String, List<EventId>> completePath = new LinkedHashMap<>();

                while (!currentPath.isEmpty()) {
                    final NodeId currentPathEntry = currentPath.pop().f0;

                    String page = currentPathEntry.getPageName();
                    List<EventId> values = completePath.computeIfAbsent(page, k -> new ArrayList<>());
                    values.add(currentPathEntry.getEventId());
                }
                result.add(completePath);
            } else {

                // append state to the path
                currentPath.push(currentEntry);

                boolean firstMatch = true;
                for (SharedBufferEdge edge : currentEntry.f1.getEdges()) {
                    // we can only proceed if the current version is compatible to the version
                    // of this previous relation
                    final DeweyNumber currentVersion = extractionState.getVersion();
                    if (currentVersion.isCompatibleWith(edge.getDeweyNumber())) {
                        final NodeId target = edge.getTarget();
                        Stack<Tuple2<NodeId, SharedBufferNode>> newPath;

                        if (firstMatch) {
                            // for the first match we don't have to copy the current path
                            newPath = currentPath;
                            firstMatch = false;
                        } else {
                            newPath = new Stack<>();
                            newPath.addAll(currentPath);
                        }

                        extractionStates.push(new SharedBufferAccessor.ExtractionState(
                                target != null ? Tuple2.of(target, sharedBuffer.getEntry(target).getElement())
                                        : null,
                                edge.getDeweyNumber(), newPath));
                    }
                }
            }

        }
    }
    return result;
}

From source file:com.espertech.esper.rowregex.EventRowRegexNFAView.java

private void print(List<RegexNFAState> states, PrintWriter writer, int indent,
        Stack<RegexNFAState> currentStack) {

    for (RegexNFAState state : states) {
        indent(writer, indent);//from w ww  . j av  a  2s  . co  m
        if (currentStack.contains(state)) {
            writer.println("(self)");
        } else {
            writer.println(printState(state));

            currentStack.push(state);
            print(state.getNextStates(), writer, indent + 4, currentStack);
            currentStack.pop();
        }
    }
}

From source file:com.bluexml.xforms.generator.forms.Renderable.java

/**
 * Recursive render./*from  www. ja va  2  s.co m*/
 * 
 * @param parentPath
 *            the parent path
 * @param parents
 *            the parents
 * @param renderedParents
 *            the rendered parents
 * @param isInIMultRepeater
 *            TODO
 * @return the rendered
 */
private Rendered recursiveRender(String parentPath, Stack<Renderable> parents, Stack<Rendered> renderedParents,
        boolean isInIMultRepeater) {
    boolean previous = XFormsGenerator.isRenderingWorkflow();
    // logger.debug(this.toString() );

    // retrieve path for this renderable in this context
    Path path = getPath(parentPath, parents, renderedParents);
    // translate Path object into an absolute path
    String sPath = null;
    if (path.pathType == PathType.absolutePath) {
        sPath = path.path;
    } else {
        sPath = parentPath + path.path;
    }

    // if (StringUtils.trimToNull(sPath) != null) {
    // System.out.println("non empty path");
    // }
    // real render
    Rendered rendered = render(sPath, parents, renderedParents, isInIMultRepeater);

    // recursive render
    parents.push(this);
    boolean childIsInIMultiple = isInIMultRepeater || isInlineMultipleRepeater(); // #1310

    if (this instanceof RenderableFormContainer) {
        XFormsGenerator.setRenderingWorkflow(isInWorkflowForm());
    }
    renderedParents.push(rendered);
    for (Renderable child : children) {
        if (child == null) {
            throw new RuntimeException("A null child was found. You probably forgot to reference a form.");
        }
        if (child.shouldRender(parents)) {
            Rendered renderedChild = child.recursiveRender(sPath, parents, renderedParents, childIsInIMultiple);
            rendered.addRendered(renderedChild, child);
        }
    }
    //
    if (this instanceof RenderableFormContainer) {
        XFormsGenerator.setRenderingWorkflow(previous);
    }

    renderedParents.pop();
    rendered.renderEnd();
    renderEnd(rendered);

    parents.pop();

    return rendered;
}

From source file:org.apache.tajo.engine.planner.rewrite.ProjectionPushDownRule.java

@Override
public LogicalNode visitTableSubQuery(Context upperContext, LogicalPlan plan, LogicalPlan.QueryBlock block,
        TableSubQueryNode node, Stack<LogicalNode> stack) throws PlanningException {
    Context childContext = new Context(plan, upperContext.requiredSet);
    stack.push(node);
    LogicalNode child = super.visitTableSubQuery(childContext, plan, block, node, stack);
    node.setSubQuery(child);//from www . j a v a2  s. c  om
    stack.pop();

    Target[] targets;
    if (node.hasTargets()) {
        targets = node.getTargets();
    } else {
        targets = PlannerUtil.schemaToTargets(node.getOutSchema());
    }

    LinkedHashSet<Target> projectedTargets = Sets.newLinkedHashSet();
    for (Iterator<Target> it = getFilteredTarget(targets, upperContext.requiredSet); it.hasNext();) {
        Target target = it.next();
        upperContext.addExpr(target);
    }

    for (Iterator<Target> it = upperContext.targetListMgr.getFilteredTargets(upperContext.requiredSet); it
            .hasNext();) {
        Target target = it.next();

        if (LogicalPlanner.checkIfBeEvaluatedAtRelation(block, target.getEvalTree(), node)) {
            projectedTargets.add(target);
            upperContext.targetListMgr.markAsEvaluated(target);
        }
    }

    node.setTargets(projectedTargets.toArray(new Target[projectedTargets.size()]));
    LogicalPlanner.verifyProjectedFields(block, node);
    return node;
}

From source file:org.apache.atlas.repository.graph.GraphHelper.java

/**
 * Get the GUIDs and vertices for all composite entities owned/contained by the specified root entity AtlasVertex.
 * The graph is traversed from the root entity through to the leaf nodes of the containment graph.
 *
 * @param entityVertex the root entity vertex
 * @return set of VertexInfo for all composite entities
 * @throws AtlasException/*from   w w  w .  j a v  a 2  s .  co m*/
 */
public Set<VertexInfo> getCompositeVertices(AtlasVertex entityVertex) throws AtlasException {
    Set<VertexInfo> result = new HashSet<>();
    Stack<AtlasVertex> vertices = new Stack<>();
    vertices.push(entityVertex);
    while (vertices.size() > 0) {
        AtlasVertex vertex = vertices.pop();
        String typeName = GraphHelper.getTypeName(vertex);
        String guid = GraphHelper.getGuid(vertex);
        Id.EntityState state = GraphHelper.getState(vertex);
        if (state == Id.EntityState.DELETED) {
            //If the reference vertex is marked for deletion, skip it
            continue;
        }
        result.add(new VertexInfo(guid, vertex, typeName));
        ClassType classType = typeSystem.getDataType(ClassType.class, typeName);
        for (AttributeInfo attributeInfo : classType.fieldMapping().fields.values()) {
            if (!attributeInfo.isComposite) {
                continue;
            }
            String edgeLabel = GraphHelper.getEdgeLabel(classType, attributeInfo);
            switch (attributeInfo.dataType().getTypeCategory()) {
            case CLASS:
                AtlasEdge edge = getEdgeForLabel(vertex, edgeLabel);
                if (edge != null && GraphHelper.getState(edge) == Id.EntityState.ACTIVE) {
                    AtlasVertex compositeVertex = edge.getInVertex();
                    vertices.push(compositeVertex);
                }
                break;
            case ARRAY:
                IDataType elementType = ((DataTypes.ArrayType) attributeInfo.dataType()).getElemType();
                DataTypes.TypeCategory elementTypeCategory = elementType.getTypeCategory();
                if (elementTypeCategory != TypeCategory.CLASS) {
                    continue;
                }
                Iterator<AtlasEdge> edges = getOutGoingEdgesByLabel(vertex, edgeLabel);
                if (edges != null) {
                    while (edges.hasNext()) {
                        edge = edges.next();
                        if (edge != null && GraphHelper.getState(edge) == Id.EntityState.ACTIVE) {
                            AtlasVertex compositeVertex = edge.getInVertex();
                            vertices.push(compositeVertex);
                        }
                    }
                }
                break;
            case MAP:
                DataTypes.MapType mapType = (DataTypes.MapType) attributeInfo.dataType();
                DataTypes.TypeCategory valueTypeCategory = mapType.getValueType().getTypeCategory();
                if (valueTypeCategory != TypeCategory.CLASS) {
                    continue;
                }
                String propertyName = GraphHelper.getQualifiedFieldName(classType, attributeInfo.name);
                List<String> keys = vertex.getProperty(propertyName, List.class);
                if (keys != null) {
                    for (String key : keys) {
                        String mapEdgeLabel = GraphHelper.getQualifiedNameForMapKey(edgeLabel, key);
                        edge = getEdgeForLabel(vertex, mapEdgeLabel);
                        if (edge != null && GraphHelper.getState(edge) == Id.EntityState.ACTIVE) {
                            AtlasVertex compositeVertex = edge.getInVertex();
                            vertices.push(compositeVertex);
                        }
                    }
                }
                break;
            default:
            }
        }
    }
    return result;
}

From source file:com.continusec.client.VerifiableLog.java

/**
 * Utility method for auditors that wish to audit the full content of a log, as well as the log operation.
 * This method will retrieve all entries in batch from the log, and ensure that the root hash in head can be confirmed to accurately represent the contents
 * of all of the log entries. If prev is not null, then additionally it is proven that the root hash in head is consistent with the root hash in prev.
 * @param prev a previous LogTreeHead representing the set of entries that have been previously audited. To indicate this is has not previously been audited, pass null,
 * @param head the LogTreeHead up to which we wish to audit the log. Upon successful completion the caller should persist this for a future iteration.
 * @param auditor caller should implemented a LogAuditor which is called sequentially for each log entry as it is encountered.
 * @param factory the factory to use for instantiating log entries. Typically this is one of {@link RawDataEntryFactory#getInstance()}, {@link JsonEntryFactory#getInstance()}, {@link RedactedJsonEntryFactory#getInstance()}.
 * @throws ContinusecException upon error
 *//*from   ww w  . j  a  v  a  2 s  . c  o  m*/
public void verifyEntries(LogTreeHead prev, LogTreeHead head, VerifiableEntryFactory factory,
        LogAuditor auditor) throws ContinusecException {
    if ((prev == null) || prev.getTreeSize() < head.getTreeSize()) {
        Stack<byte[]> merkleTreeStack = new Stack<byte[]>();
        if ((prev != null) && (prev.getTreeSize() > 0)) {
            LogInclusionProof p = this.getInclusionProofByIndex(prev.getTreeSize() + 1, prev.getTreeSize());
            byte[] firstHash = null;
            for (byte[] b : p.getAuditPath()) {
                if (firstHash == null) {
                    firstHash = b;
                } else {
                    firstHash = Util.nodeMerkleTreeHash(b, firstHash);
                }
            }
            if (!(Arrays.equals(firstHash, prev.getRootHash()))) {
                throw new VerificationFailedException();
            }
            for (int i = p.getAuditPath().length - 1; i >= 0; i--) {
                merkleTreeStack.push(p.getAuditPath()[i]);
            }
        }

        int idx = (prev == null) ? 0 : prev.getTreeSize();
        try {
            for (VerifiableEntry e : this.getEntries(idx, head.getTreeSize(), factory)) {
                // do whatever content audit is desired on e
                auditor.auditLogEntry(idx, e);

                // update the merkle tree hash stack:
                merkleTreeStack.add(e.getLeafHash());
                for (int z = idx; (z & 1) == 1; z >>= 1) {
                    byte[] right = merkleTreeStack.pop();
                    byte[] left = merkleTreeStack.pop();
                    merkleTreeStack.push(Util.nodeMerkleTreeHash(left, right));
                }
                idx++;
            }
        } catch (RuntimeException e2) {
            // since get entries iterator throws a runtime exception that wraps the real continusec exception...
            Throwable cause = e2.getCause();
            if (cause instanceof ContinusecException) {
                throw (ContinusecException) cause;
            } else {
                throw e2;
            }
        }

        if (idx != head.getTreeSize()) {
            throw new NotAllEntriesReturnedException();
        }

        byte[] headHash = merkleTreeStack.pop();
        while (!merkleTreeStack.empty()) {
            headHash = Util.nodeMerkleTreeHash(merkleTreeStack.pop(), headHash);
        }

        if (!(Arrays.equals(headHash, head.getRootHash()))) {
            throw new VerificationFailedException();
        }
    }
}

From source file:nl.nn.adapterframework.batch.RecordTransformer.java

public Object handleRecord(IPipeLineSession session, List parsedRecord, ParameterResolutionContext prc)
        throws Exception {
    StringBuffer output = new StringBuffer();
    Stack conditions = new Stack();

    for (Iterator outputFieldIt = outputFields.iterator(); outputFieldIt.hasNext();) {
        IOutputField outputField = (IOutputField) outputFieldIt.next();

        // if outputfields are to be seperator with delimiter
        if (outputSeparator != null && output.length() > 0) {
            output.append(outputSeparator);
        }//from ww w  .  j  a  v  a 2 s . c o m

        // if not in a condition
        if (conditions.isEmpty()) {
            IOutputField condition = outputField.appendValue(outputField, output, parsedRecord);
            if (condition != null) {
                conditions.push(condition);
            }
        }
        // in condition
        else {
            IOutputField condition = (IOutputField) conditions.pop();
            IOutputField newCondition = condition.appendValue(outputField, output, parsedRecord);
            if (newCondition != null) {
                conditions.push(condition);
                if (newCondition != condition) {
                    conditions.push(newCondition);
                }
            }
        }
    }
    if (output.length() > 0) {
        return output.toString();
    }
    return null;
}

From source file:org.apache.flink.cep.nfa.sharedbuffer.SharedBuffer.java

/**
 * Returns all elements from the previous relation starting at the given entry.
 *
 * @param nodeId  id of the starting entry
 * @param version Version of the previous relation which shall be extracted
 * @return Collection of previous relations starting with the given value
 * @throws Exception Thrown if the system cannot access the state.
 *///from   www  . j  ava  2  s .c om
public List<Map<String, List<EventId>>> extractPatterns(final NodeId nodeId, final DeweyNumber version)
        throws Exception {

    List<Map<String, List<EventId>>> result = new ArrayList<>();

    // stack to remember the current extraction states
    Stack<ExtractionState> extractionStates = new Stack<>();

    // get the starting shared buffer entry for the previous relation
    Lockable<SharedBufferNode> entryLock = entries.get(nodeId);

    if (entryLock != null) {
        SharedBufferNode entry = entryLock.getElement();
        extractionStates.add(new ExtractionState(Tuple2.of(nodeId, entry), version, new Stack<>()));

        // use a depth first search to reconstruct the previous relations
        while (!extractionStates.isEmpty()) {
            final ExtractionState extractionState = extractionStates.pop();
            // current path of the depth first search
            final Stack<Tuple2<NodeId, SharedBufferNode>> currentPath = extractionState.getPath();
            final Tuple2<NodeId, SharedBufferNode> currentEntry = extractionState.getEntry();

            // termination criterion
            if (currentEntry == null) {
                final Map<String, List<EventId>> completePath = new LinkedHashMap<>();

                while (!currentPath.isEmpty()) {
                    final NodeId currentPathEntry = currentPath.pop().f0;

                    String page = currentPathEntry.getPageName();
                    List<EventId> values = completePath.computeIfAbsent(page, k -> new ArrayList<>());
                    values.add(currentPathEntry.getEventId());
                }
                result.add(completePath);
            } else {

                // append state to the path
                currentPath.push(currentEntry);

                boolean firstMatch = true;
                for (SharedBufferEdge edge : currentEntry.f1.getEdges()) {
                    // we can only proceed if the current version is compatible to the version
                    // of this previous relation
                    final DeweyNumber currentVersion = extractionState.getVersion();
                    if (currentVersion.isCompatibleWith(edge.getDeweyNumber())) {
                        final NodeId target = edge.getTarget();
                        Stack<Tuple2<NodeId, SharedBufferNode>> newPath;

                        if (firstMatch) {
                            // for the first match we don't have to copy the current path
                            newPath = currentPath;
                            firstMatch = false;
                        } else {
                            newPath = new Stack<>();
                            newPath.addAll(currentPath);
                        }

                        extractionStates.push(new ExtractionState(
                                target != null ? Tuple2.of(target, entries.get(target).getElement()) : null,
                                edge.getDeweyNumber(), newPath));
                    }
                }
            }

        }
    }
    return result;
}

From source file:net.dv8tion.jda.core.entities.impl.MessageImpl.java

@Override
public synchronized String getStrippedContent() {
    if (strippedContent == null) {
        String tmp = getContent();
        //all the formatting keys to keep track of
        String[] keys = new String[] { "*", "_", "`", "~~" };

        //find all tokens (formatting strings described above)
        TreeSet<FormatToken> tokens = new TreeSet<>((t1, t2) -> Integer.compare(t1.start, t2.start));
        for (String key : keys) {
            Matcher matcher = Pattern.compile(Pattern.quote(key)).matcher(tmp);
            while (matcher.find()) {
                tokens.add(new FormatToken(key, matcher.start()));
            }//  ww  w .  j  av a 2s . co m
        }

        //iterate over all tokens, find all matching pairs, and add them to the list toRemove
        Stack<FormatToken> stack = new Stack<>();
        List<FormatToken> toRemove = new ArrayList<>();
        boolean inBlock = false;
        for (FormatToken token : tokens) {
            if (stack.empty() || !stack.peek().format.equals(token.format)
                    || stack.peek().start + token.format.length() == token.start) {
                //we are at opening tag
                if (!inBlock) {
                    //we are outside of block -> handle normally
                    if (token.format.equals("`")) {
                        //block start... invalidate all previous tags
                        stack.clear();
                        inBlock = true;
                    }
                    stack.push(token);
                } else if (token.format.equals("`")) {
                    //we are inside of a block -> handle only block tag
                    stack.push(token);
                }
            } else if (!stack.empty()) {
                //we found a matching close-tag
                toRemove.add(stack.pop());
                toRemove.add(token);
                if (token.format.equals("`") && stack.empty()) {
                    //close tag closed the block
                    inBlock = false;
                }
            }
        }

        //sort tags to remove by their start-index and iteratively build the remaining string
        Collections.sort(toRemove, (t1, t2) -> Integer.compare(t1.start, t2.start));
        StringBuilder out = new StringBuilder();
        int currIndex = 0;
        for (FormatToken formatToken : toRemove) {
            if (currIndex < formatToken.start) {
                out.append(tmp.substring(currIndex, formatToken.start));
            }
            currIndex = formatToken.start + formatToken.format.length();
        }
        if (currIndex < tmp.length()) {
            out.append(tmp.substring(currIndex));
        }
        //return the stripped text, escape all remaining formatting characters (did not have matching open/close before or were left/right of block
        strippedContent = out.toString().replace("*", "\\*").replace("_", "\\_").replace("~", "\\~");
    }
    return strippedContent;
}

From source file:com.madrobot.di.wizard.json.JSONDeserializer.java

/**
 * Deserialize a specific element, recursively.
 * //  www .j  a va2s.  c  o  m
 * @param obj
 *            Object whose fields need to be set
 * @param jsonObject
 *            JSON Parser to read data from
 * @param stack
 *            Stack of {@link ClassInfo} - entity type under consideration
 * @throws JSONException
 *             If an exception occurs during parsing
 */
private void deserialize(Object obj, JSONObject jsonObject, Stack<Class<?>> stack) throws JSONException {

    Iterator<?> iterator = jsonObject.keys();
    Class<?> userClass = stack.peek();

    while (iterator.hasNext()) {
        Object jsonKey = iterator.next();

        if (jsonKey instanceof String) {
            String key = (String) jsonKey;
            Object jsonElement = jsonObject.get(key);

            try {

                Field field = getField(userClass, key);
                String fieldName = field.getName();
                Class<?> classType = field.getType();

                if (jsonElement instanceof JSONObject) {
                    if (!Converter.isPseudoPrimitive(classType)) {

                        String setMethodName = getSetMethodName(fieldName, classType);
                        Method setMethod = userClass.getDeclaredMethod(setMethodName, classType);

                        JSONObject fieldObject = (JSONObject) jsonElement;

                        stack.push(classType);
                        Object itemObj = classType.newInstance();
                        deserialize(itemObj, fieldObject, stack);

                        setMethod.invoke(obj, itemObj);
                    } else {
                        Log.e(TAG, "Expecting composite type for " + fieldName);
                    }
                } else if (jsonElement instanceof JSONArray) {
                    if (Converter.isCollectionType(classType)) {
                        if (field.isAnnotationPresent(ItemType.class)) {
                            ItemType itemType = field.getAnnotation(ItemType.class);
                            Class<?> itemValueType = itemType.value();
                            int size = itemType.size();

                            JSONArray fieldArrayObject = (JSONArray) jsonElement;

                            if (size == JSONDeserializer.DEFAULT_ITEM_COLLECTION_SIZE
                                    || size > fieldArrayObject.length()) {
                                size = fieldArrayObject.length();
                            }

                            for (int index = 0; index < size; index++) {
                                Object value = fieldArrayObject.get(index);
                                if (value instanceof JSONObject) {
                                    stack.push(itemValueType);
                                    Object itemObj = itemValueType.newInstance();
                                    deserialize(itemObj, (JSONObject) value, stack);

                                    String addMethodName = getAddMethodName(fieldName);
                                    Method addMethod = userClass.getDeclaredMethod(addMethodName,
                                            itemValueType);
                                    addMethod.invoke(obj, itemObj);
                                }
                            }
                        }
                    } else {
                        Log.e(TAG, "Expecting collection type for " + fieldName);
                    }
                } else if (Converter.isPseudoPrimitive(classType)) {

                    Object value = Converter.convertTo(jsonObject, key, classType, field);

                    String setMethodName = getSetMethodName(fieldName, classType);
                    Method setMethod = userClass.getDeclaredMethod(setMethodName, classType);
                    setMethod.invoke(obj, value);
                } else {
                    Log.e(TAG, "Unknown datatype");
                }

            } catch (NoSuchFieldException e) {
                Log.e(TAG, e.getMessage());
            } catch (NoSuchMethodException e) {
                Log.e(TAG, e.getMessage());
            } catch (IllegalAccessException e) {
                Log.e(TAG, e.getMessage());
            } catch (InvocationTargetException e) {
                Log.e(TAG, e.getMessage());
            } catch (InstantiationException e) {
                Log.e(TAG, e.getMessage());
            }
        }
    }
}