Example usage for java.util Stack empty

List of usage examples for java.util Stack empty

Introduction

In this page you can find the example usage for java.util Stack empty.

Prototype

public boolean empty() 

Source Link

Document

Tests if this stack is empty.

Usage

From source file:org.dbpedia.spotlight.mediawiki.ModularParser.java

/**
 * There is not much differences between links an images, so they are parsed
 * in a single step// ww  w  . j a  va2 s  .com
 */
private void parseImagesAndInternalLinks(SpanManager sm, List<Span> linkSpans, List<Link> links) {

    sm.manageList(linkSpans);

    int pos = -1;
    Stack<Integer> linkOpenTags = new Stack<Integer>();
    while ((pos = sm.indexOf("[[", pos + 1)) != -1) {
        linkOpenTags.push(pos);
    }

    Span lastLinkSpan = new Span(sm.length() + 1, sm.length() + 1);
    Link.type linkType = Link.type.INTERNAL;

    while (!linkOpenTags.empty()) {
        int linkStartTag = linkOpenTags.pop();
        int linkEndTag = sm.indexOf("]]", linkStartTag);
        if (linkEndTag == -1) {
            continue;
        }

        int linkOptionTag = sm.indexOf("|", linkStartTag, linkEndTag);

        int linkTextStart;
        String linkTarget;

        if (linkOptionTag != -1) {
            linkTextStart = linkOptionTag + 1;
            linkTarget = sm.substring(new Span(linkStartTag + 2, linkOptionTag).trim(sm));
        } else {
            linkTextStart = linkStartTag + 2;
            linkTarget = sm.substring(new Span(linkStartTag + 2, linkEndTag).trim(sm));
        }

        // is is a regular link ?
        if (linkTarget.indexOf(lineSeparator) != -1) {
            continue;
        }
        linkTarget = encodeWikistyle(linkTarget);

        // so it is a Link or image!!!
        List<String> parameters;

        String namespace = getLinkNameSpace(linkTarget);
        if (namespace != null) {
            if (imageIdentifers.indexOf(namespace) != -1) {
                if (linkOptionTag != -1) {
                    int temp;
                    while ((temp = sm.indexOf("|", linkTextStart, linkEndTag)) != -1) {
                        linkTextStart = temp + 1;
                    }

                    parameters = tokenize(sm, linkOptionTag + 1, linkEndTag, "|");

                    // maybe there is an external link at the end of the
                    // image description...
                    if (sm.charAt(linkEndTag + 2) == ']' && sm.indexOf("[", linkTextStart, linkEndTag) != -1) {
                        linkEndTag++;
                    }
                } else {
                    parameters = null;
                }
                linkType = Link.type.IMAGE;
            } else {
                //Link has namespace but is not image
                linkType = Link.type.UNKNOWN;
                parameters = null;
            }
        } else {
            if (linkType == Link.type.INTERNAL && lastLinkSpan.hits(new Span(linkStartTag, linkEndTag + 2))) {
                continue;
            }
            parameters = null;
            linkType = Link.type.INTERNAL;
        }

        Span posSpan = new Span(linkTextStart, linkEndTag).trim(sm);

        //Ignoring the blank links from the paragraphs
        if (linkTarget.length() > 0)
            linkSpans.add(posSpan);

        Link l = new Link(null, posSpan, linkTarget, linkType, parameters);
        //Ignoring the blank links from the paragraphs
        if (linkTarget.length() > 0) {
            links.add(l);

            if (calculateSrcSpans) {
                l.setSrcSpan(new SrcSpan(sm.getSrcPos(linkStartTag), sm.getSrcPos(linkEndTag + 2)));
            }

        }

        sm.delete(posSpan.getEnd(), linkEndTag + 2);
        sm.delete(linkStartTag, posSpan.getStart());

        // removing line separators in link text
        int lsinlink;
        while ((lsinlink = sm.indexOf(lineSeparator, posSpan)) != -1) {
            sm.replace(lsinlink, lsinlink + lineSeparator.length(), " ");
        }

        lastLinkSpan = posSpan;
    }
}

From source file:org.dbpedia.spotlight.mediawiki.ModularParser.java

private void parseTemplates(SpanManager sm, List<Span> resolvedTemplateSpans,
        List<ResolvedTemplate> resolvedTemplates, ParsedPage pp) {

    sm.manageList(resolvedTemplateSpans);

    int pos = -2;
    Stack<Integer> templateOpenTags = new Stack<Integer>();
    while ((pos = sm.indexOf("{{", pos + 2)) != -1) {
        if (sm.length() > pos + 3 && sm.charAt(pos + 2) == '{' && sm.charAt(pos + 3) != '{') {
            pos++;//from   w  ww  . ja  v  a2  s .  c  om
        }
        templateOpenTags.push(pos);
    }

    boolean isLastTemplate = true;
    while (!templateOpenTags.empty()) {
        int templateOpenTag = templateOpenTags.pop();
        int templateCloseTag = sm.indexOf("}}", templateOpenTag);
        if (templateCloseTag == -1) {
            continue;
        }

        int templateOptionTag = sm.indexOf("|", templateOpenTag, templateCloseTag);
        int templateNameEnd;
        List<String> templateOptions;

        if (templateOptionTag != -1) {
            templateNameEnd = templateOptionTag;
            templateOptions = tokenize(sm, templateOptionTag + 1, templateCloseTag, "|");
        } else {
            templateNameEnd = templateCloseTag;
            templateOptions = new ArrayList<String>();
        }

        Span ts = new Span(templateOpenTag, templateCloseTag + 2);
        if (isLastTemplate) {
            lastTemplate = sm.substring(new Span(templateOpenTag + 2, templateCloseTag));
            isLastTemplate = false;
        }

        Template t = new Template(ts,
                encodeWikistyle(sm.substring(templateOpenTag + 2, templateNameEnd).trim()), templateOptions);

        if (calculateSrcSpans) {
            t.setSrcSpan(new SrcSpan(sm.getSrcPos(templateOpenTag), sm.getSrcPos(templateCloseTag + 2)));
        }

        t.setPos(ts);

        ResolvedTemplate rt = templateParser.parseTemplate(t, pp);

        resolvedTemplateSpans.add(ts);
        resolvedTemplates.add(rt);

        sm.replace(ts, rt.getPreParseReplacement());
    }

    if (resolvedTemplateSpans.isEmpty()) {
        sm.removeManagedList(resolvedTemplateSpans);
    }
}

From source file:org.dbpedia.spotlight.mediawiki.ModularParser.java

private void parseTags(SpanManager sm, List<Span> spans) {
    sm.manageList(spans);//from  ww  w . j  ava2  s.  c o m

    Span s = new Span(0, 0);
    Stack<Span> tagSpans = new Stack<Span>();

    while ((s = getTag(sm, s.getEnd())) != null) {
        //spans.add(s);
        //Logic to remove ref tags such as <ref> </ref> from the Article text
        if (sm.substring(s.getStart(), s.getEnd()).contains("/>")) {
            spans.add(s);
            continue;
        }

        if (sm.substring(s.getStart(), s.getEnd()).contains("/")) {
            if (!tagSpans.empty())
                spans.add(new Span(tagSpans.pop().getStart(), s.getEnd()));
        } else
            tagSpans.push(s);
    }

    if (spans.size() == 0) {
        sm.removeManagedList(spans);
    }

    for (Span span : spans)
        sm.delete(span);

}

From source file:org.dhatim.yaml.handler.YamlEventStreamHandler.java

public void handle(EventHandler eventHandler, Iterable<Event> yamlEventStream) throws SAXException {

    Stack<String> elementNameStack = new Stack<String>();
    Stack<Type> typeStack = new Stack<Type>();

    boolean isNextElementName = true;
    boolean outputStructAsElement = false;
    for (Event e : yamlEventStream) {

        if (logger.isTraceEnabled()) {
            logger.trace("Event: " + e);
        }/*from   w  w  w .ja  v  a2 s.co m*/

        if (e.is(ID.DocumentStart)) {
            elementNameStack.push(documentName);

            outputStructAsElement = true;
        } else if (e.is(ID.Scalar)) {
            ScalarEvent es = (ScalarEvent) e;

            if (isNextElementName && !lastTypeIsArray(typeStack)) {
                String name = nameFormatter.format(es.getValue());

                if (logger.isTraceEnabled()) {
                    logger.trace("Element name: " + name);
                }

                elementNameStack.push(name);

                eventHandler.addNameEvent(es, name);

                isNextElementName = false;
            } else {
                String elementName = typeStack.peek() == Type.SEQUENCE ? arrayElementName
                        : elementNameStack.pop();

                eventHandler.addValueEvent(es, elementName, es.getValue());

                isNextElementName = true;
            }
        } else if (e.is(ID.MappingStart) || e.is(ID.SequenceStart)) {
            CollectionStartEvent cse = (CollectionStartEvent) e;

            if (outputStructAsElement) {
                String elementName = lastTypeIsArray(typeStack) ? arrayElementName : elementNameStack.peek();
                eventHandler.startStructureEvent(cse, elementName);
            }

            typeStack.push(e.is(ID.SequenceStart) ? Type.SEQUENCE : Type.MAPPING);

            outputStructAsElement = true;
            isNextElementName = true;
        } else if (e.is(ID.MappingEnd) || e.is(ID.SequenceEnd)) {
            typeStack.pop();

            boolean typeStackPeekIsArray = lastTypeIsArray(typeStack);

            if (!elementNameStack.empty() && !typeStackPeekIsArray) {
                eventHandler.endStructureEvent(e, elementNameStack.pop());
            }

            if (typeStackPeekIsArray) {
                eventHandler.endStructureEvent(e, arrayElementName);
            }

        } else if (e.is(ID.Alias)) {
            String elementName = lastTypeIsArray(typeStack) ? arrayElementName : elementNameStack.pop();

            eventHandler.addAliasEvent((AliasEvent) e, elementName);

            isNextElementName = true;
        }

    }
}

From source file:it.cnr.icar.eric.server.security.authorization.RegistryAttributeFinderModule.java

/**
 * Handles attributes as defined ebRIM for Any RegistryObject.
 * Used by subject, resource and action attributes handling methods.
 *///from w  ww  .j  av  a  2 s  . co  m
EvaluationResult handleRegistryObjectAttribute(Object obj, Stack<?> attributeStack, URI type,
        EvaluationCtx context) {
    EvaluationResult evaluationResult = null;
    try {
        String attr = (String) attributeStack.pop();
        ServerRequestContext requestContext = AuthorizationServiceImpl.getRequestContext(context);
        log.trace("handleRegistryObjectAttribute: obj=" + obj.toString() + " attrubute = " + attr);

        if (requestContext != null && obj != null) {
            @SuppressWarnings("unused")
            RegistryRequestType registryRequest = requestContext.getCurrentRegistryRequest();

            //Now invoke a get method to get the value for attribute being sought
            Class<? extends Object> clazz = obj.getClass();
            @SuppressWarnings("unused")
            String clazzName = clazz.getName();
            PropertyDescriptor propDesc = new PropertyDescriptor(attr, clazz, getReadMethodName(attr), null);
            Method method = propDesc.getReadMethod();
            Object attrValObj = method.invoke(obj, (java.lang.Object[]) null);

            if (attrValObj instanceof Collection) {
                HashSet<AttributeValue> attrValueObjectIds = new HashSet<AttributeValue>();
                Iterator<?> iter = ((Collection<?>) attrValObj).iterator();
                while (iter.hasNext()) {
                    //??Dangerous assumption that Collection is a Collection of IdentifiableTypes
                    String attrValueObjectId = ((IdentifiableType) iter.next()).getId();
                    attrValueObjectIds.add(makeAttribute(attrValueObjectId, type));
                }
                evaluationResult = makeBag(attrValueObjectIds, type);
            } else {
                //See if more pointer chasing needs to be done or (!attributeStack.empty()) 
                if (!attributeStack.empty()) {
                    String id = (String) attrValObj;
                    RegistryObjectType ro = AuthorizationServiceImpl.getInstance()
                            .getRegistryObject(requestContext, id, false);
                    if (ro == null) {
                        throw new ObjectNotFoundException(id, "RegistryObject");
                    }
                    evaluationResult = handleRegistryObjectAttribute(ro, attributeStack, type, context);
                } else {
                    AttributeValue attrVal = makeAttribute(attrValObj, type);
                    evaluationResult = makeBag(attrVal, type);
                }
            }
        }
    } catch (InvocationTargetException e) {
        e.printStackTrace();
    } catch (IntrospectionException e) {
        e.printStackTrace();
    } catch (IllegalAccessException e) {
        e.printStackTrace();
    } catch (ParseException e) {
        e.printStackTrace();
    } catch (ParsingException e) {
        e.printStackTrace();
    } catch (URISyntaxException e) {
        e.printStackTrace();
    } catch (RegistryException e) {
        e.printStackTrace();
    }

    return evaluationResult;
}

From source file:org.castor.cpa.persistence.sql.engine.SQLEngine.java

public SQLEngine(final ClassDescriptor clsDesc, final PersistenceFactory factory) throws MappingException {

    _clsDesc = clsDesc;/*w ww. j  a  v a 2 s.  com*/
    _factory = factory;

    // construct field and id info
    Vector<SQLColumnInfo> idsInfo = new Vector<SQLColumnInfo>();
    Vector<SQLFieldInfo> fieldsInfo = new Vector<SQLFieldInfo>();

    /*
     * Implementation Note:
     * Extends and Depends has some special mutual exclusive
     * properties, which implementator should aware of.
     *
     * A Depended class may depends on another depended class
     * A class should either extends or depends on other class
     * A class should not depend on extending class.
     *  because, it is the same as depends on the base class
     * A class may be depended by zero or more classes
     * A class may be extended by zero or more classes
     * A class may extends only zero or one class
     * A class may depends only zero or one class
     * A class may depend on extended class
     * A class may extend a dependent class.
     * A class may extend a depended class.
     * No loop or circle should exist
     */
    // then, we put depended class ids in the back
    ClassDescriptor base = clsDesc;

    // walk until the base class which this class extends
    base = clsDesc;
    Stack<ClassDescriptor> stack = new Stack<ClassDescriptor>();
    stack.push(base);
    while (base.getExtends() != null) {
        // if (base.getDepends() != null) {
        //     throw new MappingException(
        //             "Class should not both depends on and extended other classes");
        // }
        base = base.getExtends();
        stack.push(base);
        // do we need to add loop detection?
    }

    // now base is either the base of extended class, or
    // clsDesc
    // we always put the original id info in front
    // [oleg] except for SQL name, it may differ.
    FieldDescriptor[] baseIdDescriptors = ((ClassDescriptorImpl) base).getIdentities();
    FieldDescriptor[] idDescriptors = ((ClassDescriptorImpl) clsDesc).getIdentities();

    for (int i = 0; i < baseIdDescriptors.length; i++) {
        if (baseIdDescriptors[i].hasNature(FieldDescriptorJDONature.class.getName())) {
            String name = baseIdDescriptors[i].getFieldName();
            String[] sqlName = new FieldDescriptorJDONature(baseIdDescriptors[i]).getSQLName();
            int[] sqlType = new FieldDescriptorJDONature(baseIdDescriptors[i]).getSQLType();
            FieldHandlerImpl fh = (FieldHandlerImpl) baseIdDescriptors[i].getHandler();

            // The extending class may have other SQL names for identity fields
            for (int j = 0; j < idDescriptors.length; j++) {
                if (name.equals(idDescriptors[j].getFieldName())
                        && (idDescriptors[j].hasNature(JDO_FIELD_NATURE))) {
                    sqlName = new FieldDescriptorJDONature(idDescriptors[j]).getSQLName();
                    break;
                }
            }
            idsInfo.add(new SQLColumnInfo(sqlName[0], sqlType[0], fh.getConvertTo(), fh.getConvertFrom()));
        } else {
            throw new MappingException("Except JDOFieldDescriptor");
        }
    }

    // then do the fields
    while (!stack.empty()) {
        base = stack.pop();
        FieldDescriptor[] fieldDescriptors = base.getFields();
        for (int i = 0; i < fieldDescriptors.length; i++) {
            // fieldDescriptors[i] is persistent in db if it is not transient
            // and it is a JDOFieldDescriptor or has a ClassDescriptor
            if (!fieldDescriptors[i].isTransient()) {
                if ((fieldDescriptors[i].hasNature(FieldDescriptorJDONature.class.getName()))
                        || (fieldDescriptors[i].getClassDescriptor() != null)) {

                    SQLFieldInfo inf = new SQLFieldInfo(clsDesc, fieldDescriptors[i],
                            new ClassDescriptorJDONature(base).getTableName(), !stack.empty());
                    fieldsInfo.add(inf);
                    if (inf.isJoined()) {
                        String alias = inf.getTableName() + "_f" + i;
                        inf.setTableAlias(alias);
                    } else {
                        inf.setTableAlias(inf.getTableName());
                    }
                }
            }
        }
    }

    InfoFactory infoFactory = new InfoFactory();
    _tableInfo = infoFactory.createTableInfo(clsDesc);

    _ids = new SQLColumnInfo[idsInfo.size()];
    idsInfo.copyInto(_ids);

    _fields = new SQLFieldInfo[fieldsInfo.size()];
    fieldsInfo.copyInto(_fields);

    _queryStatement = new SQLStatementQuery(this, factory);
    _loadStatement = new SQLStatementLoad(this, factory);
    _createStatement = new SQLStatementInsert(this, factory);
    _removeStatement = new SQLStatementDelete(this);
    _storeStatement = new SQLStatementUpdate(this);
}

From source file:com.continusec.client.VerifiableLog.java

/**
 * Utility method for auditors that wish to audit the full content of a log, as well as the log operation.
 * This method will retrieve all entries in batch from the log, and ensure that the root hash in head can be confirmed to accurately represent the contents
 * of all of the log entries. If prev is not null, then additionally it is proven that the root hash in head is consistent with the root hash in prev.
 * @param prev a previous LogTreeHead representing the set of entries that have been previously audited. To indicate this is has not previously been audited, pass null,
 * @param head the LogTreeHead up to which we wish to audit the log. Upon successful completion the caller should persist this for a future iteration.
 * @param auditor caller should implemented a LogAuditor which is called sequentially for each log entry as it is encountered.
 * @param factory the factory to use for instantiating log entries. Typically this is one of {@link RawDataEntryFactory#getInstance()}, {@link JsonEntryFactory#getInstance()}, {@link RedactedJsonEntryFactory#getInstance()}.
 * @throws ContinusecException upon error
 *///from www .ja v a2s. c  o m
public void verifyEntries(LogTreeHead prev, LogTreeHead head, VerifiableEntryFactory factory,
        LogAuditor auditor) throws ContinusecException {
    if ((prev == null) || prev.getTreeSize() < head.getTreeSize()) {
        Stack<byte[]> merkleTreeStack = new Stack<byte[]>();
        if ((prev != null) && (prev.getTreeSize() > 0)) {
            LogInclusionProof p = this.getInclusionProofByIndex(prev.getTreeSize() + 1, prev.getTreeSize());
            byte[] firstHash = null;
            for (byte[] b : p.getAuditPath()) {
                if (firstHash == null) {
                    firstHash = b;
                } else {
                    firstHash = Util.nodeMerkleTreeHash(b, firstHash);
                }
            }
            if (!(Arrays.equals(firstHash, prev.getRootHash()))) {
                throw new VerificationFailedException();
            }
            for (int i = p.getAuditPath().length - 1; i >= 0; i--) {
                merkleTreeStack.push(p.getAuditPath()[i]);
            }
        }

        int idx = (prev == null) ? 0 : prev.getTreeSize();
        try {
            for (VerifiableEntry e : this.getEntries(idx, head.getTreeSize(), factory)) {
                // do whatever content audit is desired on e
                auditor.auditLogEntry(idx, e);

                // update the merkle tree hash stack:
                merkleTreeStack.add(e.getLeafHash());
                for (int z = idx; (z & 1) == 1; z >>= 1) {
                    byte[] right = merkleTreeStack.pop();
                    byte[] left = merkleTreeStack.pop();
                    merkleTreeStack.push(Util.nodeMerkleTreeHash(left, right));
                }
                idx++;
            }
        } catch (RuntimeException e2) {
            // since get entries iterator throws a runtime exception that wraps the real continusec exception...
            Throwable cause = e2.getCause();
            if (cause instanceof ContinusecException) {
                throw (ContinusecException) cause;
            } else {
                throw e2;
            }
        }

        if (idx != head.getTreeSize()) {
            throw new NotAllEntriesReturnedException();
        }

        byte[] headHash = merkleTreeStack.pop();
        while (!merkleTreeStack.empty()) {
            headHash = Util.nodeMerkleTreeHash(merkleTreeStack.pop(), headHash);
        }

        if (!(Arrays.equals(headHash, head.getRootHash()))) {
            throw new VerificationFailedException();
        }
    }
}

From source file:org.apache.hadoop.hbase.filter.ParseFilter.java

/**
 * Parses the filterString and constructs a filter using it
 * <p>/*from  ww  w.j  av a  2  s  .  co m*/
 * @param filterStringAsByteArray filter string given by the user
 * @return filter object we constructed
 */
public Filter parseFilterString(byte[] filterStringAsByteArray) throws CharacterCodingException {
    // stack for the operators and parenthesis
    Stack<ByteBuffer> operatorStack = new Stack<ByteBuffer>();
    // stack for the filter objects
    Stack<Filter> filterStack = new Stack<Filter>();

    Filter filter = null;
    for (int i = 0; i < filterStringAsByteArray.length; i++) {
        if (filterStringAsByteArray[i] == ParseConstants.LPAREN) {
            // LPAREN found
            operatorStack.push(ParseConstants.LPAREN_BUFFER);
        } else if (filterStringAsByteArray[i] == ParseConstants.WHITESPACE
                || filterStringAsByteArray[i] == ParseConstants.TAB) {
            // WHITESPACE or TAB found
            continue;
        } else if (checkForOr(filterStringAsByteArray, i)) {
            // OR found
            i += ParseConstants.OR_ARRAY.length - 1;
            reduce(operatorStack, filterStack, ParseConstants.OR_BUFFER);
            operatorStack.push(ParseConstants.OR_BUFFER);
        } else if (checkForAnd(filterStringAsByteArray, i)) {
            // AND found
            i += ParseConstants.AND_ARRAY.length - 1;
            reduce(operatorStack, filterStack, ParseConstants.AND_BUFFER);
            operatorStack.push(ParseConstants.AND_BUFFER);
        } else if (checkForSkip(filterStringAsByteArray, i)) {
            // SKIP found
            i += ParseConstants.SKIP_ARRAY.length - 1;
            reduce(operatorStack, filterStack, ParseConstants.SKIP_BUFFER);
            operatorStack.push(ParseConstants.SKIP_BUFFER);
        } else if (checkForWhile(filterStringAsByteArray, i)) {
            // WHILE found
            i += ParseConstants.WHILE_ARRAY.length - 1;
            reduce(operatorStack, filterStack, ParseConstants.WHILE_BUFFER);
            operatorStack.push(ParseConstants.WHILE_BUFFER);
        } else if (filterStringAsByteArray[i] == ParseConstants.RPAREN) {
            // RPAREN found
            if (operatorStack.empty()) {
                throw new IllegalArgumentException("Mismatched parenthesis");
            }
            ByteBuffer argumentOnTopOfStack = operatorStack.peek();
            while (!(argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER))) {
                filterStack.push(popArguments(operatorStack, filterStack));
                if (operatorStack.empty()) {
                    throw new IllegalArgumentException("Mismatched parenthesis");
                }
                argumentOnTopOfStack = operatorStack.pop();
            }
        } else {
            // SimpleFilterExpression found
            byte[] filterSimpleExpression = extractFilterSimpleExpression(filterStringAsByteArray, i);
            i += (filterSimpleExpression.length - 1);
            filter = parseSimpleFilterExpression(filterSimpleExpression);
            filterStack.push(filter);
        }
    }

    // Finished parsing filterString
    while (!operatorStack.empty()) {
        filterStack.push(popArguments(operatorStack, filterStack));
    }
    filter = filterStack.pop();
    if (!filterStack.empty()) {
        throw new IllegalArgumentException("Incorrect Filter String");
    }
    return filter;
}

From source file:org.apache.tajo.engine.planner.LogicalPlanner.java

public LogicalNode postHook(PlanContext context, Stack<Expr> stack, Expr expr, LogicalNode current)
        throws PlanningException {

    // Some generated logical nodes (e.g., implicit aggregation) without exprs will pass NULL as a expr parameter.
    // We should skip them.
    if (expr != null) {
        // A relation list including a single ScanNode will return a ScanNode instance that already passed postHook.
        // So, it skips the already-visited ScanNode instance.
        if (expr.getType() == OpType.RelationList && current.getType() == NodeType.SCAN) {
            return current;
        }/* ww  w .j a v  a 2s . co m*/
    }

    QueryBlock queryBlock = context.queryBlock;
    queryBlock.updateLatestNode(current);

    // if this node is the topmost
    if (stack.size() == 0) {
        queryBlock.setRoot(current);
    }

    if (!stack.empty()) {
        queryBlock.updateCurrentNode(stack.peek());
    }
    return current;
}

From source file:com.amazonaws.services.kinesis.scaling.StreamScaler.java

private ScalingOperationReport scaleStream(String streamName, int originalShardCount, int targetShards,
        int operationsMade, int shardsCompleted, long startTime, Stack<ShardHashInfo> shardStack,
        Integer minCount, Integer maxCount) throws Exception {
    final double targetPct = 1d / targetShards;
    boolean checkMinMax = minCount != null || maxCount != null;
    String lastShardLower = null;
    String lastShardHigher = null;
    ScaleDirection scaleDirection = originalShardCount >= targetShards ? ScaleDirection.DOWN
            : ScaleDirection.UP;//  ww  w .j  ava2 s.com

    // seed the current shard count from the working stack
    int currentCount = shardStack.size();

    // we'll run iteratively until the shard stack is emptied or we reach
    // one of the caps
    ScalingCompletionStatus endStatus = ScalingCompletionStatus.Ok;
    do {
        if (checkMinMax) {
            // stop scaling if we've reached the min or max count
            boolean stopOnCap = false;
            String message = null;
            if (minCount != null && currentCount == minCount && targetShards <= minCount) {
                stopOnCap = true;
                if (operationsMade == 0) {
                    endStatus = ScalingCompletionStatus.AlreadyAtMinimum;
                } else {
                    endStatus = ScalingCompletionStatus.Ok;
                }
                message = String.format("%s: Minimum Shard Count of %s Reached", streamName, minCount);
            }
            if (maxCount != null && currentCount == maxCount && targetShards >= maxCount) {
                if (operationsMade == 0) {
                    endStatus = ScalingCompletionStatus.AlreadyAtMaximum;
                } else {
                    endStatus = ScalingCompletionStatus.Ok;
                }
                message = String.format("%s: Maximum Shard Count of %s Reached", streamName, maxCount);
                stopOnCap = true;
            }
            if (stopOnCap) {
                LOG.info(message);
                return reportFor(endStatus, streamName, operationsMade, scaleDirection);
            }
        }

        // report progress every shard completed
        if (shardsCompleted > 0) {
            reportProgress(streamName, shardsCompleted, currentCount, shardStack.size(), startTime);
        }

        // once the stack is emptied, return a report of the hash space
        // allocation
        if (shardStack.empty()) {
            return reportFor(endStatus, streamName, operationsMade, scaleDirection);
        }

        ShardHashInfo lowerShard = shardStack.pop();
        if (lowerShard != null) {
            lastShardLower = lowerShard.getShardId();
        } else {
            throw new Exception(String.format("%s: Null ShardHashInfo retrieved after processing %s",
                    streamName, lastShardLower));
        }

        // first check is if the bottom shard is smaller or larger than our
        // target width
        if (StreamScalingUtils.softCompare(lowerShard.getPctWidth(), targetPct) < 0) {
            if (shardStack.empty()) {
                // our current shard is smaller than the target size, but
                // there's nothing else to do
                return reportFor(endStatus, streamName, operationsMade, scaleDirection);
            } else {
                // get the next higher shard
                ShardHashInfo higherShard = shardStack.pop();

                if (higherShard != null) {
                    lastShardHigher = higherShard.getShardId();
                }

                if (StreamScalingUtils.softCompare(lowerShard.getPctWidth() + higherShard.getPctWidth(),
                        targetPct) > 0) {
                    // The two lowest shards together are larger than the
                    // target size, so split the upper at the target offset
                    // and
                    // merge the lower of the two new shards to the lowest
                    // shard
                    AdjacentShards splitUpper = higherShard.doSplit(kinesisClient,
                            targetPct - lowerShard.getPctWidth(),
                            shardStack.isEmpty() ? higherShard.getShardId()
                                    : shardStack.lastElement().getShardId());
                    operationsMade++;

                    // place the upper of the two new shards onto the stack
                    shardStack.push(splitUpper.getHigherShard());

                    // merge lower of the new shards with the lowest shard
                    LOG.info(String.format("Merging Shard %s with %s", lowerShard.getShardId(),
                            splitUpper.getLowerShard().getShardId()));
                    ShardHashInfo lowerMerged = new AdjacentShards(streamName, lowerShard,
                            splitUpper.getLowerShard()).doMerge(kinesisClient,
                                    shardStack.isEmpty() ? splitUpper.getHigherShard().getShardId()
                                            : shardStack.lastElement().getShardId());
                    LOG.info(String.format("Created Shard %s (%s)", lowerMerged.getShardId(),
                            pctFormat.format(lowerMerged.getPctWidth())));
                    shardsCompleted++;

                    // count of shards is unchanged in this case as we've
                    // just rebalanced, so current count is not updated
                } else {
                    // The lower and upper shards together are smaller than
                    // the target size, so merge the two shards together
                    ShardHashInfo lowerMerged = new AdjacentShards(streamName, lowerShard, higherShard)
                            .doMerge(kinesisClient, shardStack.isEmpty() ? higherShard.getShardId()
                                    : shardStack.lastElement().getShardId());
                    shardsCompleted++;
                    currentCount--;

                    // put the new shard back on the stack - it may still be
                    // too small relative to the target
                    shardStack.push(lowerMerged);
                }
            }
        } else if (StreamScalingUtils.softCompare(lowerShard.getPctWidth(), targetPct) == 0) {
            // at the correct size - move on
        } else {
            // lowest shard is larger than the target size so split at the
            // target offset
            AdjacentShards splitLower = lowerShard.doSplit(kinesisClient, targetPct,
                    shardStack.isEmpty() ? lowerShard.getShardId() : shardStack.lastElement().getShardId());
            operationsMade++;

            LOG.info(
                    String.format("Split Shard %s at %s Creating Final Shard %s and Intermediate Shard %s (%s)",
                            lowerShard.getShardId(), pctFormat.format(targetPct),
                            splitLower.getLowerShard().getShardId(), splitLower.getHigherShard(),
                            pctFormat.format(splitLower.getHigherShard().getPctWidth())));

            // push the higher of the two splits back onto the stack
            shardStack.push(splitLower.getHigherShard());
            shardsCompleted++;
            currentCount++;
        }
    } while (shardStack.size() > 0 || !shardStack.empty());

    return reportFor(endStatus, streamName, operationsMade, scaleDirection);
}