Example usage for java.util Stack pop

List of usage examples for java.util Stack pop

Introduction

In this page you can find the example usage for java.util Stack pop.

Prototype

public synchronized E pop() 

Source Link

Document

Removes the object at the top of this stack and returns that object as the value of this function.

Usage

From source file:com.flexive.core.storage.genericSQL.GenericTreeStorageSpreaded.java

protected long _reorganizeSpace(Connection con, SequencerEngine seq, FxTreeMode sourceMode, FxTreeMode destMode,
        long nodeId, boolean includeNodeId, BigInteger overrideSpacing, BigInteger overrideLeft,
        FxTreeNodeInfo insertParent, int insertPosition, BigInteger insertSpace, BigInteger insertBoundaries[],
        int depthDelta, Long destinationNode, boolean createMode, boolean createKeepIds,
        boolean disableSpaceOptimization) throws FxTreeException {
    long firstCreatedNodeId = -1;
    FxTreeNodeInfoSpreaded nodeInfo;//from   w  w  w  .  j  ava2 s  .c  om
    try {
        nodeInfo = (FxTreeNodeInfoSpreaded) getTreeNodeInfo(con, sourceMode, nodeId);
    } catch (Exception e) {
        return -1;
    }

    if (!nodeInfo.isSpaceOptimizable() && !disableSpaceOptimization) {
        // The Root node and cant be optimize any more ... so all we can do is fail :-/
        // This should never really happen
        if (nodeId == ROOT_NODE) {
            return -1;
        }
        //System.out.println("### UP we go, depthDelta=" + depthDelta);
        return _reorganizeSpace(con, seq, sourceMode, destMode, nodeInfo.getParentId(), includeNodeId,
                overrideSpacing, overrideLeft, insertParent, insertPosition, insertSpace, insertBoundaries,
                depthDelta, destinationNode, createMode, createKeepIds, false);
    }

    BigInteger spacing = nodeInfo.getDefaultSpacing();
    if (overrideSpacing != null && (overrideSpacing.compareTo(spacing) < 0 || overrideLeft != null)) {
        // override spacing unless it is greater OR overrideLeft is specified (in that case we
        // have to use the spacing for valid tree ranges)  
        spacing = overrideSpacing;
    } else {
        if (spacing.compareTo(GO_UP) < 0 && !createMode && !disableSpaceOptimization) {
            return _reorganizeSpace(con, seq, sourceMode, destMode, nodeInfo.getParentId(), includeNodeId,
                    overrideSpacing, overrideLeft, insertParent, insertPosition, insertSpace, insertBoundaries,
                    depthDelta, destinationNode, createMode, createKeepIds, false);
        }
    }

    if (insertBoundaries != null && insertPosition == -1) {
        insertPosition = 0; // insertPosition cannot be negative
    }

    Statement stmt = null;
    PreparedStatement ps = null;
    ResultSet rs;
    BigInteger left = overrideLeft == null ? nodeInfo.getLeft() : overrideLeft;
    BigInteger right = null;
    String includeNode = includeNodeId ? "=" : "";
    long counter = 0;
    long newId = -1;
    try {
        final long start = System.currentTimeMillis();
        String createProps = createMode ? ",PARENT,REF,NAME,TEMPLATE" : "";
        String sql = " SELECT ID," + StorageManager.getIfFunction( // compute total child count only when the node has children
                "CHILDCOUNT = 0", "0",
                "(SELECT COUNT(*) FROM " + getTable(sourceMode) + " WHERE LFT > NODE.LFT AND RGT < NODE.RGT)") +
        // 3           4             5   6
                ", CHILDCOUNT, LFT AS LFTORD,RGT,DEPTH" + createProps
                + " FROM (SELECT ID,CHILDCOUNT,LFT,RGT,DEPTH" + createProps + " FROM " + getTable(sourceMode)
                + " WHERE " + "LFT>" + includeNode + nodeInfo.getLeft() + " AND LFT<" + includeNode
                + nodeInfo.getRight() + ") NODE " + "ORDER BY LFTORD ASC";
        stmt = con.createStatement();
        rs = stmt.executeQuery(sql);
        if (createMode) {
            //                                                                 1  2      3     4     5   6        7   8
            ps = con.prepareStatement(
                    "INSERT INTO " + getTable(destMode) + " (ID,PARENT,DEPTH,DIRTY,REF,TEMPLATE,LFT,RGT," +
                    //9           10    11
                            "CHILDCOUNT,NAME,MODIFIED_AT) " + "VALUES (?,?,?,?,?,?,?,?,?,?,?)");
        } else {
            ps = con.prepareStatement("UPDATE " + getTable(sourceMode) + " SET LFT=?,RGT=?,DEPTH=? WHERE ID=?");
        }
        long id;
        int total_childs;
        int direct_childs;
        BigInteger nextLeft;
        int lastDepth = nodeInfo.getDepth() + (includeNodeId ? 0 : 1);
        int depth;
        BigInteger _rgt;
        BigInteger _lft;
        Long ref = null;
        String data = null;
        String name = "";

        Stack<Long> currentParent = null;
        if (createMode) {
            currentParent = new Stack<Long>();
            currentParent.push(destinationNode);
        }

        //System.out.println("Spacing:"+SPACING);
        while (rs.next()) {
            //System.out.println("------------------");
            id = rs.getLong(1);
            total_childs = rs.getInt(2);
            direct_childs = rs.getInt(3);
            _lft = getNodeBounds(rs, 4);
            _rgt = getNodeBounds(rs, 5);
            depth = rs.getInt(6);
            if (createMode) {
                // Reading these properties is slow, only do it when needed
                ref = rs.getLong(8);
                if (rs.wasNull())
                    ref = null;
                name = rs.getString(9);
                data = rs.getString(10);
                if (rs.wasNull())
                    data = null;
            }
            left = left.add(spacing).add(BigInteger.ONE);

            // Handle depth differences
            if (lastDepth - depth > 0) {
                BigInteger depthDifference = spacing.add(BigInteger.ONE);
                left = left.add(depthDifference.multiply(BigInteger.valueOf(lastDepth - depth)));
            }
            if (createMode) {
                if (lastDepth < depth) {
                    currentParent.push(newId);
                } else if (lastDepth > depth) {
                    for (int p = 0; p < (lastDepth - depth); p++)
                        currentParent.pop();
                }
            }

            right = left.add(spacing).add(BigInteger.ONE);

            // add child space if needed
            if (total_childs > 0) {
                BigInteger childSpace = spacing.multiply(BigInteger.valueOf(total_childs * 2));
                childSpace = childSpace.add(BigInteger.valueOf((total_childs * 2) - 1));
                right = right.add(childSpace);
                nextLeft = left;
            } else {
                nextLeft = right;
            }

            if (insertBoundaries != null) {
                // insert gap at requested position
                // If we're past the gap, keep adding the insert space to left/right because the added
                // space is never "injected" into the loop, i.e. without adding it the left/right boundaries of
                // nodes after the gap would be too far to the left.
                if (_lft.compareTo(insertBoundaries[0]) > 0) {
                    left = left.add(insertSpace);
                }
                if (_rgt.compareTo(insertBoundaries[0]) > 0) {
                    right = right.add(insertSpace);
                }
            }

            // sanity checks
            if (left.compareTo(right) >= 0) {
                throw new FxTreeException(LOG, "ex.tree.reorganize.failed", counter, left, right,
                        "left greater than right");
            }
            if (insertParent != null && right.compareTo((BigInteger) insertParent.getRight()) > 0) {
                throw new FxTreeException(LOG, "ex.tree.reorganize.failed", counter, left, right,
                        "wrote past parent node bounds");
            }

            // Update the node
            if (createMode) {
                newId = createKeepIds ? id : seq.getId(destMode.getSequencer());
                if (firstCreatedNodeId == -1)
                    firstCreatedNodeId = newId;

                // Create the main entry
                ps.setLong(1, newId);
                ps.setLong(2, currentParent.peek());
                ps.setLong(3, depth + depthDelta);
                ps.setBoolean(4, destMode != FxTreeMode.Live); //only flag non-live tree's dirty
                if (ref == null) {
                    ps.setNull(5, java.sql.Types.NUMERIC);
                } else {
                    ps.setLong(5, ref);
                }
                if (data == null) {
                    ps.setNull(6, java.sql.Types.VARCHAR);
                } else {
                    ps.setString(6, data);
                }
                //                    System.out.println("=> id:"+newId+" left:"+left+" right:"+right);
                setNodeBounds(ps, 7, left);
                setNodeBounds(ps, 8, right);
                ps.setInt(9, direct_childs);
                ps.setString(10, name);
                ps.setLong(11, System.currentTimeMillis());
                ps.addBatch();
            } else {
                setNodeBounds(ps, 1, left);
                setNodeBounds(ps, 2, right);
                ps.setInt(3, depth + depthDelta);
                ps.setLong(4, id);
                ps.addBatch();
                //                    ps.executeBatch();
                //                    ps.clearBatch();
            }

            // Prepare variables for the next node
            left = nextLeft;
            lastDepth = depth;
            counter++;

            // Execute batch every 10000 items to avoid out of memory
            if (counter % 10000 == 0) {
                ps.executeBatch();
                ps.clearBatch();
            }
        }
        rs.close();
        stmt.close();
        stmt = null;
        ps.executeBatch();

        if (LOG.isDebugEnabled()) {
            final long time = System.currentTimeMillis() - start;

            LOG.debug("Tree reorganization of " + counter + " items completed in " + time + " ms (spaceLen="
                    + spacing + ")");
        }
        return firstCreatedNodeId;
    } catch (FxApplicationException e) {
        throw e instanceof FxTreeException ? (FxTreeException) e : new FxTreeException(e);
    } catch (SQLException e) {
        String next = "";
        if (e.getNextException() != null)
            next = " next:" + e.getNextException().getMessage();
        if (StorageManager.isDuplicateKeyViolation(e))
            throw new FxTreeException(LOG, e, "ex.tree.reorganize.duplicateKey");
        throw new FxTreeException(LOG, e, "ex.tree.reorganize.failed", counter, left, right,
                e.getMessage() + next);
    } catch (Exception e) {
        throw new FxTreeException(e);
    } finally {
        try {
            if (stmt != null)
                stmt.close();
        } catch (Throwable t) {
            /*ignore*/}
        try {
            if (ps != null)
                ps.close();
        } catch (Throwable t) {
            /*ignore*/}
    }
}

From source file:com.linuxbox.enkive.message.MessageImpl.java

/**
 * @throws IOException//from   w  w w  .  jav  a  2s  .  co  m
 * @throws MimeIOException
 * @throws MimeException
 * @throws CannotTransferMessageContentException
 * 
 * @param InputStream
 *            in An InputStream of the message to be parsed
 * 
 *            Constructs a com.linuxbox.enkive.message object from a raw
 *            email message InputStream
 * @throws BadMessageException
 */
public void ConstructMessage(InputStream in)
        throws IOException, CannotTransferMessageContentException, BadMessageException {

    Stack<MultiPartHeader> headerStack = new Stack<MultiPartHeader>();
    MultiPartHeader mp;
    StringBuilder headers = new StringBuilder();
    boolean messageHeadersParsed = false;
    boolean isMultiPart = false;

    // TODO Get line ending from message
    final String lineEnding = "\r\n";

    final MessageStreamParser stream = new MessageStreamParser(config);
    stream.setRecursionMode(RecursionMode.M_NO_RECURSE);

    stream.parse(in);

    try {
        for (EntityState state = stream.getState(); state != EntityState.T_END_OF_STREAM; state = stream
                .next()) {
            switch (state) {

            // At the start of a header section we want to reset the local
            // header variable since we only want to store the headers
            // for the section currently being parsed
            case T_START_HEADER:
                headers = new StringBuilder();
                break;

            // Append each header field to the local header variable
            case T_FIELD:
                headers.append(stream.getField());
                headers.append(lineEnding);
                break;

            // If we haven't set the message headers set them and
            // clear the variable so they don't get stored in a
            // ContentHeader object
            case T_END_HEADER:
                if (!messageHeadersParsed) {
                    setOriginalHeaders(headers.toString());
                    messageHeadersParsed = true;
                    headers = new StringBuilder();
                }
                break;

            // If we have a multipart message, create a new object,
            // grab the information we need and push it on the stack
            case T_START_MULTIPART:
                isMultiPart = true;
                mp = new MultiPartHeaderImpl();
                mp.setBoundary(stream.getBodyDescriptor().getBoundary());
                mp.setOriginalHeaders(headers.toString());
                mp.setLineEnding(lineEnding);
                headerStack.push(mp);
                break;

            // If there's a preamble, get the multipartheader off
            // the top of the stack, set it, and push back on the stack
            case T_PREAMBLE:
                BufferedReader reader = new BufferedReader(stream.getReader());

                String tempString;
                String preamble = "";
                while ((tempString = reader.readLine()) != null) {
                    preamble += tempString + lineEnding;
                }
                mp = headerStack.pop();
                mp.setPreamble(preamble);
                headerStack.push(mp);
                break;

            // If there's an epilogue, get the multipartheader off
            // the top of the stack, set it, and push back on the stack
            case T_EPILOGUE:
                BufferedReader epilogueReader = new BufferedReader(stream.getReader());

                String tempEpilogueString;
                String epilogue = "";
                while ((tempEpilogueString = epilogueReader.readLine()) != null) {
                    epilogue += tempEpilogueString + lineEnding;
                }
                mp = headerStack.pop();
                mp.setEpilogue(epilogue);
                headerStack.push(mp);
                break;

            // Create a new singlepartheader, set the headers,
            // set the content_data
            case T_BODY:
                SinglePartHeader single = new SinglePartHeaderImpl();
                final String transferEncoding = stream.getBodyDescriptor().getTransferEncoding();
                EncodedContentDataImpl cd = new EncodedContentDataImpl(transferEncoding);
                cd.setBinaryContent(stream.getInputStream());
                single.setContentTransferEncoding(transferEncoding);

                single.setOriginalHeaders(headers.toString());
                single.parseHeaders(headers.toString(), config);
                single.setEncodedContentData(cd);
                single.setLineEnding(lineEnding);
                // If we're working with a multipart message,
                // pop, add the singlepartheader, and push.
                // Otherwise just set the singlepartheader
                if (isMultiPart) {
                    mp = headerStack.pop();
                    mp.addPartHeader(single);
                    headerStack.push(mp);
                } else
                    this.setContentHeader(single);
                break;

            // If we've reached the end of a multipart, it could
            // be a nested multipart. In that case we'll need to
            // Add the nested multipart to the multipart a level above.
            // If not nested, we've reached the end of the content headers
            // so set it.
            case T_END_MULTIPART:
                mp = headerStack.pop();
                if (headerStack.isEmpty())
                    this.setContentHeader(mp);
                else {
                    MultiPartHeader mp2 = headerStack.pop();
                    mp2.addPartHeader(mp);
                    headerStack.push(mp2);
                }
                break;
            default:
                // ignore other tags
                break;
            } // switch
        } // for
    } catch (MimeException e) {
        throw new BadMessageException(e);
    } catch (MimeIOException e) {
        throw new BadMessageException(e);
    }
    if (LOGGER.isTraceEnabled())
        LOGGER.trace("Message " + this.messageId + " successfully parsed.");
}

From source file:com.espertech.esper.epl.join.plan.NStreamOuterQueryPlanBuilder.java

/**
 * Recusivly builds a substream-per-stream ordered tree graph using the
 * join information supplied for outer joins and from the query graph (where clause).
 * <p>/*from   w w  w  .j a va 2  s .  c  om*/
 * Required streams are considered first and their lookup is placed first in the list
 * to gain performance.
 * @param streamNum is the root stream number that supplies the incoming event to build the tree for
 * @param queryGraph contains where-clause stream relationship info
 * @param outerInnerGraph contains the outer join stream relationship info
 * @param completedStreams is a temporary holder for streams already considered
 * @param substreamsPerStream is the ordered, tree-like structure to be filled
 * @param requiredPerStream indicates which streams are required and which are optional
 * @param streamCallStack the query plan call stack of streams available via cursor
 * @param dependencyGraph - dependencies between historical streams
 * @throws ExprValidationException if the query planning failed
 */
protected static void recursiveBuild(int streamNum, Stack<Integer> streamCallStack, QueryGraph queryGraph,
        OuterInnerDirectionalGraph outerInnerGraph, InnerJoinGraph innerJoinGraph,
        Set<Integer> completedStreams, LinkedHashMap<Integer, int[]> substreamsPerStream,
        boolean[] requiredPerStream, DependencyGraph dependencyGraph) throws ExprValidationException {
    // add this stream to the set of completed streams
    completedStreams.add(streamNum);

    // check if the dependencies have been satisfied
    if (dependencyGraph.hasDependency(streamNum)) {
        Set<Integer> dependencies = dependencyGraph.getDependenciesForStream(streamNum);
        for (Integer dependentStream : dependencies) {
            if (!streamCallStack.contains(dependentStream)) {
                throw new ExprValidationException(
                        "Historical stream " + streamNum + " parameter dependency originating in stream "
                                + dependentStream + " cannot or may not be satisfied by the join");
            }
        }
    }

    // Determine the streams we can navigate to from this stream
    Set<Integer> navigableStreams = queryGraph.getNavigableStreams(streamNum);

    // add unqualified navigable streams (since on-expressions in outer joins are optional)
    Set<Integer> unqualifiedNavigable = outerInnerGraph.getUnqualifiedNavigableStreams().get(streamNum);
    if (unqualifiedNavigable != null) {
        navigableStreams.addAll(unqualifiedNavigable);
    }

    // remove those already done
    navigableStreams.removeAll(completedStreams);

    // Which streams are inner streams to this stream (optional), which ones are outer to the stream (required)
    Set<Integer> requiredStreams = getOuterStreams(streamNum, navigableStreams, outerInnerGraph);

    // Add inner joins, if any, unless already completed for this stream
    innerJoinGraph.addRequiredStreams(streamNum, requiredStreams, completedStreams);

    Set<Integer> optionalStreams = getInnerStreams(streamNum, navigableStreams, outerInnerGraph, innerJoinGraph,
            completedStreams);

    // Remove from the required streams the optional streams which places 'full' joined streams
    // into the optional stream category
    requiredStreams.removeAll(optionalStreams);

    // if we are a leaf node, we are done
    if (navigableStreams.isEmpty()) {
        substreamsPerStream.put(streamNum, new int[0]);
        return;
    }

    // First the outer (required) streams to this stream, then the inner (optional) streams
    int[] substreams = new int[requiredStreams.size() + optionalStreams.size()];
    substreamsPerStream.put(streamNum, substreams);
    int count = 0;
    for (int stream : requiredStreams) {
        substreams[count++] = stream;
        requiredPerStream[stream] = true;
    }
    for (int stream : optionalStreams) {
        substreams[count++] = stream;
    }

    // next we look at all the required streams and add their dependent streams
    for (int stream : requiredStreams) {
        completedStreams.add(stream);
    }

    for (int stream : requiredStreams) {
        streamCallStack.push(stream);
        recursiveBuild(stream, streamCallStack, queryGraph, outerInnerGraph, innerJoinGraph, completedStreams,
                substreamsPerStream, requiredPerStream, dependencyGraph);
        streamCallStack.pop();
    }
    // look at all the optional streams and add their dependent streams
    for (int stream : optionalStreams) {
        streamCallStack.push(stream);
        recursiveBuild(stream, streamCallStack, queryGraph, outerInnerGraph, innerJoinGraph, completedStreams,
                substreamsPerStream, requiredPerStream, dependencyGraph);
        streamCallStack.pop();
    }
}

From source file:com.amazonaws.services.kinesis.scaling.StreamScaler.java

private ScalingOperationReport scaleStream(String streamName, int originalShardCount, int targetShards,
        int operationsMade, int shardsCompleted, long startTime, Stack<ShardHashInfo> shardStack,
        Integer minCount, Integer maxCount) throws Exception {
    final double targetPct = 1d / targetShards;
    boolean checkMinMax = minCount != null || maxCount != null;
    String lastShardLower = null;
    String lastShardHigher = null;
    ScaleDirection scaleDirection = originalShardCount >= targetShards ? ScaleDirection.DOWN
            : ScaleDirection.UP;//from  w  w w.  j  a  va2s. c o m

    // seed the current shard count from the working stack
    int currentCount = shardStack.size();

    // we'll run iteratively until the shard stack is emptied or we reach
    // one of the caps
    ScalingCompletionStatus endStatus = ScalingCompletionStatus.Ok;
    do {
        if (checkMinMax) {
            // stop scaling if we've reached the min or max count
            boolean stopOnCap = false;
            String message = null;
            if (minCount != null && currentCount == minCount && targetShards <= minCount) {
                stopOnCap = true;
                if (operationsMade == 0) {
                    endStatus = ScalingCompletionStatus.AlreadyAtMinimum;
                } else {
                    endStatus = ScalingCompletionStatus.Ok;
                }
                message = String.format("%s: Minimum Shard Count of %s Reached", streamName, minCount);
            }
            if (maxCount != null && currentCount == maxCount && targetShards >= maxCount) {
                if (operationsMade == 0) {
                    endStatus = ScalingCompletionStatus.AlreadyAtMaximum;
                } else {
                    endStatus = ScalingCompletionStatus.Ok;
                }
                message = String.format("%s: Maximum Shard Count of %s Reached", streamName, maxCount);
                stopOnCap = true;
            }
            if (stopOnCap) {
                LOG.info(message);
                return reportFor(endStatus, streamName, operationsMade, scaleDirection);
            }
        }

        // report progress every shard completed
        if (shardsCompleted > 0) {
            reportProgress(streamName, shardsCompleted, currentCount, shardStack.size(), startTime);
        }

        // once the stack is emptied, return a report of the hash space
        // allocation
        if (shardStack.empty()) {
            return reportFor(endStatus, streamName, operationsMade, scaleDirection);
        }

        ShardHashInfo lowerShard = shardStack.pop();
        if (lowerShard != null) {
            lastShardLower = lowerShard.getShardId();
        } else {
            throw new Exception(String.format("%s: Null ShardHashInfo retrieved after processing %s",
                    streamName, lastShardLower));
        }

        // first check is if the bottom shard is smaller or larger than our
        // target width
        if (StreamScalingUtils.softCompare(lowerShard.getPctWidth(), targetPct) < 0) {
            if (shardStack.empty()) {
                // our current shard is smaller than the target size, but
                // there's nothing else to do
                return reportFor(endStatus, streamName, operationsMade, scaleDirection);
            } else {
                // get the next higher shard
                ShardHashInfo higherShard = shardStack.pop();

                if (higherShard != null) {
                    lastShardHigher = higherShard.getShardId();
                }

                if (StreamScalingUtils.softCompare(lowerShard.getPctWidth() + higherShard.getPctWidth(),
                        targetPct) > 0) {
                    // The two lowest shards together are larger than the
                    // target size, so split the upper at the target offset
                    // and
                    // merge the lower of the two new shards to the lowest
                    // shard
                    AdjacentShards splitUpper = higherShard.doSplit(kinesisClient,
                            targetPct - lowerShard.getPctWidth(),
                            shardStack.isEmpty() ? higherShard.getShardId()
                                    : shardStack.lastElement().getShardId());
                    operationsMade++;

                    // place the upper of the two new shards onto the stack
                    shardStack.push(splitUpper.getHigherShard());

                    // merge lower of the new shards with the lowest shard
                    LOG.info(String.format("Merging Shard %s with %s", lowerShard.getShardId(),
                            splitUpper.getLowerShard().getShardId()));
                    ShardHashInfo lowerMerged = new AdjacentShards(streamName, lowerShard,
                            splitUpper.getLowerShard()).doMerge(kinesisClient,
                                    shardStack.isEmpty() ? splitUpper.getHigherShard().getShardId()
                                            : shardStack.lastElement().getShardId());
                    LOG.info(String.format("Created Shard %s (%s)", lowerMerged.getShardId(),
                            pctFormat.format(lowerMerged.getPctWidth())));
                    shardsCompleted++;

                    // count of shards is unchanged in this case as we've
                    // just rebalanced, so current count is not updated
                } else {
                    // The lower and upper shards together are smaller than
                    // the target size, so merge the two shards together
                    ShardHashInfo lowerMerged = new AdjacentShards(streamName, lowerShard, higherShard)
                            .doMerge(kinesisClient, shardStack.isEmpty() ? higherShard.getShardId()
                                    : shardStack.lastElement().getShardId());
                    shardsCompleted++;
                    currentCount--;

                    // put the new shard back on the stack - it may still be
                    // too small relative to the target
                    shardStack.push(lowerMerged);
                }
            }
        } else if (StreamScalingUtils.softCompare(lowerShard.getPctWidth(), targetPct) == 0) {
            // at the correct size - move on
        } else {
            // lowest shard is larger than the target size so split at the
            // target offset
            AdjacentShards splitLower = lowerShard.doSplit(kinesisClient, targetPct,
                    shardStack.isEmpty() ? lowerShard.getShardId() : shardStack.lastElement().getShardId());
            operationsMade++;

            LOG.info(
                    String.format("Split Shard %s at %s Creating Final Shard %s and Intermediate Shard %s (%s)",
                            lowerShard.getShardId(), pctFormat.format(targetPct),
                            splitLower.getLowerShard().getShardId(), splitLower.getHigherShard(),
                            pctFormat.format(splitLower.getHigherShard().getPctWidth())));

            // push the higher of the two splits back onto the stack
            shardStack.push(splitLower.getHigherShard());
            shardsCompleted++;
            currentCount++;
        }
    } while (shardStack.size() > 0 || !shardStack.empty());

    return reportFor(endStatus, streamName, operationsMade, scaleDirection);
}

From source file:org.apache.tajo.engine.planner.LogicalPlanner.java

@Override
public LogicalNode visitSort(PlanContext context, Stack<Expr> stack, Sort sort) throws PlanningException {
    QueryBlock block = context.queryBlock;

    int sortKeyNum = sort.getSortSpecs().length;
    Sort.SortSpec[] sortSpecs = sort.getSortSpecs();
    String[] referNames = new String[sortKeyNum];

    ExprNormalizedResult[] normalizedExprList = new ExprNormalizedResult[sortKeyNum];
    for (int i = 0; i < sortKeyNum; i++) {
        normalizedExprList[i] = normalizer.normalize(context, sortSpecs[i].getKey());
    }//from  w  w w  . ja  v  a2  s  .co m
    for (int i = 0; i < sortKeyNum; i++) {
        referNames[i] = block.namedExprsMgr.addExpr(normalizedExprList[i].baseExpr);
        block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].aggExprs);
        block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].scalarExprs);
    }

    ////////////////////////////////////////////////////////
    // Visit and Build Child Plan
    ////////////////////////////////////////////////////////
    stack.push(sort);
    LogicalNode child = visit(context, stack, sort.getChild());
    if (block.isAggregationRequired()) {
        child = insertGroupbyNode(context, child, stack);
    }
    stack.pop();
    ////////////////////////////////////////////////////////

    SortNode sortNode = block.getNodeFromExpr(sort);
    sortNode.setChild(child);
    sortNode.setInSchema(child.getOutSchema());
    sortNode.setOutSchema(child.getOutSchema());

    // Building sort keys
    Column column;
    List<SortSpec> annotatedSortSpecs = Lists.newArrayList();
    for (int i = 0; i < sortKeyNum; i++) {
        String refName = referNames[i];
        if (block.isConstReference(refName)) {
            continue;
        } else if (block.namedExprsMgr.isEvaluated(refName)) {
            column = block.namedExprsMgr.getTarget(refName).getNamedColumn();
        } else {
            throw new IllegalStateException("Unexpected State: " + TUtil.arrayToString(sortSpecs));
        }
        annotatedSortSpecs.add(new SortSpec(column, sortSpecs[i].isAscending(), sortSpecs[i].isNullFirst()));
    }

    if (annotatedSortSpecs.size() == 0) {
        return child;
    } else {
        sortNode.setSortSpecs(annotatedSortSpecs.toArray(new SortSpec[annotatedSortSpecs.size()]));
        return sortNode;
    }
}

From source file:gov.nih.nci.cagrid.sdk4query.processor.PublicDataCQL2ParameterizedHQL.java

/**
 * Processes CQL associations into HQL//from www .  ja  v a 2 s .c  om
 *
 * @param association
 *       The CQL association
 * @param hql
 *       The HQL fragment which will be edited
  * @param parameters
  *      The positional HQL query parameters
 * @param associationTrace
 *       The trace of associations
 * @param sourceClassName
 *       The class name of the type to which this association belongs
 * @throws QueryProcessingException
 */
private void processAssociation(Association association, StringBuilder hql, List<java.lang.Object> parameters,
        Stack<Association> associationStack, Object sourceQueryObject, String sourceAlias)
        throws QueryProcessingException {
    LOG.debug("Processing association " + sourceQueryObject.getName() + " to " + association.getName());

    // get the association's role name
    String roleName = roleNameResolver.getRoleName(sourceQueryObject.getName(), association);
    if (roleName == null) {
        // still null?? no association to the object!
        // TODO: should probably be malformed query exception
        throw new QueryProcessingException("Association from type " + sourceQueryObject.getName() + " to type "
                + association.getName() + " does not exist.  Use only direct associations");
    }
    LOG.debug("Role name determined to be " + roleName);

    // determine the alias for this association
    String alias = getAssociationAlias(sourceQueryObject.getName(), association.getName(), roleName);
    LOG.debug("Association alias determined to be " + alias);

    // add this association to the stack
    associationStack.push(association);

    // flag indicates the query is only verifying the association is populated
    boolean simpleNullCheck = true;
    if (association.getAssociation() != null) {
        simpleNullCheck = false;
        // add clause to select things from this association
        hql.append(sourceAlias).append('.').append(roleName);
        hql.append(".id in (select ").append(alias).append(".id from ");
        hql.append(association.getName()).append(" as ").append(alias).append(" where ");
        processAssociation(association.getAssociation(), hql, parameters, associationStack, association, alias);
        hql.append(") ");
    }
    if (association.getAttribute() != null) {
        simpleNullCheck = false;
        processAttribute(association.getAttribute(), hql, parameters, association,
                sourceAlias + "." + roleName);
    }
    if (association.getGroup() != null) {
        simpleNullCheck = false;
        hql.append(sourceAlias).append('.').append(roleName);
        hql.append(".id in (select ").append(alias).append(".id from ");
        hql.append(association.getName()).append(" as ").append(alias).append(" where ");
        processGroup(association.getGroup(), hql, parameters, associationStack, association, alias);
        hql.append(") ");
    }

    if (simpleNullCheck) {
        // query is checking for the association to exist and be non-null
        hql.append(sourceAlias).append('.').append(roleName).append(".id is not null ");
    }

    // pop this association off the stack
    associationStack.pop();
    LOG.debug(associationStack.size() + " associations remain on the stack");
}

From source file:com.udojava.evalex.Expression.java

/**
 * Implementation of the <i>Shunting Yard</i> algorithm to transform an
 * infix expression to a RPN expression.
 *
 * @param expression The input expression in infx.
 * @return A RPN representation of the expression, with each token as a list
 * member./*from w  ww  .  ja va  2  s .  c om*/
 */
private List<String> shuntingYard(String expression) {
    List<String> outputQueue = new ArrayList<>();
    Stack<String> stack = new Stack<>();

    Tokenizer tokenizer = new Tokenizer(expression);

    String lastFunction = null;
    String previousToken = null;
    while (tokenizer.hasNext()) {
        String token = tokenizer.next();
        if (isNumber(token)) {
            if (token.startsWith("x")) {

                BigInteger bd = new BigInteger(token.substring(1), 16);
                outputQueue.add(bd.toString(10));
            } else if (token.startsWith("b")) {
                BigInteger bd = new BigInteger(token.substring(1), 2);
                outputQueue.add(bd.toString(10));
            } else if (token.startsWith("o")) {
                BigInteger bd = new BigInteger(token.substring(1), 8);
                outputQueue.add(bd.toString(10));
            } else {
                outputQueue.add(token);
            }
        } else if (mainVars.containsKey(token)) {
            outputQueue.add(token);
        } else if (functions.containsKey(token.toUpperCase(Locale.ROOT))) {
            stack.push(token);
            lastFunction = token;
        } else if ((Character.isLetter(token.charAt(0)) || token.charAt(0) == '_')
                && !operators.containsKey(token)) {
            mainVars.put(token, new MyComplex(0, 0)); // create variable
            outputQueue.add(token);
            //stack.push(token);
        } else if (",".equals(token)) {
            if (operators.containsKey(previousToken)) {
                throw new ExpressionException("Missing parameter(s) for operator " + previousToken
                        + " at character position " + (tokenizer.getPos() - 1 - previousToken.length()));
            }
            while (!stack.isEmpty() && !"(".equals(stack.peek())) {
                outputQueue.add(stack.pop());
            }
            if (stack.isEmpty()) {
                throw new ExpressionException("Parse error for function '" + lastFunction + "'");
            }
        } else if (operators.containsKey(token)) {
            if (",".equals(previousToken) || "(".equals(previousToken)) {
                throw new ExpressionException("Missing parameter(s) for operator " + token
                        + " at character position " + (tokenizer.getPos() - token.length()));
            }
            Operator o1 = operators.get(token);
            String token2 = stack.isEmpty() ? null : stack.peek();
            while (token2 != null && operators.containsKey(token2)
                    && ((o1.isLeftAssoc() && o1.getPrecedence() <= operators.get(token2).getPrecedence())
                            || (o1.getPrecedence() < operators.get(token2).getPrecedence()))) {
                outputQueue.add(stack.pop());
                token2 = stack.isEmpty() ? null : stack.peek();
            }
            stack.push(token);
        } else if ("(".equals(token)) {
            if (previousToken != null) {
                if (isNumber(previousToken)) {
                    throw new ExpressionException(
                            "Missing operator at character position " + tokenizer.getPos());
                }
                // if the ( is preceded by a valid function, then it
                // denotes the start of a parameter list
                if (functions.containsKey(previousToken.toUpperCase(Locale.ROOT))) {
                    outputQueue.add(token);
                }
            }
            stack.push(token);
        } else if (")".equals(token)) {
            if (operators.containsKey(previousToken)) {
                throw new ExpressionException("Missing parameter(s) for operator " + previousToken
                        + " at character position " + (tokenizer.getPos() - 1 - previousToken.length()));
            }
            while (!stack.isEmpty() && !"(".equals(stack.peek())) {
                outputQueue.add(stack.pop());
            }
            if (stack.isEmpty()) {
                throw new ExpressionException("Mismatched parentheses");
            }
            stack.pop();
            if (!stack.isEmpty() && functions.containsKey(stack.peek().toUpperCase(Locale.ROOT))) {
                outputQueue.add(stack.pop());
            }
        }
        previousToken = token;
    }
    while (!stack.isEmpty()) {
        String element = stack.pop();
        if ("(".equals(element) || ")".equals(element)) {
            throw new ExpressionException("Mismatched parentheses");
        }

        if (!operators.containsKey(element)) {
            throw new ExpressionException("Unknown operator or function: " + element);
        }
        outputQueue.add(element);
    }
    return outputQueue;
}

From source file:com.sqewd.open.dal.core.persistence.db.EntityHelper.java

@SuppressWarnings({ "unchecked", "rawtypes" })
public static void setColumnValue(final ResultSet rs, final StructAttributeReflect attr,
        final AbstractEntity entity, final AbstractJoinGraph gr, final Stack<KeyValuePair<Class<?>>> path)
        throws Exception {

    KeyValuePair<String> alias = gr.getAliasFor(path, attr.Column, 0);
    String tabprefix = alias.getKey();

    if (EnumPrimitives.isPrimitiveType(attr.Field.getType())) {
        EnumPrimitives prim = EnumPrimitives.type(attr.Field.getType());
        switch (prim) {
        case ECharacter:
            String sv = rs.getString(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), sv.charAt(0));
            }//  ww  w  .  ja  v  a  2 s.  c o  m
            break;
        case EShort:
            short shv = rs.getShort(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), shv);
            }
            break;
        case EInteger:
            int iv = rs.getInt(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), iv);
            }
            break;
        case ELong:
            long lv = rs.getLong(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), lv);
            }
            break;
        case EFloat:
            float fv = rs.getFloat(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), fv);
            }
            break;
        case EDouble:
            double dv = rs.getDouble(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), dv);
            }
            break;
        default:
            throw new Exception("Unsupported Data type [" + prim.name() + "]");
        }
    } else if (attr.Convertor != null) {
        String value = rs.getString(tabprefix + "." + attr.Column);
        if (!rs.wasNull()) {
            attr.Convertor.load(entity, attr.Column, value);
        }
    } else if (attr.Field.getType().equals(String.class)) {
        String value = rs.getString(tabprefix + "." + attr.Column);
        if (!rs.wasNull()) {
            PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), value);
        }
    } else if (attr.Field.getType().equals(Date.class)) {
        long value = rs.getLong(tabprefix + "." + attr.Column);
        if (!rs.wasNull()) {
            Date dt = new Date(value);
            PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), dt);
        }
    } else if (attr.Field.getType().isEnum()) {
        String value = rs.getString(tabprefix + "." + attr.Column);
        if (!rs.wasNull()) {
            Class ecls = attr.Field.getType();
            Object evalue = Enum.valueOf(ecls, value);
            PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), evalue);
        }
    } else if (attr.Reference != null) {
        Class<?> rt = Class.forName(attr.Reference.Class);
        Object obj = rt.newInstance();
        if (!(obj instanceof AbstractEntity))
            throw new Exception("Unsupported Entity type [" + rt.getCanonicalName() + "]");
        AbstractEntity rentity = (AbstractEntity) obj;
        if (path.size() > 0) {
            path.peek().setKey(attr.Column);
        }

        KeyValuePair<Class<?>> cls = new KeyValuePair<Class<?>>();
        cls.setValue(rentity.getClass());
        path.push(cls);
        setEntity(rentity, rs, gr, path);
        PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), rentity);
        path.pop();
    }
}

From source file:org.apache.hadoop.hbase.filter.ParseFilter.java

/**
 * Parses the filterString and constructs a filter using it
 * <p>//from w w w  .  j  ava2s  . co  m
 * @param filterStringAsByteArray filter string given by the user
 * @return filter object we constructed
 */
public Filter parseFilterString(byte[] filterStringAsByteArray) throws CharacterCodingException {
    // stack for the operators and parenthesis
    Stack<ByteBuffer> operatorStack = new Stack<ByteBuffer>();
    // stack for the filter objects
    Stack<Filter> filterStack = new Stack<Filter>();

    Filter filter = null;
    for (int i = 0; i < filterStringAsByteArray.length; i++) {
        if (filterStringAsByteArray[i] == ParseConstants.LPAREN) {
            // LPAREN found
            operatorStack.push(ParseConstants.LPAREN_BUFFER);
        } else if (filterStringAsByteArray[i] == ParseConstants.WHITESPACE
                || filterStringAsByteArray[i] == ParseConstants.TAB) {
            // WHITESPACE or TAB found
            continue;
        } else if (checkForOr(filterStringAsByteArray, i)) {
            // OR found
            i += ParseConstants.OR_ARRAY.length - 1;
            reduce(operatorStack, filterStack, ParseConstants.OR_BUFFER);
            operatorStack.push(ParseConstants.OR_BUFFER);
        } else if (checkForAnd(filterStringAsByteArray, i)) {
            // AND found
            i += ParseConstants.AND_ARRAY.length - 1;
            reduce(operatorStack, filterStack, ParseConstants.AND_BUFFER);
            operatorStack.push(ParseConstants.AND_BUFFER);
        } else if (checkForSkip(filterStringAsByteArray, i)) {
            // SKIP found
            i += ParseConstants.SKIP_ARRAY.length - 1;
            reduce(operatorStack, filterStack, ParseConstants.SKIP_BUFFER);
            operatorStack.push(ParseConstants.SKIP_BUFFER);
        } else if (checkForWhile(filterStringAsByteArray, i)) {
            // WHILE found
            i += ParseConstants.WHILE_ARRAY.length - 1;
            reduce(operatorStack, filterStack, ParseConstants.WHILE_BUFFER);
            operatorStack.push(ParseConstants.WHILE_BUFFER);
        } else if (filterStringAsByteArray[i] == ParseConstants.RPAREN) {
            // RPAREN found
            if (operatorStack.empty()) {
                throw new IllegalArgumentException("Mismatched parenthesis");
            }
            ByteBuffer argumentOnTopOfStack = operatorStack.peek();
            while (!(argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER))) {
                filterStack.push(popArguments(operatorStack, filterStack));
                if (operatorStack.empty()) {
                    throw new IllegalArgumentException("Mismatched parenthesis");
                }
                argumentOnTopOfStack = operatorStack.pop();
            }
        } else {
            // SimpleFilterExpression found
            byte[] filterSimpleExpression = extractFilterSimpleExpression(filterStringAsByteArray, i);
            i += (filterSimpleExpression.length - 1);
            filter = parseSimpleFilterExpression(filterSimpleExpression);
            filterStack.push(filter);
        }
    }

    // Finished parsing filterString
    while (!operatorStack.empty()) {
        filterStack.push(popArguments(operatorStack, filterStack));
    }
    filter = filterStack.pop();
    if (!filterStack.empty()) {
        throw new IllegalArgumentException("Incorrect Filter String");
    }
    return filter;
}

From source file:fi.ni.IFC_ClassModel.java

/**
 * Parse_ if c_ line statement.//w w w  .  j a  v a2  s  .c  o m
 * 
 * @param line
 *            the line
 */
private void parse_IFC_LineStatement(String line) {
    IFC_X3_VO ifcvo = new IFC_X3_VO();
    int state = 0;
    StringBuffer sb = new StringBuffer();
    int cl_count = 0;
    LinkedList<Object> current = ifcvo.getList();
    Stack<LinkedList<Object>> list_stack = new Stack<LinkedList<Object>>();
    for (int i = 0; i < line.length(); i++) {
        char ch = line.charAt(i);
        switch (state) {
        case 0:
            if (ch == '=') {
                ifcvo.setLine_num(toLong(sb.toString()));
                sb.setLength(0);
                state++;
                continue;
            } else if (Character.isDigit(ch))
                sb.append(ch);
            break;
        case 1: // (
            if (ch == '(') {
                ifcvo.setName(sb.toString());
                sb.setLength(0);
                state++;
                continue;
            } else if (ch == ';') {
                ifcvo.setName(sb.toString());
                sb.setLength(0);
                state = Integer.MAX_VALUE;
            } else if (!Character.isWhitespace(ch))
                sb.append(ch);
            break;
        case 2: // (... line started and doing (...
            if (ch == '\'') {
                state++;
            }
            if (ch == '(') {
                list_stack.push(current);
                LinkedList<Object> tmp = new LinkedList<Object>();
                if (sb.toString().trim().length() > 0)
                    current.add(sb.toString().trim());
                sb.setLength(0);
                current.add(tmp); // listaan listn lista
                current = tmp;
                cl_count++;
                // sb.append(ch);
            } else if (ch == ')') {
                if (cl_count == 0) {
                    if (sb.toString().trim().length() > 0)
                        current.add(sb.toString().trim());
                    sb.setLength(0);
                    state = Integer.MAX_VALUE; // line is done
                    continue;
                } else {
                    if (sb.toString().trim().length() > 0)
                        current.add(sb.toString().trim());
                    sb.setLength(0);
                    cl_count--;
                    current = list_stack.pop();
                }
            } else if (ch == ',') {
                if (sb.toString().trim().length() > 0)
                    current.add(sb.toString().trim());
                current.add(Character.valueOf(ch));

                sb.setLength(0);
            } else {
                sb.append(ch);

            }
            break;
        case 3: // (...
            if (ch == '\'') {
                state--;
            } else {
                sb.append(ch);

            }
            break;
        default:
            // Do nothing
        }
    }
    linemap.put(ifcvo.line_num, ifcvo);
}