Example usage for java.util ArrayDeque ArrayDeque

List of usage examples for java.util ArrayDeque ArrayDeque

Introduction

In this page you can find the example usage for java.util ArrayDeque ArrayDeque.

Prototype

public ArrayDeque() 

Source Link

Document

Constructs an empty array deque with an initial capacity sufficient to hold 16 elements.

Usage

From source file:org.apache.wicket.MarkupContainer.java

/**
 * Returns a sequential {@code Stream} with the all children of this markup container as its
 * source. This stream does traverse the component tree.
 * /*w ww.  j a  va  2s  . c om*/
 * @return a sequential {@code Stream} over the all children of this markup container
 * @since 8.0
 */
@SuppressWarnings("unchecked")
public Stream<Component> streamChildren() {
    class ChildrenIterator<C> implements Iterator<C> {
        private Iterator<C> currentIterator;

        private Deque<Iterator<C>> iteratorStack = new ArrayDeque<>();

        private ChildrenIterator(Iterator<C> iterator) {
            currentIterator = iterator;
        }

        @Override
        public boolean hasNext() {
            if (!currentIterator.hasNext() && !iteratorStack.isEmpty()) {
                currentIterator = iteratorStack.pop();
            }
            return currentIterator.hasNext();
        }

        @Override
        public C next() {
            C child = currentIterator.next();
            if (child instanceof Iterable) {
                iteratorStack.push(currentIterator);
                currentIterator = ((Iterable<C>) child).iterator();
            }
            return child;
        }
    }
    return StreamSupport.stream(Spliterators.spliteratorUnknownSize(new ChildrenIterator<>(iterator()), 0),
            false);
}

From source file:org.codice.ddf.spatial.ogc.csw.catalog.endpoint.CswEndpointTest.java

private List<QueryResponse> getQueryResponseBatch(int batchSize, int total) {
    Queue<Result> results = new ArrayDeque<>();
    for (int i = 1; i <= total; i++) {
        MetacardImpl metacard = new MetacardImpl();
        metacard.setId(i + "");
        results.add(new ResultImpl(metacard));
    }/*from   www .  j  a  v  a  2 s . co  m*/

    List<QueryResponse> queryResponses = new ArrayList<>();
    while (!results.isEmpty()) {
        List<Result> batchList = new ArrayList<>();
        for (int i = 0; i < batchSize; i++) {
            Result result = results.poll();
            if (result == null) {
                break;
            }
            batchList.add(result);
        }
        queryResponses.add(new QueryResponseImpl(null, batchList, total));
    }

    // Add one empty response list to the end
    queryResponses.add(new QueryResponseImpl(null, Collections.emptyList(), 0));
    return queryResponses;
}

From source file:org.wso2.ballerinalang.compiler.parser.BLangPackageBuilder.java

void createMatchNode(DiagnosticPos pos) {
    if (this.matchStmtStack == null) {
        this.matchStmtStack = new ArrayDeque<>();
    }/*from ww  w .j  a v a  2 s.c  o m*/

    BLangMatch matchStmt = (BLangMatch) TreeBuilder.createMatchStatement();
    matchStmt.pos = pos;

    this.matchStmtStack.addFirst(matchStmt);
}

From source file:org.apache.asterix.app.translator.QueryTranslator.java

private static ARecordType createEnforcedType(ARecordType initialType, List<Index> indexes)
        throws AlgebricksException {
    ARecordType enforcedType = initialType;
    for (Index index : indexes) {
        if (!index.isSecondaryIndex() || !index.isEnforcingKeyFileds()) {
            continue;
        }/*  ww w. j  ava2  s .  c  o m*/
        if (index.hasMetaFields()) {
            throw new AlgebricksException("Indexing an open field is only supported on the record part");
        }
        for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
            Deque<Pair<ARecordType, String>> nestedTypeStack = new ArrayDeque<>();
            List<String> splits = index.getKeyFieldNames().get(i);
            ARecordType nestedFieldType = enforcedType;
            boolean openRecords = false;
            String bridgeName = nestedFieldType.getTypeName();
            int j;
            // Build the stack for the enforced type
            for (j = 1; j < splits.size(); j++) {
                nestedTypeStack.push(new Pair<ARecordType, String>(nestedFieldType, splits.get(j - 1)));
                bridgeName = nestedFieldType.getTypeName();
                nestedFieldType = (ARecordType) enforcedType.getSubFieldType(splits.subList(0, j));
                if (nestedFieldType == null) {
                    openRecords = true;
                    break;
                }
            }
            if (openRecords) {
                // create the smallest record
                enforcedType = new ARecordType(splits.get(splits.size() - 2),
                        new String[] { splits.get(splits.size() - 1) },
                        new IAType[] { AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)) },
                        true);
                // create the open part of the nested field
                for (int k = splits.size() - 3; k > (j - 2); k--) {
                    enforcedType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) },
                            new IAType[] { AUnionType.createUnknownableType(enforcedType) }, true);
                }
                // Bridge the gap
                Pair<ARecordType, String> gapPair = nestedTypeStack.pop();
                ARecordType parent = gapPair.first;

                IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(),
                        new IAType[] { AUnionType.createUnknownableType(enforcedType) });
                enforcedType = new ARecordType(bridgeName,
                        ArrayUtils.addAll(parent.getFieldNames(), enforcedType.getTypeName()), parentFieldTypes,
                        true);
            } else {
                //Schema is closed all the way to the field
                //enforced fields are either null or strongly typed
                LinkedHashMap<String, IAType> recordNameTypesMap = createRecordNameTypeMap(nestedFieldType);
                // if a an enforced field already exists and the type is correct
                IAType enforcedFieldType = recordNameTypesMap.get(splits.get(splits.size() - 1));
                if (enforcedFieldType != null && enforcedFieldType.getTypeTag() == ATypeTag.UNION
                        && ((AUnionType) enforcedFieldType).isUnknownableType()) {
                    enforcedFieldType = ((AUnionType) enforcedFieldType).getActualType();
                }
                if (enforcedFieldType != null && !ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(),
                        index.getKeyFieldTypes().get(i).getTypeTag())) {
                    throw new AlgebricksException("Cannot enforce field " + index.getKeyFieldNames().get(i)
                            + " to have type " + index.getKeyFieldTypes().get(i));
                }
                if (enforcedFieldType == null) {
                    recordNameTypesMap.put(splits.get(splits.size() - 1),
                            AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)));
                }
                enforcedType = new ARecordType(nestedFieldType.getTypeName(),
                        recordNameTypesMap.keySet().toArray(new String[recordNameTypesMap.size()]),
                        recordNameTypesMap.values().toArray(new IAType[recordNameTypesMap.size()]),
                        nestedFieldType.isOpen());
            }

            // Create the enforced type for the nested fields in the schema, from the ground up
            if (!nestedTypeStack.isEmpty()) {
                while (!nestedTypeStack.isEmpty()) {
                    Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop();
                    ARecordType nestedRecType = nestedTypePair.first;
                    IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone();
                    nestedRecTypeFieldTypes[nestedRecType.getFieldIndex(nestedTypePair.second)] = enforcedType;
                    enforcedType = new ARecordType(nestedRecType.getTypeName() + "_enforced",
                            nestedRecType.getFieldNames(), nestedRecTypeFieldTypes, nestedRecType.isOpen());
                }
            }
        }
    }
    return enforcedType;
}

From source file:org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.java

public void processPositionAlias(ASTNode ast) throws SemanticException {
    boolean isBothByPos = HiveConf.getBoolVar(conf, ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS);
    boolean isGbyByPos = isBothByPos || HiveConf.getBoolVar(conf, ConfVars.HIVE_GROUPBY_POSITION_ALIAS);
    boolean isObyByPos = isBothByPos || HiveConf.getBoolVar(conf, ConfVars.HIVE_ORDERBY_POSITION_ALIAS);

    Deque<ASTNode> stack = new ArrayDeque<ASTNode>();
    stack.push(ast);//from  w ww .j av a2s  .  c o  m

    while (!stack.isEmpty()) {
        ASTNode next = stack.pop();

        if (next.getChildCount() == 0) {
            continue;
        }

        boolean isAllCol;
        ASTNode selectNode = null;
        ASTNode groupbyNode = null;
        ASTNode orderbyNode = null;

        // get node type
        int child_count = next.getChildCount();
        for (int child_pos = 0; child_pos < child_count; ++child_pos) {
            ASTNode node = (ASTNode) next.getChild(child_pos);
            int type = node.getToken().getType();
            if (type == HiveParser.TOK_SELECT) {
                selectNode = node;
            } else if (type == HiveParser.TOK_GROUPBY) {
                groupbyNode = node;
            } else if (type == HiveParser.TOK_ORDERBY) {
                orderbyNode = node;
            }
        }

        if (selectNode != null) {
            int selectExpCnt = selectNode.getChildCount();

            // replace each of the position alias in GROUPBY with the actual column name
            if (groupbyNode != null) {
                for (int child_pos = 0; child_pos < groupbyNode.getChildCount(); ++child_pos) {
                    ASTNode node = (ASTNode) groupbyNode.getChild(child_pos);
                    if (node.getToken().getType() == HiveParser.Number) {
                        if (isGbyByPos) {
                            int pos = Integer.parseInt(node.getText());
                            if (pos > 0 && pos <= selectExpCnt) {
                                groupbyNode.setChild(child_pos, selectNode.getChild(pos - 1).getChild(0));
                            } else {
                                throw new SemanticException(ErrorMsg.INVALID_POSITION_ALIAS_IN_GROUPBY
                                        .getMsg("Position alias: " + pos + " does not exist\n"
                                                + "The Select List is indexed from 1 to " + selectExpCnt));
                            }
                        } else {
                            warn("Using constant number  " + node.getText()
                                    + " in group by. If you try to use position alias when hive.groupby.position.alias is false, the position alias will be ignored.");
                        }
                    }
                }
            }

            // orderby position will be processed in genPlan
        }

        for (int i = next.getChildren().size() - 1; i >= 0; i--) {
            stack.push((ASTNode) next.getChildren().get(i));
        }
    }
}