Example usage for java.util Deque addLast

List of usage examples for java.util Deque addLast

Introduction

In this page you can find the example usage for java.util Deque addLast.

Prototype

void addLast(E e);

Source Link

Document

Inserts the specified element at the end of this deque if it is possible to do so immediately without violating capacity restrictions, throwing an IllegalStateException if no space is currently available.

Usage

From source file:net.sf.jasperreports.engine.json.expression.member.evaluation.ObjectKeyExpressionEvaluator.java

private List<JRJsonNode> goAnywhereDown(JRJsonNode jrJsonNode) {
    if (log.isDebugEnabled()) {
        log.debug("going " + MemberExpression.DIRECTION.ANYWHERE_DOWN + " by "
                + (expression.isWildcard() ? "wildcard" : "key: [" + expression.getObjectKey() + "]") + " on "
                + jrJsonNode.getDataNode());
    }/*from  ww w .  j a va  2s .  c o  m*/

    List<JRJsonNode> result = new ArrayList<>();
    Deque<JRJsonNode> stack = new ArrayDeque<>();
    JsonNode initialDataNode = jrJsonNode.getDataNode();

    if (log.isDebugEnabled()) {
        log.debug("initial stack population with: " + initialDataNode);
    }

    // populate the stack initially
    if (initialDataNode.isArray()) {
        for (JsonNode deeper : initialDataNode) {
            stack.addLast(jrJsonNode.createChild(deeper));
        }
    } else {
        stack.push(jrJsonNode);
    }

    while (!stack.isEmpty()) {
        JRJsonNode stackNode = stack.pop();
        JsonNode stackDataNode = stackNode.getDataNode();

        addChildrenToStack(stackNode, stack);

        if (log.isDebugEnabled()) {
            log.debug("processing stack element: " + stackDataNode);
        }

        // process the current stack item
        if (stackDataNode.isObject()) {
            if (log.isDebugEnabled()) {
                log.debug("stack element is object; wildcard: " + expression.isWildcard());
            }

            // if wildcard => only filter the parent; we already added the object keys to the stack
            if (expression.isWildcard()) {
                if (applyFilter(stackNode)) {
                    result.add(stackNode);
                }
            }
            // else go down and filter
            else {
                JRJsonNode deeperNode = goDeeperIntoObjectNode(stackNode, false);
                if (deeperNode != null) {
                    result.add(deeperNode);
                }
            }
        } else if (stackDataNode.isValueNode() || stackDataNode.isArray()) {
            if (log.isDebugEnabled()) {
                log.debug("stack element is " + (stackDataNode.isValueNode() ? "value node" : "array")
                        + "; wildcard: " + expression.isWildcard());
            }

            if (expression.isWildcard()) {
                if (applyFilter(stackNode)) {
                    result.add(stackNode);
                }
            }
        }
    }

    return result;
}

From source file:gov.nih.nci.cacis.common.util.ExtractSchematron.java

private void processElement(Deque<XSElementDeclaration> eltStack, XSElementDeclaration element) {
    eltStack.addLast(element);
    processElement(eltStack);/*from w  w w .j  a  va  2  s  . c  o m*/
    eltStack.removeLast();
}

From source file:org.polymap.core.data.pipeline.DefaultPipelineIncubator.java

protected boolean findTransformation(ProcessorDescription from, ProcessorDescription to, LayerUseCase usecase,
        Deque<ProcessorDescription> chain) {
    log.debug(StringUtils.repeat("    ", chain.size()) + "findTransformation: " + from + " => " + to + " -- "
            + usecase);//from  w w w  . java  2 s .c om

    // recursion break
    if (chain.size() > 16) {
        return false;
    }

    // recursion start
    if (from.getSignature().isCompatible(to.getSignature())) {
        chain.addLast(to);
        log.debug(StringUtils.repeat("    ", chain.size()) + "Transformation found: " + chain);
        return true;
    }

    // recursion step
    else {
        for (ProcessorDescription desc : allTransformers(usecase)) {
            if (from.getSignature().isCompatible(desc.getSignature()) && !chain.contains(desc)) {
                chain.addLast(desc);
                if (findTransformation(desc, to, usecase, chain)) {
                    //log.debug( "      transformation found: " + desc );
                    return true;
                }
                chain.removeLast();
            }
        }
        return false;
    }
}

From source file:org.openregistry.core.domain.jpa.JpaPersonImpl.java

@Override
public Map<String, Deque<Identifier>> getIdentifiersByType() {
    final Map<String, Deque<Identifier>> identifiersByType = new HashMap<String, Deque<Identifier>>();

    for (final Identifier identifier : this.identifiers) {
        final String identifierType = identifier.getType().getName();
        Deque<Identifier> listIdentifiers = identifiersByType.get(identifierType);

        if (listIdentifiers == null) {
            listIdentifiers = new ArrayDeque<Identifier>();
            identifiersByType.put(identifierType, listIdentifiers);
        }// w w w. ja  v  a2s . com

        if (identifier.isPrimary()) {
            listIdentifiers.addFirst(identifier);
        } else {
            listIdentifiers.addLast(identifier);
        }
    }

    return identifiersByType;
}

From source file:org.apache.oozie.workflow.lite.LiteWorkflowValidator.java

/**
 * Basic recursive validation of the workflow:
 * - it is acyclic, no loops//from www . j av a 2s .c om
 * - names of the actions follow a specific pattern
 * - all nodes have valid transitions
 * - it only has supported action nodes
 * - there is no node that points to itself
 * - counts fork/join nodes
 *
 * @param app The WorkflowApp
 * @param node Current node we're checking
 * @param path The list of nodes that we've visited so far in this call chain
 * @param checkedNodes The list of nodes that we've already checked. For example, if it's a decision node, then the we
 * don't have to re-walk the entire path because it indicates that it've been done before on a separate path
 * @param forkJoinCount Number of fork and join nodes
 * @throws WorkflowException If there is any of the constraints described above is violated
 */
private void performBasicValidation(LiteWorkflowApp app, NodeDef node, Deque<String> path,
        Set<NodeDef> checkedNodes, ForkJoinCount forkJoinCount) throws WorkflowException {
    String nodeName = node.getName();

    checkActionName(node);
    if (node instanceof ActionNodeDef) {
        checkActionNode(node);
    } else if (node instanceof ForkNodeDef) {
        forkJoinCount.forks++;
    } else if (node instanceof JoinNodeDef) {
        forkJoinCount.joins++;
    }
    checkCycle(path, nodeName);

    path.addLast(nodeName);

    List<String> transitions = node.getTransitions();
    // Get all transitions and walk the workflow recursively
    if (!transitions.isEmpty()) {
        for (final String t : transitions) {
            NodeDef transitionNode = app.getNode(t);
            if (transitionNode == null) {
                throw new WorkflowException(ErrorCode.E0708, node.getName(), t);
            }

            if (!checkedNodes.contains(transitionNode)) {
                performBasicValidation(app, transitionNode, path, checkedNodes, forkJoinCount);
                checkedNodes.add(transitionNode);
            }
        }
    }

    path.remove(nodeName);
}

From source file:com.blm.orc.OrcRawRecordMerger.java

@Override
public ObjectInspector getObjectInspector() {
    // Read the configuration parameters
    String columnNameProperty = conf.get(serdeConstants.LIST_COLUMNS);
    // NOTE: if "columns.types" is missing, all columns will be of String type
    String columnTypeProperty = conf.get(serdeConstants.LIST_COLUMN_TYPES);

    // Parse the configuration parameters
    ArrayList<String> columnNames = new ArrayList<String>();
    Deque<Integer> virtualColumns = new ArrayDeque<Integer>();
    if (columnNameProperty != null && columnNameProperty.length() > 0) {
        String[] colNames = columnNameProperty.split(",");
        for (int i = 0; i < colNames.length; i++) {
            if (VirtualColumn.VIRTUAL_COLUMN_NAMES.contains(colNames[i])) {
                virtualColumns.addLast(i);
            } else {
                columnNames.add(colNames[i]);
            }//from  ww w . j a  v  a  2 s  .  c o m
        }
    }
    if (columnTypeProperty == null) {
        // Default type: all string
        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < columnNames.size(); i++) {
            if (i > 0) {
                sb.append(":");
            }
            sb.append("string");
        }
        columnTypeProperty = sb.toString();
    }

    ArrayList<TypeInfo> fieldTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
    while (virtualColumns.size() > 0) {
        fieldTypes.remove(virtualColumns.removeLast());
    }
    StructTypeInfo rowType = new StructTypeInfo();
    rowType.setAllStructFieldNames(columnNames);
    rowType.setAllStructFieldTypeInfos(fieldTypes);
    return OrcRecordUpdater.createEventSchema(OrcStruct.createObjectInspector(rowType));
}

From source file:org.apache.metron.stellar.common.StellarCompiler.java

private void exitLambda(boolean hasArgs) {
    final FrameContext.Context context = getArgContext();
    Token<?> t = expression.tokenDeque.pop();
    final Deque<Token<?>> instanceDeque = new ArrayDeque<>();
    for (; !expression.tokenDeque.isEmpty() && t != EXPRESSION_REFERENCE; t = expression.tokenDeque.pop()) {
        instanceDeque.addLast(t);
    }//w w  w. j  a  v a  2 s. c  o  m
    final List<String> variables = hasArgs ? (List<String>) instanceDeque.removeLast().getValue()
            : new ArrayList<>();
    expression.tokenDeque.push(new Token<>((tokenDeque, state) -> {
        LambdaExpression expr = new LambdaExpression(variables, instanceDeque, state);
        tokenDeque.push(new Token<>(expr, Object.class, context));
    }, DeferredFunction.class, context));
}

From source file:com.jaspersoft.jasperserver.war.cascade.token.FilterCore.java

@Override
public LinkedHashSet<String> resolveCascadingOrder(Map<String, Set<String>> masterDependencies) {
    Deque<String> orderedNames = new LinkedList<String>();
    Queue<String> workingQueue = new LinkedList<String>(masterDependencies.keySet());
    int maxIterations = (masterDependencies.size() * (masterDependencies.size() + 1)) / 2 + 1;
    while (workingQueue.size() > 0 && maxIterations-- > 0) {
        String currentName = workingQueue.remove();

        Set<String> masterDependency = masterDependencies.get(currentName);
        if (masterDependency == null || masterDependency.isEmpty()) {
            orderedNames.addFirst(currentName);
        } else {// w  ww. j  a v a 2  s.c o  m
            if (orderedNames.containsAll(masterDependency)) {
                orderedNames.addLast(currentName);
            } else {
                workingQueue.add(currentName);
            }
        }
    }
    if (maxIterations > 0) {
        return new LinkedHashSet<String>(orderedNames);
    } else {
        throw new JSException("Order cannot be resolved because of circular or non-existing dependencies.");
    }
}

From source file:ocr.sapphire.image.EdgeBasedImagePreprocessor.java

private Deque<Point> findConnectedComponent(int[] edgeData, int x, int y) {
    Deque<Point> points = new LinkedList<Point>();
    Deque<Point> queue = new LinkedList<Point>();

    edgeData[x + y * width] = WHITE;/*from  w w w. ja  va 2  s . c  om*/
    Point initialPoint = new Point(x, y);
    points.add(initialPoint);
    queue.push(initialPoint);

    while (!queue.isEmpty()) {
        Point point = queue.removeFirst();
        for (int k = 0; k < 8; k++) {
            int x2 = (int) (point.x + DX[k]);
            int y2 = (int) (point.y + DY[k]);
            if (x2 < 0 || y2 < 0 || x2 >= width || y2 >= height) {
                continue;
            }
            if (edgeData[x2 + y2 * width] == BLACK) {
                edgeData[x2 + y2 * width] = WHITE;
                Point point2 = new Point(x2, y2);
                points.add(point2);
                queue.addLast(point2);
            }
        }
    }
    return points;
}

From source file:com.ggvaidya.scinames.model.Project.java

public void serializeToDocument(Document doc) {
    // Add top-level element.
    Element project = doc.createElement("project");
    project.setAttribute("name", getName());
    doc.appendChild(project);/* ww w  .  j a  v  a2 s . c o  m*/

    // Set up some properties.
    properties.put(PROP_NAME_EXTRACTORS,
            getNameExtractors().stream().map(lne -> NameExtractorFactory.serializeExtractorsToString(lne))
                    .distinct().sorted().collect(Collectors.joining("; ")));

    // Write out properties.
    Element propertiesElement = doc.createElement("properties");
    for (String key : properties.keySet()) {
        Element p = doc.createElement("property");
        p.setAttribute("name", key);
        p.setTextContent(properties.get(key));
        propertiesElement.appendChild(p);
    }
    project.appendChild(propertiesElement);

    // Add filters.
    Element filtersElement = doc.createElement("filters");
    Deque<ChangeFilter> changeFilters = new LinkedList<>();
    {
        // We need to read the filters inside-out, so they'll be recreated
        // the right way around.
        ChangeFilter cf = getChangeFilter();
        while (cf != null) {
            changeFilters.addLast(cf);
            cf = cf.getPrevChangeFilter();
        }
    }
    changeFilters.forEach(cf -> {
        // Skip any nulls.
        if (cf.getShortName().equals("null"))
            return;

        filtersElement.appendChild(cf.serializeToElement(doc));
    });

    project.appendChild(filtersElement);

    // List all timepoints.
    Element timepointsElement = doc.createElement("datasets");
    for (Dataset tp : getDatasets()) {
        Element t = tp.serializeToElement(doc);
        timepointsElement.appendChild(t);
    }
    project.appendChild(timepointsElement);
}