Example usage for java.util Deque push

List of usage examples for java.util Deque push

Introduction

In this page you can find the example usage for java.util Deque push.

Prototype

void push(E e);

Source Link

Document

Pushes an element onto the stack represented by this deque (in other words, at the head of this deque) if it is possible to do so immediately without violating capacity restrictions, throwing an IllegalStateException if no space is currently available.

Usage

From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java

/**
 * For every node in this subtree, make sure it's start/stop token's
 * are set.  Walk depth first, visit bottom up.  Only updates nodes
 * with at least one token index < 0.
 *
 * In contrast to the method in the parent class, this method is
 * iterative.//from  w w w.  j  a va2 s.  c o  m
 */
@Override
public void setUnknownTokenBoundaries() {
    Deque<ASTNode> stack1 = new ArrayDeque<ASTNode>();
    Deque<ASTNode> stack2 = new ArrayDeque<ASTNode>();
    stack1.push(this);

    while (!stack1.isEmpty()) {
        ASTNode next = stack1.pop();
        stack2.push(next);

        if (next.children != null) {
            for (int i = next.children.size() - 1; i >= 0; i--) {
                stack1.push((ASTNode) next.children.get(i));
            }
        }
    }

    while (!stack2.isEmpty()) {
        ASTNode next = stack2.pop();

        if (next.children == null) {
            if (next.startIndex < 0 || next.stopIndex < 0) {
                next.startIndex = next.stopIndex = next.token.getTokenIndex();
            }
        } else if (next.startIndex >= 0 && next.stopIndex >= 0) {
            continue;
        } else if (next.children.size() > 0) {
            ASTNode firstChild = (ASTNode) next.children.get(0);
            ASTNode lastChild = (ASTNode) next.children.get(next.children.size() - 1);
            next.startIndex = firstChild.getTokenStartIndex();
            next.stopIndex = lastChild.getTokenStopIndex();
        }
    }
}

From source file:org.nuxeo.ecm.platform.routing.core.impl.GraphRouteImpl.java

/**
 * Finds which transitions are re-looping (feedback arc set).
 *///from  w w w  .  j  av  a  2 s  .  co  m
protected void computeLoopTransitions(String startNodeId) throws DocumentRouteException {
    if (startNodeId == null) {
        // incomplete graph
        return;
    }
    /*
     * Depth-first search. In the todo stack, each element records a list of the siblings left to visit at that
     * depth. After visiting the last sibling, we go back to the parent and at this point mark it as visited in
     * post-traversal order.
     */
    List<String> postOrder = new LinkedList<String>();
    Deque<Deque<String>> stack = new LinkedList<Deque<String>>();
    Deque<String> first = new LinkedList<String>();
    first.add(startNodeId);
    stack.push(first);
    Set<String> done = new HashSet<String>();
    for (;;) {
        // find next sibling
        String nodeId = stack.peek().peek();
        if (nodeId == null) {
            // last sibling done
            // go back up one level and mark post-traversal order
            stack.pop(); // pop empty children
            if (stack.isEmpty()) {
                // we are done
                break;
            }
            nodeId = stack.peek().pop(); // pop parent
            postOrder.add(nodeId); // mark post-traversal order
        } else if (done.add(nodeId)) {
            // traverse the next sibling
            Deque<String> children = new LinkedList<String>();
            for (Transition t : getNode(nodeId).getOutputTransitions()) {
                children.add(t.target);
            }
            // add children to stack and recurse
            stack.push(children);
        } else {
            // already traversed
            stack.peek().pop(); // skip it
        }
    }

    // reverse the post-order to find the topological ordering
    Collections.reverse(postOrder);
    Map<String, Integer> ordering = new HashMap<String, Integer>();
    int i = 1;
    for (String nodeId : postOrder) {
        ordering.put(nodeId, Integer.valueOf(i++));
    }

    // walk the graph and all transitions again
    // and mark as looping the transitions pointing to a node
    // with a smaller order that the source
    done.clear();
    Deque<String> todo = new LinkedList<String>();
    todo.add(startNodeId);
    while (!todo.isEmpty()) {
        String nodeId = todo.pop();
        if (done.add(nodeId)) {
            int source = ordering.get(nodeId).intValue();
            for (Transition t : getNode(nodeId).getOutputTransitions()) {
                todo.push(t.target);
                // compare orders to detected feeback arcs
                int target = ordering.get(t.target).intValue();
                if (target <= source) {
                    t.loop = true;
                }
            }
        }
    }
}

From source file:com.spotify.helios.agent.QueueingHistoryWriter.java

private void putBack(TaskStatusEvent event) {
    final JobId key = event.getStatus().getJob().getId();
    final Deque<TaskStatusEvent> queue = getDeque(key);
    synchronized (queue) {
        if (queue.size() >= MAX_QUEUE_SIZE) {
            // already full, just toss the event
            return;
        }//  w w  w. ja v  a  2s  . c o  m
        queue.push(event);
        count.incrementAndGet();
    }
}

From source file:hudson.plugins.nested_view.NestedView.java

/**
 * Returns the health of this nested view.
 * <p/>/* ww  w . java2s  .  c o m*/
 * <p>Notice that, if a job is contained in several sub-views of the current
 * view, then it is taken into account only once to get accurate stats.</p>
 * <p>This algorithm has been derecursified, hence the stack stuff.</p>
 */
public HealthReportContainer getHealth() {
    // we use a set to avoid taking into account several times the same job
    // when computing the health
    Set<TopLevelItem> items = new LinkedHashSet<TopLevelItem>(100);

    // retrieve all jobs to analyze (using DFS)
    Deque<View> viewsStack = new ArrayDeque<View>(20);
    viewsStack.push(this);
    do {
        View currentView = viewsStack.pop();
        if (currentView instanceof NestedView) {
            for (View v : ((NestedView) currentView).views) {
                viewsStack.push(v);
            }
        } else {
            items.addAll(currentView.getItems());
        }
    } while (!viewsStack.isEmpty());

    HealthReportContainer hrc = new HealthReportContainer();
    for (TopLevelItem item : items) {
        if (item instanceof Job) {
            hrc.sum += ((Job) item).getBuildHealth().getScore();
            hrc.count++;
        }
    }

    hrc.report = hrc.count > 0 ? new HealthReport(hrc.sum / hrc.count, Messages._ViewHealth(hrc.count))
            : new HealthReport(100, Messages._NoJobs());

    return hrc;
}

From source file:org.eclipse.cbi.maven.plugins.macsigner.SignMojo.java

/**
 * Creates a zip file.//from   ww w .j  av  a  2 s.  c o m
 * @param dir                   The Directory of the files to be zipped.
 * @param zip                   An output stream to write the file
 * @throws IOException
 */
private void createZip(File dir, ZipArchiveOutputStream zip) throws IOException {
    Deque<File> dir_stack = new LinkedList<File>();
    dir_stack.push(dir);

    // base path is the parent of the "Application.app" folder
    // it will be used to make "Application.app" the top-level folder in the zip
    String base_path = getParentDirAbsolutePath(dir);

    // verify that "dir" actually id the ".app" folder
    if (!dir.getName().endsWith(".app"))
        throw new IOException("Please verify the configuration. Directory does not end with '.app': " + dir);

    while (!dir_stack.isEmpty()) {

        File file = dir_stack.pop();
        File[] files = file.listFiles();

        for (File f : files) {
            String name = f.getAbsolutePath().substring(base_path.length());
            getLog().debug("Found: " + name);

            if (f.isFile() && isInContentsFolder(name)) {
                getLog().debug("Adding to zip file for signing: " + f);

                ZipArchiveEntry entry = new ZipArchiveEntry(name);
                zip.putArchiveEntry(entry);

                if (f.canExecute()) {
                    //work around to track the relative file names
                    // of those that need to be set as executable on unZip
                    executableFiles.add(name);
                }
                InputStream is = new FileInputStream(f);
                copyInputStreamToOutputStream(is, zip);

                is.close();
                zip.closeArchiveEntry();
            } else if (f.isDirectory() && isInContentsFolder(name)) { //add directory entry
                dir_stack.push(f);
            } else {
                getLog().debug(f + " was not included in the zip file to be signed.");
            }
        }
    }
}

From source file:com.grepcurl.random.ObjectGenerator.java

public <T> T generate(Class<T> klass, SetterOverrides setterOverrides, String[] constructorArgTypes,
        Object... constructorArgs) {
    Validate.notNull(klass);// w w w .  j a  v  a2s  .c om
    Validate.notNull(constructorArgs);
    if (verbose) {
        log(String.format("generating object of type: %s, with args: %s, of types: %s, with overrides: %s",
                klass, Arrays.toString(constructorArgs), Arrays.toString(constructorArgTypes),
                setterOverrides));
    }
    try {
        Deque<Object> objectStack = new ArrayDeque<>();
        Class[] constructorTypes = _toClasses(constructorArgTypes, constructorArgs);
        T t = klass.getConstructor(constructorTypes).newInstance(constructorArgs);
        objectStack.push(t);
        Method[] methods = klass.getMethods();
        for (Method method : methods) {
            _processMethod(method, setterOverrides, t, objectStack);
        }
        objectStack.pop();
        return t;
    } catch (Exception e) {
        throw new FailedRandomObjectGenerationException(e);
    }
}

From source file:org.apache.nifi.processors.standard.util.SFTPUtils.java

public static void changeWorkingDirectory(final ChannelSftp sftp, final String dirPath,
        final boolean createDirs, final Processor proc) throws IOException {
    final Deque<String> stack = new LinkedList<>();
    File dir = new File(dirPath);
    String currentWorkingDirectory = null;
    boolean dirExists = false;
    final String forwardPaths = dir.getPath().replaceAll(Matcher.quoteReplacement("\\"),
            Matcher.quoteReplacement("/"));
    try {// w ww.  ja va  2 s.c  o m
        currentWorkingDirectory = sftp.pwd();
        logger.debug(proc + " attempting to change directory from " + currentWorkingDirectory + " to "
                + dir.getPath());
        //always use forward paths for long string attempt
        sftp.cd(forwardPaths);
        dirExists = true;
        logger.debug(proc + " changed working directory to '" + forwardPaths + "' from '"
                + currentWorkingDirectory + "'");
    } catch (final SftpException sftpe) {
        logger.debug(proc + " could not change directory to '" + forwardPaths + "' from '"
                + currentWorkingDirectory + "' so trying the hard way.");
    }
    if (dirExists) {
        return;
    }
    if (!createDirs) {
        throw new IOException("Unable to change to requested working directory \'" + forwardPaths
                + "\' but not configured to create dirs.");
    }

    do {
        stack.push(dir.getName());
    } while ((dir = dir.getParentFile()) != null);

    String dirName = null;
    while ((dirName = stack.peek()) != null) {
        stack.pop();
        //find out if exists, if not make it if configured to do so or throw exception
        dirName = ("".equals(dirName.trim())) ? "/" : dirName;
        try {
            sftp.cd(dirName);
        } catch (final SftpException sftpe) {
            logger.debug(proc + " creating new directory and changing to it " + dirName);
            try {
                sftp.mkdir(dirName);
                sftp.cd(dirName);
            } catch (final SftpException e) {
                throw new IOException(proc + " could not make/change directory to [" + dirName + "] ["
                        + e.getLocalizedMessage() + "]", e);
            }
        }
    }
}

From source file:de.nrw.hbz.regal.sync.ingest.Downloader.java

/**
 * @param directory// w w w.j a  v a 2s  .  c  o m
 *            the directory will be zipped
 * @param zipfile
 *            the Outputfile
 */
@SuppressWarnings("resource")
protected void zip(File directory, File zipfile) {

    try (ZipOutputStream zout = new ZipOutputStream(new FileOutputStream(zipfile))) {
        URI base = directory.toURI();
        Deque<File> queue = new LinkedList<File>();
        queue.push(directory);

        while (!queue.isEmpty()) {
            directory = queue.pop();
            for (File kid : directory.listFiles()) {
                String name = base.relativize(kid.toURI()).getPath();
                if (kid.isDirectory()) {
                    queue.push(kid);
                    name = name.endsWith("/") ? name : name + "/";
                    zout.putNextEntry(new ZipEntry(name));
                } else {
                    zout.putNextEntry(new ZipEntry(name));
                    copy(kid, zout);
                    zout.closeEntry();
                }
            }
        }
    } catch (IOException e) {
        throw new ZipDownloaderException(e);
    }
}

From source file:org.apache.solr.handler.component.PivotFacetHelper.java

public SimpleOrderedMap<List<NamedList<Object>>> process(ResponseBuilder rb, SolrParams params, String[] pivots)
        throws IOException {
    if (!rb.doFacets || pivots == null)
        return null;

    int minMatch = params.getInt(FacetParams.FACET_PIVOT_MINCOUNT, 1);
    boolean distinct = params.getBool(FacetParams.FACET_PIVOT_DISTINCT, false); // distinct pivot?
    boolean showDistinctCounts = params.getBool(FacetParams.FACET_PIVOT_DISTINCT, false);
    if (showDistinctCounts) {
        // force values in facet query to default values when facet.pivot.distinct = true
        // facet.mincount = 1 ---- distinct count makes no sense if we filter out valid terms
        // facet.limit = -1   ---- distinct count makes no sense if we limit terms
        ModifiableSolrParams v = new ModifiableSolrParams(rb.req.getParams());
        v.set("facet.mincount", 1);
        v.set("facet.limit", -1);
        params = v;/* w  w  w . ja v  a 2  s .c  o m*/
        rb.req.setParams(params);
    }

    SimpleOrderedMap<List<NamedList<Object>>> pivotResponse = new SimpleOrderedMap<List<NamedList<Object>>>();
    for (String pivot : pivots) {
        String[] fields = pivot.split(","); // only support two levels for now
        int depth = fields.length;

        if (fields.length < 2) {
            throw new SolrException(ErrorCode.BAD_REQUEST, "Pivot Facet needs at least two fields: " + pivot);
        }

        DocSet docs = rb.getResults().docSet;
        String field = fields[0];
        Deque<String> fnames = new LinkedList<String>();
        for (int i = fields.length - 1; i > 1; i--) {
            fnames.push(fields[i]);
        }

        SimpleFacets sf = getFacetImplementation(rb.req, rb.getResults().docSet, params);
        NamedList<Integer> superFacets = sf.getTermCounts(field);

        if (fields.length > 1) {
            String subField = fields[1];
            pivotResponse.add(pivot,
                    doPivots(superFacets, field, subField, fnames, rb, docs, minMatch, distinct, depth, depth));
        } else {
            pivotResponse.add(pivot,
                    doPivots(superFacets, field, null, fnames, rb, docs, minMatch, distinct, depth, depth));
        }

    }
    return pivotResponse;
}

From source file:com.grepcurl.random.ObjectGenerator.java

public <T> T generate(Class<T> klass) {
    Validate.notNull(klass);//w  w  w.j  av  a 2  s  . c  o m
    if (verbose) {
        log(String.format("generating object of type: %s", klass));
    }
    try {
        Deque<Object> objectStack = new ArrayDeque<>();
        T t;
        if (klass.isEnum()) {
            int randomOrdinal = randomInt(0, klass.getEnumConstants().length - 1);
            t = klass.getEnumConstants()[randomOrdinal];
        } else {
            t = klass.getConstructor().newInstance();
        }
        objectStack.push(t);
        Method[] methods = klass.getMethods();
        for (Method method : methods) {
            _processMethod(method, null, t, objectStack);
        }
        objectStack.pop();
        return t;
    } catch (Exception e) {
        throw new FailedRandomObjectGenerationException(e);
    }
}