Example usage for java.util Deque isEmpty

List of usage examples for java.util Deque isEmpty

Introduction

In this page you can find the example usage for java.util Deque isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this collection contains no elements.

Usage

From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java

private StringBuilder dump(StringBuilder sb) {
    Deque<ASTNode> stack = new ArrayDeque<ASTNode>();
    stack.push(this);
    int tabLength = 0;

    while (!stack.isEmpty()) {
        ASTNode next = stack.peek();/*from  ww  w  . ja v a2s  . co m*/

        if (!next.visited) {
            sb.append(StringUtils.repeat(" ", tabLength * 3));
            sb.append(next.toString());
            sb.append("\n");

            if (next.children != null) {
                for (int i = next.children.size() - 1; i >= 0; i--) {
                    stack.push((ASTNode) next.children.get(i));
                }
            }

            tabLength++;
            next.visited = true;
        } else {
            tabLength--;
            next.visited = false;
            stack.pop();
        }
    }

    return sb;
}

From source file:uniol.apt.analysis.synthesize.separation.KBoundedSeparation.java

private void generateAllRegions(int k) {
    assert k >= 1;

    Set<Bag<State>> known = new HashSet<>();
    Deque<Bag<State>> todo = new LinkedList<>();
    addExcitationAndSwitchingRegions(known);
    todo.addAll(known);/*from  www .  ja va2 s  .  co  m*/

    while (!todo.isEmpty()) {
        InterrupterRegistry.throwIfInterruptRequestedForCurrentThread();
        Bag<State> r = todo.removeFirst();

        debug();
        debugFormat("Examining %s", r);

        Pair<Event, Integer> event = findEventWithNonConstantGradient(r);
        if (event == null) {
            debug("It is a region!");
            regions.add(convertToRegion(r));
            continue;
        }

        // Expand (= add entries) to the multiset so that it becomes "more region-like".
        // To do this, we either want to go towards a region with gradient(event) <= g or
        // gradient(event) > g. These two cases follow.

        Bag<State> r1 = expandBelowOrEqual(r, event.getFirst(), event.getSecond());
        debugFormat("for gradient(%s) <= %d, new result is %s", event.getFirst(), event.getSecond(), r1);
        if (shouldExplore(r1, k) && known.add(r1))
            todo.add(r1);
        else
            debug("...which should not be explored");

        Bag<State> r2 = expandAboveOrEqual(r, event.getFirst(), event.getSecond() + 1);
        debugFormat("for gradient(%s) >= %d, new result is %s", event.getFirst(), event.getSecond() + 1, r2);
        if (shouldExplore(r2, k) && known.add(r2))
            todo.add(r2);
        else
            debug("...which should not be explored");
    }

    debugFormat("Found the following regions: %s", regions);
}

From source file:net.sf.jasperreports.engine.json.expression.member.evaluation.ArrayConstructionExpressionEvaluator.java

private List<JRJsonNode> goAnywhereDown(JRJsonNode jrJsonNode) {
    List<JRJsonNode> result = new ArrayList<>();
    Deque<JRJsonNode> stack = new ArrayDeque<>();
    JsonNode initialDataNode = jrJsonNode.getDataNode();

    if (log.isDebugEnabled()) {
        log.debug("initial stack population with: " + initialDataNode);
    }//  w  ww .  j av  a  2  s  . com

    // populate the stack initially
    stack.push(jrJsonNode);

    while (!stack.isEmpty()) {
        JRJsonNode stackNode = stack.pop();
        JsonNode stackDataNode = stackNode.getDataNode();

        addChildrenToStack(stackNode, stack);

        // process the current stack item
        if (stackDataNode.isArray()) {
            if (log.isDebugEnabled()) {
                log.debug("processing stack element: " + stackDataNode);
            }

            ArrayNode newNode = getEvaluationContext().getObjectMapper().createArrayNode();

            for (Integer idx : expression.getIndexes()) {
                if (idx >= 0 && idx < stackDataNode.size()) {
                    JRJsonNode nodeAtIndex = stackNode.createChild(stackDataNode.get(idx));

                    if (applyFilter(nodeAtIndex)) {
                        newNode.add(nodeAtIndex.getDataNode());
                    }
                }
            }

            if (newNode.size() > 0) {
                result.add(stackNode.createChild(newNode));
            }
        }
    }

    return result;
}

From source file:net.sf.jasperreports.engine.json.expression.member.evaluation.ArraySliceExpressionEvaluator.java

private List<JRJsonNode> goAnywhereDown(JRJsonNode jrJsonNode) {
    List<JRJsonNode> result = new ArrayList<>();
    Deque<JRJsonNode> stack = new ArrayDeque<>();

    if (log.isDebugEnabled()) {
        log.debug("initial stack population with: " + jrJsonNode.getDataNode());
    }//from  ww  w .  j  a  v a  2  s .  c  o m

    // populate the stack initially
    stack.push(jrJsonNode);

    while (!stack.isEmpty()) {
        JRJsonNode stackNode = stack.pop();
        JsonNode stackDataNode = stackNode.getDataNode();

        addChildrenToStack(stackNode, stack);

        // process the current stack item
        if (stackDataNode.isArray()) {
            if (log.isDebugEnabled()) {
                log.debug("processing stack element: " + stackDataNode);
            }

            ArrayNode newNode = getEvaluationContext().getObjectMapper().createArrayNode();

            Integer start = getSliceStart(stackDataNode.size());
            if (start >= stackDataNode.size()) {
                continue;
            }

            Integer end = getSliceEnd(stackDataNode.size());
            if (end < 0) {
                continue;
            }

            for (int i = start; i < end; i++) {
                JRJsonNode nodeAtIndex = stackNode.createChild(stackDataNode.get(i));

                if (applyFilter(nodeAtIndex)) {
                    newNode.add(nodeAtIndex.getDataNode());
                }
            }

            if (newNode.size() > 0) {
                result.add(stackNode.createChild(newNode));
            }
        }
    }

    return result;
}

From source file:eu.interedition.collatex.tools.CollationServer.java

public void service(Request request, Response response) throws Exception {
    final Deque<String> path = path(request);
    if (path.isEmpty() || !"collate".equals(path.pop())) {
        response.sendError(404);/*  w w  w  .jav  a  2  s .  c o m*/
        return;
    }

    final SimpleCollation collation = JsonProcessor.read(request.getInputStream());
    if (maxCollationSize > 0) {
        for (SimpleWitness witness : collation.getWitnesses()) {
            final int witnessLength = witness.getTokens().stream().filter(t -> t instanceof SimpleToken)
                    .map(t -> (SimpleToken) t).mapToInt(t -> t.getContent().length()).sum();
            if (witnessLength > maxCollationSize) {
                response.sendError(413, "Request Entity Too Large");
                return;
            }
        }
    }

    response.suspend(60, TimeUnit.SECONDS, new EmptyCompletionHandler<>());
    collationThreads.submit(() -> {
        try {
            final VariantGraph graph = new VariantGraph();
            collation.collate(graph);

            // CORS support
            response.setHeader("Access-Control-Allow-Origin",
                    Optional.ofNullable(request.getHeader("Origin")).orElse("*"));
            response.setHeader("Access-Control-Allow-Methods",
                    Optional.ofNullable(request.getHeader("Access-Control-Request-Method"))
                            .orElse("GET, POST, HEAD, OPTIONS"));
            response.setHeader("Access-Control-Allow-Headers",
                    Optional.ofNullable(request.getHeader("Access-Control-Request-Headers"))
                            .orElse("Content-Type, Accept, X-Requested-With"));
            response.setHeader("Access-Control-Max-Age", "86400");
            response.setHeader("Access-Control-Allow-Credentials", "true");

            final String clientAccepts = Optional.ofNullable(request.getHeader(Header.Accept)).orElse("");

            if (clientAccepts.contains("text/plain")) {
                response.setContentType("text/plain");
                response.setCharacterEncoding("utf-8");
                try (final Writer out = response.getWriter()) {
                    new SimpleVariantGraphSerializer(graph).toDot(out);
                }
                response.resume();

            } else if (clientAccepts.contains("application/tei+xml")) {
                XMLStreamWriter xml = null;
                try {
                    response.setContentType("application/tei+xml");
                    try (OutputStream responseStream = response.getOutputStream()) {
                        xml = XMLOutputFactory.newInstance().createXMLStreamWriter(responseStream);
                        xml.writeStartDocument();
                        new SimpleVariantGraphSerializer(graph).toTEI(xml);
                        xml.writeEndDocument();
                    } finally {
                        if (xml != null) {
                            xml.close();
                        }
                    }
                    response.resume();
                } catch (XMLStreamException e) {
                    e.printStackTrace();
                }
            } else if (clientAccepts.contains("application/graphml+xml")) {
                XMLStreamWriter xml = null;
                try {
                    response.setContentType("application/graphml+xml");
                    try (OutputStream responseStream = response.getOutputStream()) {
                        xml = XMLOutputFactory.newInstance().createXMLStreamWriter(responseStream);
                        xml.writeStartDocument();
                        new SimpleVariantGraphSerializer(graph).toGraphML(xml);
                        xml.writeEndDocument();
                    } finally {
                        if (xml != null) {
                            xml.close();
                        }
                    }
                    response.resume();
                } catch (XMLStreamException e) {
                    e.printStackTrace();
                }
            } else if (clientAccepts.contains("image/svg+xml")) {
                if (dotPath == null) {
                    response.sendError(204);
                    response.resume();
                } else {
                    final StringWriter dot = new StringWriter();
                    new SimpleVariantGraphSerializer(graph).toDot(dot);

                    final Process dotProc = new ProcessBuilder(dotPath, "-Grankdir=LR", "-Gid=VariantGraph",
                            "-Tsvg").start();
                    final StringWriter errors = new StringWriter();
                    CompletableFuture.allOf(CompletableFuture.runAsync(() -> {
                        final char[] buf = new char[8192];
                        try (final Reader errorStream = new InputStreamReader(dotProc.getErrorStream())) {
                            int len;
                            while ((len = errorStream.read(buf)) >= 0) {
                                errors.write(buf, 0, len);
                            }
                        } catch (IOException e) {
                            throw new CompletionException(e);
                        }
                    }, processThreads), CompletableFuture.runAsync(() -> {
                        try (final Writer dotProcStream = new OutputStreamWriter(dotProc.getOutputStream(),
                                "UTF-8")) {
                            dotProcStream.write(dot.toString());
                        } catch (IOException e) {
                            throw new CompletionException(e);
                        }
                    }, processThreads), CompletableFuture.runAsync(() -> {
                        response.setContentType("image/svg+xml");
                        final byte[] buf = new byte[8192];
                        try (final InputStream in = dotProc.getInputStream();
                                final OutputStream out = response.getOutputStream()) {
                            int len;
                            while ((len = in.read(buf)) >= 0) {
                                out.write(buf, 0, len);
                            }
                        } catch (IOException e) {
                            throw new CompletionException(e);
                        }
                    }, processThreads), CompletableFuture.runAsync(() -> {
                        try {
                            if (!dotProc.waitFor(60, TimeUnit.SECONDS)) {
                                throw new CompletionException(new RuntimeException(
                                        "dot processing took longer than 60 seconds, process was timed out."));
                            }
                            if (dotProc.exitValue() != 0) {
                                throw new CompletionException(new IllegalStateException(errors.toString()));
                            }
                        } catch (InterruptedException e) {
                            throw new CompletionException(e);
                        }
                    }, processThreads)).exceptionally(t -> {
                        t.printStackTrace();
                        return null;
                    }).thenRunAsync(response::resume, processThreads);
                }
            } else {
                response.setContentType("application/json");
                try (final OutputStream responseStream = response.getOutputStream()) {
                    JsonProcessor.write(graph, responseStream);
                }
                response.resume();
            }
        } catch (IOException e) {
            // FIXME: ignored
        }
    });
}

From source file:org.apache.metron.common.stellar.StellarCompiler.java

private Token<?> popDeque(Deque<Token<?>> tokenDeque) {
    if (tokenDeque.isEmpty()) {
        throw new ParseException("Unable to pop an empty stack");
    }/*  w  ww  .j a v  a2  s .  com*/
    return tokenDeque.pop();
}

From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java

/**
 * For every node in this subtree, make sure it's start/stop token's
 * are set.  Walk depth first, visit bottom up.  Only updates nodes
 * with at least one token index < 0.
 *
 * In contrast to the method in the parent class, this method is
 * iterative.//from w ww .j a  va 2 s  . c  om
 */
@Override
public void setUnknownTokenBoundaries() {
    Deque<ASTNode> stack1 = new ArrayDeque<ASTNode>();
    Deque<ASTNode> stack2 = new ArrayDeque<ASTNode>();
    stack1.push(this);

    while (!stack1.isEmpty()) {
        ASTNode next = stack1.pop();
        stack2.push(next);

        if (next.children != null) {
            for (int i = next.children.size() - 1; i >= 0; i--) {
                stack1.push((ASTNode) next.children.get(i));
            }
        }
    }

    while (!stack2.isEmpty()) {
        ASTNode next = stack2.pop();

        if (next.children == null) {
            if (next.startIndex < 0 || next.stopIndex < 0) {
                next.startIndex = next.stopIndex = next.token.getTokenIndex();
            }
        } else if (next.startIndex >= 0 && next.stopIndex >= 0) {
            continue;
        } else if (next.children.size() > 0) {
            ASTNode firstChild = (ASTNode) next.children.get(0);
            ASTNode lastChild = (ASTNode) next.children.get(next.children.size() - 1);
            next.startIndex = firstChild.getTokenStartIndex();
            next.stopIndex = lastChild.getTokenStopIndex();
        }
    }
}

From source file:ocr.sapphire.image.EdgeBasedImagePreprocessor.java

private Deque<Point> findConnectedComponent(int[] edgeData, int x, int y) {
    Deque<Point> points = new LinkedList<Point>();
    Deque<Point> queue = new LinkedList<Point>();

    edgeData[x + y * width] = WHITE;//  w  ww.ja  v  a 2  s .co  m
    Point initialPoint = new Point(x, y);
    points.add(initialPoint);
    queue.push(initialPoint);

    while (!queue.isEmpty()) {
        Point point = queue.removeFirst();
        for (int k = 0; k < 8; k++) {
            int x2 = (int) (point.x + DX[k]);
            int y2 = (int) (point.y + DY[k]);
            if (x2 < 0 || y2 < 0 || x2 >= width || y2 >= height) {
                continue;
            }
            if (edgeData[x2 + y2 * width] == BLACK) {
                edgeData[x2 + y2 * width] = WHITE;
                Point point2 = new Point(x2, y2);
                points.add(point2);
                queue.addLast(point2);
            }
        }
    }
    return points;
}

From source file:de.l3s.archivepig.enrich.Response.java

@Override
public void enrich(Tuple data, Tuple enrichment, Object... params) throws Exception {
    long size = get(data, "_record.size");
    long offset = get(data, "_record.offset");
    String filename = get(data, "_record.filename");
    String cdxFile = get(data, "_record.cdxFile");

    if (size < 0 || offset < 0)
        return;/*from  w w  w  . j a  v  a 2  s.c o m*/

    FileSystem fs = FileSystem.get(UDFContext.getUDFContext().getJobConf());

    Deque<String> cdxSegments = new ArrayDeque<String>(Lists.reverse(list(cdxFile.split("\\/"))));
    cdxSegments.pop(); // remove filename
    String pathExtension = "";
    Path path = new Path(ArchiveLoader.dataPath(), pathExtension + filename);
    while (!fs.exists(path)) {
        if (cdxSegments.isEmpty()) {
            enrichment.append(new HashMap<String, String>());
            enrichment.append(new HashMap<String, String>());
            enrichment.append(null);
            return;
        }
        String cdxSegment = cdxSegments.pop();
        if (cdxSegment.endsWith(".har"))
            cdxSegment = cdxSegment.substring(0, cdxSegment.length() - 4);
        pathExtension = cdxSegment + "/" + pathExtension;
        path = new Path(ArchiveLoader.dataPath(), pathExtension + filename);
    }
    FSDataInputStream fsin = fs.open(path);
    fsin.seek(offset);
    InputStream in = fsin;

    ByteArrayOutputStream recordOutput = new ByteArrayOutputStream();
    try {
        try (BoundedInputStream boundedIn = new BoundedInputStream(in, size);
                ArchiveReader reader = ArchiveReaderFactory.get(filename, boundedIn, false);) {
            ArchiveRecord record;
            record = reader.get();

            ArchiveRecordHeader header = record.getHeader();
            enrichment.append(header.getHeaderFields());

            record.dump(recordOutput);
        } catch (Exception e) {
            return;
        } finally {
            in.close();
            recordOutput.close();
        }
    } catch (Exception e) {
        return;
    }

    try (InputStream httpResponse = new ByteArrayInputStream(recordOutput.toByteArray())) {
        // ALL COMMENTS ARE NEW VERSION VARIANTS FOR HTTP-CORE 4.3, currently in use 4.2.5
        //        SessionInputBufferImpl sessionInputBuffer = new SessionInputBufferImpl(new HttpTransportMetricsImpl(), 2048);
        //        sessionInputBuffer.bind(httpResponse);
        //        DefaultHttpResponseParserFactory responseParserFactory = new DefaultHttpResponseParserFactory();
        //        HttpMessageParser<HttpResponse> responseParser = responseParserFactory.create(sessionInputBuffer, MessageConstraints.DEFAULT);
        //        HttpResponse response = responseParser.parse();
        //        Header[] httpHeaders = response.getAllHeaders();

        HttpResponseParser parser = new HttpResponseParser();
        HttpResponse response = parser.parse(httpResponse);
        HttpHeaders httpHeaders = response.getHeaders();

        Map<String, String> httpHeadersMap = new HashMap<String, String>();
        for (HttpHeader httpHeader : httpHeaders) {
            httpHeadersMap.put(httpHeader.getName(), httpHeader.getValue());
        }
        enrichment.append(httpHeadersMap);

        //        byte[] payload = new byte[sessionInputBuffer.length()];
        //        sessionInputBuffer.read(payload);

        byte[] payload = IOUtils.toByteArray(response);

        enrichment.append(payload);

        //        HttpEntity entity = new ByteArrayEntity(payload);
        //        output.append(entity == null ? null : EntityUtils.toString(entity));
    } catch (Exception ignored) {
    }
}

From source file:org.apache.hadoop.hbase.replication.regionserver.HFileReplicator.java

public Void replicate() throws IOException {
    // Copy all the hfiles to the local file system
    Map<String, Path> tableStagingDirsMap = copyHFilesToStagingDir();

    int maxRetries = conf.getInt(HConstants.BULKLOAD_MAX_RETRIES_NUMBER, 10);

    for (Entry<String, Path> tableStagingDir : tableStagingDirsMap.entrySet()) {
        String tableNameString = tableStagingDir.getKey();
        Path stagingDir = tableStagingDir.getValue();

        LoadIncrementalHFiles loadHFiles = null;
        try {/*from w w  w  . j a va 2 s  .c om*/
            loadHFiles = new LoadIncrementalHFiles(conf);
        } catch (Exception e) {
            LOG.error("Failed to initialize LoadIncrementalHFiles for replicating bulk loaded" + " data.", e);
            throw new IOException(e);
        }
        Configuration newConf = HBaseConfiguration.create(conf);
        newConf.set(LoadIncrementalHFiles.CREATE_TABLE_CONF_KEY, "no");
        loadHFiles.setConf(newConf);

        TableName tableName = TableName.valueOf(tableNameString);
        Table table = this.connection.getTable(tableName);

        // Prepare collection of queue of hfiles to be loaded(replicated)
        Deque<LoadQueueItem> queue = new LinkedList<LoadQueueItem>();
        loadHFiles.prepareHFileQueue(stagingDir, table, queue, false);

        if (queue.isEmpty()) {
            LOG.warn("Replication process did not find any files to replicate in directory "
                    + stagingDir.toUri());
            return null;
        }

        try (RegionLocator locator = connection.getRegionLocator(tableName)) {

            fsDelegationToken.acquireDelegationToken(sinkFs);

            // Set the staging directory which will be used by LoadIncrementalHFiles for loading the
            // data
            loadHFiles.setBulkToken(stagingDir.toString());

            doBulkLoad(loadHFiles, table, queue, locator, maxRetries);
        } finally {
            cleanup(stagingDir.toString(), table);
        }
    }
    return null;
}