Example usage for java.util Deque isEmpty

List of usage examples for java.util Deque isEmpty

Introduction

In this page you can find the example usage for java.util Deque isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this collection contains no elements.

Usage

From source file:com.reprezen.swaggerparser.test.ExamplesTest.java

@Parameters
public static Collection<URL> findExamples() throws IOException {
    Collection<URL> examples = Lists.newArrayList();
    Deque<URL> dirs = Queues.newArrayDeque();
    String auth = System.getenv("GITHUB_AUTH") != null ? System.getenv("GITHUB_AUTH") + "@" : "";
    String request = String.format("https://%sapi.github.com/repos/%s/contents/%s?ref=%s", auth, SPEC_REPO,
            EXAMPLES_ROOT, EXAMPLES_BRANCH);
    dirs.add(new URL(request));
    while (!dirs.isEmpty()) {
        URL url = dirs.remove();//from w  w  w.j  av a 2s  .  c om
        String json = IOUtils.toString(url, Charsets.UTF_8);
        JsonNode tree = mapper.readTree(json);
        for (JsonNode result : iterable(tree.elements())) {
            String type = result.get("type").asText();
            String path = result.get("path").asText();
            String resultUrl = result.get("url").asText();
            if (type.equals("dir")) {
                dirs.add(new URL(resultUrl));
            } else if (type.equals("file") && (path.endsWith(".yaml") || path.endsWith(".json"))) {
                String downloadUrl = result.get("download_url").asText();
                examples.add(new URL(downloadUrl));
            }
        }
    }
    return examples;
}

From source file:info.servertools.core.util.FileUtils.java

public static void zipDirectory(File directory, File zipfile, @Nullable Collection<String> fileBlacklist,
        @Nullable Collection<String> folderBlacklist) throws IOException {
    URI baseDir = directory.toURI();
    Deque<File> queue = new LinkedList<>();
    queue.push(directory);//from   w ww . j av a2s  .  c  o  m
    OutputStream out = new FileOutputStream(zipfile);
    Closeable res = out;
    try {
        ZipOutputStream zout = new ZipOutputStream(out);
        res = zout;
        while (!queue.isEmpty()) {
            directory = queue.removeFirst();
            File[] dirFiles = directory.listFiles();
            if (dirFiles != null && dirFiles.length != 0) {
                for (File child : dirFiles) {
                    if (child != null) {
                        String name = baseDir.relativize(child.toURI()).getPath();
                        if (child.isDirectory()
                                && (folderBlacklist == null || !folderBlacklist.contains(child.getName()))) {
                            queue.push(child);
                            name = name.endsWith("/") ? name : name + "/";
                            zout.putNextEntry(new ZipEntry(name));
                        } else {
                            if (fileBlacklist != null && !fileBlacklist.contains(child.getName())) {
                                zout.putNextEntry(new ZipEntry(name));
                                copy(child, zout);
                                zout.closeEntry();
                            }
                        }
                    }
                }
            }
        }
    } finally {
        res.close();
    }
}

From source file:org.nickelproject.util.testUtil.ClasspathUtil.java

private static Iterable<String> getAllSubClasses(final Class<?>... pTags) {
    final Deque<String> vClassNames = Lists.newLinkedList();
    final Set<String> vResults = Sets.newHashSet();
    for (final Class<?> vClass : pTags) {
        vClassNames.add(vClass.getCanonicalName());
    }//from w w  w .  j a  v a  2s  . co m
    while (!vClassNames.isEmpty()) {
        final String vCurrentClass = vClassNames.pollFirst();
        for (final String vChild : kChildren.get(vCurrentClass)) {
            vClassNames.addLast(vChild);
            vResults.add(vChild);
        }
    }
    return vResults;
}

From source file:org.interreg.docexplore.util.ZipUtils.java

public static void zip(File directory, File[] files, File zipfile, float[] progress, float progressOffset,
        float progressAmount, int level) throws Exception {
    URI base = directory.toURI();
    Deque<File> queue = new LinkedList<File>();
    OutputStream out = new FileOutputStream(zipfile, false);
    Closeable res = null;/*w  ww  .  ja  v a 2 s  . c  o m*/
    try {
        int nEntries = count(files, queue, 0);
        while (!queue.isEmpty()) {
            File dir = queue.pop();
            nEntries = count(dir.listFiles(), queue, nEntries);
        }

        ZipArchiveOutputStream zout = (ZipArchiveOutputStream) new ArchiveStreamFactory()
                .createArchiveOutputStream(ArchiveStreamFactory.ZIP, out);
        zout.setLevel(level);
        res = zout;

        int cnt = zip(files, queue, base, 0, nEntries, progress, progressOffset, progressAmount, zout);
        while (!queue.isEmpty()) {
            File dir = queue.pop();
            cnt = zip(dir.listFiles(), queue, base, cnt, nEntries, progress, progressOffset, progressAmount,
                    zout);
        }
    } finally {
        res.close();
    }
}

From source file:com.cinchapi.concourse.lang.Parser.java

/**
 * Convert a valid and well-formed list of {@link Symbol} objects into a
 * Queue in postfix notation./*from w  w  w. j  a  v a2 s .c  o  m*/
 * <p>
 * NOTE: This method will group non-conjunctive symbols into
 * {@link Expression} objects.
 * </p>
 * 
 * @param symbols
 * @return the symbols in postfix notation
 */
public static Queue<PostfixNotationSymbol> toPostfixNotation(List<Symbol> symbols) {
    Deque<Symbol> stack = new ArrayDeque<Symbol>();
    Queue<PostfixNotationSymbol> queue = new LinkedList<PostfixNotationSymbol>();
    symbols = groupExpressions(symbols);
    for (Symbol symbol : symbols) {
        if (symbol instanceof ConjunctionSymbol) {
            while (!stack.isEmpty()) {
                Symbol top = stack.peek();
                if (symbol == ConjunctionSymbol.OR
                        && (top == ConjunctionSymbol.OR || top == ConjunctionSymbol.AND)) {
                    queue.add((PostfixNotationSymbol) stack.pop());
                } else {
                    break;
                }
            }
            stack.push(symbol);
        } else if (symbol == ParenthesisSymbol.LEFT) {
            stack.push(symbol);
        } else if (symbol == ParenthesisSymbol.RIGHT) {
            boolean foundLeftParen = false;
            while (!stack.isEmpty()) {
                Symbol top = stack.peek();
                if (top == ParenthesisSymbol.LEFT) {
                    foundLeftParen = true;
                    break;
                } else {
                    queue.add((PostfixNotationSymbol) stack.pop());
                }
            }
            if (!foundLeftParen) {
                throw new SyntaxException(
                        MessageFormat.format("Syntax error in {0}: Mismatched parenthesis", symbols));
            } else {
                stack.pop();
            }
        } else {
            queue.add((PostfixNotationSymbol) symbol);
        }
    }
    while (!stack.isEmpty()) {
        Symbol top = stack.peek();
        if (top instanceof ParenthesisSymbol) {
            throw new SyntaxException(
                    MessageFormat.format("Syntax error in {0}: Mismatched parenthesis", symbols));
        } else {
            queue.add((PostfixNotationSymbol) stack.pop());
        }
    }
    return queue;
}

From source file:org.opensingular.form.wicket.util.WicketFormUtils.java

public static String generateTitlePath(Component parentContainer, SInstance parentContext,
        Component childComponent, SInstance childInstance) {

    List<Component> components = Lists.newArrayList(childComponent);
    WicketUtils.appendListOfParents(components, childComponent, parentContainer);

    Deque<String> titles = new LinkedList<>();
    SInstance lastInstance = null;/*from w w w  .  j  a v  a  2 s.c  om*/
    String lastTitle = null;
    for (Component comp : components) {

        SInstance instance = WicketFormUtils.instanciaIfAware(comp.getDefaultModel()).orElse(null);

        String title = findTitle(comp);
        if (title != null && !Objects.equal(title, lastTitle)) {
            lastTitle = title;
            addTitle(titles, title, instance, lastInstance);
        }
        lastInstance = instance;
    }

    if (!titles.isEmpty()) {
        return titles.stream().collect(Collectors.joining(" > "));
    }
    return null;
}

From source file:org.apache.hadoop.hive.ql.parse.GenTezUtils.java

public static void removeUnionOperators(Configuration conf, GenTezProcContext context, BaseWork work)
        throws SemanticException {

    List<Operator<?>> roots = new ArrayList<Operator<?>>();
    roots.addAll(work.getAllRootOperators());
    if (work.getDummyOps() != null) {
        roots.addAll(work.getDummyOps());
    }/* w ww  . j  a  va2s. c  o  m*/
    roots.addAll(context.eventOperatorSet);

    // need to clone the plan.
    List<Operator<?>> newRoots = Utilities.cloneOperatorTree(conf, roots);

    // we're cloning the operator plan but we're retaining the original work. That means
    // that root operators have to be replaced with the cloned ops. The replacement map
    // tells you what that mapping is.
    BiMap<Operator<?>, Operator<?>> replacementMap = HashBiMap.create();

    // there's some special handling for dummyOps required. Mapjoins won't be properly
    // initialized if their dummy parents aren't initialized. Since we cloned the plan
    // we need to replace the dummy operators in the work with the cloned ones.
    List<HashTableDummyOperator> dummyOps = new LinkedList<HashTableDummyOperator>();

    Iterator<Operator<?>> it = newRoots.iterator();
    for (Operator<?> orig : roots) {
        Set<FileSinkOperator> fsOpSet = OperatorUtils.findOperators(orig, FileSinkOperator.class);
        for (FileSinkOperator fsOp : fsOpSet) {
            context.fileSinkSet.remove(fsOp);
        }

        Operator<?> newRoot = it.next();

        replacementMap.put(orig, newRoot);

        if (newRoot instanceof HashTableDummyOperator) {
            // dummy ops need to be updated to the cloned ones.
            dummyOps.add((HashTableDummyOperator) newRoot);
            it.remove();
        } else if (newRoot instanceof AppMasterEventOperator) {
            // event operators point to table scan operators. When cloning these we
            // need to restore the original scan.
            if (newRoot.getConf() instanceof DynamicPruningEventDesc) {
                TableScanOperator ts = ((DynamicPruningEventDesc) orig.getConf()).getTableScan();
                if (ts == null) {
                    throw new AssertionError("No table scan associated with dynamic event pruning. " + orig);
                }
                ((DynamicPruningEventDesc) newRoot.getConf()).setTableScan(ts);
            }
            it.remove();
        } else {
            if (newRoot instanceof TableScanOperator) {
                if (context.tsToEventMap.containsKey(orig)) {
                    // we need to update event operators with the cloned table scan
                    for (AppMasterEventOperator event : context.tsToEventMap.get(orig)) {
                        ((DynamicPruningEventDesc) event.getConf()).setTableScan((TableScanOperator) newRoot);
                    }
                }
            }
            context.rootToWorkMap.remove(orig);
            context.rootToWorkMap.put(newRoot, work);
        }
    }

    // now we remove all the unions. we throw away any branch that's not reachable from
    // the current set of roots. The reason is that those branches will be handled in
    // different tasks.
    Deque<Operator<?>> operators = new LinkedList<Operator<?>>();
    operators.addAll(newRoots);

    Set<Operator<?>> seen = new HashSet<Operator<?>>();

    while (!operators.isEmpty()) {
        Operator<?> current = operators.pop();
        seen.add(current);

        if (current instanceof FileSinkOperator) {
            FileSinkOperator fileSink = (FileSinkOperator) current;

            // remember it for additional processing later
            context.fileSinkSet.add(fileSink);

            FileSinkDesc desc = fileSink.getConf();
            Path path = desc.getDirName();
            List<FileSinkDesc> linked;

            if (!context.linkedFileSinks.containsKey(path)) {
                linked = new ArrayList<FileSinkDesc>();
                context.linkedFileSinks.put(path, linked);
            }
            linked = context.linkedFileSinks.get(path);
            linked.add(desc);

            desc.setDirName(new Path(path, "" + linked.size()));
            desc.setLinkedFileSink(true);
            desc.setParentDir(path);
            desc.setLinkedFileSinkDesc(linked);
        }

        if (current instanceof AppMasterEventOperator) {
            // remember for additional processing later
            context.eventOperatorSet.add((AppMasterEventOperator) current);

            // mark the original as abandoned. Don't need it anymore.
            context.abandonedEventOperatorSet
                    .add((AppMasterEventOperator) replacementMap.inverse().get(current));
        }

        if (current instanceof UnionOperator) {
            Operator<?> parent = null;
            int count = 0;

            for (Operator<?> op : current.getParentOperators()) {
                if (seen.contains(op)) {
                    ++count;
                    parent = op;
                }
            }

            // we should have been able to reach the union from only one side.
            assert count <= 1;

            if (parent == null) {
                // root operator is union (can happen in reducers)
                replacementMap.put(current, current.getChildOperators().get(0));
            } else {
                parent.removeChildAndAdoptItsChildren(current);
            }
        }

        if (current instanceof FileSinkOperator || current instanceof ReduceSinkOperator) {
            current.setChildOperators(null);
        } else {
            operators.addAll(current.getChildOperators());
        }
    }
    work.setDummyOps(dummyOps);
    work.replaceRoots(replacementMap);
}

From source file:com.cinchapi.concourse.lang.Parser.java

/**
 * Convert a valid and well-formed list of {@link Symbol} objects into a
 * an {@link AST}./*from   w w w. j av a 2  s . com*/
 * <p>
 * NOTE: This method will group non-conjunctive symbols into
 * {@link Expression} objects.
 * </p>
 * 
 * @param symbols
 * @return the symbols in an AST
 */
public static AST toAbstractSyntaxTree(List<Symbol> symbols) {
    Deque<Symbol> operatorStack = new ArrayDeque<Symbol>();
    Deque<AST> operandStack = new ArrayDeque<AST>();
    symbols = groupExpressions(symbols);
    main: for (Symbol symbol : symbols) {
        if (symbol == ParenthesisSymbol.LEFT) {
            operatorStack.push(symbol);
        } else if (symbol == ParenthesisSymbol.RIGHT) {
            while (!operatorStack.isEmpty()) {
                Symbol popped = operatorStack.pop();
                if (popped == ParenthesisSymbol.LEFT) {
                    continue main;
                } else {
                    addASTNode(operandStack, popped);
                }
            }
            throw new SyntaxException(
                    MessageFormat.format("Syntax error in {0}: Mismatched parenthesis", symbols));
        } else if (symbol instanceof Expression) {
            operandStack.add(ExpressionTree.create((Expression) symbol));
        } else {
            operatorStack.push(symbol);
        }
    }
    while (!operatorStack.isEmpty()) {
        addASTNode(operandStack, operatorStack.pop());
    }
    return operandStack.pop();
}

From source file:com.hazelcast.stabilizer.Utils.java

public static byte[] zip(List<File> roots) throws IOException {
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    Deque<File> queue = new LinkedList<File>();
    ZipOutputStream zout = new ZipOutputStream(out);

    Set<String> names = new HashSet<String>();

    try {//from   w w w  .j av  a 2s.  c  o  m
        for (File root : roots) {
            URI base = root.isDirectory() ? root.toURI() : root.getParentFile().toURI();
            queue.push(root);
            while (!queue.isEmpty()) {
                File file = queue.pop();
                if (file.getName().equals(".DS_Store")) {
                    continue;
                }

                //                    log.finest("Zipping: " + file.getAbsolutePath());

                if (file.isDirectory()) {
                    String name = base.relativize(file.toURI()).getPath();
                    name = name.endsWith("/") ? name : name + "/";

                    if (names.add(name)) {
                        zout.putNextEntry(new ZipEntry(name));
                    }

                    for (File kid : file.listFiles()) {
                        queue.push(kid);
                    }
                } else {
                    String name = base.relativize(file.toURI()).getPath();
                    zout.putNextEntry(new ZipEntry(name));
                    copy(file, zout);
                    zout.closeEntry();
                }
            }
        }
    } finally {
        zout.close();
    }

    return out.toByteArray();
}

From source file:com.textocat.textokit.commons.util.CorpusUtils.java

/**
 * Partition corpus files specified by filters.
 *
 * @param corpusDir          corpus base directory
 * @param corpusFileFilter   filter for corpus files
 * @param corpusSubDirFilter filter for corpus subdirectories. If null subdirectories will
 *                           be ignored.
 * @param partitionsNumber//from  w  w  w . j  av a 2 s .  co  m
 * @return list of file sets (partitions)
 */
public static List<Set<File>> partitionCorpusByFileSize(File corpusDir, IOFileFilter corpusFileFilter,
        IOFileFilter corpusSubDirFilter, int partitionsNumber) {
    log.info("Partitioning corpus {} with file filter {} and subdir filter {}...",
            new Object[] { corpusDir.getAbsolutePath(), corpusFileFilter, corpusSubDirFilter });
    // TODO implement an algorithm that is more robust to different file sizes
    // e.g. it should handle the case when there is no more files to include into the last partition
    if (partitionsNumber <= 0) {
        throw new IllegalArgumentException(String.format("Illegal number of partitions: %s", partitionsNumber));
    }
    if (!corpusDir.isDirectory()) {
        throw new IllegalArgumentException(String.format("%s is not existing directory", corpusDir));
    }
    final Deque<File> corpusFilesDeq;
    {
        List<File> corpusFiles = Lists
                .newArrayList(FileUtils.listFiles(corpusDir, corpusFileFilter, corpusSubDirFilter));
        // sort by decreasing size to smooth differences between parts
        Collections.sort(corpusFiles, SizeFileComparator.SIZE_REVERSE);
        corpusFilesDeq = Lists.newLinkedList(corpusFiles);
    }
    //
    int totalSize = 0;
    for (File cf : corpusFilesDeq) {
        totalSize += cf.length();
    }
    log.info("Corpus total size (bytes): {}", totalSize);
    List<FileBucket> buckets = Lists.newArrayListWithExpectedSize(partitionsNumber);
    // create empty parts
    for (int i = 0; i < partitionsNumber; i++) {
        buckets.add(new FileBucket());
    }
    while (!corpusFilesDeq.isEmpty()) {
        File cf = corpusFilesDeq.pop();
        buckets.get(0).add(cf);
        // resort: make the least bucket first
        Collections.sort(buckets);
    }
    // resort: make the largest bucket first
    Collections.sort(buckets, Collections.reverseOrder());
    // log
    log.info("Corpus {} has been partitioned by file sizes. Result partitions:\n{}", corpusDir,
            Joiner.on('\n').join(buckets));
    // transform
    List<Set<File>> result = Lists.newArrayList();
    for (FileBucket b : buckets) {
        result.add(b.getFiles());
    }
    // sanity checks
    if (result.size() != partitionsNumber || result.get(result.size() - 1).isEmpty()) {
        throw new IllegalStateException(
                "Illegal corpus partitioning result. Check previous log messages for details.");
    }
    return result;
}