Example usage for java.util Deque pop

List of usage examples for java.util Deque pop

Introduction

In this page you can find the example usage for java.util Deque pop.

Prototype

E pop();

Source Link

Document

Pops an element from the stack represented by this deque.

Usage

From source file:io.apiman.plugins.keycloak_oauth_policy.ClaimLookup.java

private static void getProperties(Class<?> klazz, String path, Deque<Field> fieldChain) {
    for (Field f : klazz.getDeclaredFields()) {
        f.setAccessible(true);/*from  w  w w .  j a va 2s .  co m*/
        JsonProperty jsonProperty = f.getAnnotation(JsonProperty.class);
        if (jsonProperty != null) {
            fieldChain.push(f);
            // If the inspected type has nested @JsonProperty annotations, we need to inspect it
            if (hasJsonPropertyAnnotation(f)) {
                getProperties(f.getType(), f.getName() + ".", fieldChain); // Add "." when traversing into new object.
            } else { // Otherwise, just assume it's simple as the best we can do is #toString
                List<Field> fieldList = new ArrayList<>(fieldChain);
                Collections.reverse(fieldList);
                STANDARD_CLAIMS_FIELD_MAP.put(path + jsonProperty.value(), fieldList);
                fieldChain.pop(); // Pop, as we have now reached end of this chain.
            }
        }
    }
}

From source file:com.hazelcast.stabilizer.Utils.java

public static byte[] zip(List<File> roots) throws IOException {
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    Deque<File> queue = new LinkedList<File>();
    ZipOutputStream zout = new ZipOutputStream(out);

    Set<String> names = new HashSet<String>();

    try {//from w w  w.j  av  a 2 s.co  m
        for (File root : roots) {
            URI base = root.isDirectory() ? root.toURI() : root.getParentFile().toURI();
            queue.push(root);
            while (!queue.isEmpty()) {
                File file = queue.pop();
                if (file.getName().equals(".DS_Store")) {
                    continue;
                }

                //                    log.finest("Zipping: " + file.getAbsolutePath());

                if (file.isDirectory()) {
                    String name = base.relativize(file.toURI()).getPath();
                    name = name.endsWith("/") ? name : name + "/";

                    if (names.add(name)) {
                        zout.putNextEntry(new ZipEntry(name));
                    }

                    for (File kid : file.listFiles()) {
                        queue.push(kid);
                    }
                } else {
                    String name = base.relativize(file.toURI()).getPath();
                    zout.putNextEntry(new ZipEntry(name));
                    copy(file, zout);
                    zout.closeEntry();
                }
            }
        }
    } finally {
        zout.close();
    }

    return out.toByteArray();
}

From source file:org.seedstack.spring.internal.SpringTransactionStatusLink.java

TransactionStatus pop() {
    Deque<TransactionStatus> transactionStatuses = perThreadObjectContainer.get();
    TransactionStatus transactionStatus = transactionStatuses.pop();
    if (transactionStatuses.isEmpty()) {
        perThreadObjectContainer.remove();
    }//from  w w w  .  ja  v a 2  s . c  om
    return transactionStatus;
}

From source file:com.textocat.textokit.commons.util.CorpusUtils.java

/**
 * Partition corpus files specified by filters.
 *
 * @param corpusDir          corpus base directory
 * @param corpusFileFilter   filter for corpus files
 * @param corpusSubDirFilter filter for corpus subdirectories. If null subdirectories will
 *                           be ignored.
 * @param partitionsNumber// w  w w. j a v  a  2  s  .  c o m
 * @return list of file sets (partitions)
 */
public static List<Set<File>> partitionCorpusByFileSize(File corpusDir, IOFileFilter corpusFileFilter,
        IOFileFilter corpusSubDirFilter, int partitionsNumber) {
    log.info("Partitioning corpus {} with file filter {} and subdir filter {}...",
            new Object[] { corpusDir.getAbsolutePath(), corpusFileFilter, corpusSubDirFilter });
    // TODO implement an algorithm that is more robust to different file sizes
    // e.g. it should handle the case when there is no more files to include into the last partition
    if (partitionsNumber <= 0) {
        throw new IllegalArgumentException(String.format("Illegal number of partitions: %s", partitionsNumber));
    }
    if (!corpusDir.isDirectory()) {
        throw new IllegalArgumentException(String.format("%s is not existing directory", corpusDir));
    }
    final Deque<File> corpusFilesDeq;
    {
        List<File> corpusFiles = Lists
                .newArrayList(FileUtils.listFiles(corpusDir, corpusFileFilter, corpusSubDirFilter));
        // sort by decreasing size to smooth differences between parts
        Collections.sort(corpusFiles, SizeFileComparator.SIZE_REVERSE);
        corpusFilesDeq = Lists.newLinkedList(corpusFiles);
    }
    //
    int totalSize = 0;
    for (File cf : corpusFilesDeq) {
        totalSize += cf.length();
    }
    log.info("Corpus total size (bytes): {}", totalSize);
    List<FileBucket> buckets = Lists.newArrayListWithExpectedSize(partitionsNumber);
    // create empty parts
    for (int i = 0; i < partitionsNumber; i++) {
        buckets.add(new FileBucket());
    }
    while (!corpusFilesDeq.isEmpty()) {
        File cf = corpusFilesDeq.pop();
        buckets.get(0).add(cf);
        // resort: make the least bucket first
        Collections.sort(buckets);
    }
    // resort: make the largest bucket first
    Collections.sort(buckets, Collections.reverseOrder());
    // log
    log.info("Corpus {} has been partitioned by file sizes. Result partitions:\n{}", corpusDir,
            Joiner.on('\n').join(buckets));
    // transform
    List<Set<File>> result = Lists.newArrayList();
    for (FileBucket b : buckets) {
        result.add(b.getFiles());
    }
    // sanity checks
    if (result.size() != partitionsNumber || result.get(result.size() - 1).isEmpty()) {
        throw new IllegalStateException(
                "Illegal corpus partitioning result. Check previous log messages for details.");
    }
    return result;
}

From source file:bb.mcmc.analysis.ESSConvergeStat.java

@Override
protected double calculateEachProgress(Double stat, Deque<Double> record) {

    if (!Double.isNaN(stat)) {
        if (record.size() > 0) {
            record.pop();
        }/* w  w w. ja v  a2  s.co  m*/
        record.add(stat);
    }
    stat = record.peekFirst();
    double progress = stat / essThreshold;
    return progress;
}

From source file:bb.mcmc.analysis.GewekeConvergeStat.java

@Override
protected double calculateEachProgress(Double stat, Deque<Double> record) {

    if (!Double.isNaN(stat)) {
        if (record.size() > 2) {
            record.pop();
        }/*from w  ww  .  ja  v  a2  s.c  o  m*/
        record.add(stat);
    }
    double avgStat = 0;
    for (double d : record) {
        avgStat += d;
    }
    avgStat /= record.size();

    //      final double progress = Math.exp( rafteryThreshold - avgStat );

    //      return progress;

    final double progress = (1 - nd.cumulativeProbability(Math.abs(avgStat))) / gewekeProgressThreshold;
    //         final double tempP = (1-nd.cumulativeProbability(Math.abs(gewekeStat)-gewekeThreshold))/0.5;
    //         R Code
    //         data<- seq(1.96,4,by=0.01)
    //         plot(data, 1-(pnorm(abs(data))-pnorm(1.96))/0.025, type="l", col=2)
    //         plot(data, (1-pnorm(data-1.96))/0.5, type="l", col=2)

    return progress;
}

From source file:de.l3s.archivepig.enrich.Response.java

@Override
public void enrich(Tuple data, Tuple enrichment, Object... params) throws Exception {
    long size = get(data, "_record.size");
    long offset = get(data, "_record.offset");
    String filename = get(data, "_record.filename");
    String cdxFile = get(data, "_record.cdxFile");

    if (size < 0 || offset < 0)
        return;/*w w w. ja  v  a2 s.  c o  m*/

    FileSystem fs = FileSystem.get(UDFContext.getUDFContext().getJobConf());

    Deque<String> cdxSegments = new ArrayDeque<String>(Lists.reverse(list(cdxFile.split("\\/"))));
    cdxSegments.pop(); // remove filename
    String pathExtension = "";
    Path path = new Path(ArchiveLoader.dataPath(), pathExtension + filename);
    while (!fs.exists(path)) {
        if (cdxSegments.isEmpty()) {
            enrichment.append(new HashMap<String, String>());
            enrichment.append(new HashMap<String, String>());
            enrichment.append(null);
            return;
        }
        String cdxSegment = cdxSegments.pop();
        if (cdxSegment.endsWith(".har"))
            cdxSegment = cdxSegment.substring(0, cdxSegment.length() - 4);
        pathExtension = cdxSegment + "/" + pathExtension;
        path = new Path(ArchiveLoader.dataPath(), pathExtension + filename);
    }
    FSDataInputStream fsin = fs.open(path);
    fsin.seek(offset);
    InputStream in = fsin;

    ByteArrayOutputStream recordOutput = new ByteArrayOutputStream();
    try {
        try (BoundedInputStream boundedIn = new BoundedInputStream(in, size);
                ArchiveReader reader = ArchiveReaderFactory.get(filename, boundedIn, false);) {
            ArchiveRecord record;
            record = reader.get();

            ArchiveRecordHeader header = record.getHeader();
            enrichment.append(header.getHeaderFields());

            record.dump(recordOutput);
        } catch (Exception e) {
            return;
        } finally {
            in.close();
            recordOutput.close();
        }
    } catch (Exception e) {
        return;
    }

    try (InputStream httpResponse = new ByteArrayInputStream(recordOutput.toByteArray())) {
        // ALL COMMENTS ARE NEW VERSION VARIANTS FOR HTTP-CORE 4.3, currently in use 4.2.5
        //        SessionInputBufferImpl sessionInputBuffer = new SessionInputBufferImpl(new HttpTransportMetricsImpl(), 2048);
        //        sessionInputBuffer.bind(httpResponse);
        //        DefaultHttpResponseParserFactory responseParserFactory = new DefaultHttpResponseParserFactory();
        //        HttpMessageParser<HttpResponse> responseParser = responseParserFactory.create(sessionInputBuffer, MessageConstraints.DEFAULT);
        //        HttpResponse response = responseParser.parse();
        //        Header[] httpHeaders = response.getAllHeaders();

        HttpResponseParser parser = new HttpResponseParser();
        HttpResponse response = parser.parse(httpResponse);
        HttpHeaders httpHeaders = response.getHeaders();

        Map<String, String> httpHeadersMap = new HashMap<String, String>();
        for (HttpHeader httpHeader : httpHeaders) {
            httpHeadersMap.put(httpHeader.getName(), httpHeader.getValue());
        }
        enrichment.append(httpHeadersMap);

        //        byte[] payload = new byte[sessionInputBuffer.length()];
        //        sessionInputBuffer.read(payload);

        byte[] payload = IOUtils.toByteArray(response);

        enrichment.append(payload);

        //        HttpEntity entity = new ByteArrayEntity(payload);
        //        output.append(entity == null ? null : EntityUtils.toString(entity));
    } catch (Exception ignored) {
    }
}

From source file:com.icantrap.collections.dawg.DawgBuilder.java

/**
 * The number of nodes - currently - in the structure that will become the Dawg.
 * @return the number of nodes/*from w  w  w.  java 2s. c  o  m*/
 */
public int nodeCount() {
    int nodeCount = 0;
    Deque<Node> stack = new LinkedList<Node>();
    stack.push(root);

    while (!stack.isEmpty()) {
        Node ptr = stack.pop();
        ++nodeCount;

        for (Node nextChild : ptr.nextChildren)
            stack.push(nextChild);
        if (null != ptr.child)
            stack.push(ptr.child);
    }

    return nodeCount;
}

From source file:com.darkstar.beanCartography.utils.finder.Finder.java

/**
 * Process the bean context stack.//from   w  ww  . java 2s .  c om
 *
 * @param stack stack of objects left to search
 * @param visited set of objects already searched
 */
protected void visit(Deque<BeanContext> stack, Set<BeanContext> visited) {
    BeanContext target = stack.pop();
    if (target == null)
        return;

    if (visited.contains(target))
        return;
    visited.add(target);

    // process this object and check the filters.  if passed filter then run interceptors...
    filtersInterceptors.entrySet().stream().filter(entry -> entry.getKey().accept(target.getSource()))
            .forEach(entry -> entry.getValue().intercept(target.getSource()));

    // process this object's contained objects (i.e. see what we need to add to the stack)...
    if (NameUtils.isImmutable(target.getSource().getClass()))
        return;
    Object fieldValue = null;
    try {
        while (target.hasNextFieldValue()) {
            fieldValue = target.nextFieldValue();

            // skip nulls...
            if (fieldValue == null)
                continue;

            // add pojo or container or whatever this is...
            if (!visited.contains(fieldValue) && !stack.contains(fieldValue))
                stack.add(new BeanContext(fieldValue));

            // arrays...
            if (fieldValue.getClass().isArray()) {
                if (!processArrays)
                    continue;
                final Object arrayFieldValue = fieldValue;
                IntStream.range(0, Array.getLength(arrayFieldValue)).forEach(i -> {
                    Object element = Array.get(arrayFieldValue, i);
                    if (element != null && !visited.contains(element) && !stack.contains(element))
                        stack.add(new BeanContext(element));
                });

                // collections...
            } else if (fieldValue instanceof Collection<?>) {
                if (!processCollections)
                    continue;
                ((Collection<?>) fieldValue).stream().filter(
                        element -> element != null && !visited.contains(element) && !stack.contains(element))
                        .forEach(element -> stack.add(new BeanContext(element)));

                // maps...
            } else if (fieldValue instanceof Map<?, ?>) {
                if (!processMaps)
                    continue;
                ((Map<?, ?>) fieldValue).entrySet().stream().forEach(entry -> {
                    if (entry.getKey() != null && !visited.contains(entry.getKey())
                            && !stack.contains(entry.getKey()))
                        stack.add(new BeanContext(entry.getKey()));
                    if (entry.getValue() != null && !visited.contains(entry.getValue())
                            && !stack.contains(entry.getValue()))
                        stack.add(new BeanContext(entry.getValue()));
                });
            }
        }

    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.core.controller.AlgoritmoController.java

public static String busquedaProfundidad(Grafo g, String inicio, String fin) {
    Deque<String> pila = new ArrayDeque<>();
    Deque<String> padresPila = new ArrayDeque<>();
    List<String> explorados = new ArrayList<>();
    List<String> padresExplorados = new ArrayList<>();
    String nodoActual, nodoPadre;
    String result = "Algoritmo de Busqueda Primero en Profundidad";
    result += "\nCantidad de nodos: " + g.getNodos().size();
    result += "\nCantidad de aristas: " + g.getAristas().size();
    pila.push(inicio);/*w  w  w  .j a v a 2s.  com*/
    padresPila.push("#");
    while (true) {
        result += "\nPila: " + Arrays.toString(pila.toArray());
        if (pila.isEmpty()) {
            result += "\nNo se encontro el nodo destino";
            break;
        }
        nodoActual = pila.pop();
        nodoPadre = padresPila.pop();
        explorados.add(nodoActual);
        padresExplorados.add(nodoPadre);
        if (nodoActual.equals(fin)) {
            result += "\nNodo destino alcanzado"; //Mostrar camino
            String nodo = nodoActual;
            String secuenciaResultado = "";
            while (nodo != "#") {
                secuenciaResultado = nodo + " " + secuenciaResultado;
                nodo = padresExplorados.get(explorados.indexOf(nodo));
            }
            result += "\nCamino solucion: " + secuenciaResultado;
            break;
        }
        List<String> vecinos = g.nodosVecinos(nodoActual);
        for (int i = vecinos.size() - 1; i >= 0; i--) {
            String a = vecinos.get(i);
            if (!explorados.contains(a)) {
                if (pila.contains(a)) {
                    pila.remove(a);
                    padresPila.remove(nodoActual);
                }
                pila.push(a);
                padresPila.push(nodoActual);
            }
        }
    }
    return result;
}