Example usage for java.util Deque size

List of usage examples for java.util Deque size

Introduction

In this page you can find the example usage for java.util Deque size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this deque.

Usage

From source file:com.epam.cme.storefront.history.impl.DefaultBrowseHistory.java

@Override
public void addBrowseHistoryEntry(final BrowseHistoryEntry browseHistoryEntry) {
    // Get the actual history entry list stored in the session
    final Deque<BrowseHistoryEntry> browseHistoryEntries = getBrowseHistoryEntries();

    if (browseHistoryEntries != null) {
        // Lock on the entries to ensure that we modify it atomically
        synchronized (browseHistoryEntries) {
            // Add the entry
            browseHistoryEntries.addFirst(browseHistoryEntry);

            // Remove any entries that are over capacity
            while (browseHistoryEntries.size() > getCapacity()) {
                browseHistoryEntries.removeLast();
            }//from   ww w  . j a  v  a 2 s  . c o m
        }
    }
}

From source file:edu.byu.nlp.al.EmpiricalAnnotationInstanceManager.java

@VisibleForTesting
EmpiricalAnnotationInstanceManager(Iterable<FlatInstance<D, L>> instances,
        EmpiricalAnnotations<D, L> annotations, AnnotationRecorder<D, L> annotationRecorder,
        int maxNumAnnotations, int maxNumMeasurements, boolean prioritizeLabelProportions,
        RandomGenerator rnd) {// ww w.  j  a  v a  2 s  .  c om
    super(annotationRecorder);

    List<FlatInstance<D, L>> sortedAnnotations = Lists.newArrayList();
    for (FlatInstance<D, L> inst : instances) {
        // add each annotation associated with this item to the queue 
        sortedAnnotations.addAll(annotations.getAnnotationsFor(inst.getSource(), inst.getData()).values());
    }
    // sort the annotation queue based on annotation order
    Datasets.sortAnnotationsInPlace(sortedAnnotations);

    // interleave measurements and annotations
    Deque<FlatInstance<D, L>> measurementDeque = Deques.randomizedDeque(annotations.getMeasurements(), rnd);
    prioritizeMeasurements(measurementDeque, prioritizeLabelProportions);
    Deque<FlatInstance<D, L>> annotationDeque = new ArrayDeque<FlatInstance<D, L>>(sortedAnnotations);
    queue = Lists.newLinkedList(); // better queueing behavior

    // add measurements 
    int numMeasurements = 0;
    while (measurementDeque.size() > 0 && numMeasurements < maxNumMeasurements) {
        numMeasurements += 1;
        queue.add(measurementDeque.pop());
    }

    // add annotations 
    int numAnnotations = 0;
    while (annotationDeque.size() > 0 && numAnnotations < maxNumAnnotations) {
        numAnnotations += 1;
        queue.add(annotationDeque.pop());
    }
}

From source file:io.cloudslang.lang.compiler.CompileParallelLoopFlowTest.java

@Test
public void testPreCompileParallelLoopFlowNavigate() throws Exception {
    Deque<Step> steps = getStepsAfterPrecompileFlow("/loops/parallel_loop/parallel_loop_navigate.sl");
    assertEquals(2, steps.size());// w ww.  j  a  v a  2  s  . c o m

    Step parallelStep = steps.getFirst();

    verifyParallelLoopStatement(parallelStep);

    List<Output> publishValues = getPublishOutputs(parallelStep);
    assertEquals(0, publishValues.size());

    List<Map<String, String>> expectedNavigationStrings = new ArrayList<>();
    Map<String, String> successMap = new HashMap<>();
    successMap.put(ScoreLangConstants.SUCCESS_RESULT, "print_list");
    Map<String, String> failureMap = new HashMap<>();
    failureMap.put(ScoreLangConstants.FAILURE_RESULT, "FAILURE");
    expectedNavigationStrings.add(successMap);
    expectedNavigationStrings.add(failureMap);
    verifyNavigationStrings(expectedNavigationStrings, parallelStep);

    assertTrue(parallelStep.isParallelLoop());
}

From source file:edu.byu.nlp.al.EmpiricalAnnotationLayersInstanceManager.java

@VisibleForTesting
EmpiricalAnnotationLayersInstanceManager(Iterable<FlatInstance<D, L>> instances,
        EmpiricalAnnotations<D, L> annotations, AnnotationRecorder<D, L> annotationRecorder,
        int maxNumAnnotations, int maxNumMeasurements, boolean prioritizeLabelProportions,
        RandomGenerator rnd) {/* w w w  .  j  a  v  a  2s.  c o  m*/
    super(annotationRecorder);

    // make a mutable collection of all annotations for each instance
    List<FlatInstance<D, L>> sortedAnnotations = Lists.newArrayList();
    Map<String, Deque<FlatInstance<D, L>>> perInstanceAnnotationLists = Maps.newIdentityHashMap();
    for (FlatInstance<D, L> inst : instances) {
        // find all annotations associated with this item
        Collection<FlatInstance<D, L>> anns = annotations.getAnnotationsFor(inst.getSource(), inst.getData())
                .values();
        perInstanceAnnotationLists.put(inst.getSource(), Deques.randomizedDeque(anns, rnd));
    }

    // grab one annotation for each instance until they are gone
    // (annotate the whole corpus 1-deep before starting on 2-deep, and so on)
    while (perInstanceAnnotationLists.size() > 0) {
        Set<String> toRemove = Sets.newHashSet();

        for (String src : Iterables2.shuffled(perInstanceAnnotationLists.keySet(), rnd)) {
            Deque<FlatInstance<D, L>> anns = perInstanceAnnotationLists.get(src);
            if (anns.size() > 0) {
                // add 1 to the queue for this instance
                sortedAnnotations.add(anns.pop());
            }
            if (anns.size() == 0) {
                toRemove.add(src);
            }
        }

        for (String src : toRemove) {
            perInstanceAnnotationLists.remove(src);
        }
    }

    // interleave measurements and annotations in the final queue
    Deque<FlatInstance<D, L>> measurementDeque = Deques.randomizedDeque(annotations.getMeasurements(), rnd);
    prioritizeMeasurements(measurementDeque, prioritizeLabelProportions);
    Deque<FlatInstance<D, L>> annotationDeque = new ArrayDeque<FlatInstance<D, L>>(sortedAnnotations);
    queue = Lists.newLinkedList(); // better queueing behavior

    // add measurements 
    int numMeasurements = 0;
    while (measurementDeque.size() > 0 && numMeasurements < maxNumMeasurements) {
        numMeasurements += 1;
        queue.add(measurementDeque.pop());
    }

    // add annotations 
    int numAnnotations = 0;
    while (annotationDeque.size() > 0 && numAnnotations < maxNumAnnotations) {
        numAnnotations += 1;
        queue.add(annotationDeque.pop());
    }

}

From source file:io.cloudslang.lang.compiler.CompileParallelLoopFlowTest.java

@Test
public void testPreCompileParallelLoopFlowPublishNavigate() throws Exception {
    Deque<Step> steps = getStepsAfterPrecompileFlow("/loops/parallel_loop/parallel_loop_publish_navigate.sl");
    assertEquals(2, steps.size());// w w w. jav a  2 s . c  o m

    Step parallelStep = steps.getFirst();

    verifyParallelLoopStatement(parallelStep);

    List<Output> publishValues = getPublishOutputs(parallelStep);
    assertEquals(2, publishValues.size());
    assertEquals("${ map(lambda x:str(x['name']), branches_context) }", publishValues.get(0).getValue().get());

    List<Map<String, String>> expectedNavigationStrings = new ArrayList<>();
    Map<String, String> successMap = new HashMap<>();
    successMap.put(ScoreLangConstants.SUCCESS_RESULT, "print_list");
    Map<String, String> failureMap = new HashMap<>();
    failureMap.put(ScoreLangConstants.FAILURE_RESULT, "FAILURE");
    expectedNavigationStrings.add(successMap);
    expectedNavigationStrings.add(failureMap);
    verifyNavigationStrings(expectedNavigationStrings, parallelStep);

    assertTrue(parallelStep.isParallelLoop());
}

From source file:ict.ocrabase.main.java.client.bulkload.LoadHFiles.java

/**
 * Perform a bulk load of the given directory into the given
 * pre-existing table.//  w  ww .  ja  v a2  s.c  om
 * @param hfofDir the directory that was provided as the output path
 * of a job using HFileOutputFormat
 * @param table the table to load into
 * @throws TableNotFoundException if table does not yet exist
 */
public void doBulkLoad(Path outDir, String[] tableNames) throws TableNotFoundException, IOException {
    Configuration config = getConf();
    int num = 0;
    int tableNum = tableNames.length;
    for (String tableName : tableNames) {
        HTable table = new HTable(config, tableName);
        Path hfofDir = new Path(outDir, tableName);
        HConnection conn = table.getConnection();

        if (!conn.isTableAvailable(table.getTableName())) {
            throw new TableNotFoundException(
                    "Table " + Bytes.toStringBinary(table.getTableName()) + "is not currently available.");
        }

        Deque<LoadQueueItem> queue = null;
        try {
            queue = discoverLoadQueue(hfofDir);
            int total = queue.size();
            while (!queue.isEmpty()) {
                LoadQueueItem item = queue.remove();
                tryLoad(item, conn, table.getTableName(), queue, config);
                progress = (num + 1 - (float) queue.size() / total) / tableNum;
            }
        } finally {
            if (queue != null && !queue.isEmpty()) {
                StringBuilder err = new StringBuilder();
                err.append("-------------------------------------------------\n");
                err.append("Bulk load aborted with some files not yet loaded:\n");
                err.append("-------------------------------------------------\n");
                for (LoadQueueItem q : queue) {
                    err.append("  ").append(q.hfilePath).append('\n');
                }
                LOG.error(err);
                throw new IOException();
            }
        }
        num++;
    }
    progress = 1;
}

From source file:automenta.climatenet.ImportKML.java

public String[] getPath(Deque<String> p) {
    return p.toArray(new String[p.size()]);
}

From source file:org.ow2.authzforce.pap.dao.flatfile.FlatFileDAORefPolicyProviderModule.java

@Override
public StaticTopLevelPolicyElementEvaluator get(final TopLevelPolicyElementType policyType, final String id,
        final VersionPatterns versionPatterns, final Deque<String> ancestorPolicyRefChain)
        throws IndeterminateEvaluationException {
    if (policyType == TopLevelPolicyElementType.POLICY) {
        throw UNSUPPORTED_POLICY_REFERENCE_EXCEPTION;
    }//from w  w  w .ja va2s .  com

    final Deque<String> newPolicyRefChain = Utils.appendAndCheckPolicyRefChain(ancestorPolicyRefChain,
            Collections.singletonList(id), maxPolicyRefDepth);

    // Request for PolicySetEvaluator (from PolicySetIdReference)
    final Entry<PolicyVersion, PolicyEvaluatorSupplier> policyEntry = policyCache.get(id, versionPatterns);
    if (policyEntry == null) {
        return null;
    }

    final int refChainLenBefore = newPolicyRefChain.size();
    final StaticTopLevelPolicyElementEvaluator policyEvaluator;
    try {
        policyEvaluator = policyEntry.getValue().get(this, newPolicyRefChain);
    } catch (final IndeterminateEvaluationException e) {
        // throw back an high-level exception message for easier
        // troubleshooting (no file path)
        final PolicyVersion version = policyEntry.getKey();
        throw new IndeterminateEvaluationException(
                "Matched PolicySet '" + id + "' (version " + version
                        + ") is invalid or its content is unavailable",
                StatusHelper.STATUS_PROCESSING_ERROR, e);
    }

    final List<String> resultPolicyLongestRefChain = policyEvaluator.getExtraPolicyMetadata()
            .getLongestPolicyRefChain();
    /*
     * If there is a longest ref chain in result policy, but
     * newPolicyRefChain was not updated with it (length unchanged, i.e.
     * same as before the get(...)), it means the policy was already parsed
     * before this retrieval (longest ref chain already computed).
     * Therefore, we need to take into account the longest policy ref chain
     * already computed in the result policy with the current policy ref
     * chain up to this result policy, i.e. newPolicyRefChain; and check the
     * total chain length.
     */
    if (resultPolicyLongestRefChain != null && !resultPolicyLongestRefChain.isEmpty()
            && newPolicyRefChain.size() == refChainLenBefore) {
        // newPolicyRefChain was not updated, so we assumed the result
        // policy was already parsed, and longest ref chain already computed
        // To get the new longest ref chain, we need to combine the two
        Utils.appendAndCheckPolicyRefChain(newPolicyRefChain, resultPolicyLongestRefChain, maxPolicyRefDepth);
    }

    return policyEvaluator;
}

From source file:com.google.gwt.emultest.java.util.ArrayDequeTest.java

private void checkDequeSizeAndContent(Deque<?> deque, Object... expected) {
    assertEquals(expected.length, deque.size());
    int i = 0;/*  ww  w .j a va  2s .  c om*/
    for (Object e : deque) {
        assertEquals(expected[i++], e);
    }
    assertEquals(expected.length, i);
}

From source file:info.magnolia.ui.form.field.definition.FieldDefinitionKeyGenerator.java

@Override
protected void keysFor(List<String> list, FieldDefinition field, AnnotatedElement el) {
    Object parent = getParentViaCast(field);
    String fieldName = field.getName().replace(':', '-');
    if (parent != null && isChooseDialog(parent.getClass())) {
        // handle choose dialog
        final AppDescriptor app = (AppDescriptor) getRoot(field);
        addKey(list, app.getName(), "chooseDialog", "fields", fieldName, fieldOrGetterName(el));
    } else {/* w  w w  .  ja v a2 s.  c  o  m*/
        final Deque<String> parentNames = new LinkedList<String>();
        while (parent != null && !(parent instanceof TabDefinition)) {
            String parentName = getParentName(parent);
            if (parentName != null) {
                parentNames.addFirst(parentName);
            }
            parent = getParentViaCast(parent);
        }

        final String property = fieldOrGetterName(el);
        final String parentKeyPart = StringUtils.join(parentNames, '.').replace(':', '-');
        if (parent instanceof TabDefinition) {
            TabDefinition tab = (TabDefinition) parent;
            final String tabName = tab.getName();
            final FormDefinition formDef = getParentViaCast(tab);
            final String dialogID = getParentId(formDef);

            // in case of a field in field
            if (parentNames.size() > 0) {
                // <dialogId>.<tabName>.<parentFieldNames_separated_by_dots>.<fieldName>.<property>
                // <dialogId>.<tabName>.<parentFieldNames_separated_by_dots>.<fieldName> (in case of property==label)
                addKey(list, dialogID, tabName, parentKeyPart, fieldName, property);
            }
            // <dialogId>.<tabName>.<fieldName>.<property>
            // <dialogId>.<tabName>.<fieldName> (in case of property==label)
            addKey(list, dialogID, tabName, fieldName, property);
            // <tabName>.<fieldName> (in case of property==label)
            addKey(list, tabName, fieldName, property);
            // <dialogId>.<fieldName>.<property>
            // <dialogId>.<fieldName> (in case property==label)
            addKey(list, dialogID, fieldName, property);

            String[] parts = StringUtils.split(dialogID, ".");
            if (parts.length > 1) {
                String dialogIDNoModuleName = parts[parts.length - 1];
                addKey(list, dialogIDNoModuleName, fieldName, property);
                addKey(list, dialogIDNoModuleName, tabName, fieldName, property);
            }

        } else {
            // In case we didn't encounter parent tab definition - we simply generated a key based on dot-separated parent names
            addKey(list, parentKeyPart, fieldName, property);
        }
    }
}