Example usage for java.util ArrayDeque ArrayDeque

List of usage examples for java.util ArrayDeque ArrayDeque

Introduction

In this page you can find the example usage for java.util ArrayDeque ArrayDeque.

Prototype

public ArrayDeque() 

Source Link

Document

Constructs an empty array deque with an initial capacity sufficient to hold 16 elements.

Usage

From source file:org.apache.gobblin.ingestion.google.webmaster.GoogleWebmasterDataFetcherImpl.java

/**
 * Due to the limitation of the API, we can get a maximum of 5000 rows at a time. Another limitation is that, results are sorted by click count descending. If two rows have the same click count, they are sorted in an arbitrary way. (Read more at https://developers.google.com/webmaster-tools/v3/searchanalytics). So we try to get all pages by partitions, if a partition has 5000 rows returned. We try partition current partition into more granular levels.
 *
 *//*  w w  w.  j  av a  2  s .  c o  m*/
@Override
public Collection<ProducerJob> getAllPages(String startDate, String endDate, String country, int rowLimit)
        throws IOException {
    log.info("Requested row limit: " + rowLimit);
    if (!_jobs.isEmpty()) {
        log.info("Service got hot started.");
        return _jobs;
    }
    ApiDimensionFilter countryFilter = GoogleWebmasterFilter.countryEqFilter(country);

    List<GoogleWebmasterFilter.Dimension> requestedDimensions = new ArrayList<>();
    requestedDimensions.add(GoogleWebmasterFilter.Dimension.PAGE);
    int expectedSize = -1;
    if (rowLimit >= GoogleWebmasterClient.API_ROW_LIMIT) {
        //expected size only makes sense when the data set size is larger than GoogleWebmasterClient.API_ROW_LIMIT
        expectedSize = getPagesSize(startDate, endDate, country, requestedDimensions,
                Arrays.asList(countryFilter));
        log.info(String.format("Expected number of pages is %d for market-%s from %s to %s", expectedSize,
                GoogleWebmasterFilter.countryFilterToString(countryFilter), startDate, endDate));
    }

    Queue<Pair<String, FilterOperator>> jobs = new ArrayDeque<>();
    jobs.add(Pair.of(_siteProperty, FilterOperator.CONTAINS));

    Collection<String> allPages = getPages(startDate, endDate, requestedDimensions, countryFilter, jobs,
            Math.min(rowLimit, GoogleWebmasterClient.API_ROW_LIMIT));
    int actualSize = allPages.size();
    log.info(String.format("A total of %d pages fetched for property %s at country-%s from %s to %s",
            actualSize, _siteProperty, country, startDate, endDate));

    if (expectedSize != -1 && actualSize != expectedSize) {
        log.warn(String.format("Expected page size is %d, but only able to get %d", expectedSize, actualSize));
    }

    ArrayDeque<ProducerJob> producerJobs = new ArrayDeque<>(actualSize);
    for (String page : allPages) {
        producerJobs.add(new SimpleProducerJob(page, startDate, endDate));
    }
    return producerJobs;
}

From source file:com.cloudera.oryx.rdf.common.pmml.DecisionForestPMML.java

private static Segment buildTreeModel(DecisionForest forest,
        Map<Integer, BiMap<String, Integer>> columnToCategoryNameToIDMapping,
        MiningFunctionType miningFunctionType, MiningSchema miningSchema, int treeID, DecisionTree tree,
        InboundSettings settings) {//from  w w  w. j av  a  2s.com

    List<String> columnNames = settings.getColumnNames();
    int targetColumn = settings.getTargetColumn();

    Node root = new Node();
    root.setId("r");

    // Queue<Node> modelNodes = Queues.newArrayDeque();
    Queue<Node> modelNodes = new ArrayDeque<Node>();
    modelNodes.add(root);

    Queue<Pair<TreeNode, Decision>> treeNodes = new ArrayDeque<Pair<TreeNode, Decision>>();
    treeNodes.add(new Pair<TreeNode, Decision>(tree.getRoot(), null));

    while (!treeNodes.isEmpty()) {

        Pair<TreeNode, Decision> treeNodePredicate = treeNodes.remove();
        Node modelNode = modelNodes.remove();

        // This is the decision that got us here from the parent, if any; not the predicate at this node
        Predicate predicate = buildPredicate(treeNodePredicate.getSecond(), columnNames,
                columnToCategoryNameToIDMapping);
        modelNode.setPredicate(predicate);

        TreeNode treeNode = treeNodePredicate.getFirst();
        if (treeNode.isTerminal()) {

            TerminalNode terminalNode = (TerminalNode) treeNode;
            modelNode.setRecordCount((double) terminalNode.getCount());

            Prediction prediction = terminalNode.getPrediction();

            if (prediction.getFeatureType() == FeatureType.CATEGORICAL) {

                Map<Integer, String> categoryIDToName = columnToCategoryNameToIDMapping.get(targetColumn)
                        .inverse();
                CategoricalPrediction categoricalPrediction = (CategoricalPrediction) prediction;
                int[] categoryCounts = categoricalPrediction.getCategoryCounts();
                float[] categoryProbabilities = categoricalPrediction.getCategoryProbabilities();
                for (int categoryID = 0; categoryID < categoryProbabilities.length; categoryID++) {
                    int categoryCount = categoryCounts[categoryID];
                    float probability = categoryProbabilities[categoryID];
                    if (categoryCount > 0 && probability > 0.0f) {
                        String categoryName = categoryIDToName.get(categoryID);
                        ScoreDistribution distribution = new ScoreDistribution(categoryName, categoryCount);
                        distribution.setProbability((double) probability);
                        modelNode.getScoreDistributions().add(distribution);
                    }
                }

            } else {

                NumericPrediction numericPrediction = (NumericPrediction) prediction;
                modelNode.setScore(Float.toString(numericPrediction.getPrediction()));
            }

        } else {

            DecisionNode decisionNode = (DecisionNode) treeNode;
            Decision decision = decisionNode.getDecision();

            Node positiveModelNode = new Node();
            positiveModelNode.setId(modelNode.getId() + '+');
            modelNode.getNodes().add(positiveModelNode);
            Node negativeModelNode = new Node();
            negativeModelNode.setId(modelNode.getId() + '-');
            modelNode.getNodes().add(negativeModelNode);
            modelNode.setDefaultChild(
                    decision.getDefaultDecision() ? positiveModelNode.getId() : negativeModelNode.getId());
            modelNodes.add(positiveModelNode);
            modelNodes.add(negativeModelNode);
            treeNodes.add(new Pair<TreeNode, Decision>(decisionNode.getRight(), decision));
            treeNodes.add(new Pair<TreeNode, Decision>(decisionNode.getLeft(), null));

        }

    }

    TreeModel treeModel = new TreeModel(miningSchema, root, miningFunctionType);
    treeModel.setSplitCharacteristic(TreeModel.SplitCharacteristic.BINARY_SPLIT);
    treeModel.setMissingValueStrategy(MissingValueStrategyType.DEFAULT_CHILD);

    Segment segment = new Segment();
    segment.setId(Integer.toString(treeID));
    segment.setPredicate(new True());
    segment.setModel(treeModel);
    segment.setWeight(forest.getWeights()[treeID]);

    return segment;
}

From source file:org.talend.dataprep.transformation.actions.text.ExtractStringTokens.java

@Override
public void compile(ActionContext context) {
    super.compile(context);
    if (context.getActionStatus() == ActionContext.ActionStatus.OK) {

        final String regex = context.getParameters().get(PARAMETER_REGEX);

        // Validate the regex, and put it in context once for all lines:
        // Check 1: not null or empty
        if (StringUtils.isEmpty(regex)) {
            LOGGER.debug("Empty pattern, action canceled");
            context.setActionStatus(ActionContext.ActionStatus.CANCELED);
            return;
        }//from  ww  w .  j  av  a  2  s  .com
        // Check 2: valid regex
        try {
            context.get(PATTERN, p -> Pattern.compile(regex));
        } catch (PatternSyntaxException e) {
            LOGGER.debug("Invalid pattern {} --> {}, action canceled", regex, e.getMessage(), e);
            context.setActionStatus(ActionContext.ActionStatus.CANCELED);
        }
        // Create result column
        final Map<String, String> parameters = context.getParameters();
        final String columnId = context.getColumnId();

        // create the new columns
        int limit = parameters.get(MODE_PARAMETER).equals(MULTIPLE_COLUMNS_MODE)
                ? Integer.parseInt(parameters.get(LIMIT))
                : 1;

        final RowMetadata rowMetadata = context.getRowMetadata();
        final ColumnMetadata column = rowMetadata.getById(columnId);
        final List<String> newColumns = new ArrayList<>();
        final Deque<String> lastColumnId = new ArrayDeque<>();
        lastColumnId.push(columnId);
        for (int i = 0; i < limit; i++) {
            final int newColumnIndex = i + 1;
            newColumns.add(context.column(column.getName() + APPENDIX + i, r -> {
                final ColumnMetadata c = ColumnMetadata.Builder //
                        .column() //
                        .type(Type.STRING) //
                        .computedId(StringUtils.EMPTY) //
                        .name(column.getName() + APPENDIX + newColumnIndex) //
                        .build();
                lastColumnId.push(rowMetadata.insertAfter(lastColumnId.pop(), c));
                return c;
            }));
        }

    }
}

From source file:org.talend.dataquality.semantic.recognizer.DefaultCategoryRecognizer.java

/**
 * For the discovery, if a category c matches with the data,
 * it means all the ancestor categories of c have to match too.
 * This method increments the ancestor categories of c.
 * /* w  ww .java  2s.com*/
 * @param categories, the category result
 * @param id, the category ID of the matched category c
 * 
 */
private void incrementAncestorsCategories(Set<String> categories, String id) {
    Deque<Pair<String, Integer>> catToSee = new ArrayDeque<>();
    Set<String> catAlreadySeen = new HashSet<>();
    catToSee.add(Pair.of(id, 0));
    Pair<String, Integer> currentCategory;
    while (!catToSee.isEmpty()) {
        currentCategory = catToSee.pop();
        DQCategory dqCategory = crm.getCategoryMetadataById(currentCategory.getLeft());
        if (dqCategory != null && !CollectionUtils.isEmpty(dqCategory.getParents())) {
            int parentLevel = currentCategory.getRight() + 1;
            for (DQCategory parent : dqCategory.getParents()) {
                if (!catAlreadySeen.contains(parent.getId())) {
                    catAlreadySeen.add(parent.getId());
                    catToSee.add(Pair.of(parent.getId(), parentLevel));
                    DQCategory meta = crm.getCategoryMetadataById(parent.getId());
                    if (meta != null) {
                        incrementCategory(meta.getName(), meta.getLabel(), parentLevel);
                        categories.add(meta.getName());
                    }
                }
            }
        }
    }
}

From source file:androidx.navigation.NavDeepLinkBuilder.java

private void fillInIntent() {
    NavDestination node = null;/*from  ww w.j a va2  s .  com*/
    ArrayDeque<NavDestination> possibleDestinations = new ArrayDeque<>();
    possibleDestinations.add(mGraph);
    while (!possibleDestinations.isEmpty() && node == null) {
        NavDestination destination = possibleDestinations.poll();
        if (destination.getId() == mDestId) {
            node = destination;
        } else if (destination instanceof NavGraph) {
            for (NavDestination child : (NavGraph) destination) {
                possibleDestinations.add(child);
            }
        }
    }
    if (node == null) {
        final String dest = NavDestination.getDisplayName(mContext, mDestId);
        throw new IllegalArgumentException(
                "navigation destination " + dest + " is unknown to this NavController");
    }
    mIntent.putExtra(NavController.KEY_DEEP_LINK_IDS, node.buildDeepLinkIds());
}

From source file:gobblin.ingestion.google.webmaster.GoogleWebmasterDataFetcherImpl.java

/**
 * Due to the limitation of the API, we can get a maximum of 5000 rows at a time. Another limitation is that, results are sorted by click count descending. If two rows have the same click count, they are sorted in an arbitrary way. (Read more at https://developers.google.com/webmaster-tools/v3/searchanalytics). So we try to get all pages by partitions, if a partition has 5000 rows returned. We try partition current partition into more granular levels.
 *
 *///from  w  w w .j a  v a  2  s .  c  o m
@Override
public Collection<ProducerJob> getAllPages(String startDate, String endDate, String country, int rowLimit)
        throws IOException {
    if (!_jobs.isEmpty()) {
        log.info("Service got hot started.");
        return _jobs;
    }

    ApiDimensionFilter countryFilter = GoogleWebmasterFilter.countryEqFilter(country);

    List<GoogleWebmasterFilter.Dimension> requestedDimensions = new ArrayList<>();
    requestedDimensions.add(GoogleWebmasterFilter.Dimension.PAGE);

    Collection<String> allPages = _client.getPages(_siteProperty, startDate, endDate, country, rowLimit,
            requestedDimensions, Arrays.asList(countryFilter), 0);
    int actualSize = allPages.size();

    if (rowLimit < GoogleWebmasterClient.API_ROW_LIMIT || actualSize < GoogleWebmasterClient.API_ROW_LIMIT) {
        log.info(String.format("A total of %d pages fetched for property %s at country-%s from %s to %s",
                actualSize, _siteProperty, country, startDate, endDate));
    } else {
        int expectedSize = getPagesSize(startDate, endDate, country, requestedDimensions,
                Arrays.asList(countryFilter));
        log.info(String.format("Total number of pages is %d for market-%s from %s to %s", expectedSize,
                GoogleWebmasterFilter.countryFilterToString(countryFilter), startDate, endDate));
        Queue<Pair<String, FilterOperator>> jobs = new ArrayDeque<>();
        expandJobs(jobs, _siteProperty);

        allPages = getPages(startDate, endDate, requestedDimensions, countryFilter, jobs);
        allPages.add(_siteProperty);
        actualSize = allPages.size();
        if (actualSize != expectedSize) {
            log.warn(String.format("Expected page size for country-%s is %d, but only able to get %d", country,
                    expectedSize, actualSize));
        }
        log.info(String.format("A total of %d pages fetched for property %s at country-%s from %s to %s",
                actualSize, _siteProperty, country, startDate, endDate));
    }

    ArrayDeque<ProducerJob> jobs = new ArrayDeque<>(actualSize);
    for (String page : allPages) {
        jobs.add(new SimpleProducerJob(page, startDate, endDate));
    }
    return jobs;
}

From source file:de.tudarmstadt.ukp.dkpro.core.io.xml.XmlReaderXPath.java

@Override
public void initialize(UimaContext arg0) throws ResourceInitializationException {
    super.initialize(arg0);

    fileIterator = getFileSetIterator();
    XPath xpath = XPathFactory.newInstance().newXPath();
    nodes = new ArrayDeque<Node>();

    if (StringUtils.isWhitespace(rootXPath)) {
        throw new IllegalArgumentException("Illegal root XPath expression. Please provide a valid one.");
    }// ww  w  .  j  a  va 2 s . co  m
    try {
        compiledRootXPath = xpath.compile(rootXPath);
    } catch (XPathExpressionException e) {
        throw new IllegalArgumentException("Illegal root XPath expression. Please provide a valid one.");
    }

    if (docIdTag != null) {
        if (StringUtils.isWhitespace(docIdTag)) {
            throw new IllegalArgumentException("Illegal ID XPath expression. Please provide a valid one.");
        }
        try {
            compiledIdXPath = xpath.compile(docIdTag);
        } catch (XPathExpressionException e) {
            throw new IllegalArgumentException("Illegal ID XPath expression. Please provide a valid one.");
        }
    }

    // Substitution
    if (substituteTags != null && substituteTags.length > 0) {
        if (substituteTags.length % 2 != 0) {
            throw new IllegalArgumentException("Parameter substitute tags must "
                    + "be given in an array of even number of elements, in 'before, after' order");
        }

        useSubstitution = true;
        substitution = new HashMap<String, String>(substituteTags.length);
        for (int i = 0; i < substituteTags.length; i += 2) {
            substitution.put(substituteTags[i], substituteTags[i + 1]);
        }
    }

    processNextFile();
}

From source file:logicProteinHypernetwork.analysis.reactions.ComplexMultigraph.java

private void subtract(Set<Interaction> impossible, int vertex) throws NotConnectedException {
    Set<Integer> visited = new HashSet<Integer>();
    Deque<Integer> todo = new ArrayDeque<Integer>();

    todo.add(vertex);//from   ww  w.ja v a2  s .  c o m
    while (!todo.isEmpty()) {
        int u = todo.remove();
        visited.add(u);

        for (int e : getIncidentEdges(u)) {
            Interaction i = edgesToInteractions.get(e);
            if (impossible.contains(i)) {
                removeEdge(e);
                edgesToInteractions.remove(e);
                interactionToEdges.remove(i, e);
                impossible.remove(i);
            }
        }

        for (int v : getNeighbors(u)) {
            todo.add(v);
        }
    }

    if (visited.size() < getVertexCount()) {
        throw new NotConnectedException();
    }
}

From source file:org.commonjava.maven.ext.io.rest.DefaultTranslator.java

/**
 * Translate the versions.//from   w ww . ja v a 2 s  .  c o  m
 * <pre>{@code
 * [ {
 *     "groupId": "com.google.guava",
 *     "artifactId": "guava",
 *     "version": "13.0.1"
 * } }
 * }</pre>
 * This equates to a List of ProjectVersionRef.
 *
 * <pre>{@code
 * {
 *     "productNames": [],
 *     "productVersionIds": [],
 *     "repositoryGroup": "",
 *     "gavs": [
 *     {
 *         "groupId": "com.google.guava",
 *         "artifactId": "guava",
 *         "version": "13.0.1"
 *     } ]
 * }
 * }</pre>
 * There may be a lot of them, possibly causing timeouts or other issues.
 * This is mitigated by splitting them into smaller chunks when an error occurs and retrying.
 */
public Map<ProjectVersionRef, String> translateVersions(List<ProjectVersionRef> projects) {
    init(rgm);

    final Map<ProjectVersionRef, String> result = new HashMap<>();
    final Queue<Task> queue = new ArrayDeque<>();
    if (initialRestMaxSize != 0) {
        // Presplit
        final List<List<ProjectVersionRef>> partition = ListUtils.partition(projects, initialRestMaxSize);
        for (List<ProjectVersionRef> p : partition) {
            queue.add(new Task(rgm, p, endpointUrl + REPORTS_LOOKUP_GAVS));
        }
        logger.debug("For initial sizing of {} have split the queue into {} ", initialRestMaxSize,
                queue.size());
    } else {
        queue.add(new Task(rgm, projects, endpointUrl + REPORTS_LOOKUP_GAVS));
    }

    while (!queue.isEmpty()) {
        Task task = queue.remove();
        task.executeTranslate();
        if (task.isSuccess()) {
            result.putAll(task.getResult());
        } else {
            if (task.canSplit() && task.getStatus() == 504) {
                List<Task> tasks = task.split();

                logger.warn(
                        "Failed to translate versions for task @{} due to {}, splitting and retrying. Chunk size was: {} and new chunk size {} in {} segments.",
                        task.hashCode(), task.getStatus(), task.getChunkSize(), tasks.get(0).getChunkSize(),
                        tasks.size());
                queue.addAll(tasks);
            } else {
                if (task.getStatus() < 0) {
                    logger.debug("Caught exception calling server with message {}", task.getErrorMessage());
                } else {
                    logger.debug("Did not get status {} but received {}", SC_OK, task.getStatus());
                }

                if (task.getStatus() > 0) {
                    throw new RestException("Received response status " + task.getStatus() + " with message: "
                            + task.getErrorMessage());
                } else {
                    throw new RestException("Received response status " + task.getStatus() + " with message "
                            + task.getErrorMessage());
                }
            }
        }
    }
    return result;
}

From source file:com.couchbase.client.core.endpoint.view.ViewHandlerTest.java

@Before
@SuppressWarnings("unchecked")
public void setup() {
    responseBuffer = new Disruptor<ResponseEvent>(new EventFactory<ResponseEvent>() {
        @Override//  www  .ja v a 2 s  .  co  m
        public ResponseEvent newInstance() {
            return new ResponseEvent();
        }
    }, 1024, Executors.newCachedThreadPool());

    firedEvents = Collections.synchronizedList(new ArrayList<CouchbaseMessage>());
    latch = new CountDownLatch(1);
    responseBuffer.handleEventsWith(new EventHandler<ResponseEvent>() {
        @Override
        public void onEvent(ResponseEvent event, long sequence, boolean endOfBatch) throws Exception {
            firedEvents.add(event.getMessage());
            latch.countDown();
        }
    });
    responseRingBuffer = responseBuffer.start();

    CoreEnvironment environment = mock(CoreEnvironment.class);
    when(environment.scheduler()).thenReturn(Schedulers.computation());
    when(environment.maxRequestLifetime()).thenReturn(10000L); // 10 seconds
    when(environment.autoreleaseAfter()).thenReturn(2000L);
    endpoint = mock(AbstractEndpoint.class);
    when(endpoint.environment()).thenReturn(environment);
    when(environment.userAgent()).thenReturn("Couchbase Client Mock");

    queue = new ArrayDeque<ViewRequest>();
    handler = new ViewHandler(endpoint, responseRingBuffer, queue, false);
    channel = new EmbeddedChannel(handler);
}