Example usage for java.util Queue add

List of usage examples for java.util Queue add

Introduction

In this page you can find the example usage for java.util Queue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:com.baifendian.swordfish.common.utils.graph.Graph.java

/**
 * ??, ???, ??:/*from  ww  w.j a v a 2s  . co  m*/
 * 1) , ?????
 * 2) ????, , ?
 *
 * @return true  , false ?
 */
public synchronized boolean isConnected() {
    Queue<VK> q = new LinkedList<>();
    Set<VK> hasVisited = new HashSet<>();

    // ?
    Iterator<Map.Entry<VK, VD>> iter = vertices.entrySet().iterator();

    // ,  true
    if (!iter.hasNext()) {
        return true;
    }

    // ????
    Map.Entry<VK, VD> entry = iter.next();

    VK startKey = entry.getKey();

    q.add(startKey);
    hasVisited.add(startKey);

    while (!q.isEmpty()) {
        VK key = q.poll();

        for (VK postKey : getPostNode(key)) {
            if (!hasVisited.contains(postKey)) {
                q.add(postKey);
                hasVisited.add(postKey);
            }
        }

        for (VK preKey : getPreNode(key)) {
            if (!hasVisited.contains(preKey)) {
                q.add(preKey);
                hasVisited.add(preKey);
            }
        }
    }

    return hasVisited.size() == getVertexNumber();
}

From source file:org.jboss.errai.ioc.rebind.ioc.graph.impl.DependencyGraphBuilderImpl.java

private void processResolutionQueue(final Queue<AbstractInjectable> resolutionQueue,
        final Multimap<ResolutionPriority, ConcreteInjectable> resolvedByPriority) {
    do {//from w ww .  j ava2s  .com
        final AbstractInjectable cur = resolutionQueue.poll();
        for (final BaseInjectable link : cur.linked) {
            if (link instanceof AbstractInjectable) {
                resolutionQueue.add((AbstractInjectable) link);
            } else if (link instanceof ConcreteInjectable) {
                resolvedByPriority.put(getMatchingPriority(link), (ConcreteInjectable) link);
            }
        }
    } while (resolutionQueue.size() > 0);
}

From source file:edu.uci.ics.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.java

public void init() throws HyracksDataException {
    Map<ActivityId, IOperatorNodePushable> startOperatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>();
    Queue<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> childQueue = new LinkedList<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
    List<IConnectorDescriptor> outputConnectors = null;

    /**//w w w .  ja v a2 s.  com
     * Set up the source operators
     */
    for (Entry<ActivityId, IActivity> entry : startActivities.entrySet()) {
        IOperatorNodePushable opPushable = entry.getValue().createPushRuntime(ctx, recordDescProvider,
                partition, nPartitions);
        startOperatorNodePushables.put(entry.getKey(), opPushable);
        operatprNodePushablesBFSOrder.add(opPushable);
        operatorNodePushables.put(entry.getKey(), opPushable);
        inputArity += opPushable.getInputArity();
        outputConnectors = parent.getActivityOutputMap().get(entry.getKey());
        if (outputConnectors != null) {
            for (IConnectorDescriptor conn : outputConnectors) {
                childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId()));
            }
        }
    }

    /**
     * Using BFS (breadth-first search) to construct to runtime execution
     * DAG;
     */
    while (childQueue.size() > 0) {
        /**
         * expend the executing activities further to the downstream
         */
        if (outputConnectors != null && outputConnectors.size() > 0) {
            for (IConnectorDescriptor conn : outputConnectors) {
                if (conn != null) {
                    childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId()));
                }
            }
        }

        /**
         * construct the source to destination information
         */
        Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> channel = childQueue.poll();
        ActivityId sourceId = channel.getLeft().getLeft().getActivityId();
        int outputChannel = channel.getLeft().getRight();
        ActivityId destId = channel.getRight().getLeft().getActivityId();
        int inputChannel = channel.getRight().getRight();
        IOperatorNodePushable sourceOp = operatorNodePushables.get(sourceId);
        IOperatorNodePushable destOp = operatorNodePushables.get(destId);
        if (destOp == null) {
            destOp = channel.getRight().getLeft().createPushRuntime(ctx, recordDescProvider, partition,
                    nPartitions);
            operatprNodePushablesBFSOrder.add(destOp);
            operatorNodePushables.put(destId, destOp);
        }

        /**
         * construct the dataflow connection from a producer to a consumer
         */
        sourceOp.setOutputFrameWriter(outputChannel, destOp.getInputFrameWriter(inputChannel),
                recordDescProvider.getInputRecordDescriptor(destId, inputChannel));

        /**
         * traverse to the child of the current activity
         */
        outputConnectors = parent.getActivityOutputMap().get(destId);
    }
}

From source file:org.apache.hama.bsp.ResourceManager.java

private void useOffer(SchedulerDriver schedulerDriver, Offer offer) {
    log.debug("Received offer From: " + offer.getHostname());

    String host = offer.getHostname();

    ResourceOffer ro = new ResourceOffer(offer);
    int maxSlots = ro.getMaxSlots();
    if (maxSlots == 0) {
        schedulerDriver.declineOffer(offer.getId());
        return;/*ww  w . j a  v a2s. co  m*/
    }

    java.util.Queue<TaskInProgress> tasks = new LinkedList<TaskInProgress>();

    while (tasks.size() < maxSlots) {
        TaskInProgress tip = null;
        if (tasksToRunByGroom.get(host) != null) {
            tip = tasksToRunByGroom.get(host).poll();
        }

        if (tip == null) {
            tip = tasksToRunByGroom.get(anyGroomServer).poll();
            if (tip == null) {
                if (tasks.isEmpty()) {
                    schedulerDriver.declineOffer(offer.getId());
                }

                break;
            }
        }
        if (executingTasks.contains(tip)) {
            continue;
        }

        executingTasks.add(tip);
        tasksToRun.remove(tip);

        tasks.add(tip);

        log.debug("Found offer for: " + tip.getTaskId());
    }

    if (!tasks.isEmpty()) {
        launchTasks(schedulerDriver, tasks, ro);
    }
}

From source file:org.apache.streams.riak.binary.RiakBinaryPersistReader.java

@Override
public synchronized StreamsResultSet readAll() {

    Queue<StreamsDatum> readAllQueue = constructQueue();

    Namespace ns = new Namespace(configuration.getDefaultBucketType(), configuration.getDefaultBucket());

    ListKeys lk = new ListKeys.Builder(ns).build();

    ListKeys.Response listKeysResponse = null;
    try {//w  w w  .  j  ava2  s.  c  om
        listKeysResponse = client.client().execute(lk);
    } catch (Exception e) {
        e.printStackTrace();
        return null;
    }

    MultiFetch multiFetch = new MultiFetch.Builder().addLocations(listKeysResponse).build();
    MultiFetch.Response multiFetchResponse = null;
    try {
        multiFetchResponse = client.client().execute(multiFetch);
    } catch (ExecutionException e) {
        e.printStackTrace();
        return null;
    } catch (InterruptedException e) {
        e.printStackTrace();
        return null;
    }

    for (RiakFuture<FetchValue.Response, Location> f : multiFetchResponse) {
        try {
            FetchValue.Response response = f.get();
            readAllQueue
                    .add(new StreamsDatum(response.getValue(String.class), f.getQueryInfo().getKeyAsString()));
        } catch (ExecutionException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    return new StreamsResultSet(readAllQueue);
}

From source file:org.apache.falcon.resource.AbstractInstanceManager.java

private LineageGraphResult triage(EntityType entityType, Entity entity, String instanceTime, Cluster cluster)
        throws FalconException {

    Date instanceDate = SchemaHelper.parseDateUTC(instanceTime);
    LineageGraphResult result = new LineageGraphResult();
    Set<String> vertices = new HashSet<>();
    Set<LineageGraphResult.Edge> edges = new HashSet<>();
    Map<String, String> instanceStatusMap = new HashMap<>();

    // queue containing all instances which need to be triaged
    Queue<SchedulableEntityInstance> remainingInstances = new LinkedList<>();
    SchedulableEntityInstance currentInstance = new SchedulableEntityInstance(entity.getName(),
            cluster.getName(), instanceDate, entityType);
    remainingInstances.add(currentInstance);

    while (!remainingInstances.isEmpty()) {
        currentInstance = remainingInstances.remove();
        if (currentInstance.getEntityType() == EntityType.FEED) {
            Feed feed = ConfigurationStore.get().get(EntityType.FEED, currentInstance.getEntityName());
            FeedInstanceStatus.AvailabilityStatus status = getFeedInstanceStatus(feed,
                    currentInstance.getInstanceTime(), cluster);

            // add vertex to the graph
            vertices.add(currentInstance.toString());
            instanceStatusMap.put(currentInstance.toString(), "[" + status.name() + "]");
            if (status == FeedInstanceStatus.AvailabilityStatus.AVAILABLE) {
                continue;
            }/*from  w w w.java  2s.  c om*/

            // find producer process instance and add it to the queue
            SchedulableEntityInstance producerInstance = FeedHelper.getProducerInstance(feed,
                    currentInstance.getInstanceTime(), cluster);
            if (producerInstance != null) {
                remainingInstances.add(producerInstance);

                //add edge from producerProcessInstance to the feedInstance
                LineageGraphResult.Edge edge = new LineageGraphResult.Edge(producerInstance.toString(),
                        currentInstance.toString(), "produces");
                edges.add(edge);
            }
        } else { // entity type is PROCESS
            Process process = ConfigurationStore.get().get(EntityType.PROCESS, currentInstance.getEntityName());
            InstancesResult.WorkflowStatus status = getProcessInstanceStatus(process,
                    currentInstance.getInstanceTime());

            // add current process instance as a vertex
            vertices.add(currentInstance.toString());
            if (status == null) {
                instanceStatusMap.put(currentInstance.toString(), "[ Not Available ]");
            } else {
                instanceStatusMap.put(currentInstance.toString(), "[" + status.name() + "]");
                if (status == InstancesResult.WorkflowStatus.SUCCEEDED) {
                    continue;
                }
            }

            // find list of input feed instances - only mandatory ones and not optional ones
            Set<SchedulableEntityInstance> inputFeedInstances = ProcessHelper.getInputFeedInstances(process,
                    currentInstance.getInstanceTime(), cluster, false);
            for (SchedulableEntityInstance inputFeedInstance : inputFeedInstances) {
                remainingInstances.add(inputFeedInstance);

                //Add edge from inputFeedInstance to consumer processInstance
                LineageGraphResult.Edge edge = new LineageGraphResult.Edge(inputFeedInstance.toString(),
                        currentInstance.toString(), "consumed by");
                edges.add(edge);
            }
        }
    }

    // append status to each vertex
    Set<String> relabeledVertices = new HashSet<>();
    for (String instance : vertices) {
        String status = instanceStatusMap.get(instance);
        relabeledVertices.add(instance + status);
    }

    // append status to each edge
    for (LineageGraphResult.Edge edge : edges) {
        String oldTo = edge.getTo();
        String oldFrom = edge.getFrom();

        String newFrom = oldFrom + instanceStatusMap.get(oldFrom);
        String newTo = oldTo + instanceStatusMap.get(oldTo);

        edge.setFrom(newFrom);
        edge.setTo(newTo);
    }

    result.setEdges(edges.toArray(new LineageGraphResult.Edge[0]));
    result.setVertices(relabeledVertices.toArray(new String[0]));
    return result;
}

From source file:gobblin.ingestion.google.webmaster.GoogleWebmasterDataFetcherImpl.java

/**
 * Due to the limitation of the API, we can get a maximum of 5000 rows at a time. Another limitation is that, results are sorted by click count descending. If two rows have the same click count, they are sorted in an arbitrary way. (Read more at https://developers.google.com/webmaster-tools/v3/searchanalytics). So we try to get all pages by partitions, if a partition has 5000 rows returned. We try partition current partition into more granular levels.
 *
 *//*from   ww w  . jav  a2 s .com*/
@Override
public Collection<ProducerJob> getAllPages(String startDate, String endDate, String country, int rowLimit)
        throws IOException {
    if (!_jobs.isEmpty()) {
        log.info("Service got hot started.");
        return _jobs;
    }

    ApiDimensionFilter countryFilter = GoogleWebmasterFilter.countryEqFilter(country);

    List<GoogleWebmasterFilter.Dimension> requestedDimensions = new ArrayList<>();
    requestedDimensions.add(GoogleWebmasterFilter.Dimension.PAGE);

    Collection<String> allPages = _client.getPages(_siteProperty, startDate, endDate, country, rowLimit,
            requestedDimensions, Arrays.asList(countryFilter), 0);
    int actualSize = allPages.size();

    if (rowLimit < GoogleWebmasterClient.API_ROW_LIMIT || actualSize < GoogleWebmasterClient.API_ROW_LIMIT) {
        log.info(String.format("A total of %d pages fetched for property %s at country-%s from %s to %s",
                actualSize, _siteProperty, country, startDate, endDate));
    } else {
        int expectedSize = getPagesSize(startDate, endDate, country, requestedDimensions,
                Arrays.asList(countryFilter));
        log.info(String.format("Total number of pages is %d for market-%s from %s to %s", expectedSize,
                GoogleWebmasterFilter.countryFilterToString(countryFilter), startDate, endDate));
        Queue<Pair<String, FilterOperator>> jobs = new ArrayDeque<>();
        expandJobs(jobs, _siteProperty);

        allPages = getPages(startDate, endDate, requestedDimensions, countryFilter, jobs);
        allPages.add(_siteProperty);
        actualSize = allPages.size();
        if (actualSize != expectedSize) {
            log.warn(String.format("Expected page size for country-%s is %d, but only able to get %d", country,
                    expectedSize, actualSize));
        }
        log.info(String.format("A total of %d pages fetched for property %s at country-%s from %s to %s",
                actualSize, _siteProperty, country, startDate, endDate));
    }

    ArrayDeque<ProducerJob> jobs = new ArrayDeque<>(actualSize);
    for (String page : allPages) {
        jobs.add(new SimpleProducerJob(page, startDate, endDate));
    }
    return jobs;
}

From source file:org.aliuge.crawler.extractor.selector.IFConditions.java

/**
 * ???/*from   w w  w.  j a va 2 s  . co m*/
 * 
 * @param depend
 * @return
 */
public boolean test(Map<String, Object> selectContent) throws ExtractException {
    TreeMap<Integer, String> conIndex = Maps.newTreeMap();
    Queue<SimpleExpression> expressionQueue = Queues.newArrayDeque();
    Queue<String> logicQueue = Queues.newArrayDeque();
    // a=b and c=d or c=e or x=y
    int index = 0;
    for (String co : cond) {
        index = 0;
        while ((index = conditions.indexOf(co, index + 1)) > -1) {
            int i = index;
            conIndex.put(i, co);
        }
    }
    index = 0;
    for (Entry<Integer, String> entry : conIndex.entrySet()) {
        String subExp = conditions.substring(index, entry.getKey());
        for (String op : operations) {
            int i = subExp.indexOf(op);
            if (i > -1) {
                String[] ss = subExp.split(op);
                if (null == selectContent.get(ss[0].trim())) {
                    throw new ExtractException("?????["
                            + this.conditions + "] " + ss[0]);
                }
                expressionQueue
                        .add(new SimpleExpression(StringUtils.trim((String) selectContent.get(ss[0].trim())),
                                StringUtils.trim(ss[1]), op));
                logicQueue.add(StringUtils.trim(entry.getValue()));
            }
        }
        index = entry.getKey() + entry.getValue().length();
    }
    // ??
    String subExp = conditions.substring(index);
    for (String op : operations) {
        int i = subExp.indexOf(op);
        if (i > -1) {
            String[] ss = subExp.split(op);
            if (null == selectContent.get(ss[0].trim())) {
                throw new ExtractException("?????[" + this.conditions
                        + "] " + ss[0]);
            }
            expressionQueue.add(new SimpleExpression(StringUtils.trim((String) selectContent.get(ss[0].trim())),
                    StringUtils.trim(ss[1]), op));
        }
    }
    boolean b;
    try {
        b = expressionQueue.poll().test();
        while (!expressionQueue.isEmpty()) {
            b = cacl(b, logicQueue.poll(), expressionQueue.poll());
        }
        return b;
    } catch (Exception e) {
        e.printStackTrace();
    }

    return false;
}

From source file:it.geosolutions.geobatch.unredd.script.ingestion.IngestionAction.java

/**
 * Main loop on input files./*from   w w  w. j a  v a 2s  .c  om*/
 * Single file processing is called on execute(File inputZipFile)
 */
public Queue<FileSystemEvent> execute(Queue<FileSystemEvent> events) throws ActionException {

    final Queue<FileSystemEvent> ret = new LinkedList<FileSystemEvent>();
    LOGGER.warn("Ingestion flow running");

    while (!events.isEmpty()) {
        final FileSystemEvent ev = events.remove();

        try {
            if (ev != null) {
                if (LOGGER.isTraceEnabled()) {
                    LOGGER.trace("Processing incoming event: " + ev.getSource());
                }

                File inputZipFile = ev.getSource(); // this is the input zip file
                File out = execute(inputZipFile);
                ret.add(new FileSystemEvent(out, FileSystemEventType.FILE_ADDED));

            } else {
                LOGGER.error("NULL event: skipping...");
                continue;
            }

        } catch (ActionException ex) { // ActionEx have already been processed
            LOGGER.error(ex.getMessage(), ex);
            throw ex;

        } catch (Exception ex) {
            final String message = "GeostoreAction.execute(): Unable to produce the output: "
                    + ex.getLocalizedMessage();
            LOGGER.error(message, ex);
            throw new ActionException(this, message);
        }
    }

    return ret;
}

From source file:org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.java

private void init() throws HyracksDataException {
    Map<ActivityId, IOperatorNodePushable> startOperatorNodePushables = new HashMap<ActivityId, IOperatorNodePushable>();
    Queue<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> childQueue = new LinkedList<Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
    List<IConnectorDescriptor> outputConnectors = null;

    /**// w  w  w .j  av  a 2 s  .co  m
     * Set up the source operators
     */
    for (Entry<ActivityId, IActivity> entry : startActivities.entrySet()) {
        IOperatorNodePushable opPushable = entry.getValue().createPushRuntime(ctx, recordDescProvider,
                partition, nPartitions);
        startOperatorNodePushables.put(entry.getKey(), opPushable);
        operatorNodePushablesBFSOrder.add(opPushable);
        operatorNodePushables.put(entry.getKey(), opPushable);
        inputArity += opPushable.getInputArity();
        outputConnectors = parent.getActivityOutputMap().get(entry.getKey());
        if (outputConnectors != null) {
            for (IConnectorDescriptor conn : outputConnectors) {
                childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId()));
            }
        }
    }

    /**
     * Using BFS (breadth-first search) to construct to runtime execution
     * DAG;
     */
    while (childQueue.size() > 0) {
        /**
         * construct the source to destination information
         */
        Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> channel = childQueue.poll();
        ActivityId sourceId = channel.getLeft().getLeft().getActivityId();
        int outputChannel = channel.getLeft().getRight();
        ActivityId destId = channel.getRight().getLeft().getActivityId();
        int inputChannel = channel.getRight().getRight();
        IOperatorNodePushable sourceOp = operatorNodePushables.get(sourceId);
        IOperatorNodePushable destOp = operatorNodePushables.get(destId);
        if (destOp == null) {
            destOp = channel.getRight().getLeft().createPushRuntime(ctx, recordDescProvider, partition,
                    nPartitions);
            operatorNodePushablesBFSOrder.add(destOp);
            operatorNodePushables.put(destId, destOp);
        }

        /**
         * construct the dataflow connection from a producer to a consumer
         */
        sourceOp.setOutputFrameWriter(outputChannel, destOp.getInputFrameWriter(inputChannel),
                recordDescProvider.getInputRecordDescriptor(destId, inputChannel));

        /**
         * traverse to the child of the current activity
         */
        outputConnectors = parent.getActivityOutputMap().get(destId);

        /**
         * expend the executing activities further to the downstream
         */
        if (outputConnectors != null && outputConnectors.size() > 0) {
            for (IConnectorDescriptor conn : outputConnectors) {
                if (conn != null) {
                    childQueue.add(parent.getConnectorActivityMap().get(conn.getConnectorId()));
                }
            }
        }
    }

    // Sets the startedInitialization flags to be false.
    startedInitialization = new boolean[operatorNodePushablesBFSOrder.size()];
    Arrays.fill(startedInitialization, false);
}