Example usage for java.util Queue poll

List of usage examples for java.util Queue poll

Introduction

In this page you can find the example usage for java.util Queue poll.

Prototype

E poll();

Source Link

Document

Retrieves and removes the head of this queue, or returns null if this queue is empty.

Usage

From source file:com.googlecode.concurrentlinkedhashmap.MultiThreadedTest.java

@Test(dataProvider = "builder")
public void weightedConcurrency(Builder<Integer, List<Integer>> builder) {
    final ConcurrentLinkedHashMap<Integer, List<Integer>> map = builder.weigher(Weighers.<Integer>list())
            .maximumWeightedCapacity(threads).concurrencyLevel(threads).build();
    final Queue<List<Integer>> values = new ConcurrentLinkedQueue<List<Integer>>();
    for (int i = 1; i <= threads; i++) {
        Integer[] array = new Integer[i];
        Arrays.fill(array, Integer.MIN_VALUE);
        values.add(Arrays.asList(array));
    }//from   ww  w  . java  2 s .c  o  m
    executeWithTimeOut(map, new Callable<Long>() {
        @Override
        public Long call() throws Exception {
            return timeTasks(threads, new Runnable() {
                @Override
                public void run() {
                    List<Integer> value = values.poll();
                    for (int i = 0; i < iterations; i++) {
                        map.put(i % 10, value);
                    }
                }
            });
        }
    });
}

From source file:info.raack.appliancelabeler.machinelearning.appliancedetection.algorithms.NaiveStateTransitionDetectionAlgorithm.java

public AlgorithmPredictions algorithmCalculateApplianceEnergyUsePredictions(EnergyMonitor energyMonitor,
        Queue<EnergyTimestep> originTimesteps, ItemReader<SecondData> dataReader) {
    AlgorithmPredictions algorithmPredictions = new AlgorithmPredictions();

    Map<UserAppliance, List<EnergyTimestep>> applianceTimesteps = new HashMap<UserAppliance, List<EnergyTimestep>>();

    List<UserAppliance> apps = database.getUserAppliancesForAlgorithmForEnergyMonitor(energyMonitor, getId());

    Map<UserAppliance, Double> currentTimestepEnergyConsumption = new HashMap<UserAppliance, Double>();

    for (UserAppliance appliance : apps) {
        currentTimestepEnergyConsumption.put(appliance, 0d);
        applianceTimesteps.put(appliance, new ArrayList<EnergyTimestep>());
    }/*from w  w  w  . j av  a  2s . c  o m*/

    EnergyTimestep currentTimestep = originTimesteps.poll();

    // ASSUMPTION - measurements are in chronological order
    if (apps.size() > 0) {

        long currentTimestepEndTime = currentTimestep.getEndTime().getTime();

        // for each second in the measurement list
        int number = 0;

        try {
            for (SecondData secondData = dataReader.read(); secondData != null; secondData = dataReader
                    .read()) {
                long dateLong = secondData.getCalLong();

                while (dateLong > currentTimestepEndTime) {
                    //logger.debug("End of timestep " + currentTimestep.getEndTime() + "; getting next timestamp");

                    // get new timestep
                    currentTimestep = originTimesteps.poll();

                    // need to check to see if the current timestep is not null - we won't process up to the very last second, as some will run over the last full 5 minute block
                    if (currentTimestep == null) {
                        // done!
                        break;
                    } else {
                        currentTimestepEndTime = currentTimestep.getEndTime().getTime();
                    }
                }

                //logger.debug("Current second: " + new Date(dateLong) + " (" + dateLong + ")");
                // power during this second, watts, multiplied by the amount of time, 1 second, is one 1 watt second (one joule)
                double consumedEnergy = secondData.getPower();

                int relativeAmounts[] = new int[apps.size()];

                int total = 0;
                for (int i = 0; i < apps.size(); i++) {
                    // randomly assign energy usage to each appliance - don't even attempt to do any actual prediction
                    relativeAmounts[i] = (int) (Math.random() * 100f);
                    total += relativeAmounts[i];
                }

                for (int i = 0; i < apps.size(); i++) {
                    // compute percentage
                    double previousConsumption = currentTimestepEnergyConsumption.get(apps.get(i));
                    double newConsumption = consumedEnergy * ((double) relativeAmounts[i] / (double) total);

                    currentTimestepEnergyConsumption.put(apps.get(i), previousConsumption + newConsumption);
                }

                //logger.debug("Current millis: " + date.getTime() + "; ending millis: " + currentTimestep.getEndTime().getTime());

                if (dateLong == currentTimestepEndTime) {
                    //logger.debug("End of timestep; closing energy measurement");
                    // save current energy consumption in this timestep and reset counter
                    for (UserAppliance appliance : apps) {
                        if (currentTimestepEnergyConsumption.get(appliance) > 0) {
                            EnergyTimestep step = currentTimestep.copyWithoutEnergyOrAppliance();

                            step.setEnergyConsumed(currentTimestepEnergyConsumption.get(appliance));
                            step.setUserAppliance(appliance);
                            applianceTimesteps.get(appliance).add(step);
                        }

                        currentTimestepEnergyConsumption.put(appliance, 0d);
                    }

                    // get new timestep
                    currentTimestep = originTimesteps.poll();

                    // need to check to see if the current timestep is not null - we won't process up to the very last second, as some will run over the last full 5 minute block
                    if (currentTimestep == null) {
                        // done!
                        break;
                    } else {
                        currentTimestepEndTime = currentTimestep.getEndTime().getTime();
                    }
                }
            }
        } catch (Exception e) {
            throw new RuntimeException("Could not train naive algorithm", e);
        }
    }

    algorithmPredictions.setEnergyTimesteps(applianceTimesteps);
    algorithmPredictions.setStateTransitions(new ArrayList<ApplianceStateTransition>());

    return algorithmPredictions;
}

From source file:org.gradle.model.internal.method.WeaklyTypeReferencingMethod.java

private Method findMethod(Class<?> clazz, Class<?>[] paramTypes) {
    Set<Class<?>> seenInterfaces = null;
    Queue<Class<?>> queue = null;
    Class<?> type = clazz;//from  ww  w. j av a  2 s. c  o m
    while (type != null) {
        for (Method method : type.getDeclaredMethods()) {
            if (method.getName().equals(name) && Arrays.equals(paramTypes, method.getParameterTypes())) {
                return method;
            }
        }

        if (queue == null) {
            queue = new ArrayDeque<Class<?>>();
            seenInterfaces = Sets.newHashSet();
        }

        Class<?> superclass = type.getSuperclass();
        if (superclass != null) {
            queue.add(superclass);
        }
        for (Class<?> iface : type.getInterfaces()) {
            if (seenInterfaces.add(iface)) {
                queue.add(iface);
            }
        }

        type = queue.poll();
    }

    throw new org.gradle.internal.reflect.NoSuchMethodException(
            String.format("Could not find method %s(%s) on %s.", name, Joiner.on(", ").join(paramTypes),
                    this.target.getRawClass().getSimpleName()));
}

From source file:password.pwm.util.report.ReportService.java

private void updateCacheFromLdap() throws ChaiUnavailableException, ChaiOperationException,
        PwmOperationalException, PwmUnrecoverableException {
    LOGGER.debug(PwmConstants.REPORTING_SESSION_LABEL,
            "beginning process to updating user cache records from ldap");
    if (status != STATUS.OPEN) {
        return;//from  w  ww .jav a2 s .c om
    }
    cancelFlag = false;
    reportStatus = new ReportStatusInfo(settings.getSettingsHash());
    reportStatus.setInProgress(true);
    reportStatus.setStartDate(new Date());
    try {
        final Queue<UserIdentity> allUsers = new LinkedList<>(getListOfUsers());
        reportStatus.setTotal(allUsers.size());
        while (status == STATUS.OPEN && !allUsers.isEmpty() && !cancelFlag) {
            final long startUpdateTime = System.currentTimeMillis();
            final UserIdentity userIdentity = allUsers.poll();
            try {
                if (updateCache(userIdentity)) {
                    reportStatus.setUpdated(reportStatus.getUpdated() + 1);
                }
            } catch (Exception e) {
                String errorMsg = "error while updating report cache for " + userIdentity.toString()
                        + ", cause: ";
                errorMsg += e instanceof PwmException ? ((PwmException) e).getErrorInformation().toDebugStr()
                        : e.getMessage();
                final ErrorInformation errorInformation;
                errorInformation = new ErrorInformation(PwmError.ERROR_REPORTING_ERROR, errorMsg);
                LOGGER.error(PwmConstants.REPORTING_SESSION_LABEL, errorInformation.toDebugStr());
                reportStatus.setLastError(errorInformation);
                reportStatus.setErrors(reportStatus.getErrors() + 1);
            }
            reportStatus.setCount(reportStatus.getCount() + 1);
            reportStatus.getEventRateMeter().markEvents(1);
            final long totalUpdateTime = System.currentTimeMillis() - startUpdateTime;
            if (settings.isAutoCalcRest()) {
                avgTracker.addSample(totalUpdateTime);
                Helper.pause(avgTracker.avgAsLong());
            } else {
                Helper.pause(settings.getRestTime().getTotalMilliseconds());
            }
        }
        if (cancelFlag) {
            reportStatus.setLastError(
                    new ErrorInformation(PwmError.ERROR_SERVICE_NOT_AVAILABLE, "report cancelled by operator"));
        }
    } finally {
        reportStatus.setFinishDate(new Date());
        reportStatus.setInProgress(false);
    }
    LOGGER.debug(PwmConstants.REPORTING_SESSION_LABEL,
            "update user cache process completed: " + JsonUtil.serialize(reportStatus));
}

From source file:org.apache.camel.component.mail.MailConsumer.java

public int processBatch(Queue<Object> exchanges) throws Exception {
    int total = exchanges.size();

    // limit if needed
    if (maxMessagesPerPoll > 0 && total > maxMessagesPerPoll) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Limiting to maximum messages to poll " + maxMessagesPerPoll + " as there was " + total
                    + " messages in this poll.");
        }/*  w ww .ja  v a2 s  .c om*/
        total = maxMessagesPerPoll;
    }

    for (int index = 0; index < total && isBatchAllowed(); index++) {
        // only loop if we are started (allowed to run)
        Exchange exchange = ObjectHelper.cast(Exchange.class, exchanges.poll());
        // add current index and total as properties
        exchange.setProperty(Exchange.BATCH_INDEX, index);
        exchange.setProperty(Exchange.BATCH_SIZE, total);
        exchange.setProperty(Exchange.BATCH_COMPLETE, index == total - 1);

        // update pending number of exchanges
        pendingExchanges = total - index - 1;

        // must use the original message in case we need to workaround a charset issue when extracting mail content
        final Message mail = exchange.getIn(MailMessage.class).getOriginalMessage();

        // add on completion to handle after work when the exchange is done
        exchange.addOnCompletion(new Synchronization() {
            public void onComplete(Exchange exchange) {
                processCommit(mail, exchange);
            }

            public void onFailure(Exchange exchange) {
                processRollback(mail, exchange);
            }

            @Override
            public String toString() {
                return "MailConsumerOnCompletion";
            }
        });

        // process the exchange
        processExchange(exchange);
    }

    return total;
}

From source file:azkaban.jobtype.AzkabanPigListener.java

@Override
public void initialPlanNotification(String scriptId, MROperPlan plan) {
    logger.info("**********initialPlanNotification!**********");

    // First pass: generate dagNodeNameMap.
    Map<OperatorKey, MapReduceOper> planKeys = plan.getKeys();
    for (Map.Entry<OperatorKey, MapReduceOper> entry : planKeys.entrySet()) {
        String nodeName = entry.getKey().toString();
        String[] aliases = toArray(ScriptState.get().getAlias(entry.getValue()).trim());
        String[] features = toArray(ScriptState.get().getPigFeature(entry.getValue()).trim());

        PigJobDagNode node = new PigJobDagNode(nodeName, aliases, features);
        this.dagNodeNameMap.put(node.getName(), node);

        // This shows how we can get the basic info about all nameless jobs 
        // before any execute. We can traverse the plan to build a DAG of this 
        // info./*ww  w  .j a  v  a  2 s. c  o  m*/
        logger.info("initialPlanNotification: aliases: " + StringUtils.join(aliases, ",") + ", name: "
                + node.getName() + ", features: " + StringUtils.join(features, ","));
    }

    // Second pass: connect the edges
    for (Map.Entry<OperatorKey, MapReduceOper> entry : planKeys.entrySet()) {
        PigJobDagNode node = this.dagNodeNameMap.get(entry.getKey().toString());
        List<String> successorNodeList = new ArrayList<String>();
        List<MapReduceOper> successors = plan.getSuccessors(entry.getValue());
        if (successors != null) {
            for (MapReduceOper successor : successors) {
                PigJobDagNode successorNode = this.dagNodeNameMap.get(successor.getOperatorKey().toString());
                successorNodeList.add(successorNode.getName());
                successorNode.addParent(node);
            }
        }
        node.setSuccessors(successorNodeList);
    }

    // Third pass: find roots.
    Queue<PigJobDagNode> parentQueue = new LinkedList<PigJobDagNode>();
    Queue<PigJobDagNode> childQueue = new LinkedList<PigJobDagNode>();
    for (Map.Entry<String, PigJobDagNode> entry : this.dagNodeNameMap.entrySet()) {
        PigJobDagNode node = entry.getValue();
        if (node.getParents().isEmpty()) {
            node.setLevel(0);
            parentQueue.add(node);
        }
    }

    // Final pass: BFS to set levels.
    int level = 0;
    Set<PigJobDagNode> visited = new HashSet<PigJobDagNode>();
    while (parentQueue.peek() != null) {
        PigJobDagNode node = null;
        while ((node = parentQueue.poll()) != null) {
            if (visited.contains(node)) {
                continue;
            }
            node.setLevel(level);
            for (String jobName : node.getSuccessors()) {
                PigJobDagNode successorNode = this.dagNodeNameMap.get(jobName);
                childQueue.add(successorNode);
            }
        }

        Queue<PigJobDagNode> tmp = childQueue;
        childQueue = parentQueue;
        parentQueue = tmp;
        ++level;
    }

    updateJsonFile();
}

From source file:org.apache.hadoop.hdfs.notifier.server.TestServerHistory.java

@Test
public void testQueueNotificationAdvanced() throws Exception {
    // Starting without a ramp-up phase
    DummyServerCore core = new DummyServerCore();
    ServerHistory history = new ServerHistory(core, false);
    long historyLength = 10000;
    history.setHistoryLength(historyLength);
    Queue<NamespaceNotification> historyNotifications;
    long txCount = 1001;

    new Thread(history).start();

    for (long txId = 0; txId < txCount; txId++) {
        history.storeNotification(//  w w w .java  2  s  . c o m
                new NamespaceNotification("/a/" + txId, EventType.FILE_ADDED.getByteValue(), txId));
    }

    // Part 1 - Get all notifications
    historyNotifications = new LinkedList<NamespaceNotification>();
    history.addNotificationsToQueue(new NamespaceEvent("/a", EventType.FILE_ADDED.getByteValue()), 0,
            historyNotifications);
    Assert.assertEquals(1000, historyNotifications.size());
    for (long txId = 1; txId < txCount; txId++) {
        NamespaceNotification n = historyNotifications.poll();
        Assert.assertEquals(txId, n.txId);
        Assert.assertEquals("/a/" + txId, n.path);
        Assert.assertEquals(EventType.FILE_ADDED.getByteValue(), n.type);
    }

    // Part 2 - Get half of the notifications
    historyNotifications = new LinkedList<NamespaceNotification>();
    history.addNotificationsToQueue(new NamespaceEvent("/a", EventType.FILE_ADDED.getByteValue()), 500,
            historyNotifications);
    Assert.assertEquals(500, historyNotifications.size());
    for (long txId = 501; txId < txCount; txId++) {
        NamespaceNotification n = historyNotifications.poll();
        Assert.assertEquals(txId, n.txId);
        Assert.assertEquals("/a/" + txId, n.path);
        Assert.assertEquals(EventType.FILE_ADDED.getByteValue(), n.type);
    }

    core.shutdown();
}

From source file:com.thoughtworks.go.server.service.dd.reporting.ReportingDependencyFanInNode.java

private Pair<StageIdentifier, List<ReportingFaninScmMaterial>> getRevisionNthFor(int n,
        ReportingFanInGraphContext context) {
    List<ReportingFaninScmMaterial> scmMaterials = new ArrayList<>();
    PipelineTimeline pipelineTimeline = context.pipelineTimeline;
    Queue<PipelineTimelineEntry.Revision> revisionQueue = new ConcurrentLinkedQueue<>();
    DependencyMaterialConfig dependencyMaterial = (DependencyMaterialConfig) materialConfig;
    PipelineTimelineEntry entry = pipelineTimeline.instanceFor(dependencyMaterial.getPipelineName(),
            totalInstanceCount - n);//from  w  w w  .jav a 2s  . co  m

    StageIdentifier dependentStageIdentifier = dependentStageIdentifier(context, entry,
            CaseInsensitiveString.str(dependencyMaterial.getStageName()));
    if (!StageIdentifier.NULL.equals(dependentStageIdentifier)) {
        addToRevisionQueue(entry, revisionQueue, scmMaterials, context);
    } else {
        return null;
    }
    while (!revisionQueue.isEmpty()) {
        PipelineTimelineEntry.Revision revision = revisionQueue.poll();
        DependencyMaterialRevision dmr = DependencyMaterialRevision.create(revision.revision, null);
        PipelineTimelineEntry pte = pipelineTimeline
                .getEntryFor(new CaseInsensitiveString(dmr.getPipelineName()), dmr.getPipelineCounter());
        addToRevisionQueue(pte, revisionQueue, scmMaterials, context);
    }

    return new Pair<>(dependentStageIdentifier, scmMaterials);
}

From source file:de.csw.lucene.ConceptFilter.java

/**
 * advances to the next token in the stream.
 * Takes into account that terms from the ontology might be constructed
 * out of several consecutive tokens./*from w ww .ja  va 2  s  .  c  o m*/
 * @return false at EOS
 */
@Override
public boolean incrementToken() throws IOException {

    boolean hasMoreToken = innerNextToken();
    if (!hasMoreToken) {
        return false;
    }

    Queue<AttributeSource.State> lookAhead = new LinkedList<AttributeSource.State>();
    List<String> terms = new ArrayList<String>();
    terms.add(String.copyValueOf(charTermAttribute.buffer(), 0, charTermAttribute.length()));

    while (index.isPrefix(terms) && hasMoreToken) {
        lookAhead.add(captureState());
        hasMoreToken = innerNextToken();
        terms.add(String.copyValueOf(charTermAttribute.buffer(), 0, charTermAttribute.length()));
    }

    // if we have a match ...
    if (index.hasExactMatches(StringUtils.join(terms.toArray(), OntologyIndex.PREFIX_SEPARATOR))) {

        // ..then we consume all elements in the look ahead, if present
        if (!lookAhead.isEmpty()) {
            int maxEndOffset = offsetAttribute.endOffset();
            restoreState(lookAhead.poll());
            terms.remove(0); // already present in current token
            for (String term : terms) {
                charTermAttribute.append(OntologyIndex.PREFIX_SEPARATOR);
                charTermAttribute.append(term);
            }

            offsetAttribute.setOffset(offsetAttribute.startOffset(), maxEndOffset);
        }
        typeAttribute.setType(CONCEPT_TYPE);
        if (log.isTraceEnabled()) {
            log.trace("Concept token recognized: "
                    + String.copyValueOf(charTermAttribute.buffer(), 0, charTermAttribute.length()));
        }

    } else {

        // .. else we push back in the queue the tokens already read
        if (!lookAhead.isEmpty()) {
            lookAhead.add(captureState());
            restoreState(lookAhead.poll());
            for (AttributeSource.State laterToken : lookAhead) {
                queue.add(laterToken);
            }
        }
    }

    return hasMoreToken;
}

From source file:org.trend.hgraph.util.test.GenerateTestData.java

private void doGenerateTestData() throws IOException {
    HTable vertexTable = null;// w  w  w  . j a v  a  2 s.c o  m
    HTable edgeTable = null;
    Put put = null;
    long vIdx = 0;
    byte[] parentVertexKey = null;
    StopWatch timer = new StopWatch();
    timer.start();
    try {
        vertexTable = new HTable(this.getConf(), this.vertexTable);
        vertexTable.setAutoFlush(false);
        edgeTable = new HTable(this.getConf(), this.edgeTable);
        edgeTable.setAutoFlush(false);

        Queue<byte[]> parentVertexKeysQueue = new ArrayDeque<byte[]>();
        int tmpEdgeCountPerVertex = 0;
        int edgeAcctCount = 0;
        Properties.Pair<Integer, Integer> pair = null;
        for (int rowCount = 0; rowCount < this.vertexCount; rowCount++) {
            put = generateVertexPut();
            vertexTable.put(put);
            parentVertexKeysQueue.offer(put.getRow());

            if (rowCount > 0) {
                vIdx = rowCount % tmpEdgeCountPerVertex;
                if (vIdx == 0) {
                    parentVertexKey = parentVertexKeysQueue.poll();

                    edgeAcctCount++;
                    if (this.isDistributionMode && !this.isFirstVertices[pair.key]
                            && edgeAcctCount == tmpEdgeCountPerVertex) {
                        this.addFirstVertex(Bytes.toString(parentVertexKey));
                        this.isFirstVertices[pair.key] = true;
                    }
                    pair = this.determineEdgeCountPerVertex(rowCount);
                    tmpEdgeCountPerVertex = pair.value;
                    edgeAcctCount = 0;
                } else if (vIdx > 0) {
                    edgeAcctCount++;
                    parentVertexKey = parentVertexKeysQueue.peek();
                } else {
                    throw new RuntimeException("vIdex:" + vIdx + " shall always not small than 0");
                }

                put = generateEdgePut(rowCount, parentVertexKey, put.getRow());
                edgeTable.put(put);
            } else {
                pair = this.determineEdgeCountPerVertex(rowCount);
                tmpEdgeCountPerVertex = pair.value;
                if (!this.isDistributionMode)
                    this.addFirstVertex(Bytes.toString(put.getRow()));
            }
        }
        vertexTable.flushCommits();
        edgeTable.flushCommits();
    } catch (IOException e) {
        LOG.error("doGenerateTestData failed", e);
        throw e;
    } finally {
        if (null != vertexTable)
            vertexTable.close();
        if (null != edgeTable)
            edgeTable.close();
        timer.stop();
        LOG.info("Time elapsed:" + timer.toString() + ", " + timer.getTime() + " for pushing "
                + this.vertexCount + " vertices test data to HBase");
        LOG.info("first vertices id:" + this.firstVertices);
    }
}