Example usage for java.util.concurrent BlockingQueue isEmpty

List of usage examples for java.util.concurrent BlockingQueue isEmpty

Introduction

In this page you can find the example usage for java.util.concurrent BlockingQueue isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this collection contains no elements.

Usage

From source file:org.apache.hive.ptest.execution.HostExecutor.java

/**
 * Executes parallel test until the parallel work queue is empty. Then
 * executes the isolated tests on the host. During each phase if a
 * AbortDroneException is thrown the drone is removed possibly
 * leaving this host with zero functioning drones. If all drones
 * are removed the host will be replaced before the next run.
 *//*w  w w.  ja v a2 s  .co  m*/
private void executeTests(final BlockingQueue<TestBatch> parallelWorkQueue,
        final BlockingQueue<TestBatch> isolatedWorkQueue, final Set<TestBatch> failedTestResults)
        throws Exception {
    if (mShutdown) {
        mLogger.warn("Shutting down host " + mHost.getName());
        return;
    }
    mLogger.info("Starting parallel execution on " + mHost.getName());
    List<ListenableFuture<Void>> droneResults = Lists.newArrayList();
    for (final Drone drone : ImmutableList.copyOf(mDrones)) {
        droneResults.add(mExecutor.submit(new Callable<Void>() {
            @Override
            public Void call() throws Exception {
                TestBatch batch = null;
                Stopwatch sw = Stopwatch.createUnstarted();
                try {
                    do {
                        batch = parallelWorkQueue.poll(mNumPollSeconds, TimeUnit.SECONDS);
                        if (mShutdown) {
                            mLogger.warn("Shutting down host " + mHost.getName());
                            return null;
                        }
                        if (batch != null) {
                            numParallelBatchesProcessed++;
                            sw.reset().start();
                            try {
                                if (!executeTestBatch(drone, batch, failedTestResults)) {
                                    failedTestResults.add(batch);
                                }
                            } finally {
                                sw.stop();
                                mLogger.info(
                                        "Finished processing parallel batch [{}] on host {}. ElapsedTime(ms)={}",
                                        new Object[] { batch.getName(), getHost().toShortString(),
                                                sw.elapsed(TimeUnit.MILLISECONDS) });
                            }
                        }
                    } while (!mShutdown && !parallelWorkQueue.isEmpty());
                } catch (AbortDroneException ex) {
                    mDrones.remove(drone); // return value not checked due to concurrent access
                    mLogger.error("Aborting drone during parallel execution", ex);
                    if (batch != null) {
                        Preconditions.checkState(parallelWorkQueue.add(batch),
                                "Could not add batch to parallel queue " + batch);
                    }
                }
                return null;
            }
        }));
    }
    if (mShutdown) {
        mLogger.warn("Shutting down host " + mHost.getName());
        return;
    }
    Futures.allAsList(droneResults).get();
    mLogger.info("Starting isolated execution on " + mHost.getName());
    for (Drone drone : ImmutableList.copyOf(mDrones)) {
        TestBatch batch = null;
        Stopwatch sw = Stopwatch.createUnstarted();
        try {
            do {

                batch = isolatedWorkQueue.poll(mNumPollSeconds, TimeUnit.SECONDS);
                if (batch != null) {
                    numIsolatedBatchesProcessed++;
                    sw.reset().start();
                    try {
                        if (!executeTestBatch(drone, batch, failedTestResults)) {
                            failedTestResults.add(batch);
                        }
                    } finally {
                        sw.stop();
                        mLogger.info("Finished processing isolated batch [{}] on host {}. ElapsedTime(ms)={}",
                                new Object[] { batch.getName(), getHost().toShortString(),
                                        sw.elapsed(TimeUnit.MILLISECONDS) });
                    }
                }
            } while (!mShutdown && !isolatedWorkQueue.isEmpty());
        } catch (AbortDroneException ex) {
            mDrones.remove(drone); // return value not checked due to concurrent access
            mLogger.error("Aborting drone during isolated execution", ex);
            if (batch != null) {
                Preconditions.checkState(isolatedWorkQueue.add(batch),
                        "Could not add batch to isolated queue " + batch);
            }
        }
    }
}

From source file:org.batoo.jpa.benchmark.BenchmarkTest.java

private void waitUntilFinish(ThreadPoolExecutor executor) {
    final BlockingQueue<Runnable> workQueue = executor.getQueue();
    try {/*  w w  w.ja v a 2  s. c  o  m*/
        final long started = System.currentTimeMillis();

        int lastToGo = workQueue.size();

        final int total = workQueue.size();
        int performed = 0;

        int maxStatusMessageLength = 0;
        while (!workQueue.isEmpty()) {
            final float doneNow = lastToGo - workQueue.size();
            performed += doneNow;

            final float elapsed = (System.currentTimeMillis() - started) / 1000;

            lastToGo = workQueue.size();

            if (performed > 0) {
                final float throughput = performed / elapsed;
                final float eta = ((elapsed * total) / performed) - elapsed;

                final float percentDone = (100 * (float) lastToGo) / total;
                final int gaugeDone = (int) ((100 - percentDone) / 5);
                final String gauge = "[" + StringUtils.repeat("", gaugeDone)
                        + StringUtils.repeat("-", 20 - gaugeDone) + "]";

                final String sampling = this.profilingQueue.size() > 0
                        ? MessageFormat.format(" | Samples {0}", this.profilingQueue.size())
                        : "";

                if ((maxStatusMessageLength != 0) || (eta > 5)) {
                    String statusMessage = MessageFormat.format(
                            "\r{4} %{5,number,00.00} | ETA {2} | LAST TPS {0} ops / sec | AVG TPS {1,number,#.0} | LEFT {3}{6}", //
                            doneNow, throughput, this.etaToString((int) eta), workQueue.size(), gauge,
                            percentDone, sampling);

                    maxStatusMessageLength = Math.max(statusMessage.length(), maxStatusMessageLength);
                    statusMessage = StringUtils.leftPad(statusMessage,
                            maxStatusMessageLength - statusMessage.length());
                    System.out.print(statusMessage);
                }
            }

            if (elapsed > BenchmarkTest.MAX_TEST_TIME) {
                throw new IllegalStateException("Max allowed test time exceeded");
            }

            Thread.sleep(1000);
        }

        if (maxStatusMessageLength > 0) {
            System.out.print("\r" + StringUtils.repeat(" ", maxStatusMessageLength) + "\r");
        }

        executor.shutdown();

        if (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
            BenchmarkTest.LOG.warn("Forcefully shutting down the thread pool");

            executor.shutdownNow();
        }

        BenchmarkTest.LOG.warn("Iterations completed");
    } catch (final InterruptedException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.opencastproject.videosegmenter.impl.VideoSegmenterServiceImpl.java

/**
 * Returns the segments for the movie accessible through the frame grabbing control.
 * //  w  w w  . j  ava  2  s.  co  m
 * @param video
 *          the mpeg-7 video representation
 * @param dsh
 *          the data source handler
 * @return the list of segments
 * @throws IOException
 *           if accessing a frame fails
 * @throws VideoSegmenterException
 *           if segmentation of the video fails
 */
protected List<Segment> segment(Video video, FrameGrabber dsh) throws IOException, VideoSegmenterException {
    List<Segment> segments = new ArrayList<Segment>();

    int t = 1;
    int lastStableImageTime = 0;
    long startOfSegment = 0;
    int currentSceneStabilityCount = 1;
    boolean sceneChangeImminent = true;
    boolean luckyPunchRecovery = false;
    int segmentCount = 1;
    BufferedImage previousImage = null;
    BufferedImage lastStableImage = null;
    BlockingQueue<Buffer> bufferQueue = new ArrayBlockingQueue<Buffer>(stabilityThreshold + 1);
    long durationInSeconds = video.getMediaTime().getMediaDuration().getDurationInMilliseconds() / 1000;
    Segment contentSegment = video.getTemporalDecomposition().createSegment("segment-" + segmentCount);
    ImageComparator icomp = new ImageComparator(changesThreshold);

    // icomp.setStatistics(true);
    // String imagesPath = PathSupport.concat(new String[] {
    // System.getProperty("java.io.tmpdir"),
    // "videosegments",
    // video.getMediaLocator().getMediaURI().toString().replaceAll("\\W", "-")
    // });
    // icomp.saveImagesTo(new File(imagesPath));

    Buffer buf = dsh.getBuffer();
    while (t < durationInSeconds && buf != null && !buf.isEOM()) {
        BufferedImage bufferedImage = ImageUtils.createImage(buf);
        if (bufferedImage == null)
            throw new VideoSegmenterException("Unable to extract image at time " + t);

        logger.trace("Analyzing video at {} s", t);

        // Compare the new image with our previous sample
        boolean differsFromPreviousImage = icomp.isDifferent(previousImage, bufferedImage, t);

        // We found an image that is different compared to the previous one. Let's see if this image remains stable
        // for some time (STABILITY_THRESHOLD) so we can declare a new scene
        if (differsFromPreviousImage) {
            logger.debug("Found differing image at {} seconds", t);

            // If this is the result of a lucky punch (looking ahead STABILITY_THRESHOLD seconds), then we should
            // really start over an make sure we get the correct beginning of the new scene
            if (!sceneChangeImminent && t - lastStableImageTime > 1) {
                luckyPunchRecovery = true;
                previousImage = lastStableImage;
                bufferQueue.add(buf);
                t = lastStableImageTime;
            } else {
                lastStableImageTime = t - 1;
                lastStableImage = previousImage;
                previousImage = bufferedImage;
                currentSceneStabilityCount = 1;
                t++;
            }
            sceneChangeImminent = true;
        }

        // We are looking ahead and everyhting seems to be fine.
        else if (!sceneChangeImminent) {
            fillLookAheadBuffer(bufferQueue, buf, dsh);
            lastStableImageTime = t;
            t += stabilityThreshold;
            previousImage = bufferedImage;
            lastStableImage = bufferedImage;
        }

        // Seems to be the same image. If we have just recently detected a new scene, let's see if we are able to
        // confirm that this is scene is stable (>= STABILITY_THRESHOLD)
        else if (currentSceneStabilityCount < stabilityThreshold) {
            currentSceneStabilityCount++;
            previousImage = bufferedImage;
            t++;
        }

        // Did we find a new scene?
        else if (currentSceneStabilityCount == stabilityThreshold) {
            lastStableImageTime = t;

            long endOfSegment = t - stabilityThreshold - 1;
            long durationms = (endOfSegment - startOfSegment) * 1000L;

            // Create a new segment if this wasn't the first one
            if (endOfSegment > stabilityThreshold) {
                contentSegment.setMediaTime(new MediaRelTimeImpl(startOfSegment * 1000L, durationms));
                contentSegment = video.getTemporalDecomposition().createSegment("segment-" + ++segmentCount);
                segments.add(contentSegment);
                startOfSegment = endOfSegment;
            }

            // After finding a new segment, likelihood of a stable image is good, let's take a look ahead. Since
            // a processor can't seek, we need to store the buffers in between, in case we need to come back.
            fillLookAheadBuffer(bufferQueue, buf, dsh);
            t += stabilityThreshold;
            previousImage = bufferedImage;
            lastStableImage = bufferedImage;
            currentSceneStabilityCount++;
            sceneChangeImminent = false;
            logger.info("Found new scene at {} s", startOfSegment);
        }

        // Did we find a new scene by looking ahead?
        else if (sceneChangeImminent) {
            // We found a scene change by looking ahead. Now we want to get to the exact position
            lastStableImageTime = t;
            previousImage = bufferedImage;
            lastStableImage = bufferedImage;
            currentSceneStabilityCount++;
            t++;
        }

        // Nothing special, business as usual
        else {
            // If things look stable, then let's look ahead as much as possible without loosing information (which is
            // equal to looking ahead STABILITY_THRESHOLD seconds.
            lastStableImageTime = t;
            fillLookAheadBuffer(bufferQueue, buf, dsh);
            t += stabilityThreshold;
            lastStableImage = bufferedImage;
            previousImage = bufferedImage;
        }

        if (luckyPunchRecovery) {
            buf = bufferQueue.poll();
            luckyPunchRecovery = !bufferQueue.isEmpty();
        } else
            buf = dsh.getBuffer();
    }

    // Finish off the last segment
    long startOfSegmentms = startOfSegment * 1000L;
    long durationms = ((long) durationInSeconds - startOfSegment) * 1000;
    contentSegment.setMediaTime(new MediaRelTimeImpl(startOfSegmentms, durationms));
    segments.add(contentSegment);

    // Print summary
    if (icomp.hasStatistics()) {
        NumberFormat nf = NumberFormat.getNumberInstance();
        nf.setMaximumFractionDigits(2);
        logger.info("Image comparison finished with an average change of {}% in {} comparisons",
                nf.format(icomp.getAvgChange()), icomp.getComparisons());
    }

    // Cleanup
    if (icomp.getSavedImagesDirectory() != null) {
        FileUtils.deleteQuietly(icomp.getSavedImagesDirectory());
    }

    return segments;
}

From source file:org.xwiki.mail.internal.DefaultMailSender.java

@Override
public void send(MimeMessage message, Session session) throws MessagingException {
    DefaultMailResultListener listener = new DefaultMailResultListener();
    sendAsynchronously(message, session, listener);
    waitTillSent(Long.MAX_VALUE);
    BlockingQueue<Exception> errorQueue = listener.getExceptionQueue();
    if (!errorQueue.isEmpty()) {
        throw new MessagingException(String.format("Failed to send mail message [%s]", message),
                errorQueue.peek());//from   w ww . j ava  2s .c  o  m
    }
}

From source file:ubic.gemma.core.loader.association.NCBIGene2GOAssociationLoader.java

private void load(BlockingQueue<Gene2GOAssociation> queue) {

    NCBIGene2GOAssociationLoader.log.debug("Entering 'load' ");

    long millis = System.currentTimeMillis();
    int cpt = 0;/*w  w  w  .j a v a  2  s.c  om*/
    double secspt = 0.0;

    Collection<Gene2GOAssociation> itemsToPersist = new ArrayList<>();
    try {
        while (!(producerDone.get() && queue.isEmpty())) {
            Gene2GOAssociation associations = queue.poll();

            if (associations == null) {
                continue;
            }

            itemsToPersist.add(associations);
            if (++count % NCBIGene2GOAssociationLoader.BATCH_SIZE == 0) {
                persisterHelper.persist(itemsToPersist);
                itemsToPersist.clear();
            }

            // just some timing information.
            if (count % 10000 == 0) {
                cpt++;
                double secsperthousand = (System.currentTimeMillis() - millis) / 1000.0;
                secspt += secsperthousand;
                double meanspt = secspt / cpt;

                String progString = "Processed and loaded " + count + " (" + secsperthousand
                        + " seconds elapsed, average per thousand=" + String.format("%.2f", meanspt) + ")";
                NCBIGene2GOAssociationLoader.log.info(progString);
                millis = System.currentTimeMillis();
            }

        }
    } catch (Exception e) {
        consumerDone.set(true);
        NCBIGene2GOAssociationLoader.log.fatal(e, e);
        throw new RuntimeException(e);
    }

    // finish up.
    persisterHelper.persist(itemsToPersist);

    NCBIGene2GOAssociationLoader.log.info("Finished, loaded total of " + count + " GO associations");
    consumerDone.set(true);

}

From source file:ubic.gemma.core.loader.expression.arrayDesign.ArrayDesignProbeMapperServiceImpl.java

private void doLoad(final BlockingQueue<BACS> queue, AtomicBoolean generatorDone, AtomicBoolean loaderDone,
        boolean persist) {
    int loadedAssociationCount = 0;
    while (!(generatorDone.get() && queue.isEmpty())) {

        try {//from  www  .j a va 2 s.  c  o m
            BACS bacs = queue.poll();
            if (bacs == null) {
                continue;
            }

            GeneProduct geneProduct = bacs.ba.getGeneProduct();

            if (geneProduct.getId() == null) {
                GeneProduct existing = geneProductService.find(geneProduct);

                if (existing == null) {

                    existing = this.checkForAlias(geneProduct);
                    if (existing == null) {
                        /*
                         * We have to be careful not to cruft up the gene table now that I so carefully cleaned it.
                         * But this is a problem if we aren't adding some other association to the gene at least.
                         * But generally the mRNAs that GP has that NCBI doesn't are "alternative" or "additional".
                         */
                        if (ArrayDesignProbeMapperServiceImpl.log.isDebugEnabled())
                            ArrayDesignProbeMapperServiceImpl.log
                                    .debug("New gene product from GoldenPath is not in Gemma: " + geneProduct
                                            + " skipping association to " + bacs.ba.getBioSequence()
                                            + " [skipping policy in place]");
                        continue;
                    }
                }
                bacs.ba.setGeneProduct(existing);
            }

            if (persist) {
                persisterHelper.persist(bacs.ba);

                if (++loadedAssociationCount % 1000 == 0) {
                    ArrayDesignProbeMapperServiceImpl.log.info("Persisted " + loadedAssociationCount
                            + " blat associations. " + "Current queue has " + queue.size() + " items.");
                }
            } else {
                this.printResult(bacs.cs, bacs.ba);
            }

        } catch (Exception e) {
            ArrayDesignProbeMapperServiceImpl.log.error(e, e);
            loaderDone.set(true);
            throw new RuntimeException(e);
        }
    }
    ArrayDesignProbeMapperServiceImpl.log
            .info("Load thread done: loaded " + loadedAssociationCount + " blat associations. ");
    loaderDone.set(true);
}

From source file:ubic.gemma.core.loader.genome.gene.ncbi.NcbiGeneConverter.java

public void convert(final BlockingQueue<NcbiGeneData> geneInfoQueue, final BlockingQueue<Gene> geneQueue) {
    // start up thread to convert a member of geneInfoQueue to a gene/geneproduct/databaseentry
    // then push the gene onto the geneQueue for loading

    Thread convertThread = new Thread(new Runnable() {
        @Override/* w  w  w  . jav a 2  s.  c o  m*/
        @SuppressWarnings("synthetic-access")
        public void run() {
            while (!(sourceDone.get() && geneInfoQueue.isEmpty())) {
                try {
                    NcbiGeneData data = geneInfoQueue.poll();
                    if (data == null) {
                        continue;
                    }
                    Gene converted = NcbiGeneConverter.this.convert(data);

                    if (converted.getProducts().isEmpty()) {
                        log.info("Gene with no products skipped: " + converted);
                        continue;
                    }

                    geneQueue.put(converted);

                } catch (InterruptedException e) {
                    NcbiGeneConverter.log.warn("Interrupted");
                    break;
                } catch (Exception e) {
                    NcbiGeneConverter.log.error(e, e);
                    break;
                }
            }
            producerDone.set(true);
        }
    }, "Converter");

    convertThread.start();
}

From source file:ubic.gemma.core.loader.genome.gene.ncbi.NcbiGeneLoader.java

void doLoad(final BlockingQueue<Gene> geneQueue) {
    StopWatch timer = new StopWatch();
    timer.start();/*w  ww  . ja v  a 2 s  . c o m*/
    while (!(converterDone.get() && geneQueue.isEmpty())) {
        Gene gene = null;
        try {
            // the converted genes.
            gene = geneQueue.poll();
            if (gene == null) {
                continue;
            }

            persisterHelper.persistOrUpdate(gene);

            if (++loadedGeneCount % 1000 == 0 || timer.getTime() > 30 * 1000) {
                NcbiGeneLoader.log.info("Processed " + loadedGeneCount + " genes. Queue has " + geneQueue.size()
                        + " items; last gene: " + gene);
                timer.reset();
                timer.start();
            }

        } catch (Exception e) {
            NcbiGeneLoader.log.error("Error while loading gene: " + gene + ": " + e.getMessage(), e);
            loaderDone.set(true);
            throw new RuntimeException(e);
        }
    }
    NcbiGeneLoader.log.info("Loaded " + loadedGeneCount + " genes. ");
    loaderDone.set(true);
}

From source file:ubic.gemma.core.loader.protein.StringProteinInteractionLoader.java

/**
 * Poll the queue to see if any Gene2GeneProteinAssociation to load into database. If so firstly check to see if the
 * genes are in the gemma db as these identifiers came from biomart If both genes found load.
 *
 * @param gene2GeneProteinAssociationQueue queue of Gene2GeneProteinAssociation to load
 *///from  www.  jav  a  2 s  .  com
private void doLoad(final BlockingQueue<Gene2GeneProteinAssociation> gene2GeneProteinAssociationQueue) {
    StringProteinInteractionLoader.log.info("starting processing ");
    while (!(converterDone.get() && gene2GeneProteinAssociationQueue.isEmpty())) {

        try {
            Gene2GeneProteinAssociation gene2GeneProteinAssociation = gene2GeneProteinAssociationQueue.poll();
            if (gene2GeneProteinAssociation == null) {
                continue;
            }
            // check they are genes gemma knows about
            Gene geneOne = geneService.findByNCBIId(gene2GeneProteinAssociation.getFirstGene().getNcbiGeneId());
            Gene geneTwo = geneService
                    .findByNCBIId(gene2GeneProteinAssociation.getSecondGene().getNcbiGeneId());

            if (geneOne == null) {
                StringProteinInteractionLoader.log.warn("Gene with NCBI id="
                        + gene2GeneProteinAssociation.getFirstGene().getNcbiGeneId() + " not in Gemma");
                continue;
            }
            if (geneTwo == null) {
                StringProteinInteractionLoader.log.warn("Gene with NCBI id="
                        + gene2GeneProteinAssociation.getSecondGene().getNcbiGeneId() + " not in Gemma");
                continue;
            }

            FieldUtils.writeField(gene2GeneProteinAssociation, "firstGene", geneOne, true);
            FieldUtils.writeField(gene2GeneProteinAssociation, "secondGene", geneTwo, true);

            persisterHelper.persist(gene2GeneProteinAssociation);

            if (++loadedGeneCount % 1000 == 0) {
                StringProteinInteractionLoader.log
                        .info("Proceesed " + loadedGeneCount + " protein protein interactions. "
                                + "Current queue has " + gene2GeneProteinAssociationQueue.size() + " items.");
            }

        } catch (Exception e) {
            StringProteinInteractionLoader.log.error(e, e);
            loaderDone.set(true);
            throw new RuntimeException(e);
        }
    }
    StringProteinInteractionLoader.log.info("Loaded " + loadedGeneCount + " protein protein interactions. ");
    loaderDone.set(true);
}

From source file:ubic.gemma.loader.association.NCBIGene2GOAssociationLoader.java

/**
 * @param queue/* www  . j  a va  2  s  .c o m*/
 */
protected void load(BlockingQueue<Gene2GOAssociation> queue) {

    log.debug("Entering 'load' ");

    long millis = System.currentTimeMillis();
    int cpt = 0;
    double secspt = 0.0;

    Collection<Gene2GOAssociation> itemsToPersist = new ArrayList<Gene2GOAssociation>();
    try {
        while (!(producerDone.get() && queue.isEmpty())) {
            Gene2GOAssociation associations = queue.poll();

            if (associations == null) {
                continue;
            }

            itemsToPersist.add(associations);
            if (++count % BATCH_SIZE == 0) {
                persisterHelper.persist(itemsToPersist);
                itemsToPersist.clear();
            }

            // just some timing information.
            if (count % 1000 == 0) {
                cpt++;
                double secsperthousand = (System.currentTimeMillis() - millis) / 1000.0;
                secspt += secsperthousand;
                double meanspt = secspt / cpt;

                String progString = "Processed and loaded " + count + " (" + secsperthousand
                        + " seconds elapsed, average per thousand=" + String.format("%.2f", meanspt) + ")";
                log.info(progString);
                millis = System.currentTimeMillis();
            }

        }
    } catch (Exception e) {
        consumerDone.set(true);
        log.fatal(e, e);
        throw new RuntimeException(e);
    }

    // finish up.
    persisterHelper.persist(itemsToPersist);

    log.info("Finished, loaded total of " + count + " GO associations");
    consumerDone.set(true);

}