Example usage for java.util.concurrent BlockingQueue poll

List of usage examples for java.util.concurrent BlockingQueue poll

Introduction

In this page you can find the example usage for java.util.concurrent BlockingQueue poll.

Prototype

E poll(long timeout, TimeUnit unit) throws InterruptedException;

Source Link

Document

Retrieves and removes the head of this queue, waiting up to the specified wait time if necessary for an element to become available.

Usage

From source file:io.orchestrate.client.itest.KvTest.java

@Theory
public void pojoKvPutAsync(@ForAll(sampleSize = 10) final String key) throws InterruptedException {
    assumeThat(key, not(isEmptyString()));
    final BlockingQueue<KvMetadata> queue = DataStructures.getLTQInstance(KvMetadata.class);

    User user = new User("test1", "Some description");
    final OrchestrateRequest<KvMetadata> addUserRequest = client.kv(collection(), key).put(user)
            .on(new ResponseAdapter<KvMetadata>() {
                @Override//from   w  w  w  . j a  v  a2 s. co m
                public void onFailure(final Throwable error) {
                    // handle error condition
                }

                @Override
                public void onSuccess(final KvMetadata userKvMeta) {
                    queue.add(userKvMeta);
                }
            });
    @SuppressWarnings("unchecked")
    final KvMetadata kvMetadata = queue.poll(5000, TimeUnit.MILLISECONDS);

    assertNotNull(kvMetadata);
}

From source file:io.orchestrate.client.itest.KvTest.java

@Theory
public void getKeyWithInvalidApiKey(@ForAll(sampleSize = 2) final String key) throws InterruptedException {
    assumeThat(key, not(isEmptyString()));

    String badKey = "12345678-1234-1234-1234-1234567890123";
    Client badClient = OrchestrateClient.builder(badKey).build();

    final BlockingQueue<Throwable> failureQueue = DataStructures.getLTQInstance(Throwable.class);

    try {/*  w  ww  .  j  a va  2s.  c o  m*/
        final KvObject<String> object = badClient.kv(collection(), key).get(String.class)
                .on(new ResponseListener<KvObject<String>>() {
                    @Override
                    public void onFailure(Throwable error) {
                        failureQueue.add(error);
                    }

                    @Override
                    public void onSuccess(KvObject<String> object) {
                    }
                }).get();
        fail("Should have thrown InvalidApiKeyException on 'get()'.");
    } catch (InvalidApiKeyException ex) {
    }
    @SuppressWarnings("unchecked")
    final Throwable failure = failureQueue.poll(5000, TimeUnit.MILLISECONDS);
    assertTrue(failure instanceof InvalidApiKeyException);
}

From source file:io.orchestrate.client.itest.KvTest.java

@Theory
public void pojoKvGetAsync(@ForAll(sampleSize = 10) final String key) throws InterruptedException {
    assumeThat(key, not(isEmptyString()));
    User user = new User(key, "description for " + key);

    final KvMetadata userKvMeta = client.kv(collection(), key).put(user).get();

    final BlockingQueue<KvObject> queue = DataStructures.getLTQInstance(KvObject.class);

    final OrchestrateRequest<KvObject<User>> getUserRequest = client.kv(collection(), key).get(User.class)
            .on(new ResponseAdapter<KvObject<User>>() {
                @Override/*from   ww w. ja  v  a2  s. c  o  m*/
                public void onFailure(final Throwable error) {
                    // handle error condition
                }

                @Override
                public void onSuccess(final KvObject<User> userKv) {
                    queue.add(userKv);
                    User user = userKv.getValue();
                    String userKey = userKv.getKey();
                    String userRef = userKv.getRef();

                    System.out.println(String.format("Read user key:%s, name:%s, ref:%s", userKey, userRef,
                            user.getName()));
                }
            });
    @SuppressWarnings("unchecked")
    final KvObject userKv = queue.poll(5000, TimeUnit.MILLISECONDS);

    assertNotNull(userKv);
}

From source file:com.wordnik.swaggersocket.server.SwaggerSocketProtocolInterceptor.java

private final void attachWriter(final AtmosphereResource r) {
    final AtmosphereRequest request = r.getRequest();

    AtmosphereResponse res = r.getResponse();
    AsyncIOWriter writer = res.getAsyncIOWriter();

    BlockingQueue<AtmosphereResource> queue = (BlockingQueue<AtmosphereResource>) getContextValue(request,
            SUSPENDED_RESPONSE);/*w ww .j  av  a2  s.  c  om*/
    if (queue == null) {
        queue = new LinkedBlockingQueue<AtmosphereResource>();
        request.getSession().setAttribute(SUSPENDED_RESPONSE, queue);
    }

    if (AtmosphereInterceptorWriter.class.isAssignableFrom(writer.getClass())) {
        // WebSocket already had one.
        if (r.transport() != AtmosphereResource.TRANSPORT.WEBSOCKET) {
            writer = new AtmosphereInterceptorWriter() {

                @Override
                protected void writeReady(AtmosphereResponse response, byte[] data) throws IOException {

                    // We are buffering response.
                    if (data == null)
                        return;

                    BlockingQueue<AtmosphereResource> queue = (BlockingQueue<AtmosphereResource>) getContextValue(
                            request, SUSPENDED_RESPONSE);
                    if (queue != null) {
                        AtmosphereResource resource;
                        try {
                            // TODO: Should this be configurable
                            // We stay suspended for 60 seconds
                            resource = queue.poll(60, TimeUnit.SECONDS);
                        } catch (InterruptedException e) {
                            logger.trace("", e);
                            return;
                        }

                        if (resource == null) {
                            logger.debug("No resource was suspended, resuming the second connection.");
                        } else {

                            logger.trace("Resuming {}", resource.uuid());

                            try {
                                OutputStream o = resource.getResponse().getResponse().getOutputStream();
                                o.write(data);
                                o.flush();

                                resource.resume();
                            } catch (IOException ex) {
                                logger.warn("", ex);
                            }
                        }
                    } else {
                        logger.error("Queue was null");
                    }
                }

                /**
                 * Add an {@link AsyncIOInterceptor} that will be invoked in the order it was added.
                 *
                 * @param filter {@link AsyncIOInterceptor
                 * @return this
                 */
                public AtmosphereInterceptorWriter interceptor(AsyncIOInterceptor filter) {
                    if (!filters.contains(filter)) {
                        filters.addLast(filter);
                    }
                    return this;
                }
            };
            res.asyncIOWriter(writer);
        }
        //REVIST need a better way to add a custom filter at the first entry and not at the last as
        // e.g. interceptor(AsyncIOInterceptor interceptor, int position)
        LinkedList<AsyncIOInterceptor> filters = AtmosphereInterceptorWriter.class.cast(writer).filters();
        if (!filters.contains(interceptor)) {
            filters.addFirst(interceptor);
        }
    }
}

From source file:io.orchestrate.client.itest.KvTest.java

@Theory
@org.junit.Ignore//from   w w w  .  j  a va  2 s .com
public void getKeyWithListener(@ForAll(sampleSize = 10) final String key) throws InterruptedException {
    assumeThat(key, not(isEmptyString()));

    insertItem(key, "{}");

    final BlockingQueue<KvObject> queue = DataStructures.getLTQInstance(KvObject.class);
    final KvObject<String> object = client.kv(collection(), key).get(String.class)
            .on(new ResponseAdapter<KvObject<String>>() {
                @Override
                public void onFailure(final Throwable error) {
                    fail(error.getMessage());
                }

                @Override
                public void onSuccess(final KvObject<String> object) {
                    queue.add(object);
                }
            }).on(new ResponseAdapter<KvObject<String>>() {
                @Override
                public void onFailure(final Throwable error) {
                    fail(error.getMessage());
                }

                @Override
                public void onSuccess(final KvObject<String> object) {
                    queue.add(object);
                }
            }).get();

    @SuppressWarnings("unchecked")
    final KvObject result1 = queue.poll(5000, TimeUnit.MILLISECONDS);
    final KvObject result2 = queue.poll(5000, TimeUnit.MILLISECONDS);

    assertNotNull(result1);
    assertNotNull(result2);
    assertEquals(result1, result2);
}

From source file:com.jbrisbin.vpc.jobsched.SubmitClosure.java

@Override
public Object call(Object[] args) {
    log.debug("args: " + args);
    String exch = args[0].toString();
    String route = args[1].toString();
    final Object body = args[2];
    Map headers = null;//  w ww .  ja  va 2 s.  co m

    final BlockingQueue<Object> resultsQueue = new LinkedBlockingQueue<Object>();
    Queue replyQueue = rabbitAdmin.declareQueue();
    SimpleMessageListenerContainer listener = new SimpleMessageListenerContainer();
    listener.setQueues(replyQueue);
    if (args.length > 3) {
        for (int i = 3; i <= args.length; i++) {
            if (args[i] instanceof MessageListener) {
                MessageListener callback = (MessageListener) args[3];
                listener.setMessageListener(callback);
            } else if (args[i] instanceof Map) {
                headers = (Map) args[i];
            }
        }
    } else {
        listener.setMessageListener(new MessageListener() {
            public void onMessage(Message message) {
                byte[] body = message.getBody();
                try {
                    resultsQueue.add(mapper.readValue(body, 0, body.length, Map.class));
                } catch (IOException e) {
                    log.error(e.getMessage(), e);
                }
            }
        });
    }

    final Map msgHdrs = headers;
    rabbitTemplate.send(exch, route, new MessageCreator() {
        public Message createMessage() {
            MessageProperties props = new RabbitMessageProperties();
            props.setContentType("application/json");
            if (null != msgHdrs) {
                props.getHeaders().putAll(msgHdrs);
            }
            String uuid = UUID.randomUUID().toString();
            props.setCorrelationId(uuid.getBytes());
            ByteArrayOutputStream out = new ByteArrayOutputStream();
            try {
                mapper.writeValue(out, body);
            } catch (IOException e) {
                log.error(e.getMessage(), e);
            }
            Message msg = new Message(out.toByteArray(), props);
            return msg;
        }
    });

    Object results = null;
    try {
        results = resultsQueue.poll(5, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        log.error(e.getMessage(), e);
    }
    listener.stop();

    return results;
}

From source file:at.salzburgresearch.vgi.vgianalyticsframework.activityanalysis.pipeline.impl.VgiPipelineImpl.java

/**
 * Starts the pipeline. Create producers, lets them read the VGI operations, receives the data and sends the operations to a set of consumers
 *//*www  .j  av  a  2s . c o  m*/
@Override
public void start() {
    timerStart = new Date();

    BlockingQueue<IVgiFeature> queue = new ArrayBlockingQueue<IVgiFeature>(queueSize);

    /** Create thread(s) which will read the PBF files */
    Thread[] producerThread = new Thread[numThreads];
    for (int i = 0; i < numThreads; i++) {

        producerName = (settings.isReadQuadtree()) ? "vgiOperationPbfReaderQuadtree" : "vgiOperationPbfReader";

        producer = ctx.getBean(producerName, IVgiAnalysisPipelineProducer.class);
        producer.setQueue(queue);
        producerThread[i] = new Thread(producer);

        producer.setProducerCount(numThreads);
        producer.setProducerNumber(i);
        producer.setPbfDataFolder((pbfDataFolder != null) ? pbfDataFolder : settings.getPbfDataFolder());

        producer.setFilterNodeId(filterNodeId);
        producer.setFilterWayId(filterWayId);
        producer.setFilterRelationId(filterRelationId);
        producer.setFilterGeometryType(filterGeometryType);
        producer.setConstrainedFilter(constrainedFilter);
        producer.setCoordinateOnly(coordinateOnly);

        producer.setFilterFileId(filterFileId);

        producerThread[i].start();
    }

    List<IVgiFeature> currentBatch = new ArrayList<IVgiFeature>();

    try {
        doBeforeFirstBatch();

        /** Read queue as long as it is not empty */
        boolean resumePipeline = true;
        while (resumePipeline) {
            resumePipeline = false;
            for (int i = 0; i < numThreads; i++) {
                if (producerThread[i].isAlive())
                    resumePipeline = true;
            }
            if (!queue.isEmpty())
                resumePipeline = true;
            if (!resumePipeline)
                break;

            IVgiFeature currentFeature = queue.poll(60, TimeUnit.MILLISECONDS);

            if (currentFeature == null)
                continue;

            /** Detach batch if minimum batch size is reached */
            if (currentBatch.size() >= batchSize) {
                detachBatch(currentBatch);
                currentBatch.clear();
            }

            currentBatch.add(currentFeature);
        }

        if (currentBatch.size() > 0) {
            detachBatch(currentBatch);
        }

        doAfterLastBatch();

    } catch (InterruptedException e) {
        log.error("error joining producer thread", e);
    }

    if (settings.getActionAnalyzerList() != null) {
        writeMetaData(settings.getResultFolder());
    }
}

From source file:com.indeed.lsmtree.recordcache.PersistentRecordCache.java

/**
 * Performs lookup for multiple keys and returns a streaming iterator to results.
 * Each element in the iterator is one of
 *  (1) an exception associated with a single lookup
 *  (2) a key value tuple/*w w  w  . ja va2s  .c o  m*/
 *
 * @param keys      lookup keys
 * @param progress  (optional) an AtomicInteger for tracking progress
 * @param skipped   (optional) an AtomicInteger for tracking missing keys
 * @return          iterator of lookup results
 */
public Iterator<Either<Exception, P2<K, V>>> getStreaming(final @Nonnull Iterator<K> keys,
        final @Nullable AtomicInteger progress, final @Nullable AtomicInteger skipped) {
    log.info("starting store lookups");
    LongArrayList addressList = new LongArrayList();
    int notFound = 0;
    while (keys.hasNext()) {
        final K key = keys.next();
        final Long address;
        try {
            address = index.get(key);
        } catch (IOException e) {
            log.error("error", e);
            return Iterators.singletonIterator(Left.<Exception, P2<K, V>>of(new IndexReadException(e)));
        }
        if (address != null) {
            addressList.add(address);
        } else {
            notFound++;
        }
    }
    if (progress != null)
        progress.addAndGet(notFound);
    if (skipped != null)
        skipped.addAndGet(notFound);
    log.info("store lookups complete, sorting addresses");

    final long[] addresses = addressList.elements();
    Arrays.sort(addresses, 0, addressList.size());

    log.info("initializing store lookup iterator");
    final BlockingQueue<Runnable> taskQueue = new ArrayBlockingQueue<Runnable>(100);
    final Iterator<List<Long>> iterable = Iterators.partition(addressList.iterator(), 1000);
    final ExecutorService primerThreads = new ThreadPoolExecutor(10, 10, 0L, TimeUnit.MILLISECONDS, taskQueue,
            new NamedThreadFactory("store priming thread", true, log), new RejectedExecutionHandler() {
                @Override
                public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
                    try {
                        taskQueue.put(r);
                    } catch (InterruptedException e) {
                        log.error("error", e);
                        throw new RuntimeException(e);
                    }
                }
            });
    final BlockingQueue<List<Either<Exception, P2<K, V>>>> completionQueue = new ArrayBlockingQueue<List<Either<Exception, P2<K, V>>>>(
            10);
    final AtomicLong runningTasks = new AtomicLong(0);
    final AtomicBoolean taskSubmitterRunning = new AtomicBoolean(true);

    new Thread(new Runnable() {
        @Override
        public void run() {
            while (iterable.hasNext()) {
                runningTasks.incrementAndGet();
                final List<Long> addressesSublist = iterable.next();
                primerThreads.submit(new FutureTask<List<Either<Exception, P2<K, V>>>>(
                        new RecordLookupTask(addressesSublist)) {
                    @Override
                    protected void done() {
                        try {
                            final List<Either<Exception, P2<K, V>>> results = get();
                            if (progress != null) {
                                progress.addAndGet(results.size());
                            }
                            completionQueue.put(results);
                        } catch (InterruptedException e) {
                            log.error("error", e);
                            throw new RuntimeException(e);
                        } catch (ExecutionException e) {
                            log.error("error", e);
                            throw new RuntimeException(e);
                        }
                    }
                });
            }
            taskSubmitterRunning.set(false);
        }
    }, "RecordLookupTaskSubmitterThread").start();

    return new Iterator<Either<Exception, P2<K, V>>>() {

        Iterator<Either<Exception, P2<K, V>>> currentIterator;

        @Override
        public boolean hasNext() {
            if (currentIterator != null && currentIterator.hasNext())
                return true;
            while (taskSubmitterRunning.get() || runningTasks.get() > 0) {
                try {
                    final List<Either<Exception, P2<K, V>>> list = completionQueue.poll(1, TimeUnit.SECONDS);
                    if (list != null) {
                        log.debug("remaining: " + runningTasks.decrementAndGet());
                        currentIterator = list.iterator();
                        if (currentIterator.hasNext())
                            return true;
                    }
                } catch (InterruptedException e) {
                    log.error("error", e);
                    throw new RuntimeException(e);
                }
            }
            primerThreads.shutdown();
            return false;
        }

        @Override
        public Either<Exception, P2<K, V>> next() {
            return currentIterator.next();
        }

        @Override
        public void remove() {
            throw new UnsupportedOperationException();
        }
    };
}

From source file:org.apache.solr.handler.dataimport.XPathEntityProcessor.java

private Iterator<Map<String, Object>> getRowIterator(final Reader data, final String s) {
    //nothing atomic about it. I just needed a StongReference
    final AtomicReference<Exception> exp = new AtomicReference<>();
    final BlockingQueue<Map<String, Object>> blockingQueue = new ArrayBlockingQueue<>(blockingQueueSize);
    final AtomicBoolean isEnd = new AtomicBoolean(false);
    final AtomicBoolean throwExp = new AtomicBoolean(true);
    publisherThread = new Thread() {
        @Override/*w  w w. j a  v a  2s. c om*/
        public void run() {
            try {
                xpathReader.streamRecords(data, (record, xpath) -> {
                    if (isEnd.get()) {
                        throwExp.set(false);
                        //To end the streaming . otherwise the parsing will go on forever
                        //though consumer has gone away
                        throw new RuntimeException("BREAK");
                    }
                    Map<String, Object> row;
                    try {
                        row = readRow(record, xpath);
                    } catch (Exception e) {
                        isEnd.set(true);
                        return;
                    }
                    offer(row);
                });
            } catch (Exception e) {
                if (throwExp.get())
                    exp.set(e);
            } finally {
                closeIt(data);
                if (!isEnd.get()) {
                    offer(END_MARKER);
                }
            }
        }

        private void offer(Map<String, Object> row) {
            try {
                while (!blockingQueue.offer(row, blockingQueueTimeOut, blockingQueueTimeOutUnits)) {
                    if (isEnd.get())
                        return;
                    LOG.debug("Timeout elapsed writing records.  Perhaps buffer size should be increased.");
                }
            } catch (InterruptedException e) {
                return;
            } finally {
                synchronized (this) {
                    notifyAll();
                }
            }
        }
    };

    publisherThread.start();

    return new Iterator<Map<String, Object>>() {
        private Map<String, Object> lastRow;
        int count = 0;

        @Override
        public boolean hasNext() {
            return !isEnd.get();
        }

        @Override
        public Map<String, Object> next() {
            Map<String, Object> row;

            do {
                try {
                    row = blockingQueue.poll(blockingQueueTimeOut, blockingQueueTimeOutUnits);
                    if (row == null) {
                        LOG.debug("Timeout elapsed reading records.");
                    }
                } catch (InterruptedException e) {
                    LOG.debug("Caught InterruptedException while waiting for row.  Aborting.");
                    isEnd.set(true);
                    return null;
                }
            } while (row == null);

            if (row == END_MARKER) {
                isEnd.set(true);
                if (exp.get() != null) {
                    String msg = "Parsing failed for xml, url:" + s + " rows processed in this xml:" + count;
                    if (lastRow != null)
                        msg += " last row in this xml:" + lastRow;
                    if (ABORT.equals(onError)) {
                        wrapAndThrow(SEVERE, exp.get(), msg);
                    } else if (SKIP.equals(onError)) {
                        wrapAndThrow(DataImportHandlerException.SKIP, exp.get());
                    } else {
                        LOG.warn(msg, exp.get());
                    }
                }
                return null;
            }
            count++;
            return lastRow = row;
        }

        @Override
        public void remove() {
            /*no op*/
        }
    };

}

From source file:org.apache.solr.handler.dataimport.processor.XPathEntityProcessor.java

private Iterator<Map<String, Object>> getRowIterator(final Reader data, final String s) {
    //nothing atomic about it. I just needed a StongReference
    final AtomicReference<Exception> exp = new AtomicReference<Exception>();
    final BlockingQueue<Map<String, Object>> blockingQueue = new ArrayBlockingQueue<Map<String, Object>>(
            blockingQueueSize);// www  .j  a v  a2 s .  c  o  m
    final AtomicBoolean isEnd = new AtomicBoolean(false);
    final AtomicBoolean throwExp = new AtomicBoolean(true);
    publisherThread = new Thread() {
        @Override
        public void run() {
            try {
                xpathReader.streamRecords(data, new XPathRecordReader.Handler() {
                    @Override
                    @SuppressWarnings("unchecked")
                    public void handle(Map<String, Object> record, String xpath) {
                        if (isEnd.get()) {
                            throwExp.set(false);
                            //To end the streaming . otherwise the parsing will go on forever
                            //though consumer has gone away
                            throw new RuntimeException("BREAK");
                        }
                        Map<String, Object> row;
                        try {
                            row = readRow(record, xpath);
                        } catch (final Exception e) {
                            isEnd.set(true);
                            return;
                        }
                        offer(row);
                    }
                });
            } catch (final Exception e) {
                if (throwExp.get())
                    exp.set(e);
            } finally {
                closeIt(data);
                if (!isEnd.get()) {
                    offer(END_MARKER);
                }
            }
        }

        private void offer(Map<String, Object> row) {
            try {
                while (!blockingQueue.offer(row, blockingQueueTimeOut, blockingQueueTimeOutUnits)) {
                    if (isEnd.get())
                        return;
                    LOG.debug("Timeout elapsed writing records.  Perhaps buffer size should be increased.");
                }
            } catch (final InterruptedException e) {
                return;
            } finally {
                synchronized (this) {
                    notifyAll();
                }
            }
        }
    };

    publisherThread.start();

    return new Iterator<Map<String, Object>>() {
        private Map<String, Object> lastRow;
        int count = 0;

        @Override
        public boolean hasNext() {
            return !isEnd.get();
        }

        @Override
        public Map<String, Object> next() {
            Map<String, Object> row;

            do {
                try {
                    row = blockingQueue.poll(blockingQueueTimeOut, blockingQueueTimeOutUnits);
                    if (row == null) {
                        LOG.debug("Timeout elapsed reading records.");
                    }
                } catch (final InterruptedException e) {
                    LOG.debug("Caught InterruptedException while waiting for row.  Aborting.");
                    isEnd.set(true);
                    return null;
                }
            } while (row == null);

            if (row == END_MARKER) {
                isEnd.set(true);
                if (exp.get() != null) {
                    String msg = "Parsing failed for xml, url:" + s + " rows processed in this xml:" + count;
                    if (lastRow != null)
                        msg += " last row in this xml:" + lastRow;
                    if (ABORT.equals(onError)) {
                        wrapAndThrow(SEVERE, exp.get(), msg);
                    } else if (SKIP.equals(onError)) {
                        wrapAndThrow(DataImportHandlerException.SKIP, exp.get());
                    } else {
                        LOG.warn(msg, exp.get());
                    }
                }
                return null;
            }
            count++;
            return lastRow = row;
        }

        @Override
        public void remove() {
            /*no op*/
        }
    };

}