List of usage examples for java.util.concurrent BlockingQueue poll
E poll();
From source file:fi.jumi.core.ipc.dirs.DirectoryObserverTest.java
private static List<Path> takeAtLeast(int count, BlockingQueue<Path> src) throws InterruptedException { List<Path> taken = new ArrayList<>(); for (int i = 0; i < count; i++) { taken.add(src.take());//w ww . jav a 2s. c om } Path p; while ((p = src.poll()) != null) { taken.add(p); } return taken; }
From source file:net.tomp2p.simgrid.SimGridTomP2P.java
public static SendingMessage getPendingMessag(Number160 senderID) throws InterruptedException { BlockingQueue<SendingMessage> queue = pendingMessages.get(senderID); if (queue == null) { queue = new LinkedBlockingQueue<SendingMessage>(); pendingMessages.put(senderID, queue); }//from www . j a v a2s . c o m return queue.poll(); }
From source file:net.sf.jacclog.service.importer.internal.queue.LogFileQueueImporterObserver.java
@Override public void added(final BlockingQueue<LogFile> queue, final LogFile file) { LOG.debug("Added file '" + file.getFile().getPath() + "' to queue."); executor.execute(new Runnable() { @Override/*from w w w . ja v a2 s .com*/ public void run() { final LogFile file = queue.poll(); service.importLogEntries(file); } }); }
From source file:com.nearinfinity.blur.thrift.AsyncClientPool.java
private TAsyncClient newClient(Class<?> c, Connection connection) throws InterruptedException { BlockingQueue<TAsyncClient> blockingQueue = getQueue(connection); TAsyncClient client = blockingQueue.poll(); if (client != null) { return client; }/*ww w . j a v a 2 s . co m*/ AtomicInteger counter; synchronized (_numberOfConnections) { counter = _numberOfConnections.get(connection.getHost()); if (counter == null) { counter = new AtomicInteger(); _numberOfConnections.put(connection.getHost(), counter); } } synchronized (counter) { int numOfConnections = counter.get(); while (numOfConnections >= _maxConnectionsPerHost) { client = blockingQueue.poll(_pollTime, TimeUnit.MILLISECONDS); if (client != null) { return client; } LOG.debug("Waiting for client number of connection [" + numOfConnections + "], max connection per host [" + _maxConnectionsPerHost + "]"); numOfConnections = counter.get(); } LOG.info("Creating a new client for [" + connection + "]"); String name = c.getName(); Constructor<?> constructor = _constructorCache.get(name); if (constructor == null) { String clientClassName = name.replace("$AsyncIface", "$AsyncClient"); try { Class<?> clazz = Class.forName(clientClassName); constructor = clazz.getConstructor(new Class[] { TProtocolFactory.class, TAsyncClientManager.class, TNonblockingTransport.class }); _constructorCache.put(name, constructor); } catch (Exception e) { throw new RuntimeException(e); } } try { TNonblockingSocket transport = newTransport(connection); client = (TAsyncClient) constructor .newInstance(new Object[] { _protocolFactory, _clientManager, transport }); client.setTimeout(_timeout); counter.incrementAndGet(); return client; } catch (Exception e) { throw new RuntimeException(e); } } }
From source file:org.apache.camel.impl.DefaultServicePool.java
public synchronized Service acquire(Key key) { BlockingQueue<Service> services = pool.get(key); if (services == null || services.isEmpty()) { if (log.isTraceEnabled()) { log.trace("No free services in pool to acquire for key: " + key); }/*ww w. j a v a 2s .c o m*/ return null; } Service answer = services.poll(); if (log.isTraceEnabled()) { log.trace("Acquire: " + key + " service: " + answer); } return answer; }
From source file:ubic.gemma.core.apps.ExpressionExperimentDataFileGeneratorCli.java
@Override protected Exception doWork(String[] args) { Exception exp = this.processCommandLine(args); if (exp != null) { return exp; }/* w ww.j a v a2 s .c o m*/ BlockingQueue<BioAssaySet> queue = new ArrayBlockingQueue<>(expressionExperiments.size()); // Add the Experiments to the queue for processing for (BioAssaySet ee : expressionExperiments) { if (ee instanceof ExpressionExperiment) { try { queue.put(ee); } catch (InterruptedException ie) { AbstractCLI.log.info(ie); } } else { throw new UnsupportedOperationException("Can't handle non-EE BioAssaySets yet"); } } // Inner class for processing the experiments class Worker extends Thread { private SecurityContext context; private BlockingQueue<BioAssaySet> q; private Worker(BlockingQueue<BioAssaySet> q, SecurityContext context) { this.context = context; this.q = q; } @Override public void run() { SecurityContextHolder.setContext(this.context); while (true) { BioAssaySet ee = q.poll(); if (ee == null) { break; } AbstractCLI.log.info("Processing Experiment: " + ee.getName()); ExpressionExperimentDataFileGeneratorCli.this.processExperiment((ExpressionExperiment) ee); } } } final SecurityContext context = SecurityContextHolder.getContext(); Collection<Thread> threads = new ArrayList<>(); for (int i = 1; i <= this.numThreads; i++) { Worker worker = new Worker(queue, context); threads.add(worker); AbstractCLI.log.info("Starting thread " + i); worker.start(); } this.waitForThreadPoolCompletion(threads); this.summarizeProcessing(); return null; }
From source file:ubic.gemma.apps.ExpressionExperimentDataFileGeneratorCli.java
@Override protected Exception doWork(String[] args) { Exception exp = processCommandLine(DESCRIPTION, args); if (exp != null) { return exp; }/*from w w w . ja v a 2s . c o m*/ BlockingQueue<BioAssaySet> queue = new ArrayBlockingQueue<BioAssaySet>(expressionExperiments.size()); // Add the Experiments to the queue for processing for (BioAssaySet ee : expressionExperiments) { if (ee instanceof ExpressionExperiment) { try { queue.put(ee); } catch (InterruptedException ie) { log.info(ie); } } else { throw new UnsupportedOperationException("Can't handle non-EE BioAssaySets yet"); } } // Inner class for processing the experiments class Worker extends Thread { BlockingQueue<BioAssaySet> q; SecurityContext context; Worker(BlockingQueue<BioAssaySet> q, SecurityContext context) { this.context = context; this.q = q; } @Override public void run() { SecurityContextHolder.setContext(this.context); while (true) { BioAssaySet ee = q.poll(); if (ee == null) { break; } log.info("Processing Experiment: " + ee.getName()); processExperiment((ExpressionExperiment) ee); } } } final SecurityContext context = SecurityContextHolder.getContext(); Collection<Thread> threads = new ArrayList<Thread>(); for (int i = 1; i <= this.numThreads; i++) { Worker worker = new Worker(queue, context); threads.add(worker); log.info("Starting thread " + i); worker.start(); } waitForThreadPoolCompletion(threads); summarizeProcessing(); return null; }
From source file:org.apache.reef.io.data.loading.impl.AbstractEvaluatorToPartitionStrategy.java
/** * Allocates the first available split into the evaluator. * * @param evaluatorId// ww w . j a v a 2 s .c o m * the evaluator id * @param value * the queue of splits * @return a numberedSplit or null if it cannot find one */ protected NumberedSplit<InputSplit> allocateSplit(final String evaluatorId, final BlockingQueue<NumberedSplit<InputSplit>> value) { if (value == null) { LOG.log(Level.FINE, "Queue of splits can't be empty. Returning null"); return null; } while (true) { final NumberedSplit<InputSplit> split = value.poll(); if (split == null) { return null; } if (value == unallocatedSplits || unallocatedSplits.remove(split)) { LOG.log(Level.FINE, "Found split-" + split.getIndex() + " in the queue"); final NumberedSplit<InputSplit> old = evaluatorToSplits.putIfAbsent(evaluatorId, split); if (old != null) { throw new RuntimeException( "Trying to assign different splits to the same evaluator is not supported"); } else { LOG.log(Level.FINE, "Returning " + split.getIndex()); return split; } } } }
From source file:ubic.gemma.core.loader.genome.gene.ncbi.NcbiGeneLoader.java
void doLoad(final BlockingQueue<Gene> geneQueue) { StopWatch timer = new StopWatch(); timer.start();//from ww w . ja v a 2 s . c o m while (!(converterDone.get() && geneQueue.isEmpty())) { Gene gene = null; try { // the converted genes. gene = geneQueue.poll(); if (gene == null) { continue; } persisterHelper.persistOrUpdate(gene); if (++loadedGeneCount % 1000 == 0 || timer.getTime() > 30 * 1000) { NcbiGeneLoader.log.info("Processed " + loadedGeneCount + " genes. Queue has " + geneQueue.size() + " items; last gene: " + gene); timer.reset(); timer.start(); } } catch (Exception e) { NcbiGeneLoader.log.error("Error while loading gene: " + gene + ": " + e.getMessage(), e); loaderDone.set(true); throw new RuntimeException(e); } } NcbiGeneLoader.log.info("Loaded " + loadedGeneCount + " genes. "); loaderDone.set(true); }
From source file:com.clickha.nifi.processors.FetchFileTransferV2.java
/** * Close connections that are idle or optionally close all connections. * Connections are considered "idle" if they have not been used in 10 seconds. * * @param closeNonIdleConnections if <code>true</code> will close all connection; if <code>false</code> will close only idle connections */// w w w. j a va 2 s. c o m private void closeConnections(final boolean closeNonIdleConnections) { for (final Map.Entry<Tuple<String, Integer>, BlockingQueue<FileTransferIdleWrapper>> entry : fileTransferMap .entrySet()) { final BlockingQueue<FileTransferIdleWrapper> wrapperQueue = entry.getValue(); final List<FileTransferIdleWrapper> putBack = new ArrayList<>(); FileTransferIdleWrapper wrapper; while ((wrapper = wrapperQueue.poll()) != null) { final long lastUsed = wrapper.getLastUsed(); final long nanosSinceLastUse = System.nanoTime() - lastUsed; if (!closeNonIdleConnections && TimeUnit.NANOSECONDS.toMillis(nanosSinceLastUse) < IDLE_CONNECTION_MILLIS) { putBack.add(wrapper); } else { try { wrapper.getFileTransfer().close(); } catch (final IOException ioe) { getLogger().warn("Failed to close Idle Connection due to {}", new Object[] { ioe }, ioe); } } } for (final FileTransferIdleWrapper toPutBack : putBack) { wrapperQueue.offer(toPutBack); } } }