List of usage examples for java.util.concurrent BlockingQueue put
void put(E e) throws InterruptedException;
From source file:org.apache.pig.impl.streaming.ExecutableManager.java
private void sendOutput(BlockingQueue<Result> binaryOutputQueue, Result res) { try {/* w w w. j a v a 2s . co m*/ binaryOutputQueue.put(res); } catch (InterruptedException e) { LOG.error("Error while sending binary output to POStream", e); } synchronized (poStream) { // notify waiting consumer // the if is to satisfy "findbugs" if (res != null) { poStream.notifyAll(); } } }
From source file:com.netflix.curator.framework.imps.TestFrameworkEdges.java
@Test public void testMissedResponseOnBackgroundESCreate() throws Exception { CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1)); client.start();/*from w w w . j a v a 2s. c om*/ try { CreateBuilderImpl createBuilder = (CreateBuilderImpl) client.create(); createBuilder.failNextCreateForTesting = true; final BlockingQueue<String> queue = Queues.newArrayBlockingQueue(1); BackgroundCallback callback = new BackgroundCallback() { @Override public void processResult(CuratorFramework client, CuratorEvent event) throws Exception { queue.put(event.getPath()); } }; createBuilder.withProtection().withMode(CreateMode.EPHEMERAL_SEQUENTIAL).inBackground(callback) .forPath("/"); String ourPath = queue.poll(10, TimeUnit.SECONDS); Assert.assertTrue(ourPath.startsWith(ZKPaths.makePath("/", CreateBuilderImpl.PROTECTED_PREFIX))); Assert.assertFalse(createBuilder.failNextCreateForTesting); } finally { IOUtils.closeQuietly(client); } }
From source file:com.all.backend.web.services.LocalPushService.java
public void pushMessage(long mail, AllMessage<?> message) { BlockingQueue<AllMessage<?>> queue; synchronized (messages) { queue = messages.get(mail);//w ww .j a v a 2 s . c o m if (queue == null) { queue = new LinkedBlockingQueue<AllMessage<?>>(); messages.put(mail, queue); } try { // log.info("pushing message to " + mail + " content:\n" + message); queue.put(message); } catch (InterruptedException e) { log.error(e, e); } } }
From source file:com.netflix.curator.framework.imps.TestFramework.java
@Test public void testNamespaceWithWatcher() throws Exception { CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder(); CuratorFramework client = builder.connectString(server.getConnectString()).namespace("aisa") .retryPolicy(new RetryOneTime(1)).build(); client.start();/*ww w . ja va2s .c om*/ try { final BlockingQueue<String> queue = new LinkedBlockingQueue<String>(); Watcher watcher = new Watcher() { @Override public void process(WatchedEvent event) { try { queue.put(event.getPath()); } catch (InterruptedException e) { throw new Error(e); } } }; client.create().forPath("/base"); client.getChildren().usingWatcher(watcher).forPath("/base"); client.create().forPath("/base/child"); String path = queue.take(); Assert.assertEquals(path, "/base"); } finally { client.close(); } }
From source file:com.netflix.curator.framework.imps.TestFramework.java
@Test public void testNamespaceInBackground() throws Exception { CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder(); CuratorFramework client = builder.connectString(server.getConnectString()).namespace("aisa") .retryPolicy(new RetryOneTime(1)).build(); client.start();/*from w w w. j ava 2 s. c o m*/ try { final BlockingQueue<String> queue = new LinkedBlockingQueue<String>(); CuratorListener listener = new CuratorListener() { @Override public void eventReceived(CuratorFramework client, CuratorEvent event) throws Exception { if (event.getType() == CuratorEventType.EXISTS) { queue.put(event.getPath()); } } }; client.getCuratorListenable().addListener(listener); client.create().forPath("/base"); client.checkExists().inBackground().forPath("/base"); String path = queue.poll(10, TimeUnit.SECONDS); Assert.assertEquals(path, "/base"); client.getCuratorListenable().removeListener(listener); BackgroundCallback callback = new BackgroundCallback() { @Override public void processResult(CuratorFramework client, CuratorEvent event) throws Exception { queue.put(event.getPath()); } }; client.getChildren().inBackground(callback).forPath("/base"); path = queue.poll(10, TimeUnit.SECONDS); Assert.assertEquals(path, "/base"); } finally { client.close(); } }
From source file:annis.gui.resultfetch.ResultFetchJob.java
@Override public void run() { WebResource subgraphRes = Helper.getAnnisWebResource().path("query/search/subgraph"); // holds the ids of the matches. MatchGroup result;//from w ww .j a v a2 s .co m try { if (Thread.interrupted()) { return; } // set the the progress bar, for given the user some information about the loading process ui.accessSynchronously(new Runnable() { @Override public void run() { resultPanel.showMatchSearchInProgress(query); } }); // get the matches result = futureMatches.get(); // get the subgraph for each match, when the result is not empty if (result.getMatches().isEmpty()) { // check if thread was interrupted if (Thread.interrupted()) { return; } // nothing found, so inform the user about this. ui.access(new Runnable() { @Override public void run() { resultPanel.showNoResult(); } }); } else { if (Thread.interrupted()) { return; } // since annis found something, inform the user that subgraphs are created ui.access(new Runnable() { @Override public void run() { resultPanel.showSubgraphSearchInProgress(query, 0.0f); } }); // prepare fetching subgraphs final int totalResultSize = result.getMatches().size(); final BlockingQueue<SaltProject> queue = new ArrayBlockingQueue<>(totalResultSize); int current = 0; for (Match m : result.getMatches()) { if (Thread.interrupted()) { return; } List<Match> subList = new LinkedList<>(); subList.add(m); final SaltProject p = executeQuery(subgraphRes, new MatchGroup(subList), query.getLeftContext(), query.getRightContext(), query.getSegmentation(), SubgraphFilter.all); queue.put(p); log.debug("added match {} to queue", current + 1); if (current == 0) { PollControl.changePollingTime(ui, PollControl.DEFAULT_TIME); ui.access(new Runnable() { @Override public void run() { resultPanel.setQueryResultQueue(queue, query, totalResultSize); } }); } if (Thread.interrupted()) { return; } current++; } } // end if no results } catch (InterruptedException ex) { // just return } catch (final ExecutionException root) { ui.accessSynchronously(new Runnable() { @Override public void run() { if (resultPanel != null && resultPanel.getPaging() != null) { PagingComponent paging = resultPanel.getPaging(); Throwable cause = root.getCause(); if (cause instanceof UniformInterfaceException) { UniformInterfaceException ex = (UniformInterfaceException) cause; if (ex.getResponse().getStatus() == 400) { List<AqlParseError> errors = ex.getResponse() .getEntity(new GenericType<List<AqlParseError>>() { }); String errMsg = Joiner.on(" | ").join(errors); paging.setInfo("parsing error: " + errMsg); } else if (ex.getResponse().getStatus() == 504) { paging.setInfo("Timeout: query exeuction took too long"); } else if (ex.getResponse().getStatus() == 403) { paging.setInfo("Not authorized to query this corpus."); } else { paging.setInfo("unknown error: " + ex); } } else { log.error("Unexcepted ExecutionException cause", root); } resultPanel.showFinishedSubgraphSearch(); } } }); } // end catch }
From source file:gov.nasa.ensemble.resources.TestProjectProperties.java
@Test public void writing() throws CoreException, IOException, InterruptedException { final String key = "key", val1 = "val1", val2 = "val2"; final IFile propFile = ProjectProperties.propFile(file, key); final ProjectProperties props = projProps(file); assertTrue(props.get(key).isNone()); assertFalse(propFile.exists());/* w w w . j a v a 2 s .co m*/ final BlockingQueue<Integer> queue = new LinkedBlockingQueue<Integer>(); ResourcesPlugin.getWorkspace().addResourceChangeListener(new IResourceChangeListener() { @Override public void resourceChanged(IResourceChangeEvent event) { if (event.getDelta() != null) { try { event.getDelta().accept(new IResourceDeltaVisitor() { @Override public boolean visit(IResourceDelta delta) { if (propFile.equals(delta.getResource())) try { queue.put(delta.getKind()); } catch (InterruptedException e) { LogUtil.error(e); } return true; } }); } catch (CoreException e) { throw new RuntimeException(e); } } } }); final long timeout = 1000; // create props.set(key, val1); assertEquals(ADDED, (int) queue.poll(timeout, MILLISECONDS)); assertTrue("Property file does not exist after setting property in memory", propFile.exists()); assertEquals(val1, getStringContents(propFile)); // update props.set(key, val2); assertEquals(CHANGED, (int) queue.poll(timeout, MILLISECONDS)); assertEquals("Property file does not exist after updating property in memory", val2, getStringContents(propFile)); // delete props.unset(key); assertEquals(REMOVED, (int) queue.poll(timeout, MILLISECONDS)); assertFalse("Property file still exists after unsetting property in memory", propFile.exists()); }
From source file:com.splout.db.qnode.QNodeHandlerContext.java
/** * This method can be called to initialize a pool of connections to a dnode. This method may be called from multiple * threads so it should be safe to call it concurrently. *//*from w w w .j av a 2s. c o m*/ public void initializeThriftClientCacheFor(String dnode) throws TTransportException, InterruptedException { // this lock is on the whole cache but we would actually be interested in a per-DNode lock... // there's only one lock for simplicity. thriftClientCacheLock.lock(); try { // initialize queue for this DNode BlockingQueue<DNodeService.Client> dnodeQueue = thriftClientCache.get(dnode); if (dnodeQueue == null) { // this assures that the per-DNode queue is only created once and then reused. dnodeQueue = new LinkedBlockingDeque<DNodeService.Client>(thriftClientPoolSize); } if (dnodeQueue.isEmpty()) { try { for (int i = dnodeQueue.size(); i < thriftClientPoolSize; i++) { dnodeQueue.put(DNodeClient.get(dnode)); } // we only put the queue if all connections have been populated thriftClientCache.put(dnode, dnodeQueue); } catch (TTransportException e) { log.error("Error while trying to populate queue for " + dnode + ", will discard created connections.", e); while (!dnodeQueue.isEmpty()) { dnodeQueue.poll().getOutputProtocol().getTransport().close(); } throw e; } } else { // it should be safe to call this method from different places concurrently // so we contemplate the case where another Thread already populated the queue // and only populate it if it's really empty. log.warn(Thread.currentThread().getName() + " : queue for [" + dnode + "] is not empty - it was populated before."); } } finally { thriftClientCacheLock.unlock(); } }
From source file:com.nridge.connector.common.con_com.crawl.CrawlQueue.java
/** * Places the queue item marker into the queue identified by queue name. * * @param aQueueName Queue name (e.g. extract, transform, publish) * @param aQueueItemMarker Queue item marker (e.g. NSD-CrawlFinish) *///from ww w.j a v a 2 s . c o m public void putMarkerIntoQueue(String aQueueName, String aQueueItemMarker) { Logger appLogger = mAppMgr.getLogger(this, "putMarkerIntoQueue"); appLogger.trace(mAppMgr.LOGMSG_TRACE_ENTER); BlockingQueue blockingQueue = (BlockingQueue) mAppMgr.getProperty(aQueueName); if (blockingQueue == null) appLogger.error(String.format("Queue name '%s' from AppMgr is null.", aQueueName)); else if (!isQueueItemMarker(aQueueItemMarker)) appLogger.error( String.format("Queue marker '%s' is not valid - cannot put in queue.", aQueueItemMarker)); else { try { blockingQueue.put(aQueueItemMarker); } catch (InterruptedException e) { // Restore the interrupted status so parent can handle (if it wants to). Thread.currentThread().interrupt(); } appLogger.debug( String.format("Queue '%s' had a marker of '%s' placed into it.", aQueueName, aQueueItemMarker)); } appLogger.trace(mAppMgr.LOGMSG_TRACE_DEPART); }
From source file:com.indeed.lsmtree.recordcache.PersistentRecordCache.java
/** * Performs lookup for multiple keys and returns a streaming iterator to results. * Each element in the iterator is one of * (1) an exception associated with a single lookup * (2) a key value tuple//www. j a v a 2s . c om * * @param keys lookup keys * @param progress (optional) an AtomicInteger for tracking progress * @param skipped (optional) an AtomicInteger for tracking missing keys * @return iterator of lookup results */ public Iterator<Either<Exception, P2<K, V>>> getStreaming(final @Nonnull Iterator<K> keys, final @Nullable AtomicInteger progress, final @Nullable AtomicInteger skipped) { log.info("starting store lookups"); LongArrayList addressList = new LongArrayList(); int notFound = 0; while (keys.hasNext()) { final K key = keys.next(); final Long address; try { address = index.get(key); } catch (IOException e) { log.error("error", e); return Iterators.singletonIterator(Left.<Exception, P2<K, V>>of(new IndexReadException(e))); } if (address != null) { addressList.add(address); } else { notFound++; } } if (progress != null) progress.addAndGet(notFound); if (skipped != null) skipped.addAndGet(notFound); log.info("store lookups complete, sorting addresses"); final long[] addresses = addressList.elements(); Arrays.sort(addresses, 0, addressList.size()); log.info("initializing store lookup iterator"); final BlockingQueue<Runnable> taskQueue = new ArrayBlockingQueue<Runnable>(100); final Iterator<List<Long>> iterable = Iterators.partition(addressList.iterator(), 1000); final ExecutorService primerThreads = new ThreadPoolExecutor(10, 10, 0L, TimeUnit.MILLISECONDS, taskQueue, new NamedThreadFactory("store priming thread", true, log), new RejectedExecutionHandler() { @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { try { taskQueue.put(r); } catch (InterruptedException e) { log.error("error", e); throw new RuntimeException(e); } } }); final BlockingQueue<List<Either<Exception, P2<K, V>>>> completionQueue = new ArrayBlockingQueue<List<Either<Exception, P2<K, V>>>>( 10); final AtomicLong runningTasks = new AtomicLong(0); final AtomicBoolean taskSubmitterRunning = new AtomicBoolean(true); new Thread(new Runnable() { @Override public void run() { while (iterable.hasNext()) { runningTasks.incrementAndGet(); final List<Long> addressesSublist = iterable.next(); primerThreads.submit(new FutureTask<List<Either<Exception, P2<K, V>>>>( new RecordLookupTask(addressesSublist)) { @Override protected void done() { try { final List<Either<Exception, P2<K, V>>> results = get(); if (progress != null) { progress.addAndGet(results.size()); } completionQueue.put(results); } catch (InterruptedException e) { log.error("error", e); throw new RuntimeException(e); } catch (ExecutionException e) { log.error("error", e); throw new RuntimeException(e); } } }); } taskSubmitterRunning.set(false); } }, "RecordLookupTaskSubmitterThread").start(); return new Iterator<Either<Exception, P2<K, V>>>() { Iterator<Either<Exception, P2<K, V>>> currentIterator; @Override public boolean hasNext() { if (currentIterator != null && currentIterator.hasNext()) return true; while (taskSubmitterRunning.get() || runningTasks.get() > 0) { try { final List<Either<Exception, P2<K, V>>> list = completionQueue.poll(1, TimeUnit.SECONDS); if (list != null) { log.debug("remaining: " + runningTasks.decrementAndGet()); currentIterator = list.iterator(); if (currentIterator.hasNext()) return true; } } catch (InterruptedException e) { log.error("error", e); throw new RuntimeException(e); } } primerThreads.shutdown(); return false; } @Override public Either<Exception, P2<K, V>> next() { return currentIterator.next(); } @Override public void remove() { throw new UnsupportedOperationException(); } }; }