List of usage examples for java.util.concurrent CompletionService submit
Future<V> submit(Callable<V> task);
From source file:org.paxle.se.search.impl.SearchProviderManager.java
private void search(ISearchRequest request, ISearchResultCollector results) throws InterruptedException, ExecutionException, SearchException { if (request == null) throw new NullPointerException("The search-request object must not be null"); final CompletionService<ISearchResult> execCompletionService = new ExecutorCompletionService<ISearchResult>( this.execService); // determining all search-providers that should be used for the query HashSet<String> allowedProviderPIDs = new HashSet<String>(request.getProviderIDs()); // loop through all providers and pass the request to each one List<String> usedProviderPIDs = new ArrayList<String>(); for (Entry<String, ServiceReference> providerEntry : this.providersRefs.entrySet()) { final String providerPID = providerEntry.getKey(); final ServiceReference providerRef = providerEntry.getValue(); if (allowedProviderPIDs.size() > 0 && !allowedProviderPIDs.contains(providerPID)) { this.logger.debug(String.format("SEProvider '%s' is skipped for search request '%d'.", providerPID, Integer.valueOf(request.getRequestID()))); continue; }//from w ww . j ava 2s . c o m usedProviderPIDs.add(providerPID); execCompletionService.submit(new SearchProviderCallable(this.ctx, providerRef, request)); } if (allowedProviderPIDs.size() == 0) { // store the providers we have used to process the search-request request.setProviderIDs(usedProviderPIDs); } // loop through all providers and collect the results long searchTimeout = request.getTimeout(); for (int i = 0; i < usedProviderPIDs.size(); ++i) { final long start = System.currentTimeMillis(); // waiting for the next search result final Future<ISearchResult> future = execCompletionService.poll(searchTimeout, TimeUnit.MILLISECONDS); if (future != null) { final ISearchResult r = future.get(); if (r != null) { final String providerPID = r.getProviderID(); final int size = r.getSize(); this.logger .debug(String.format("SEProvider '%s' returned '%d' results for search-request '%d'.", providerPID, Integer.valueOf(size), Integer.valueOf(request.getRequestID()))); results.collect(r); } } final long diff = System.currentTimeMillis() - start; if ((searchTimeout -= diff) <= 0) break; } }
From source file:org.paxle.filter.robots.impl.RobotsTxtManager.java
/** * Check a list of {@link URI URI} against the robots.txt file of the servers hosting the {@link URI}. * @param hostPort the web-server hosting the {@link URI URIs} * @param urlList a list of {@link URI}//from w w w . ja v a2 s .co m * * @return all {@link URI} that are blocked by the servers */ public List<URI> isDisallowed(Collection<URI> urlList) { if (urlList == null) throw new NullPointerException("The URI-list is null."); // group the URL list based on hostname:port HashMap<URI, List<URI>> uriBlocks = this.groupURI(urlList); ArrayList<URI> disallowedURI = new ArrayList<URI>(); /* * Asynchronous execution and parallel check of all blocks */ final CompletionService<Collection<URI>> execCompletionService = new ExecutorCompletionService<Collection<URI>>( this.execService); // loop through the blocks and start a worker for each block for (Entry<URI, List<URI>> uriBlock : uriBlocks.entrySet()) { URI baseUri = uriBlock.getKey(); List<URI> uriList = uriBlock.getValue(); execCompletionService.submit(new RobotsTxtManagerCallable(baseUri, uriList)); } // wait for the worker-threads to finish execution for (int i = 0; i < uriBlocks.size(); ++i) { try { Collection<URI> disallowedInGroup = execCompletionService.take().get(); if (disallowedInGroup != null) { disallowedURI.addAll(disallowedInGroup); } } catch (InterruptedException e) { this.logger.info(String.format("Interruption detected while waiting for robots.txt-check result.")); // XXX should we break here? } catch (ExecutionException e) { this.logger.error( String.format("Unexpected '%s' while performing robots.txt check.", e.getClass().getName()), e); } } return disallowedURI; }
From source file:com.amazon.janusgraph.diskstorage.dynamodb.DynamoDBDelegate.java
public void parallelMutate(List<MutateWorker> workers) throws BackendException { CompletionService<Void> completion = new ExecutorCompletionService<>(clientThreadPool); List<Future<Void>> futures = Lists.newLinkedList(); for (MutateWorker worker : workers) { futures.add(completion.submit(worker)); }/*from ww w . j a va 2s. co m*/ //block on the futures all getting or throwing instead of using a latch as i need to check future status anyway boolean interrupted = false; try { for (int i = 0; i < workers.size(); i++) { try { completion.take().get(); //Void } catch (InterruptedException e) { interrupted = true; // fail out because janusgraph does not poll this thread for interrupted anywhere throw new BackendRuntimeException("was interrupted during parallelMutate"); } catch (ExecutionException e) { throw unwrapExecutionException(e, MUTATE_ITEM); } } } finally { for (Future<Void> future : futures) { if (!future.isDone()) { future.cancel(interrupted /* mayInterruptIfRunning */); } } if (interrupted) { // set interrupted on this thread Thread.currentThread().interrupt(); } } }
From source file:it.geosolutions.tools.io.file.CopyTree.java
/** * //w w w .j av a2 s . c om * @param cs * CompletionService * @param source * file to copy * @param sourceDir * where source is mounted * @param destinationDir * mount point where to copy source * @return * @throws RejectedExecutionException * - if this task cannot be accepted for execution. * @throws IllegalArgumentException * - if executor is null or terminated. */ public static Future<File> asyncCopyTree(final CompletionService<File> cs, final File source, final File sourceDir, final File destinationDir, final Progress<String> listener) throws RejectedExecutionException, IllegalArgumentException { final Callable<File> call = new Callable<File>() { public File call() throws Exception { try { // build the new path listener.onNewTask("rebase file path"); listener.onStart(); File destFile = Path.rebaseFile(sourceDir, destinationDir, source); listener.onUpdateProgress(10); // try to build the directory tree listener.onNewTask("building directory structure"); listener.onStart(); if (!destFile.getParentFile().mkdirs()) { listener.onWarningOccurred(this.getClass().getSimpleName(), Thread.currentThread().getName(), "Unable to create the destination directory structure: probably it already exists"); } listener.onUpdateProgress(30); // start copy listener.onNewTask("copying " + source + " to " + destFile); listener.onStart(); FileUtils.copyFile(source, destFile); listener.onUpdateProgress(100); listener.onCompleted(); // return the rebased and copied file return destFile; } catch (Exception e) { listener.onExceptionOccurred(e); listener.onCancel(); throw e; } } }; try { return cs.submit(call); } catch (NullPointerException e) { listener.onExceptionOccurred(e); listener.onCancel(); throw e; } catch (RejectedExecutionException e) { listener.onExceptionOccurred(e); listener.onCancel(); throw e; } }
From source file:com.rapid7.diskstorage.dynamodb.DynamoDBDelegate.java
public List<QueryResultWrapper> parallelQuery(List<QueryWorker> queryWorkers) throws BackendException { CompletionService<QueryResultWrapper> completionService = new ExecutorCompletionService<>(clientThreadPool); List<Future<QueryResultWrapper>> futures = Lists.newLinkedList(); for (QueryWorker worker : queryWorkers) { futures.add(completionService.submit(worker)); }/*from ww w . j a v a2 s . c o m*/ boolean interrupted = false; List<QueryResultWrapper> results = Lists.newLinkedList(); try { for (int i = 0; i < queryWorkers.size(); i++) { try { QueryResultWrapper result = completionService.take().get(); results.add(result); } catch (InterruptedException e) { interrupted = true; // fail out because titan does not poll this thread for interrupted anywhere throw new BackendRuntimeException("was interrupted during parallelQuery"); } catch (ExecutionException e) { throw unwrapExecutionException(e, QUERY); } } } finally { for (Future<QueryResultWrapper> future : futures) { if (!future.isDone()) { future.cancel(interrupted /* mayInterruptIfRunning */); } } if (interrupted) { // set interrupted on this thread and fail out Thread.currentThread().interrupt(); } } return results; }
From source file:com.amazon.janusgraph.diskstorage.dynamodb.DynamoDBDelegate.java
public List<QueryResultWrapper> parallelQuery(List<QueryWorker> queryWorkers) throws BackendException { CompletionService<QueryResultWrapper> completionService = new ExecutorCompletionService<>(clientThreadPool); List<Future<QueryResultWrapper>> futures = Lists.newLinkedList(); for (QueryWorker worker : queryWorkers) { futures.add(completionService.submit(worker)); }//from w ww . j a v a 2s . co m boolean interrupted = false; List<QueryResultWrapper> results = Lists.newLinkedList(); try { for (int i = 0; i < queryWorkers.size(); i++) { try { QueryResultWrapper result = completionService.take().get(); results.add(result); } catch (InterruptedException e) { interrupted = true; // fail out because janusgraph does not poll this thread for interrupted anywhere throw new BackendRuntimeException("was interrupted during parallelQuery"); } catch (ExecutionException e) { throw unwrapExecutionException(e, QUERY); } } } finally { for (Future<QueryResultWrapper> future : futures) { if (!future.isDone()) { future.cancel(interrupted /* mayInterruptIfRunning */); } } if (interrupted) { // set interrupted on this thread and fail out Thread.currentThread().interrupt(); } } return results; }
From source file:com.rapid7.diskstorage.dynamodb.DynamoDBDelegate.java
public Map<StaticBuffer, GetItemResult> parallelGetItem(List<GetItemWorker> workers) throws BackendException { final CompletionService<GetItemResultWrapper> completionService = new ExecutorCompletionService<>( clientThreadPool);//from w ww. j a va 2 s . c om final List<Future<GetItemResultWrapper>> futures = Lists.newLinkedList(); for (GetItemWorker worker : workers) { futures.add(completionService.submit(worker)); } boolean interrupted = false; final Map<StaticBuffer, GetItemResult> results = Maps.newHashMap(); try { for (int i = 0; i < workers.size(); i++) { try { GetItemResultWrapper result = completionService.take().get(); results.put(result.getTitanKey(), result.getDynamoDBResult()); } catch (InterruptedException e) { interrupted = true; throw new BackendRuntimeException("was interrupted during parallelGet"); } catch (ExecutionException e) { throw unwrapExecutionException(e, GET_ITEM); } } } finally { for (Future<GetItemResultWrapper> future : futures) { if (!future.isDone()) { future.cancel(interrupted /* mayInterruptIfRunning */); } } if (interrupted) { // set interrupted on this thread and fail out Thread.currentThread().interrupt(); } } return results; }
From source file:com.amazon.janusgraph.diskstorage.dynamodb.DynamoDBDelegate.java
public Map<StaticBuffer, GetItemResult> parallelGetItem(List<GetItemWorker> workers) throws BackendException { final CompletionService<GetItemResultWrapper> completionService = new ExecutorCompletionService<>( clientThreadPool);/* w w w . ja va 2 s . co m*/ final List<Future<GetItemResultWrapper>> futures = Lists.newLinkedList(); for (GetItemWorker worker : workers) { futures.add(completionService.submit(worker)); } boolean interrupted = false; final Map<StaticBuffer, GetItemResult> results = Maps.newHashMap(); try { for (int i = 0; i < workers.size(); i++) { try { GetItemResultWrapper result = completionService.take().get(); results.put(result.getJanusGraphKey(), result.getDynamoDBResult()); } catch (InterruptedException e) { interrupted = true; throw new BackendRuntimeException("was interrupted during parallelGet"); } catch (ExecutionException e) { throw unwrapExecutionException(e, GET_ITEM); } } } finally { for (Future<GetItemResultWrapper> future : futures) { if (!future.isDone()) { future.cancel(interrupted /* mayInterruptIfRunning */); } } if (interrupted) { // set interrupted on this thread and fail out Thread.currentThread().interrupt(); } } return results; }
From source file:net.sourceforge.seqware.pipeline.plugins.PluginRunnerIT.java
public void testLatestWorkflowsInternal(List<Integer> accessions) throws IOException { String output = ITUtility.runSeqWareJar( "-p net.sourceforge.seqware.pipeline.plugins.BundleManager -- --list-installed", ReturnValue.SUCCESS);/* w ww . j a v a 2 s . c o m*/ Assert.assertTrue("output should include installed workflows", output.contains("INSTALLED WORKFLOWS")); Map<String, WorkflowInfo> latestWorkflows = new HashMap<String, WorkflowInfo>(); String[] lines = output.split(System.getProperty("line.separator")); for (String line : lines) { String[] lineParts = line.split("\t"); try { int workflow_accession = Integer.valueOf(lineParts[3]); String workflowName = lineParts[0]; String path = lineParts[4]; if (path.equals("null")) { continue; } WorkflowInfo wi = new WorkflowInfo(workflow_accession, path, workflowName, lineParts[1]); //TODO: check that the permanent workflow actually exists, if not warn and skip File fileAtPath = new File(path); if (!fileAtPath.exists()) { Log.warn("Skipping " + workflowName + ":" + workflow_accession + " , bundle path does not exist at " + path); continue; } if (!latestWorkflows.containsKey(workflowName)) { latestWorkflows.put(workflowName, wi); } else { // contained int old = latestWorkflows.get(workflowName).sw_accession; if (workflow_accession > old) { latestWorkflows.put(workflowName, wi); } } } catch (Exception e) { /** * do nothing and skip this line of the BundleManager output */ } } // setup thread pool ExecutorService threadPool = Executors.newFixedThreadPool(latestWorkflows.size()); CompletionService<String> pool = new ExecutorCompletionService<String>(threadPool); for (Entry<String, WorkflowInfo> e : latestWorkflows.entrySet()) { System.out.println("Testing " + e.getKey() + " " + e.getValue().sw_accession); // if we have an accession list, skip accessions that are not in it if (accessions.size() > 0) { Integer acc = e.getValue().sw_accession; if (!accessions.contains(acc)) { System.out.println( "Skipping " + e.getKey() + " " + e.getValue().sw_accession + " due to accession list"); continue; } } StringBuilder params = new StringBuilder(); params.append("--bundle ").append(e.getValue().path).append(" "); params.append("--version ").append(e.getValue().version).append(" "); params.append("--test "); File tempFile = File.createTempFile(e.getValue().name, ".out"); pool.submit(new TestingThread(params.toString(), tempFile)); } for (Entry<String, WorkflowInfo> e : latestWorkflows.entrySet()) { try { pool.take().get(); } catch (InterruptedException ex) { Log.error(ex); } catch (ExecutionException ex) { Log.error(ex); } } threadPool.shutdown(); }
From source file:net.sourceforge.seqware.pipeline.plugins.PluginRunnerET.java
public void testLatestWorkflowsInternal(List<Integer> accessions) throws IOException { String output = ITUtility.runSeqWareJar( "-p net.sourceforge.seqware.pipeline.plugins.BundleManager -- --list-installed", ReturnValue.SUCCESS, null);// w w w . j a v a 2 s . c o m Assert.assertTrue("output should include installed workflows", output.contains("INSTALLED WORKFLOWS")); Map<String, WorkflowInfo> latestWorkflows = new HashMap<>(); String[] lines = output.split(System.getProperty("line.separator")); for (String line : lines) { String[] lineParts = line.split("\t"); try { int workflow_accession = Integer.valueOf(lineParts[3]); String workflowName = lineParts[0]; String path = lineParts[lineParts.length - 2]; if (path.equals("null")) { continue; } WorkflowInfo wi = new WorkflowInfo(workflow_accession, path, workflowName, lineParts[1]); //TODO: check that the permanent workflow actually exists, if not warn and skip File fileAtPath = new File(path); if (!fileAtPath.exists()) { Log.warn("Skipping " + workflowName + ":" + workflow_accession + " , bundle path does not exist at " + path); continue; } if (!latestWorkflows.containsKey(workflowName)) { latestWorkflows.put(workflowName, wi); } else { // contained int old = latestWorkflows.get(workflowName).sw_accession; if (workflow_accession > old) { latestWorkflows.put(workflowName, wi); } } } catch (Exception e) { /** * do nothing and skip this line of the BundleManager output */ } } // setup thread pool ExecutorService threadPool = Executors.newFixedThreadPool(latestWorkflows.size()); CompletionService<String> pool = new ExecutorCompletionService<>(threadPool); for (Entry<String, WorkflowInfo> e : latestWorkflows.entrySet()) { System.out.println("Testing " + e.getKey() + " " + e.getValue().sw_accession); // if we have an accession list, skip accessions that are not in it if (accessions.size() > 0) { Integer acc = e.getValue().sw_accession; if (!accessions.contains(acc)) { System.out.println( "Skipping " + e.getKey() + " " + e.getValue().sw_accession + " due to accession list"); continue; } } StringBuilder params = new StringBuilder(); params.append("--bundle ").append(e.getValue().path).append(" "); params.append("--version ").append(e.getValue().version).append(" "); params.append("--test "); File tempFile = File.createTempFile(e.getValue().name, ".out"); pool.submit(new TestingThread(params.toString(), tempFile)); } for (Entry<String, WorkflowInfo> e : latestWorkflows.entrySet()) { try { pool.take().get(); } catch (InterruptedException ex) { Log.error(ex); } catch (ExecutionException ex) { Log.error(ex); } } threadPool.shutdown(); }