Example usage for java.util.concurrent ExecutorCompletionService ExecutorCompletionService

List of usage examples for java.util.concurrent ExecutorCompletionService ExecutorCompletionService

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorCompletionService ExecutorCompletionService.

Prototype

public ExecutorCompletionService(Executor executor) 

Source Link

Document

Creates an ExecutorCompletionService using the supplied executor for base task execution and a LinkedBlockingQueue as a completion queue.

Usage

From source file:org.apache.hadoop.yarn.server.nodemanager.amrmproxy.BaseAMRMProxyTest.java

/**
 * This helper method will invoke the specified function in parallel for each
 * end point in the specified list using a thread pool and return the
 * responses received from the function. It implements the logic required for
 * dispatching requests in parallel and waiting for the responses. If any of
 * the function call fails or times out, it will ignore and proceed with the
 * rest. So the responses returned can be less than the number of end points
 * specified/*from   ww w.  ja  va2 s  .c  om*/
 * 
 * @param testContext
 * @param func
 * @return
 */
protected <T, R> List<R> runInParallel(List<T> testContexts, final Function<T, R> func) {
    ExecutorCompletionService<R> completionService = new ExecutorCompletionService<R>(this.getThreadPool());
    LOG.info("Sending requests to endpoints asynchronously. Number of test contexts=" + testContexts.size());
    for (int index = 0; index < testContexts.size(); index++) {
        final T testContext = testContexts.get(index);

        LOG.info("Adding request to threadpool for test context: " + testContext.toString());

        completionService.submit(new Callable<R>() {
            @Override
            public R call() throws Exception {
                LOG.info("Sending request. Test context:" + testContext.toString());

                R response = null;
                try {
                    response = func.invoke(testContext);
                    LOG.info("Successfully sent request for context: " + testContext.toString());
                } catch (Throwable ex) {
                    LOG.error("Failed to process request for context: " + testContext);
                    response = null;
                }

                return response;
            }
        });
    }

    ArrayList<R> responseList = new ArrayList<R>();
    LOG.info("Waiting for responses from endpoints. Number of contexts=" + testContexts.size());
    for (int i = 0; i < testContexts.size(); ++i) {
        try {
            final Future<R> future = completionService.take();
            final R response = future.get(3000, TimeUnit.MILLISECONDS);
            responseList.add(response);
        } catch (Throwable e) {
            LOG.error("Failed to process request " + e.getMessage());
        }
    }

    return responseList;
}

From source file:org.paxle.se.search.impl.SearchProviderManager.java

private void search(ISearchRequest request, ISearchResultCollector results)
        throws InterruptedException, ExecutionException, SearchException {
    if (request == null)
        throw new NullPointerException("The search-request object must not be null");

    final CompletionService<ISearchResult> execCompletionService = new ExecutorCompletionService<ISearchResult>(
            this.execService);

    // determining all search-providers that should be used for the query
    HashSet<String> allowedProviderPIDs = new HashSet<String>(request.getProviderIDs());

    // loop through all providers and pass the request to each one
    List<String> usedProviderPIDs = new ArrayList<String>();
    for (Entry<String, ServiceReference> providerEntry : this.providersRefs.entrySet()) {
        final String providerPID = providerEntry.getKey();
        final ServiceReference providerRef = providerEntry.getValue();

        if (allowedProviderPIDs.size() > 0 && !allowedProviderPIDs.contains(providerPID)) {
            this.logger.debug(String.format("SEProvider '%s' is skipped for search request '%d'.", providerPID,
                    Integer.valueOf(request.getRequestID())));
            continue;
        }/*from  w  w w  .ja v  a 2 s. c o  m*/

        usedProviderPIDs.add(providerPID);
        execCompletionService.submit(new SearchProviderCallable(this.ctx, providerRef, request));
    }

    if (allowedProviderPIDs.size() == 0) {
        // store the providers we have used to process the search-request
        request.setProviderIDs(usedProviderPIDs);
    }

    // loop through all providers and collect the results
    long searchTimeout = request.getTimeout();
    for (int i = 0; i < usedProviderPIDs.size(); ++i) {
        final long start = System.currentTimeMillis();

        // waiting for the next search result
        final Future<ISearchResult> future = execCompletionService.poll(searchTimeout, TimeUnit.MILLISECONDS);
        if (future != null) {
            final ISearchResult r = future.get();

            if (r != null) {
                final String providerPID = r.getProviderID();
                final int size = r.getSize();
                this.logger
                        .debug(String.format("SEProvider '%s' returned '%d' results for search-request '%d'.",
                                providerPID, Integer.valueOf(size), Integer.valueOf(request.getRequestID())));

                results.collect(r);
            }
        }

        final long diff = System.currentTimeMillis() - start;
        if ((searchTimeout -= diff) <= 0)
            break;
    }
}

From source file:org.apache.flume.channel.kafka.TestKafkaChannel.java

private void writeAndVerify(final boolean testRollbacks, final KafkaChannel channel, final boolean interleave)
        throws Exception {

    final List<List<Event>> events = createBaseList();

    ExecutorCompletionService<Void> submitterSvc = new ExecutorCompletionService<Void>(
            Executors.newCachedThreadPool());

    putEvents(channel, events, submitterSvc);

    if (interleave) {
        wait(submitterSvc, 5);/*from ww w  . ja v a 2  s .  co  m*/
    }

    ExecutorCompletionService<Void> submitterSvc2 = new ExecutorCompletionService<Void>(
            Executors.newCachedThreadPool());

    final List<Event> eventsPulled = pullEvents(channel, submitterSvc2, 50, testRollbacks, true);

    if (!interleave) {
        wait(submitterSvc, 5);
    }
    wait(submitterSvc2, 5);

    verify(eventsPulled);
}

From source file:org.apache.drill.sql.client.ref.DrillRefImpl.java

public Enumerator<E> enumerator() {
    // TODO: use a completion service from the container
    final ExecutorCompletionService<Collection<RunOutcome>> service = new ExecutorCompletionService<Collection<RunOutcome>>(
            new ThreadPoolExecutor(1, 1, 1, TimeUnit.SECONDS, new LinkedBlockingDeque<Runnable>(10)));

    // Run the plan using an executor. It runs in a different thread, writing
    // results to our queue.
    ///*from   w w w  .  j  a v  a2  s. com*/
    // TODO: use the result of task, and check for exceptions
    final Future<Collection<RunOutcome>> task = runRefInterpreterPlan(service);

    return new JsonEnumerator(task, queue, fields);

}

From source file:org.pentaho.platform.plugin.services.metadata.PentahoMetadataDomainRepositoryConcurrencyTest.java

private void runTest(final List<? extends Callable<String>> actors) throws Exception {
    List<String> errors = new ArrayList<String>();
    ExecutorService executorService = Executors.newFixedThreadPool(actors.size());
    try {/*  ww w  .ja  v  a  2  s. c  o  m*/
        CompletionService<String> completionService = new ExecutorCompletionService<String>(executorService);
        for (Callable<String> reader : actors) {
            completionService.submit(reader);
        }

        for (int i = 0; i < actors.size(); i++) {
            Future<String> take = completionService.take();
            String result;
            try {
                result = take.get();
            } catch (ExecutionException e) {
                result = "Execution exception: " + e.getMessage();
            }
            if (result != null) {
                errors.add(result);
            }
        }
    } finally {
        executorService.shutdown();
    }

    if (!errors.isEmpty()) {
        StringBuilder builder = new StringBuilder();
        builder.append("The following errors occurred: \n");
        for (String error : errors) {
            builder.append(error).append('\n');
        }
        fail(builder.toString());
    }
}

From source file:org.apache.hadoop.hbase.util.ModifyRegionUtils.java

/**
 * Execute the task on the specified set of regions.
 *
 * @param exec Thread Pool Executor/*from w w  w .  j  a  v a  2 s  .  c o m*/
 * @param regions {@link HRegionInfo} that describes the regions to edit
 * @param task {@link RegionFillTask} custom code to edit the region
 * @throws IOException
 */
public static void editRegions(final ThreadPoolExecutor exec, final Collection<HRegionInfo> regions,
        final RegionEditTask task) throws IOException {
    final ExecutorCompletionService<Void> completionService = new ExecutorCompletionService<Void>(exec);
    for (final HRegionInfo hri : regions) {
        completionService.submit(new Callable<Void>() {
            @Override
            public Void call() throws IOException {
                task.editRegion(hri);
                return null;
            }
        });
    }

    try {
        for (HRegionInfo hri : regions) {
            completionService.take().get();
        }
    } catch (InterruptedException e) {
        throw new InterruptedIOException(e.getMessage());
    } catch (ExecutionException e) {
        IOException ex = new IOException();
        ex.initCause(e.getCause());
        throw ex;
    }
}

From source file:org.jclouds.aws.s3.xml.S3ParserTest.java

@Test
void testParseListContainerResultParallelResponseTime() throws InterruptedException, ExecutionException {
    CompletionService<ListBucketResponse> completer = new ExecutorCompletionService<ListBucketResponse>(exec);
    for (int i = 0; i < LOOP_COUNT; i++)
        completer.submit(new Callable<ListBucketResponse>() {
            public ListBucketResponse call() throws IOException, SAXException, HttpException {
                return runParseListContainerResult();
            }/*from   w  ww .  j  a  v  a2 s. c  om*/
        });
    for (int i = 0; i < LOOP_COUNT; i++)
        assert completer.take().get() != null;
}

From source file:org.codice.ddf.admin.common.PrioritizedBatchExecutor.java

private List<CompletionService<T>> getPrioritizedCompletionServices() {
    List<CompletionService<T>> prioritizedCompletionServices = new ArrayList<>();

    for (List<Callable<T>> taskBatch : tasks) {
        CompletionService<T> completionService = new ExecutorCompletionService<>(threadPool);

        for (Callable<T> task : taskBatch) {
            completionService.submit(task);
        }//from w  w w  . ja v a2  s .com

        prioritizedCompletionServices.add(completionService);
    }

    return prioritizedCompletionServices;
}

From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer.java

CompletionService<Path> createCompletionService(ExecutorService exec) {
    return new ExecutorCompletionService<Path>(exec);
}