Example usage for java.util.concurrent ExecutorCompletionService ExecutorCompletionService

List of usage examples for java.util.concurrent ExecutorCompletionService ExecutorCompletionService

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorCompletionService ExecutorCompletionService.

Prototype

public ExecutorCompletionService(Executor executor) 

Source Link

Document

Creates an ExecutorCompletionService using the supplied executor for base task execution and a LinkedBlockingQueue as a completion queue.

Usage

From source file:com.rapid7.diskstorage.dynamodb.DynamoDBDelegate.java

public Map<StaticBuffer, GetItemResult> parallelGetItem(List<GetItemWorker> workers) throws BackendException {
    final CompletionService<GetItemResultWrapper> completionService = new ExecutorCompletionService<>(
            clientThreadPool);// w  ww  . ja v a2 s  .co m

    final List<Future<GetItemResultWrapper>> futures = Lists.newLinkedList();
    for (GetItemWorker worker : workers) {
        futures.add(completionService.submit(worker));
    }

    boolean interrupted = false;
    final Map<StaticBuffer, GetItemResult> results = Maps.newHashMap();
    try {
        for (int i = 0; i < workers.size(); i++) {
            try {
                GetItemResultWrapper result = completionService.take().get();
                results.put(result.getTitanKey(), result.getDynamoDBResult());
            } catch (InterruptedException e) {
                interrupted = true;
                throw new BackendRuntimeException("was interrupted during parallelGet");
            } catch (ExecutionException e) {
                throw unwrapExecutionException(e, GET_ITEM);
            }
        }
    } finally {
        for (Future<GetItemResultWrapper> future : futures) {
            if (!future.isDone()) {
                future.cancel(interrupted /* mayInterruptIfRunning */);
            }
        }

        if (interrupted) {
            // set interrupted on this thread and fail out
            Thread.currentThread().interrupt();
        }
    }
    return results;
}

From source file:io.druid.query.lookup.LookupReferencesManager.java

private void startLookups(final List<LookupBean> lookupBeanList) {
    final ImmutableMap.Builder<String, LookupExtractorFactoryContainer> builder = ImmutableMap.builder();
    final ExecutorService executorService = Execs.multiThreaded(lookupConfig.getNumLookupLoadingThreads(),
            "LookupReferencesManager-Startup-%s");
    final CompletionService<Map.Entry<String, LookupExtractorFactoryContainer>> completionService = new ExecutorCompletionService<>(
            executorService);//from w w  w  .  j a v  a  2  s .  c om
    final List<LookupBean> remainingLookups = new ArrayList<>(lookupBeanList);
    try {
        LOG.info("Starting lookup loading process");
        for (int i = 0; i < lookupConfig.getLookupStartRetries() && !remainingLookups.isEmpty(); i++) {
            LOG.info("Round of attempts #%d, [%d] lookups", i + 1, remainingLookups.size());
            final Map<String, LookupExtractorFactoryContainer> successfulLookups = startLookups(
                    remainingLookups, completionService);
            builder.putAll(successfulLookups);
            remainingLookups.removeIf(l -> successfulLookups.containsKey(l.getName()));
        }
        if (!remainingLookups.isEmpty()) {
            LOG.warn("Failed to start the following lookups after [%d] attempts: [%s]",
                    lookupConfig.getLookupStartRetries(), remainingLookups);
        }
        stateRef.set(new LookupUpdateState(builder.build(), ImmutableList.of(), ImmutableList.of()));
    } catch (InterruptedException | RuntimeException e) {
        LOG.error(e, "Failed to finish lookup load process.");
    } finally {
        executorService.shutdownNow();
    }
}

From source file:net.sourceforge.seqware.pipeline.plugins.PluginRunnerET.java

public void testLatestWorkflowsInternal(List<Integer> accessions) throws IOException {
    String output = ITUtility.runSeqWareJar(
            "-p net.sourceforge.seqware.pipeline.plugins.BundleManager -- --list-installed",
            ReturnValue.SUCCESS, null);/* w w w.jav a  2  s. c o  m*/
    Assert.assertTrue("output should include installed workflows", output.contains("INSTALLED WORKFLOWS"));
    Map<String, WorkflowInfo> latestWorkflows = new HashMap<>();
    String[] lines = output.split(System.getProperty("line.separator"));
    for (String line : lines) {
        String[] lineParts = line.split("\t");
        try {
            int workflow_accession = Integer.valueOf(lineParts[3]);
            String workflowName = lineParts[0];
            String path = lineParts[lineParts.length - 2];
            if (path.equals("null")) {
                continue;
            }
            WorkflowInfo wi = new WorkflowInfo(workflow_accession, path, workflowName, lineParts[1]);

            //TODO: check that the permanent workflow actually exists, if not warn and skip
            File fileAtPath = new File(path);
            if (!fileAtPath.exists()) {
                Log.warn("Skipping " + workflowName + ":" + workflow_accession
                        + " , bundle path does not exist at " + path);
                continue;
            }

            if (!latestWorkflows.containsKey(workflowName)) {
                latestWorkflows.put(workflowName, wi);
            } else {
                // contained
                int old = latestWorkflows.get(workflowName).sw_accession;
                if (workflow_accession > old) {
                    latestWorkflows.put(workflowName, wi);
                }
            }
        } catch (Exception e) {
            /**
             * do nothing and skip this line of the BundleManager output
             */
        }
    }
    // setup thread pool
    ExecutorService threadPool = Executors.newFixedThreadPool(latestWorkflows.size());
    CompletionService<String> pool = new ExecutorCompletionService<>(threadPool);
    for (Entry<String, WorkflowInfo> e : latestWorkflows.entrySet()) {
        System.out.println("Testing " + e.getKey() + " " + e.getValue().sw_accession);

        // if we have an accession list, skip accessions that are not in it
        if (accessions.size() > 0) {
            Integer acc = e.getValue().sw_accession;
            if (!accessions.contains(acc)) {
                System.out.println(
                        "Skipping " + e.getKey() + " " + e.getValue().sw_accession + " due to accession list");
                continue;
            }
        }

        StringBuilder params = new StringBuilder();
        params.append("--bundle ").append(e.getValue().path).append(" ");
        params.append("--version ").append(e.getValue().version).append(" ");
        params.append("--test ");
        File tempFile = File.createTempFile(e.getValue().name, ".out");
        pool.submit(new TestingThread(params.toString(), tempFile));
    }
    for (Entry<String, WorkflowInfo> e : latestWorkflows.entrySet()) {
        try {
            pool.take().get();
        } catch (InterruptedException ex) {
            Log.error(ex);
        } catch (ExecutionException ex) {
            Log.error(ex);
        }
    }
    threadPool.shutdown();
}

From source file:org.apache.hama.bsp.TestBSPTaskFaults.java

public void testPingOnTaskCleanupFailure() {

    LOG.info("Testing ping failure case - 3");

    conf.setInt(TEST_POINT, 3);/*from  w ww . j  a  va2  s  .  c  o m*/
    CompletionService<Integer> completionService = new ExecutorCompletionService<Integer>(
            this.testBSPTaskService);
    TestBSPProcessRunner runner = new TestBSPProcessRunner(3, workerServer.getListenerAddress().getPort());

    Future<Integer> future = completionService.submit(runner);

    try {
        future.get(20000, TimeUnit.MILLISECONDS);
    } catch (InterruptedException e1) {
        LOG.error("Interrupted Exception.", e1);
    } catch (ExecutionException e1) {
        LOG.error("ExecutionException Exception.", e1);
    } catch (TimeoutException e) {
        LOG.error("TimeoutException Exception.", e);
    }

    checkIfPingTestPassed();
    groom.setPingCount(0);
    this.testBSPTaskService.shutdownNow();
    runner.destroyProcess();
}

From source file:com.amazon.janusgraph.diskstorage.dynamodb.DynamoDBDelegate.java

public Map<StaticBuffer, GetItemResult> parallelGetItem(List<GetItemWorker> workers) throws BackendException {
    final CompletionService<GetItemResultWrapper> completionService = new ExecutorCompletionService<>(
            clientThreadPool);//from  w  w  w.ja  v a  2 s .  c o m

    final List<Future<GetItemResultWrapper>> futures = Lists.newLinkedList();
    for (GetItemWorker worker : workers) {
        futures.add(completionService.submit(worker));
    }

    boolean interrupted = false;
    final Map<StaticBuffer, GetItemResult> results = Maps.newHashMap();
    try {
        for (int i = 0; i < workers.size(); i++) {
            try {
                GetItemResultWrapper result = completionService.take().get();
                results.put(result.getJanusGraphKey(), result.getDynamoDBResult());
            } catch (InterruptedException e) {
                interrupted = true;
                throw new BackendRuntimeException("was interrupted during parallelGet");
            } catch (ExecutionException e) {
                throw unwrapExecutionException(e, GET_ITEM);
            }
        }
    } finally {
        for (Future<GetItemResultWrapper> future : futures) {
            if (!future.isDone()) {
                future.cancel(interrupted /* mayInterruptIfRunning */);
            }
        }

        if (interrupted) {
            // set interrupted on this thread and fail out
            Thread.currentThread().interrupt();
        }
    }
    return results;
}

From source file:org.apache.hama.bsp.TestBSPTaskFaults.java

public void testBSPTaskSelfDestroy() {
    LOG.info("Testing self kill on lost contact.");

    CompletionService<Integer> completionService = new ExecutorCompletionService<Integer>(
            this.testBSPTaskService);
    TestBSPProcessRunner runner = new TestBSPProcessRunner(0, workerServer.getListenerAddress().getPort());

    Future<Integer> future = completionService.submit(runner);

    try {//w w w.java2 s  .c o m
        while (groom.pingCount == 0) {
            Thread.sleep(100);
        }
    } catch (Exception e) {
        LOG.error("Interrupted the timer for 1 sec.", e);
    }

    workerServer.stop();
    umbilical = null;
    workerServer = null;
    Integer exitValue = -1;
    try {
        exitValue = future.get(20000, TimeUnit.MILLISECONDS);
    } catch (InterruptedException e1) {
        LOG.error("Interrupted Exception.", e1);
    } catch (ExecutionException e1) {
        LOG.error("ExecutionException Exception.", e1);
    } catch (TimeoutException e) {
        LOG.error("TimeoutException Exception.", e);
    }

    assertEquals(69, exitValue.intValue());
    runner.destroyProcess();
}

From source file:org.geowebcache.sqlite.MbtilesBlobStore.java

/**
 * Helper method that delete the provided files.
 *//*w ww  . j  a  v  a 2s.  co  m*/
private boolean deleteFiles(List<File> files) throws StorageException {
    if (files.isEmpty()) {
        if (LOGGER.isInfoEnabled()) {
            LOGGER.info("No files to delete.");
        }
        return false;
    }
    // asking the connection manager to remove the database files
    CompletionService completionService = new ExecutorCompletionService(executorService);
    int tasks = 0;
    for (File file : files) {
        completionService.submit(() -> connectionManager.delete(file), true);
        tasks++;
    }
    // let's wait for the tasks to finish
    for (int i = 0; i < tasks; i++) {
        try {
            completionService.take().get();
        } catch (Exception exception) {
            throw Utils.exception(exception, "Something bad happen when deleting files.");
        }
    }
    if (LOGGER.isDebugEnabled()) {
        LOGGER.debug("Files deleted.");
    }
    return true;
}

From source file:org.apache.hadoop.hbase.regionserver.HStore.java

/**
 * Creates an unsorted list of StoreFile loaded in parallel
 * from the given directory./*w  w w .  j  a  va 2  s  . c  om*/
 * @throws IOException
 */
private List<StoreFile> loadStoreFiles() throws IOException {
    Collection<StoreFileInfo> files = fs.getStoreFiles(getColumnFamilyName());
    if (files == null || files.size() == 0) {
        return new ArrayList<StoreFile>();
    }

    // initialize the thread pool for opening store files in parallel..
    ThreadPoolExecutor storeFileOpenerThreadPool = this.region
            .getStoreFileOpenAndCloseThreadPool("StoreFileOpenerThread-" + this.getColumnFamilyName());
    CompletionService<StoreFile> completionService = new ExecutorCompletionService<StoreFile>(
            storeFileOpenerThreadPool);

    int totalValidStoreFile = 0;
    for (final StoreFileInfo storeFileInfo : files) {
        // open each store file in parallel
        completionService.submit(new Callable<StoreFile>() {
            @Override
            public StoreFile call() throws IOException {
                StoreFile storeFile = createStoreFileAndReader(storeFileInfo);
                return storeFile;
            }
        });
        totalValidStoreFile++;
    }

    ArrayList<StoreFile> results = new ArrayList<StoreFile>(files.size());
    IOException ioe = null;
    try {
        for (int i = 0; i < totalValidStoreFile; i++) {
            try {
                Future<StoreFile> future = completionService.take();
                StoreFile storeFile = future.get();
                long length = storeFile.getReader().length();
                this.storeSize += length;
                this.totalUncompressedBytes += storeFile.getReader().getTotalUncompressedBytes();
                if (LOG.isDebugEnabled()) {
                    LOG.debug("loaded " + storeFile.toStringDetailed());
                }
                results.add(storeFile);
            } catch (InterruptedException e) {
                if (ioe == null)
                    ioe = new InterruptedIOException(e.getMessage());
            } catch (ExecutionException e) {
                if (ioe == null)
                    ioe = new IOException(e.getCause());
            }
        }
    } finally {
        storeFileOpenerThreadPool.shutdownNow();
    }
    if (ioe != null) {
        // close StoreFile readers
        for (StoreFile file : results) {
            try {
                if (file != null)
                    file.closeReader(true);
            } catch (IOException e) {
                LOG.warn(e.getMessage());
            }
        }
        throw ioe;
    }

    return results;
}

From source file:com.redhat.red.offliner.Main.java

/**
 * Sets up components needed for the download process, including the {@link ExecutorCompletionService},
 * {@link java.util.concurrent.Executor}, {@link org.apache.http.client.HttpClient}, and {@link ArtifactListReader}
 * instances. If baseUrls were provided on the command line, it will initialize the "global" baseUrls list to that.
 * Otherwise it will use {@link Options#DEFAULT_REPO_URL} and {@link Options#CENTRAL_REPO_URL} as the default
 * baseUrls. If specified, configures the HTTP proxy and username/password for authentication.
 * @throws MalformedURLException In case an invalid {@link URL} is given as a baseUrl.
 *//* www.  j a  va2  s  .  c om*/
protected void init() throws MalformedURLException {
    int threads = opts.getThreads();
    executorService = Executors.newFixedThreadPool(threads, (final Runnable r) -> {
        //        executorService = Executors.newCachedThreadPool( ( final Runnable r ) -> {
        final Thread t = new Thread(r);
        t.setDaemon(true);

        return t;
    });

    executor = new ExecutorCompletionService<>(executorService);

    errors = new ConcurrentHashMap<String, Throwable>();

    final PoolingHttpClientConnectionManager ccm = new PoolingHttpClientConnectionManager();
    ccm.setMaxTotal(opts.getConnections());

    final HttpClientBuilder builder = HttpClients.custom().setConnectionManager(ccm);

    final String proxy = opts.getProxy();
    String proxyHost = proxy;
    int proxyPort = 8080;
    if (proxy != null) {
        final int portSep = proxy.lastIndexOf(':');

        if (portSep > -1) {
            proxyHost = proxy.substring(0, portSep);
            proxyPort = Integer.parseInt(proxy.substring(portSep + 1));
        }
        final HttpRoutePlanner planner = new DefaultProxyRoutePlanner(new HttpHost(proxyHost, proxyPort));

        builder.setRoutePlanner(planner);
    }

    client = builder.build();

    final CredentialsProvider creds = new BasicCredentialsProvider();

    cookieStore = new BasicCookieStore();

    baseUrls = opts.getBaseUrls();
    if (baseUrls == null) {
        baseUrls = new ArrayList<>();
    }

    List<String> repoUrls = (baseUrls.isEmpty() ? DEFAULT_URLS : baseUrls);

    System.out.println("Planning download from:\n  " + StringUtils.join(repoUrls, "\n  "));

    for (String repoUrl : repoUrls) {
        if (repoUrl != null) {
            final String user = opts.getUser();
            if (user != null) {
                final URL u = new URL(repoUrl);
                final AuthScope as = new AuthScope(u.getHost(), UrlUtils.getPort(u));

                creds.setCredentials(as, new UsernamePasswordCredentials(user, opts.getPassword()));
            }
        }

        if (proxy != null) {
            final String proxyUser = opts.getProxyUser();
            if (proxyUser != null) {
                creds.setCredentials(new AuthScope(proxyHost, proxyPort),
                        new UsernamePasswordCredentials(proxyUser, opts.getProxyPassword()));
            }
        }
    }

    artifactListReaders = new ArrayList<>(3);
    artifactListReaders.add(new FoloReportArtifactListReader());
    artifactListReaders.add(new PlaintextArtifactListReader());
    artifactListReaders.add(new PomArtifactListReader(opts.getSettingsXml(), opts.getTypeMapping(), creds));
}

From source file:nl.privacybarometer.privacyvandaag.service.FetcherService.java

private int refreshFeeds(final long keepDateBorderTime) {
    ContentResolver cr = getContentResolver();
    final Cursor cursor = cr.query(FeedColumns.CONTENT_URI, FeedColumns.PROJECTION_ID, null, null, null);
    int nbFeed = (cursor != null) ? cursor.getCount() : 0;

    ExecutorService executor = Executors.newFixedThreadPool(THREAD_NUMBER, new ThreadFactory() {
        @Override// w w w .  ja  va  2s .  co  m
        public Thread newThread(Runnable r) {
            Thread t = new Thread(r);
            t.setPriority(Thread.MIN_PRIORITY);
            return t;
        }
    });

    CompletionService<Integer> completionService = new ExecutorCompletionService<>(executor);
    while (cursor != null && cursor.moveToNext()) {
        final String feedId = cursor.getString(0);
        completionService.submit(new Callable<Integer>() {
            @Override
            public Integer call() {
                int result = 0;
                try {
                    result = refreshFeed(feedId, keepDateBorderTime);
                } catch (Exception e) {
                    Log.e(TAG, "Error refreshing feed " + e.getMessage());
                }
                return result;
            }
        });
    }
    if (cursor != null)
        cursor.close();

    int globalResult = 0;
    for (int i = 0; i < nbFeed; i++) {
        try {
            Future<Integer> f = completionService.take(); // ModPrivacyVandaag: the count of new articles after a feed is refreshed
            globalResult += f.get();
        } catch (Exception e) {
            Log.e(TAG, "Error counting new articles " + e.getMessage());
        }
    }

    executor.shutdownNow(); // To purge all threads

    return globalResult; // ModPrivacyVandaag: As far as I can see: this contains the number of new articles from a refresh of the feeds.
}