Example usage for java.util.concurrent ExecutorService awaitTermination

List of usage examples for java.util.concurrent ExecutorService awaitTermination

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService awaitTermination.

Prototype

boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException;

Source Link

Document

Blocks until all tasks have completed execution after a shutdown request, or the timeout occurs, or the current thread is interrupted, whichever happens first.

Usage

From source file:org.pentaho.test.platform.plugin.services.cache.CacheExpirationRegistryTest.java

@Test
public void testRegisterThreaded() throws InterruptedException {
    ExecutorService service = Executors.newFixedThreadPool(THREADS);
    long start = System.currentTimeMillis();

    IntStream.range(0, THREADS).forEach(t -> service.submit(() -> {
        try {//from   ww w  .ja  v  a 2 s.  c  o m
            IntStream.range(0, 100000).forEach((i) -> {
                registry.register(new TestCacheItem("key" + i));
                registry.getCachedItems();
                registry.unRegister(new TestCacheItem("key" + i));
                registry.getCachedItems();
            });
        } catch (Exception e) {
            logger.error("Error in thread " + t + " [" + e.getClass().getName() + "]: " + e.getMessage() + ".");
            fail();
        }

        logger.info("Thread " + t + " finished.");
    }));

    service.shutdown();
    service.awaitTermination(300, TimeUnit.SECONDS); // kill the test if it takes more than 5 minutes to run

    long end = System.currentTimeMillis();
    logger.info("Execution time in milliseconds: " + (end - start));
    logger.info("Size: " + registry.getCachedItems().size());

    assertEquals(0, registry.getCachedItems().size());
}

From source file:com.emc.ecs.sync.CasMigrationTest.java

protected String summarize(FPPool pool, List<String> clipIds) throws Exception {
    List<String> summaries = Collections.synchronizedList(new ArrayList<String>());

    ExecutorService service = Executors.newFixedThreadPool(CAS_THREADS);

    System.out.print("Summarizing clips");

    for (String clipId : clipIds) {
        service.submit(new ClipReader(pool, clipId, summaries));
    }//from   ww  w  .j  av a 2s. com

    service.shutdown();
    service.awaitTermination(CAS_SETUP_WAIT_MINUTES, TimeUnit.MINUTES);
    service.shutdownNow();

    System.out.println();

    Collections.sort(summaries);
    StringBuilder out = new StringBuilder();
    for (String summary : summaries) {
        out.append(summary);
    }
    return out.toString();
}

From source file:com.emc.vipr.sync.CasMigrationTest.java

protected String summarize(FPPool pool, List<String> clipIds) throws Exception {
    List<String> summaries = Collections.synchronizedList(new ArrayList<String>());

    ExecutorService service = Executors.newFixedThreadPool(CAS_SETUP_THREADS);

    System.out.print("Summarizing clips");

    for (String clipId : clipIds) {
        service.submit(new ClipReader(pool, clipId, summaries));
    }//from   w  ww .j  ava 2s .  c  om

    service.shutdown();
    service.awaitTermination(CAS_SETUP_WAIT_MINUTES, TimeUnit.MINUTES);
    service.shutdownNow();

    System.out.println();

    Collections.sort(summaries);
    StringBuilder out = new StringBuilder();
    for (String summary : summaries) {
        out.append(summary);
    }
    return out.toString();
}

From source file:org.wso2.carbon.bps.perf.rest.RestClientTest.java

public void execute() throws Exception {

    serverURL = config.getProperty("serverURL");
    ActivitiRestClient pretestClient = new ActivitiRestClient(serverURL, 1);
    JSONObject processDefs = pretestClient.getProcessDefinitions();
    try {// w ww .  j  a v a2 s  . co m
        JSONArray defs = processDefs.getJSONArray("data");
        for (int defNumber = 0; defNumber < defs.length(); defNumber++) {
            JSONObject def = defs.getJSONObject(defNumber);
            String pid = def.getString("id");
            String pkey = def.getString("key");
            processKeytoId.put(pkey, pid);
        }
    } catch (JSONException e) {
        log.error("Failed to get process definitions from the server: " + serverURL
                + ". Process definitions response: " + processDefs.toString());
    }

    instanceCount = Integer.parseInt(config.getProperty("instances"));

    List<Integer> threadNumbers = new ArrayList<>();
    String threadsProp = config.getProperty("threads");
    String[] threadParts = threadsProp.split(",");
    for (String threadPart : threadParts) {
        int threadCount = Integer.parseInt(threadPart.trim());
        threadNumbers.add(threadCount);
    }

    sleepTime = Integer.parseInt(config.getProperty("sleep"));
    outPath = config.getProperty("results");
    File outFolder = new File(outPath);
    if (!outFolder.exists()) {
        log.info("Results folder " + outFolder.getAbsolutePath() + " does not exist. Creating a new folder...");
        outFolder.mkdirs();
    }
    File testReportFile = new File(outFolder, "summary.csv");
    StringBuffer summaryBuffer = new StringBuffer();
    summaryBuffer.append("Server URL," + serverURL + "\n");
    summaryBuffer.append("Number of process instances," + instanceCount + "\n");
    summaryBuffer.append("Number of threads," + Arrays.toString(threadNumbers.toArray()) + "\n\n\n");
    log.info("Test configuration - \n" + summaryBuffer.toString());
    summaryBuffer.append("Process ID,Threads,Total time,TPS,Average execution time\n\n");
    FileUtils.write(testReportFile, summaryBuffer.toString());

    List<ProcessConfig> processConfigs = new ArrayList<>();
    String processRef = "process";
    Set<String> processPropsNames = config.stringPropertyNames();
    for (String processPropName : processPropsNames) {
        if (processPropName.startsWith(processRef)) {
            String processProp = config.getProperty(processPropName);
            ProcessConfig processConfig = new ProcessConfig(processProp, processKeytoId);
            processConfigs.add(processConfig);
            log.info("Test configuration created for the process " + processConfig.toString());
        }
    }

    boolean testFailures = false;
    long allTestsStartTime = System.currentTimeMillis();
    int numTotalTests = processConfigs.size() * threadNumbers.size();
    int numCompletedTests = 0;

    List<String> completedProcessNames = new ArrayList<>();
    log.info("Starting performance tests...");
    for (ProcessConfig processConfig : processConfigs) {
        log.info("Starting tests for process " + processConfig.getId());

        for (int numTreads : threadNumbers) {
            log.info("Starting test for process " + processConfig.getId() + " with " + numTreads
                    + " threads...");
            ActivitiRestClient client = new ActivitiRestClient(serverURL, numTreads);

            List<RestProcessExecutor> processExecutors = new ArrayList<>(instanceCount);
            ExecutorService executorService = Executors.newFixedThreadPool(numTreads);

            long stime = System.currentTimeMillis();
            for (int i = 0; i < instanceCount; i++) {
                RestProcessExecutor processExecutor = new RestProcessExecutor(null, processConfig.getId(),
                        processConfig.getStartupVariables(), client, i);
                processExecutors.add(processExecutor);
                executorService.execute(processExecutor);
            }

            executorService.shutdown();
            try {
                executorService.awaitTermination(1, TimeUnit.HOURS);
            } catch (InterruptedException e) {
                String msg = "Error occurred while waiting for executors to terminate.";
                log.error(msg, e);
            }
            long etime = System.currentTimeMillis();

            StringBuffer buf = new StringBuffer();
            double totalDuration = 0;
            buf.append("Instance durations for process: " + processConfig.getId() + "\n");
            for (RestProcessExecutor processExecutor : processExecutors) {
                testFailures = processExecutor.isFailed();
                if (testFailures) {
                    break;
                }

                buf.append(processExecutor.getExternalDuration() + "\n");
                totalDuration += processExecutor.getExternalDuration();
            }

            if (!testFailures) {
                double externalTPS = (double) instanceCount * 1000 / (double) (etime - stime);
                externalTPS = UMath.round(externalTPS, 3);

                double avgExeTime = totalDuration / instanceCount;
                avgExeTime = UMath.round(avgExeTime, 3);

                log.info("Test for process " + processConfig.getId() + " with " + numTreads
                        + " threads completed with duration: " + (etime - stime) + " ms | TPS: " + externalTPS
                        + " | Average execution time: " + avgExeTime);
                String processRecord = processConfig.getId() + "," + numTreads + "," + (etime - stime) + ","
                        + externalTPS + "," + avgExeTime + "\n";
                FileWriter fileWriter = new FileWriter(testReportFile, true);
                fileWriter.write(processRecord);
                fileWriter.close();

                buf.append("\n\nTPS," + externalTPS + "\n\n");
                buf.append("\n\nAverage execution time," + avgExeTime + " ms\n\n");

                File processReportFile = new File(outFolder, processConfig.getId() + ".csv");
                FileUtils.write(processReportFile, buf.toString());
                client.close();

                numCompletedTests++;
                double testingTime = System.currentTimeMillis() - allTestsStartTime;
                double testingTimeMinutes = UMath.round(testingTime / (1000 * 60), 2);
                double testingTimeHours = UMath.round(testingTime / (1000 * 60 * 60), 2);

                double remainingTime = (testingTime / numCompletedTests) * (numTotalTests - numCompletedTests);
                double remainingTimeMinutes = UMath.round(remainingTime / (1000 * 60), 2);
                double remainingTimeHours = UMath.round(remainingTime / (1000 * 60 * 60), 2);
                log.info("Completed test for process " + processConfig.getId() + " with " + numTreads
                        + " threads.");
                log.info(numCompletedTests + " out of " + numTotalTests + " completed in " + testingTimeMinutes
                        + " minutes (" + testingTimeHours + " hours). Estimated remaining time: "
                        + remainingTimeMinutes + " minutes (" + remainingTimeHours + " hours)");

                //                    client.undeploy();
                //                    client.deploy();

                completedProcessNames.add("Process: " + processConfig.getId() + " | Threads: " + numTreads);
                log.info("Waiting " + sleepTime + " ms before the next test");
                Thread.sleep(sleepTime);
            } else {
                log.error("Test for process " + processConfig.getId() + " with " + numTreads
                        + " failed. See client and server logs for more information.");
                break; // terminate tests for this process with other threads
            }
        }

        if (!testFailures) {
            log.info("Completed tests for process " + processConfig.getId());
        } else {
            log.error("At least one test for the process " + processConfig.getId()
                    + " has failed. Test suite will be terminated.");
            StringBuffer retryMessage = new StringBuffer();
            retryMessage.append("Below tests were completed successfully:\n");
            for (String completedProcessName : completedProcessNames) {
                retryMessage.append(completedProcessName + "\n");
            }
            log.info(retryMessage.toString());
            break; // terminate tests for other processes
        }
    }
}

From source file:edu.cmu.lti.oaqa.bioasq.concept.retrieval.GoPubMedSeparateConceptRetrievalExecutor.java

@Override
public void process(JCas jcas) throws AnalysisEngineProcessException {
    AbstractQuery aquery = TypeUtil.getAbstractQueries(jcas).stream().findFirst().get();
    Collection<QueryConcept> qconcepts = TypeUtil.getQueryConcepts(aquery);
    List<ConceptSearchResult> concepts = Collections.synchronizedList(new ArrayList<>());
    ExecutorService es = Executors.newCachedThreadPool();
    for (QueryConcept qconcept : qconcepts) {
        String queryString = bopQueryStringConstructor.formatQueryConcept(qconcept)
                .replaceAll("[^A-Za-z0-9_\\-\"]+", " ");
        LOG.info("Query string: {}", queryString);
        for (BioASQUtil.Ontology ontology : BioASQUtil.Ontology.values()) {
            es.execute(() -> {/* www. j  av a 2 s  . co m*/
                try {
                    concepts.addAll(
                            BioASQUtil.searchOntology(service, jcas, queryString, pages, hits, ontology));
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }
            });
        }
    }
    es.shutdown();
    try {
        if (!es.awaitTermination(timeout, TimeUnit.MINUTES)) {
            LOG.warn("Timeout occurs for one or some concept retrieval services.");
        }
    } catch (InterruptedException e) {
        throw new AnalysisEngineProcessException(e);
    }
    Map<String, List<ConceptSearchResult>> onto2concepts = concepts.stream()
            .collect(groupingBy(ConceptSearchResult::getSearchId));
    for (Map.Entry<String, List<ConceptSearchResult>> entry : onto2concepts.entrySet()) {
        List<ConceptSearchResult> results = entry.getValue();
        LOG.info("Retrieved {} concepts from {}", results.size(), entry.getKey());
        if (LOG.isDebugEnabled()) {
            results.stream().limit(10).forEach(c -> LOG.debug(" - {}", TypeUtil.toString(c)));
        }
    }
    TypeUtil.rankedSearchResultsByScore(concepts, limit).forEach(ConceptSearchResult::addToIndexes);
}

From source file:com.asakusafw.runtime.util.cache.HadoopFileCacheRepositoryTest.java

/**
 * Conflict cache creation./*from w  ww. ja  v  a2 s  . c  o  m*/
 * @throws Exception if failed
 */
@Test
public void conflict() throws Exception {
    File source = folder.newFile();
    byte[] bytes = new byte[1024 * 1024];
    try (OutputStream output = new FileOutputStream(source)) {
        for (int i = 0, n = 50; i < n; i++) {
            output.write(bytes);
        }
    }

    Path path = path(source);
    File cacheRepo = folder.newFolder();
    Configuration configuration = new ConfigurationProvider().newInstance();
    LockProvider<Path> locks = new LocalFileLockProvider<>(folder.newFolder());
    RetryStrategy retrier = new ConstantRetryStrategy(30, 100, 200);
    FileCacheRepository cache = new HadoopFileCacheRepository(configuration, path(cacheRepo), locks, retrier);

    List<Future<Path>> futures = new ArrayList<>();
    int count = 10;
    CountDownLatch latch = new CountDownLatch(count);
    ExecutorService executor = Executors.newFixedThreadPool(count);
    try {
        for (int i = 0; i < count; i++) {
            String label = String.format("thread-%d", i);
            futures.add(executor.submit(() -> {
                LOG.info("Wait: resolve @" + label);
                latch.countDown();
                if (latch.await(5, TimeUnit.SECONDS) == false) {
                    throw new TimeoutException();
                }
                LOG.info("Start: resolve @" + label);
                Path result = cache.resolve(path);

                LOG.info("Finish: resolve @" + label);
                return result;
            }));
        }
        executor.shutdown();
        if (executor.awaitTermination(30, TimeUnit.SECONDS) == false) {
            throw new TimeoutException();
        }
    } finally {
        executor.shutdownNow();
    }
    for (Future<Path> future : futures) {
        future.get();
    }
}

From source file:org.apache.hadoop.hbase.tool.HFileContentValidator.java

/**
 * Check HFile contents are readable by HBase 2.
 *
 * @param conf used configuration// ww  w .  j ava  2  s .c  o m
 * @return number of HFiles corrupted HBase
 * @throws IOException if a remote or network exception occurs
 */
private boolean validateHFileContent(Configuration conf) throws IOException {
    FileSystem fileSystem = FSUtils.getCurrentFileSystem(conf);

    ExecutorService threadPool = createThreadPool(conf);
    HFileCorruptionChecker checker;

    try {
        checker = new HFileCorruptionChecker(conf, threadPool, false);

        Path rootDir = FSUtils.getRootDir(conf);
        LOG.info("Validating HFile contents under {}", rootDir);

        Collection<Path> tableDirs = FSUtils.getTableDirs(fileSystem, rootDir);
        checker.checkTables(tableDirs);

        Path archiveRootDir = new Path(rootDir, HConstants.HFILE_ARCHIVE_DIRECTORY);
        LOG.info("Validating HFile contents under {}", archiveRootDir);

        List<Path> archiveTableDirs = FSUtils.getTableDirs(fileSystem, archiveRootDir);
        checker.checkTables(archiveTableDirs);
    } finally {
        threadPool.shutdown();

        try {
            threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
        }
    }

    int checkedFiles = checker.getHFilesChecked();
    Collection<Path> corrupted = checker.getCorrupted();

    if (corrupted.isEmpty()) {
        LOG.info("Checked {} HFiles, none of them are corrupted.", checkedFiles);
        LOG.info("There are no incompatible HFiles.");

        return true;
    } else {
        LOG.info("Checked {} HFiles, {} are corrupted.", checkedFiles, corrupted.size());

        for (Path path : corrupted) {
            LOG.info("Corrupted file: {}", path);
        }

        LOG.info("Change data block encodings before upgrading. "
                + "Check https://s.apache.org/prefixtree for instructions.");

        return false;
    }
}

From source file:net.jotel.ws.client.WebSocketClientTest.java

@Test
public void reconnect() throws Exception {

    final List<Exception> exceptions = new ArrayList<Exception>();

    URI uri = new URI("ws://not-existing-domain-name:8080/websocket/ws/subscribe");
    final WebSocketClient c = new WebSocketClient();
    c.setWebSocketUri(uri);//from w  ww  . ja v a  2s .c o m
    c.setReconnectEnabled(true);
    c.setReconnectInterval(100L);
    c.setReconnectAttempts(2);
    c.addListener(new WebSocketListener() {
        @Override
        public void onMessage(String message) {
        }

        @Override
        public void onMessage(byte[] message) {
        }

        @Override
        public void onError(Exception ex) {
            exceptions.add(ex);
        }

        @Override
        public void onClose(Integer statusCode, String message) {
            // TODO Auto-generated method stub

        }

        @Override
        public void onConnect() {
            // TODO Auto-generated method stub

        }
    });

    try {
        c.connect();
        fail("Expected WebSocketException");
    } catch (WebSocketException ex) {
        // expected
        assertEquals(3, exceptions.size());

        for (Exception e : exceptions) {
            Throwable rootCause = ExceptionUtils.getRootCause(e);
            if (rootCause == null) {
                rootCause = e;
            }

            assertTrue(rootCause instanceof UnknownHostException);
        }
    }

    exceptions.clear();
    c.setReconnectAttempts(0);

    try {
        c.connect();
        fail("Expected WebSocketException");
    } catch (WebSocketException ex) {
        // expected
        assertEquals(1, exceptions.size());

        for (Exception e : exceptions) {
            Throwable rootCause = ExceptionUtils.getRootCause(e);
            if (rootCause == null) {
                rootCause = e;
            }

            assertTrue(rootCause instanceof UnknownHostException);
        }
    }

    exceptions.clear();
    c.setReconnectAttempts(-1);

    ExecutorService executor = Executors.newSingleThreadExecutor();

    Future<?> future = executor.submit(new Runnable() {
        @Override
        public void run() {
            try {
                c.connect();
                fail("Expected WebSocketException");
            } catch (WebSocketException ex) {
                throw new UnhandledException(ex);
            }
        }
    });

    Thread.sleep(2000L);

    c.setReconnectEnabled(false);

    Thread.sleep(2000L);

    executor.shutdown();
    assertTrue(executor.awaitTermination(1, TimeUnit.SECONDS));

    try {
        future.get();
        fail("Expected WebSocketException");
    } catch (Exception ex) {
        // expected
        assertTrue(exceptions.size() > 1);

        for (Exception e : exceptions) {
            Throwable rootCause = ExceptionUtils.getRootCause(e);
            if (rootCause == null) {
                rootCause = e;
            }

            assertTrue(rootCause instanceof UnknownHostException);
        }
    }
}

From source file:org.jumpmind.metl.core.runtime.flow.StepRuntime.java

private void shutdownThreads(boolean waitForShutdown) {
    if (this.componentRuntimeExecutor instanceof ExecutorService) {
        try {//from w ww .j ava2s. com
            ExecutorService service = (ExecutorService) this.componentRuntimeExecutor;
            service.shutdown();
            while (waitForShutdown && !service.isTerminated()) {
                service.awaitTermination(500, TimeUnit.MILLISECONDS);
            }
        } catch (Exception e) {
            recordError(1, e);
        }
    }
}

From source file:com.brienwheeler.lib.concurrent.ExecutorsTest.java

@Test
public void testNewSingleThreadExecutorShutdownNow() throws InterruptedException {
    NamedThreadFactory threadFactory = new NamedThreadFactory(THREAD_FACTORY_NAME);
    ExecutorService executor = Executors.newSingleThreadExecutor(threadFactory);

    executor.submit(new SleepRunnable(10L));
    Future<?> notExecutedRunnable = executor.submit(new NullRunnable());
    Future<?> notExecutedCallable = executor.submit(new NullCallable());
    Future<Integer> notEexecutedRunnable2 = executor.submit(new NullRunnable(), 1);

    List<Runnable> notExecuted = executor.shutdownNow();
    Assert.assertTrue(executor.isShutdown());
    Assert.assertEquals(3, notExecuted.size());
    Assert.assertTrue(CollectionUtils.containsInstance(notExecuted, notExecutedRunnable));
    Assert.assertTrue(CollectionUtils.containsInstance(notExecuted, notExecutedCallable));
    Assert.assertTrue(CollectionUtils.containsInstance(notExecuted, notEexecutedRunnable2));

    executor.awaitTermination(10, TimeUnit.MILLISECONDS);
    Assert.assertTrue(executor.isTerminated());
}