Example usage for java.util.concurrent ExecutorService awaitTermination

List of usage examples for java.util.concurrent ExecutorService awaitTermination

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService awaitTermination.

Prototype

boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException;

Source Link

Document

Blocks until all tasks have completed execution after a shutdown request, or the timeout occurs, or the current thread is interrupted, whichever happens first.

Usage

From source file:com.espertech.esper.multithread.TestMTIsolation.java

private void tryIsolated(int numThreads, int numLoops) throws Exception {
    Configuration config = SupportConfigFactory.getConfiguration();
    config.getEngineDefaults().getViewResources().setShareViews(false);
    config.addEventType("SupportBean", SupportBean.class);
    EPServiceProvider engine = EPServiceProviderManager.getDefaultProvider(config);
    engine.initialize();/*  ww  w  .  java 2 s .c o m*/

    // execute
    ExecutorService threadPool = Executors.newFixedThreadPool(numThreads);
    Future future[] = new Future[numThreads];
    ReentrantReadWriteLock sharedStartLock = new ReentrantReadWriteLock();
    sharedStartLock.writeLock().lock();
    for (int i = 0; i < numThreads; i++) {
        future[i] = threadPool.submit(new IsolateUnisolateCallable(i, engine, numLoops));
    }
    Thread.sleep(100);
    sharedStartLock.writeLock().unlock();

    threadPool.shutdown();
    threadPool.awaitTermination(10, TimeUnit.SECONDS);

    for (int i = 0; i < numThreads; i++) {
        assertTrue((Boolean) future[i].get());
    }
}

From source file:org.mitre.mpf.mst.TestSystemStress.java

@Test(timeout = 180 * MINUTES)
public void runFaceOcvDetectImageManyJobs() throws Exception {
    testCtr++;//w w w. ja  va  2  s  .c om
    log.info("Beginning test #{} runFaceOcvDetectImageManyJobs()", testCtr);
    IOFileFilter fileFilter = FileFilterUtils.and(FileFilterUtils.fileFileFilter(),
            FileFilterUtils.suffixFileFilter(".jpg"));

    int numExtractors = 6; // number of extractors on Jenkins (* number of nodes, now 1)
    //        int numExtractors = 2;  // number of extractors on local VM * 1 node

    // for testing on local VM only
    //        Collection<File> files = FileUtils.listFiles(new File(getClass().getClassLoader().getResource("samples/face").getFile()),
    //            fileFilter, null);

    // for testing on Jenkins
    // 10,000 jpgs
    Collection<File> files = FileUtils.listFiles(new File("/mpfdata/datasets/mugshots_10000"), fileFilter,
            null);

    BlockingQueue<File> fQueue = new ArrayBlockingQueue<File>(files.size());
    for (File file : files) {
        fQueue.put(file);
    }
    ExecutorService executor = Executors.newFixedThreadPool(numExtractors);
    JobRunner[] jobRunners = new JobRunner[numExtractors];
    for (int i = 0; i < numExtractors; i++) {
        jobRunners[i] = new JobRunner(fQueue);
        executor.submit(jobRunners[i]);
    }
    executor.shutdown();
    executor.awaitTermination(Long.MAX_VALUE, TimeUnit.MILLISECONDS);

    Assert.assertEquals("Number of files to process doesn't match actual number of jobs run (one job/file):",
            files.size(), manyJobsNumFilesProcessed);
    log.info("Successfully ran {} jobs for {} files, one file per job.", manyJobsNumFilesProcessed,
            files.size());
    log.info("Finished test runFaceOcvDetectImageManyJobs()");
}

From source file:com.linkedin.pinot.integration.tests.RealtimeClusterIntegrationTest.java

@BeforeClass
public void setUp() throws Exception {
    // Start ZK and Kafka
    startZk();/*from w  w w.  j a va 2  s .  c om*/
    kafkaStarters = KafkaStarterUtils.startServers(getKafkaBrokerCount(), KafkaStarterUtils.DEFAULT_KAFKA_PORT,
            KafkaStarterUtils.DEFAULT_ZK_STR, KafkaStarterUtils.getDefaultKafkaConfiguration());

    // Create Kafka topic
    createKafkaTopic(KAFKA_TOPIC, KafkaStarterUtils.DEFAULT_ZK_STR);

    // Start the Pinot cluster
    startController();
    startBroker();
    startServer();

    // Unpack data
    final List<File> avroFiles = unpackAvroData(_tmpDir, SEGMENT_COUNT);

    File schemaFile = getSchemaFile();

    // Load data into H2
    ExecutorService executor = Executors.newCachedThreadPool();
    setupH2AndInsertAvro(avroFiles, executor);

    // Initialize query generator
    setupQueryGenerator(avroFiles, executor);

    // Push data into the Kafka topic
    pushAvroIntoKafka(avroFiles, executor, KAFKA_TOPIC);

    // Wait for data push, query generator initialization and H2 load to complete
    executor.shutdown();
    executor.awaitTermination(10, TimeUnit.MINUTES);

    // Create Pinot table
    setUpTable("mytable", "DaysSinceEpoch", "daysSinceEpoch", KafkaStarterUtils.DEFAULT_ZK_STR, KAFKA_TOPIC,
            schemaFile, avroFiles.get(0));

    // Wait until the Pinot event count matches with the number of events in the Avro files
    long timeInFiveMinutes = System.currentTimeMillis() + 5 * 60 * 1000L;
    Statement statement = _connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
    statement.execute("select count(*) from mytable");
    ResultSet rs = statement.getResultSet();
    rs.first();
    int h2RecordCount = rs.getInt(1);
    rs.close();

    waitForRecordCountToStabilizeToExpectedCount(h2RecordCount, timeInFiveMinutes);
}

From source file:com.linkedin.pinot.integration.tests.UploadRefreshDeleteIntegrationTest.java

protected void generateAndUploadRandomSegment(String segmentName, int rowCount) throws Exception {
    ThreadLocalRandom random = ThreadLocalRandom.current();
    Schema schema = new Schema.Parser().parse(
            new File(TestUtils.getFileFromResourceUrl(getClass().getClassLoader().getResource("dummy.avsc"))));
    GenericRecord record = new GenericData.Record(schema);
    GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);
    DataFileWriter<GenericRecord> fileWriter = new DataFileWriter<GenericRecord>(datumWriter);
    File avroFile = new File(_tmpDir, segmentName + ".avro");
    fileWriter.create(schema, avroFile);

    for (int i = 0; i < rowCount; i++) {
        record.put(0, random.nextInt());
        fileWriter.append(record);/*from w ww.j a va2  s.co m*/
    }

    fileWriter.close();

    int segmentIndex = Integer.parseInt(segmentName.split("_")[1]);

    File segmentTarDir = new File(_tarsDir, segmentName);
    ensureDirectoryExistsAndIsEmpty(segmentTarDir);
    ExecutorService executor = MoreExecutors.sameThreadExecutor();
    buildSegmentsFromAvro(Collections.singletonList(avroFile), executor, segmentIndex,
            new File(_segmentsDir, segmentName), segmentTarDir, "mytable", false, null);
    executor.shutdown();
    executor.awaitTermination(1L, TimeUnit.MINUTES);

    for (String segmentFileName : segmentTarDir.list()) {
        File file = new File(segmentTarDir, segmentFileName);
        FileUploadUtils.sendFile("localhost", "8998", "segments", segmentFileName, new FileInputStream(file),
                file.length(), FileUploadUtils.SendFileMethod.POST);
    }

    avroFile.delete();
    FileUtils.deleteQuietly(segmentTarDir);
}

From source file:org.csc.phynixx.connection.MTPooledConnectionIT.java

private void startRunners(IActOnConnection actOnConnection, int numThreads) throws Exception {
    exceptions.clear();// w ww  .ja  v a 2  s .co m
    ExecutorService executorService = Executors.newFixedThreadPool(numThreads);

    for (int i = 0; i < numThreads; i++) {
        Callable<Object> task = new Caller(actOnConnection);
        executorService.submit(task);
    }

    executorService.shutdown();

    // 10 seconds per execution
    boolean inTime = executorService.awaitTermination(10000 * CONNECTION_POOL_SIZE, TimeUnit.SECONDS);
    if (!inTime) {
        if (!executorService.isShutdown()) {
            List<Runnable> runnables = executorService.shutdownNow();
        }
        throw new IllegalStateException(
                "Execution was stopped after " + 10 * CONNECTION_POOL_SIZE + " seconds");
    }
    if (exceptions.size() > 0) {
        for (int i = 0; i < exceptions.size(); i++) {
            Exception ex = (Exception) exceptions.get(i);
            ex.printStackTrace();
        }
        throw new IllegalStateException("Error occurred", exceptions.get(0));
    }
}

From source file:org.apache.hadoop.hive.ql.exec.StatsNoJobTask.java

private void shutdownAndAwaitTermination(ExecutorService threadPool) {

    // Disable new tasks from being submitted
    threadPool.shutdown();/*from   w  ww .j av  a2s. c o  m*/
    try {

        // Wait a while for existing tasks to terminate
        if (!threadPool.awaitTermination(100, TimeUnit.SECONDS)) {
            // Cancel currently executing tasks
            threadPool.shutdownNow();

            // Wait a while for tasks to respond to being cancelled
            if (!threadPool.awaitTermination(100, TimeUnit.SECONDS)) {
                LOG.debug("Stats collection thread pool did not terminate");
            }
        }
    } catch (InterruptedException ie) {

        // Cancel again if current thread also interrupted
        threadPool.shutdownNow();

        // Preserve interrupt status
        Thread.currentThread().interrupt();
    }
}

From source file:at.wada811.android.library.demos.concurrent.ExecutorActivity.java

/**
 * ExecutorService ???/* w ww.  j  a  v  a  2s.co m*/
 * 
 * <p>
 * ?????????????????????1???????<br>
 * UI?8???????? Handler ????????????
 * </p>
 */
private void shutdown(final ExecutorService executorService) {
    new Handler().postDelayed(new Runnable() {
        @Override
        public void run() {
            try {
                LogUtils.d("shutdown");
                executorService.shutdown();
                if (!executorService.awaitTermination(1, TimeUnit.SECONDS)) {
                    LogUtils.d("shutdownNow");
                    executorService.shutdownNow();
                }
            } catch (InterruptedException e) {
                e.printStackTrace();
                LogUtils.d("shutdownNow: " + e.getMessage());
                executorService.shutdownNow();
            }
        }
    }, 8000);
}

From source file:org.apache.zeppelin.sap.UniverseInterpreter.java

private UniverseCompleter createOrUpdateUniverseCompleter(InterpreterContext interpreterContext,
        final String buf, final int cursor) throws UniverseException {
    final UniverseCompleter completer;
    if (universeCompleter == null) {
        completer = new UniverseCompleter(3600);
    } else {/*from   w w  w  . j ava 2  s .c o m*/
        completer = universeCompleter;
    }
    try {
        final String token = client.getToken(interpreterContext.getParagraphId());
        ExecutorService executorService = Executors.newFixedThreadPool(1);
        executorService.execute(new Runnable() {
            @Override
            public void run() {
                completer.createOrUpdate(client, token, buf, cursor);
            }
        });

        executorService.shutdown();

        executorService.awaitTermination(10, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        logger.warn("Completion timeout", e);
    } finally {
        try {
            client.closeSession(interpreterContext.getParagraphId());
        } catch (Exception e) {
            logger.error("Error close SAP session", e);
        }
    }
    return completer;
}

From source file:com.ebay.jetstream.event.processor.esper.raw.EsperTest.java

@Ignore
public void multithreadingTest() {
    Configuration configuration = new Configuration();
    configuration.configure(/*from w  ww  .ja  v  a  2s  . c  om*/
            new File("src/test/java/com/ebay/jetstream/event/processor/esper/raw/EsperTestConfig.xml"));
    EPServiceProvider epService = EPServiceProviderManager.getProvider("EsperTest", configuration);
    EsperTestStatement esperStmt = new EsperTestStatement(epService.getEPAdministrator());

    EsperTestSubscriber subscriber = new EsperTestSubscriber();
    EsperTestListener listener = new EsperTestListener();
    esperStmt.setSubscriber(subscriber);
    esperStmt.addListener(listener);

    ExecutorService threadPool = Executors.newCachedThreadPool(new EsperTestThreadFactory());
    EsperTestRunnable runnables[] = new EsperTestRunnable[THREADS_NUM];
    try {
        for (int i = 0; i < THREADS_NUM; i++) {
            runnables[i] = new EsperTestRunnable(epService, i);
            threadPool.submit(runnables[i]);
        }
        threadPool.shutdown();
        threadPool.awaitTermination(200, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        fail("InterruptedException: " + e.getMessage());
    }
    assertTrue("ExecutorService failed to shut down properly", threadPool.isShutdown());

    log.info("[" + subscriber.getIds().first() + "," + subscriber.getIds().last() + "]");
    assertEquals(THREADS_NUM, subscriber.getCount());

    log.info("[" + listener.getIds().first() + "," + listener.getIds().last() + "]");
    assertEquals(THREADS_NUM, listener.getCount());
    assertEquals(THREADS_NUM, listener.getNewCount());
    assertEquals(0, listener.getOldCount());
}

From source file:jenkins.plugins.office365connector.HttpWorkerTest.java

@Test
public void testSendingMultipleWebhooks() throws IOException, InterruptedException {
    ExecutorService executorService = Executors.newCachedThreadPool();
    HttpWorker worker1 = new HttpWorker("http://localhost:8000/test1", "test1body", 30000,
            Mockito.mock(PrintStream.class));
    HttpWorker worker2 = new HttpWorker("http://localhost:8000/test2", "test2body", 30000,
            Mockito.mock(PrintStream.class));
    executorService.submit(worker1);/*from w w  w .  j ava 2  s. c om*/
    executorService.submit(worker2);
    executorService.shutdown();
    executorService.awaitTermination(5, TimeUnit.SECONDS);
    Assert.assertTrue(MyHandler.getTest1Result());
    Assert.assertTrue(MyHandler.getTest2Result());
}