Example usage for java.util.concurrent ExecutorService shutdown

List of usage examples for java.util.concurrent ExecutorService shutdown

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService shutdown.

Prototype

void shutdown();

Source Link

Document

Initiates an orderly shutdown in which previously submitted tasks are executed, but no new tasks will be accepted.

Usage

From source file:com.espertech.esper.multithread.TestMTStmtNamedWindowUpdate.java

private void trySend(int numThreads, int numEventsPerThread) throws Exception {
    Configuration config = SupportConfigFactory.getConfiguration();
    config.addEventType("SupportBean", SupportBean.class);
    engine = EPServiceProviderManager.getDefaultProvider(config);
    engine.initialize();/* w  ww  . ja v a2  s  .c  om*/

    // setup statements
    engine.getEPAdministrator().createEPL(
            "create window MyWindow.std:unique(theString, intPrimitive) as select * from SupportBean");
    engine.getEPAdministrator()
            .createEPL("insert into MyWindow select * from SupportBean(boolPrimitive = true)");
    engine.getEPAdministrator().createEPL("on SupportBean(boolPrimitive = false) sb "
            + "update MyWindow win set intBoxed = win.intBoxed + 1, doublePrimitive = win.doublePrimitive + sb.doublePrimitive"
            + " where sb.theString = win.theString and sb.intPrimitive = win.intPrimitive");

    // send primer events, initialize totals
    Map<MultiKeyUntyped, UpdateTotals> totals = new HashMap<MultiKeyUntyped, UpdateTotals>();
    for (int i = 0; i < NUM_STRINGS; i++) {
        for (int j = 0; j < NUM_INTS; j++) {
            SupportBean primer = new SupportBean(Integer.toString(i), j);
            primer.setBoolPrimitive(true);
            primer.setIntBoxed(0);
            primer.setDoublePrimitive(0);

            engine.getEPRuntime().sendEvent(primer);
            MultiKeyUntyped key = new MultiKeyUntyped(primer.getTheString(), primer.getIntPrimitive());
            totals.put(key, new UpdateTotals(0, 0));
        }
    }

    // execute
    long startTime = System.currentTimeMillis();
    ExecutorService threadPool = Executors.newFixedThreadPool(numThreads);
    Future<StmtNamedWindowUpdateCallable.UpdateResult> future[] = new Future[numThreads];
    for (int i = 0; i < numThreads; i++) {
        future[i] = threadPool
                .submit(new StmtNamedWindowUpdateCallable("Thread" + i, engine, numEventsPerThread));
    }

    threadPool.shutdown();
    threadPool.awaitTermination(10, TimeUnit.SECONDS);
    long endTime = System.currentTimeMillis();

    // total up result
    long deltaCumulative = 0;
    for (int i = 0; i < numThreads; i++) {
        StmtNamedWindowUpdateCallable.UpdateResult result = future[i].get();
        deltaCumulative += result.getDelta();
        for (StmtNamedWindowUpdateCallable.UpdateItem item : result.getUpdates()) {
            MultiKeyUntyped key = new MultiKeyUntyped(item.getTheString(), item.getIntval());
            UpdateTotals total = totals.get(key);
            if (total == null) {
                throw new RuntimeException("Totals not found for key " + key);
            }
            total.setNum(total.getNum() + 1);
            total.setSum(total.getSum() + item.getDoublePrimitive());
        }
    }

    // compare
    EventBean[] rows = engine.getEPRuntime().executeQuery("select * from MyWindow").getArray();
    assertEquals(rows.length, totals.size());
    long totalUpdates = 0;
    for (EventBean row : rows) {
        UpdateTotals total = totals.get(new MultiKeyUntyped(row.get("theString"), row.get("intPrimitive")));
        assertEquals(total.getNum(), row.get("intBoxed"));
        assertEquals(total.getSum(), row.get("doublePrimitive"));
        totalUpdates += total.getNum();
    }

    assertEquals(totalUpdates, numThreads * numEventsPerThread);
    //long deltaTime = endTime - startTime;
    //System.out.println("Totals updated: " + totalUpdates + "  Delta cumu: " + deltaCumulative + "  Delta pooled: " + deltaTime);
}

From source file:com.amour.imagecrawler.ImagesManager.java

/**
 * Run crawler operation in multi-thread
 * @param propertiesManager The Properties-Manager
 * @throws IOException /* w ww. j a v a2s. co m*/
 * @throws java.security.NoSuchAlgorithmException 
 */
public void run(Properties propertiesManager) throws IOException, NoSuchAlgorithmException, Exception {

    ExecutorService executor = Executors.newFixedThreadPool(
            Integer.parseInt(propertiesManager.getProperty(Crawler.NUMBER_OF_WORKER_THREADS_KEY)));
    for (String imageUrl : this.imagesList) {

        Runnable worker = new ImageRunable(imageUrl, propertiesManager);
        executor.execute(worker);
    }
    executor.shutdown();
    while (!executor.isTerminated()) {
    }
}

From source file:edu.cmu.lti.oaqa.bioasq.concept.retrieval.GoPubMedConceptRetrievalExecutor.java

@Override
public void process(JCas jcas) throws AnalysisEngineProcessException {
    AbstractQuery aquery = TypeUtil.getAbstractQueries(jcas).stream().findFirst().get();
    String queryString = bopQueryStringConstructor.construct(aquery).replaceAll("[^A-Za-z0-9_\\-\"]+", " ");
    LOG.info("Query string: {}", queryString);
    List<ConceptSearchResult> concepts = Collections.synchronizedList(new ArrayList<>());
    ExecutorService es = Executors.newCachedThreadPool();
    for (BioASQUtil.Ontology ontology : BioASQUtil.Ontology.values()) {
        es.execute(() -> {//from  w  ww. j  a  va2 s  .  c  o  m
            try {
                concepts.addAll(BioASQUtil.searchOntology(service, jcas, queryString, pages, hits, ontology));
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        });
    }
    es.shutdown();
    try {
        if (!es.awaitTermination(timeout, TimeUnit.MINUTES)) {
            LOG.warn("Timeout occurs for one or some concept retrieval services.");
        }
    } catch (InterruptedException e) {
        throw new AnalysisEngineProcessException(e);
    }
    Map<String, List<ConceptSearchResult>> onto2concepts = concepts.stream()
            .collect(groupingBy(ConceptSearchResult::getSearchId));
    for (Map.Entry<String, List<ConceptSearchResult>> entry : onto2concepts.entrySet()) {
        List<ConceptSearchResult> results = entry.getValue();
        LOG.info("Retrieved {} concepts from {}", results.size(), entry.getKey());
        if (LOG.isDebugEnabled()) {
            results.stream().limit(3).forEach(c -> LOG.debug(" - {}", TypeUtil.toString(c)));
        }
    }
    TypeUtil.rankedSearchResultsByScore(concepts, limit).forEach(ConceptSearchResult::addToIndexes);
}

From source file:org.drftpd.protocol.speedtest.net.slave.SpeedTestHandler.java

private void close(ExecutorService executor, Set<Callable<Long>> callables) {
    for (Callable<Long> callable : callables) {
        ((SpeedTestCallable) callable).close();
    }//from w w w.j  a  va2  s .com
    executor.shutdown();
}

From source file:com.norconex.committer.AbstractFileQueueCommitterTest.java

@Test
public void testMultipleCommitThread() throws Exception {

    final AtomicInteger counter = new AtomicInteger();

    final AbstractFileQueueCommitter committer = new AbstractFileQueueCommitter() {

        @Override//from w ww .  j  a v a 2 s  . c o  m
        protected void commitAddition(IAddOperation operation) throws IOException {
            counter.incrementAndGet();
            operation.delete();
        }

        @Override
        protected void commitDeletion(IDeleteOperation operation) throws IOException {
            counter.incrementAndGet();
            operation.delete();
        }

        @Override
        protected void commitComplete() {
        }
    };

    File queue = temp.newFolder();
    committer.setQueueDir(queue.getPath());
    // Use a bigger number to make sure the files are not 
    // committed while they are added.
    committer.setQueueSize(1000);

    // Queue 50 files for additions
    for (int i = 0; i < 50; i++) {
        Properties metadata = new Properties();
        committer.add(Integer.toString(i), IOUtils.toInputStream("hello world!"), metadata);
    }
    // Queue 50 files for deletions
    for (int i = 50; i < 100; i++) {
        Properties metadata = new Properties();
        committer.remove(Integer.toString(i), metadata);
    }

    ExecutorService pool = Executors.newFixedThreadPool(10);
    for (int i = 0; i < 10; i++) {
        pool.submit(new Runnable() {
            @Override
            public void run() {
                try {
                    committer.commit();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });
    }

    pool.shutdown();
    pool.awaitTermination(10, TimeUnit.SECONDS);

    // Each file should have been processed exactly once
    assertEquals(100, counter.intValue());

    // All files should have been processed
    Collection<File> files = FileUtils.listFiles(queue, null, true);
    assertTrue(files.isEmpty());
}

From source file:com.cloudera.oryx.app.speed.als.ALSSpeedModelManager.java

@Override
public Iterable<String> buildUpdates(JavaPairRDD<String, String> newData) {
    if (model == null || model.getFractionLoaded() < minModelLoadFraction) {
        return Collections.emptyList();
    }/*  w ww  .  j a  v  a2s . co m*/

    // Order by timestamp and parse as tuples
    JavaRDD<String> sortedValues = newData.values().sortBy(MLFunctions.TO_TIMESTAMP_FN, true,
            newData.partitions().size());
    JavaPairRDD<Tuple2<String, String>, Double> tuples = sortedValues.mapToPair(TO_TUPLE_FN);

    JavaPairRDD<Tuple2<String, String>, Double> aggregated;
    if (model.isImplicit()) {
        // See comments in ALSUpdate for explanation of how deletes are handled by this.
        aggregated = tuples.groupByKey().mapValues(MLFunctions.SUM_WITH_NAN);
    } else {
        // For non-implicit, last wins.
        aggregated = tuples.foldByKey(Double.NaN, Functions.<Double>last());
    }

    Collection<UserItemStrength> input = aggregated.filter(MLFunctions.<Tuple2<String, String>>notNaNValue())
            .map(TO_UIS_FN).collect();

    final Solver XTXsolver;
    final Solver YTYsolver;
    try {
        XTXsolver = model.getXTXSolver();
        YTYsolver = model.getYTYSolver();
    } catch (SingularMatrixSolverException smse) {
        return Collections.emptyList();
    }

    final Collection<String> result = new ArrayList<>();
    int numThreads = Runtime.getRuntime().availableProcessors();
    Collection<Callable<Void>> tasks = new ArrayList<>(numThreads);
    final Iterator<UserItemStrength> inputIterator = input.iterator();
    for (int i = 0; i < numThreads; i++) {
        tasks.add(new LoggingVoidCallable() {
            @Override
            public void doCall() {
                while (true) {
                    UserItemStrength uis;
                    synchronized (inputIterator) {
                        if (inputIterator.hasNext()) {
                            uis = inputIterator.next();
                        } else {
                            break;
                        }
                    }
                    String user = uis.getUser();
                    String item = uis.getItem();
                    double value = uis.getStrength();

                    // Xu is the current row u in the X user-feature matrix
                    float[] Xu = model.getUserVector(user);
                    // Yi is the current row i in the Y item-feature matrix
                    float[] Yi = model.getItemVector(item);

                    float[] newXu = ALSUtils.computeUpdatedXu(YTYsolver, value, Xu, Yi, model.isImplicit());
                    // Similarly for Y vs X
                    float[] newYi = ALSUtils.computeUpdatedXu(XTXsolver, value, Yi, Xu, model.isImplicit());

                    if (newXu != null) {
                        String update = toUpdateJSON("X", user, newXu, item);
                        synchronized (result) {
                            result.add(update);
                        }
                    }
                    if (newYi != null) {
                        String update = toUpdateJSON("Y", item, newYi, user);
                        synchronized (result) {
                            result.add(update);
                        }
                    }
                }
            }
        });
    }

    ExecutorService executor = Executors.newFixedThreadPool(numThreads);
    try {
        executor.invokeAll(tasks);
    } catch (InterruptedException ie) {
        throw new IllegalStateException(ie);
    } finally {
        executor.shutdown();
    }
    return result;
}

From source file:jenkins.plugins.elanceodesk.workplace.notifier.HttpWorkerTest.java

@Test
public void testSendingMultipleWebhooks() throws IOException, InterruptedException {
    ExecutorService executorService = Executors.newCachedThreadPool();
    HttpWorker worker1 = new HttpWorker("http://localhost:8000/test1", "test1body", 30000, 1,
            Mockito.mock(PrintStream.class));
    HttpWorker worker2 = new HttpWorker("http://localhost:8000/test2", "test2body", 30000, 1,
            Mockito.mock(PrintStream.class));
    executorService.submit(worker1);//from  w  ww . j  a va  2 s .  c o m
    executorService.submit(worker2);
    executorService.shutdown();
    executorService.awaitTermination(5, TimeUnit.SECONDS);
    Assert.assertTrue(MyHandler.getTest1Result());
    Assert.assertTrue(MyHandler.getTest2Result());
}

From source file:com.sastix.cms.server.services.cache.UIDServiceTest.java

@Test
public void massiveUIDCreatorTest() throws InterruptedException {

    String region1 = "r1";
    String region2 = "r2";
    regionIdsMap.put(region1, new HashMap<>());
    regionIdsMap.put(region2, new HashMap<>());
    ExecutorService executor = Executors.newFixedThreadPool(NTHREDS);
    for (int i = 0; i < numberOfTasks; i++) {
        String region = region1;//from   w ww  . java2 s  .  c o m
        if (i % 2 == 0) {
            region = region2;
        }
        Runnable worker = new UIDRunnable(region);
        executor.execute(worker);
    }

    try {
        latch.await();
    } catch (InterruptedException E) {
        // handle
    }

    executor.shutdown();
    executor.awaitTermination(5, TimeUnit.SECONDS);
    assertEquals(numberOfTasks, ids.size());
    assertEquals(numberOfTasks / 2, regionIdsMap.get(region1).size());
    assertEquals(numberOfTasks / 2, regionIdsMap.get(region2).size());
    assertTrue(!duplicateFound);
    LOG.info("Finished all threads");
}

From source file:com.tenforce.lodms.extractors.CkanHarvester.java

public void harvest(List<String> datasetIds)
        throws RDFHandlerException, ExtractException, DatatypeConfigurationException {
    if (datasetIds.isEmpty()) {
        throw new ExtractException("no datasets specified");
    }/*from w  w w. j a  v  a  2 s. c  o  m*/
    if (enableProvenance)
        addCatalogProvenance();

    MapToRdfConverter converter = new MapToRdfConverter(predicatePrefix, ignoredKeys, handler);
    ExecutorService executorService = Executors.newFixedThreadPool(5);
    CountDownLatch barrier = new CountDownLatch(datasetIds.size());
    Catalog catalog = new Catalog(baseUri, subjectPrefix);

    try {
        for (String datasetId : datasetIds) {
            executorService.execute(new DataSetHarvester(catalog, converter, handler, apiUri, datasetId,
                    barrier, warnings, httpMethod));
        }
        executorService.shutdown();
        barrier.await();
    } catch (Exception e) {
        executorService.shutdownNow();
        throw new ExtractException(e.getMessage(), e);
    }

}

From source file:com.ebay.jetstream.event.processor.esper.raw.EsperTest.java

@Test
public void aggregationTest() {
    Configuration configuration = new Configuration();
    configuration.configure(//from   w ww  .j  ava  2 s .c om
            new File("src/test/java/com/ebay/jetstream/event/processor/esper/raw/EsperTestConfig.xml"));
    EPServiceProvider epService = EPServiceProviderManager.getProvider("EsperTest", configuration);
    EsperTestAggregationStatement esperStmt = new EsperTestAggregationStatement(epService.getEPAdministrator());
    EsperTestAggregationListener listener = new EsperTestAggregationListener();
    esperStmt.addListener(listener);

    ExecutorService threadPool = Executors.newCachedThreadPool(new EsperTestThreadFactory());
    EsperTestAggregationRunnable runnables[] = new EsperTestAggregationRunnable[THREADS_NUM_AGGRTEST];
    try {
        for (int i = 0; i < THREADS_NUM_AGGRTEST; i++) {
            runnables[i] = new EsperTestAggregationRunnable(epService, i);
            threadPool.submit(runnables[i]);
        }
        threadPool.shutdown();
        threadPool.awaitTermination(200, TimeUnit.SECONDS);
    } catch (InterruptedException e) {
        fail("InterruptedException: " + e.getMessage());
    }
    assertTrue("ExecutorService failed to shut down properly", threadPool.isShutdown());
    assertEquals(THREADS_NUM_AGGRTEST * 2, listener.getCount());
    assertEquals(THREADS_NUM_AGGRTEST, m_aggregationResults.size()); // only one result per oroginal event
    for (int i = 0; i < THREADS_NUM_AGGRTEST; i++) {
        assertEquals(11.0 + 4. * i, m_aggregationResults.get(i), 1.e-06);
    }
    assertEquals(THREADS_NUM_AGGRTEST, m_aggregationAvgResults.size()); // only one result per oroginal event
    for (int i = 0; i < THREADS_NUM_AGGRTEST; i++) {
        assertEquals((11.0 + 4. * i) / 4., m_aggregationAvgResults.get(i), 1.e-06);
    }
}