Example usage for java.util.concurrent ExecutorService shutdownNow

List of usage examples for java.util.concurrent ExecutorService shutdownNow

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService shutdownNow.

Prototype

List<Runnable> shutdownNow();

Source Link

Document

Attempts to stop all actively executing tasks, halts the processing of waiting tasks, and returns a list of the tasks that were awaiting execution.

Usage

From source file:org.opennms.netmgt.events.commands.StressCommand.java

@Override
protected Object doExecute() {
    // Apply sane lower bounds to all of the configurable options
    eventsPerSecondPerThread = Math.max(1, eventsPerSecondPerThread);
    numberOfThreads = Math.max(1, numberOfThreads);
    numSeconds = Math.max(1, numSeconds);
    reportIntervalInSeconds = Math.max(1, reportIntervalInSeconds);
    batchSize = Math.max(1, batchSize);
    boolean useJexl = jexlExpressions != null && jexlExpressions.size() > 0;

    // Display the effective settings and rates
    double eventsPerSecond = eventsPerSecondPerThread * numberOfThreads;
    System.out.printf("Generating %d events per second accross %d threads for %d seconds\n",
            eventsPerSecondPerThread, numberOfThreads, numSeconds);
    System.out.printf("\t with UEI: %s\n", eventUei);
    System.out.printf("\t with batch size: %d\n", batchSize);
    System.out.printf("\t with synchronous calls: %s\n", isSynchronous);
    System.out.printf("Which will yield an effective\n");
    System.out.printf("\t %.2f events per second\n", eventsPerSecond);
    System.out.printf("\t %.2f total events\n", eventsPerSecond * numSeconds);
    if (useJexl) {
        System.out.printf("Using JEXL expressions:\n");
        for (String jexlExpression : jexlExpressions) {
            System.out.printf("\t%s\n", jexlExpression);
        }// w  ww  .j  a va2  s .c o m
    }

    // Setup the reporter
    ConsoleReporter reporter = ConsoleReporter.forRegistry(metrics).convertRatesTo(TimeUnit.SECONDS)
            .convertDurationsTo(TimeUnit.MILLISECONDS).build();

    // Setup the executor
    final ThreadFactory threadFactory = new ThreadFactoryBuilder().setNameFormat("Event Generator #%d").build();
    final ExecutorService executor = Executors.newFixedThreadPool(numberOfThreads, threadFactory);

    System.out.println("Starting.");
    try {
        reporter.start(reportIntervalInSeconds, TimeUnit.SECONDS);
        for (int i = 0; i < numberOfThreads; i++) {
            final EventGenerator eventGenerator = useJexl ? new JexlEventGenerator(jexlExpressions)
                    : new EventGenerator();
            executor.execute(eventGenerator);
        }
        System.out.println("Started.");

        // Wait until we timeout or get interrupted
        try {
            Thread.sleep(SECONDS.toMillis(numSeconds));
        } catch (InterruptedException e) {
        }

        // Stop!
        try {
            System.out.println("Stopping.");
            executor.shutdownNow();
            if (!executor.awaitTermination(2, TimeUnit.MINUTES)) {
                System.err.println("The threads did not stop in time.");
            } else {
                System.out.println("Stopped.");
            }
        } catch (InterruptedException e) {
        }
    } finally {
        // Make sure we always stop the reporter
        reporter.stop();
    }

    // And display one last report...
    reporter.report();
    return null;
}

From source file:org.keycloak.testsuite.admin.ComponentsTest.java

private void testConcurrency(BiConsumer<ExecutorService, Integer> taskCreator) throws InterruptedException {
    ExecutorService s = Executors.newFixedThreadPool(NUMBER_OF_THREADS, new BasicThreadFactory.Builder()
            .daemon(true).uncaughtExceptionHandler((t, e) -> log.error(e.getMessage(), e)).build());
    this.remainingDeleteSubmissions = new CountDownLatch(NUMBER_OF_TASKS);

    for (int i = 0; i < NUMBER_OF_TASKS; i++) {
        taskCreator.accept(s, i);//from  w w  w . j  ava2  s  .  com
    }

    try {
        assertTrue("Did not create all components in time",
                this.remainingDeleteSubmissions.await(30, TimeUnit.SECONDS));
        s.shutdown();
        assertTrue("Did not finish before timeout", s.awaitTermination(30, TimeUnit.SECONDS));
    } finally {
        s.shutdownNow();
    }
}

From source file:org.apache.carbondata.hadoop.api.CarbonTableOutputFormat.java

@Override
public RecordWriter<NullWritable, ObjectArrayWritable> getRecordWriter(
        final TaskAttemptContext taskAttemptContext) throws IOException {
    final CarbonLoadModel loadModel = getLoadModel(taskAttemptContext.getConfiguration());
    String appName = taskAttemptContext.getConfiguration().get(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME);
    if (null != appName) {
        CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_WRITTEN_BY_APPNAME, appName);
    }/*from w  w  w  . j  a va  2  s .c  o  m*/
    //if loadModel having taskNo already(like in SDK) then no need to overwrite
    short sdkWriterCores = loadModel.getSdkWriterCores();
    int itrSize = (sdkWriterCores > 0) ? sdkWriterCores : 1;
    final CarbonOutputIteratorWrapper[] iterators = new CarbonOutputIteratorWrapper[itrSize];
    for (int i = 0; i < itrSize; i++) {
        iterators[i] = new CarbonOutputIteratorWrapper();
    }
    if (null == loadModel.getTaskNo() || loadModel.getTaskNo().isEmpty()) {
        loadModel.setTaskNo(taskAttemptContext.getConfiguration().get("carbon.outputformat.taskno",
                String.valueOf(System.nanoTime())));
    }
    loadModel.setDataWritePath(taskAttemptContext.getConfiguration().get("carbon.outputformat.writepath"));
    final String[] tempStoreLocations = getTempStoreLocations(taskAttemptContext);
    DataTypeUtil.clearFormatter();
    final DataLoadExecutor dataLoadExecutor = new DataLoadExecutor();
    final ExecutorService executorService = Executors.newFixedThreadPool(1,
            new CarbonThreadFactory("CarbonRecordWriter:" + loadModel.getTableName(), true));
    // It should be started in new thread as the underlying iterator uses blocking queue.
    Future future = executorService.submit(new Thread() {
        @Override
        public void run() {
            ThreadLocalSessionInfo.setConfigurationToCurrentThread(taskAttemptContext.getConfiguration());
            try {
                dataLoadExecutor.execute(loadModel, tempStoreLocations, iterators);
            } catch (Exception e) {
                executorService.shutdownNow();
                for (CarbonOutputIteratorWrapper iterator : iterators) {
                    iterator.closeWriter(true);
                }
                try {
                    dataLoadExecutor.close();
                } catch (Exception ex) {
                    // As already exception happened before close() send that exception.
                    throw new RuntimeException(e);
                }
                throw new RuntimeException(e);
            } finally {
                ThreadLocalSessionInfo.unsetAll();
            }
        }
    });

    if (sdkWriterCores > 0) {
        // CarbonMultiRecordWriter handles the load balancing of the write rows in round robin.
        return new CarbonMultiRecordWriter(iterators, dataLoadExecutor, loadModel, future, executorService);
    } else {
        return new CarbonRecordWriter(iterators[0], dataLoadExecutor, loadModel, future, executorService);
    }
}

From source file:org.apache.camel.component.http4.HttpConcurrentTest.java

private void doSendMessages(int files, int poolSize) throws Exception {
    ExecutorService executor = Executors.newFixedThreadPool(poolSize);
    // we access the responses Map below only inside the main thread,
    // so no need for a thread-safe Map implementation
    Map<Integer, Future<String>> responses = new HashMap<Integer, Future<String>>();
    for (int i = 0; i < files; i++) {
        final int index = i;
        Future<String> out = executor.submit(new Callable<String>() {
            public String call() throws Exception {
                return template.requestBody("http4://" + getHostName() + ":" + getPort(), null, String.class);
            }/*from w ww. j a v  a 2 s  .c  om*/
        });
        responses.put(index, out);
    }

    assertEquals(files, responses.size());

    // get all responses
    Set<String> unique = new HashSet<String>();
    for (Future<String> future : responses.values()) {
        unique.add(future.get());
    }

    // should be 'files' unique responses
    assertEquals("Should be " + files + " unique responses", files, unique.size());
    executor.shutdownNow();
}

From source file:com.linkedin.pinot.perf.QueryRunner.java

/**
 * Use multiple threads to run query at an increasing target QPS.
 *
 * Use a concurrent linked queue to buffer the queries to be sent. Use the main thread to insert queries into the
 * queue at the target QPS, and start {numThreads} worker threads to fetch queries from the queue and send them.
 * We start with the start QPS, and keep adding delta QPS to the start QPS during the test. The main thread is
 * responsible for collecting the statistic information and log them periodically.
 *
 * @param conf perf benchmark driver config.
 * @param queryFile query file./*from  w w w.ja va  2s  . c o m*/
 * @param numThreads number of threads sending queries.
 * @param startQPS start QPS
 * @param deltaQPS delta QPS
 * @throws Exception
 */
@SuppressWarnings("InfiniteLoopStatement")
public static void targetQPSQueryRunner(PerfBenchmarkDriverConf conf, String queryFile, int numThreads,
        double startQPS, double deltaQPS) throws Exception {
    final long randomSeed = 123456789L;
    final Random random = new Random(randomSeed);
    final int timePerTargetQPSMillis = 60000;
    final int queueLengthThreshold = Math.max(20, (int) startQPS);

    final List<String> queries;
    try (FileInputStream input = new FileInputStream(new File(queryFile))) {
        queries = IOUtils.readLines(input);
    }
    final int numQueries = queries.size();

    final PerfBenchmarkDriver driver = new PerfBenchmarkDriver(conf);
    final AtomicInteger counter = new AtomicInteger(0);
    final AtomicLong totalResponseTime = new AtomicLong(0L);
    final ExecutorService executorService = Executors.newFixedThreadPool(numThreads);

    final ConcurrentLinkedQueue<String> queryQueue = new ConcurrentLinkedQueue<>();
    double currentQPS = startQPS;
    int intervalMillis = (int) (MILLIS_PER_SECOND / currentQPS);

    for (int i = 0; i < numThreads; i++) {
        executorService.submit(new Runnable() {
            @Override
            public void run() {
                while (true) {
                    String query = queryQueue.poll();
                    if (query == null) {
                        try {
                            Thread.sleep(1);
                            continue;
                        } catch (InterruptedException e) {
                            LOGGER.error("Interrupted.", e);
                            return;
                        }
                    }
                    long startTime = System.currentTimeMillis();
                    try {
                        driver.postQuery(query);
                        counter.getAndIncrement();
                        totalResponseTime.getAndAdd(System.currentTimeMillis() - startTime);
                    } catch (Exception e) {
                        LOGGER.error("Caught exception while running query: {}", query, e);
                        return;
                    }
                }
            }
        });
    }

    LOGGER.info("Start with QPS: {}, delta QPS: {}", startQPS, deltaQPS);
    while (true) {
        long startTime = System.currentTimeMillis();
        while (System.currentTimeMillis() - startTime <= timePerTargetQPSMillis) {
            if (queryQueue.size() > queueLengthThreshold) {
                executorService.shutdownNow();
                throw new RuntimeException("Cannot achieve target QPS of: " + currentQPS);
            }
            queryQueue.add(queries.get(random.nextInt(numQueries)));
            Thread.sleep(intervalMillis);
        }
        double timePassedSeconds = ((double) (System.currentTimeMillis() - startTime)) / MILLIS_PER_SECOND;
        int count = counter.getAndSet(0);
        double avgResponseTime = ((double) totalResponseTime.getAndSet(0)) / count;
        LOGGER.info("Target QPS: {}, Interval: {}ms, Actual QPS: {}, Avg Response Time: {}ms", currentQPS,
                intervalMillis, count / timePassedSeconds, avgResponseTime);

        // Find a new interval
        int newIntervalMillis;
        do {
            currentQPS += deltaQPS;
            newIntervalMillis = (int) (MILLIS_PER_SECOND / currentQPS);
        } while (newIntervalMillis == intervalMillis);
        intervalMillis = newIntervalMillis;
    }
}

From source file:com.emc.ecs.sync.storage.CasStorageTest.java

private void delete(FPPool pool, List<String> clipIds) throws Exception {
    ExecutorService service = Executors.newFixedThreadPool(CAS_THREADS);

    System.out.print("Deleting clips");

    for (String clipId : clipIds) {
        service.submit(new ClipDeleter(pool, clipId));
    }/*from   ww  w  .  j  av a2s . c  o m*/

    service.shutdown();
    service.awaitTermination(CAS_SETUP_WAIT_MINUTES, TimeUnit.MINUTES);
    service.shutdownNow();

    System.out.println();
}

From source file:org.springframework.integration.monitor.IntegrationMBeanExporter.java

private void doShutdownExecutorService(ExecutorService executorService) {
    if (this.shutdownForced) {
        executorService.shutdownNow();
    } else {/* www .j  a v a2 s  . c  o m*/
        executorService.shutdown();
    }
}

From source file:com.emc.ecs.sync.storage.CasStorageTest.java

private String summarize(FPPool pool, List<String> clipIds) throws Exception {
    List<String> summaries = Collections.synchronizedList(new ArrayList<String>());

    ExecutorService service = Executors.newFixedThreadPool(CAS_THREADS);

    System.out.print("Summarizing clips");

    for (String clipId : clipIds) {
        service.submit(new ClipReader(pool, clipId, summaries));
    }/*from  w  ww . ja  v  a2s  .  co  m*/

    service.shutdown();
    service.awaitTermination(CAS_SETUP_WAIT_MINUTES, TimeUnit.MINUTES);
    service.shutdownNow();

    System.out.println();

    Collections.sort(summaries);
    StringBuilder out = new StringBuilder();
    for (String summary : summaries) {
        out.append(summary);
    }
    return out.toString();
}

From source file:org.jboss.test.cluster.defaultcfg.web.test.CleanShutdownTestCase.java

public void testShutdown() throws Exception {
    ExecutorService executor = Executors.newFixedThreadPool(MAX_THREADS);

    try {/*  ww w.ja v a 2 s. c  om*/
        // Make sure a normal request will succeed
        Assert.assertEquals(200, new RequestTask(0).call().intValue());

        // Send a long request - in parallel
        Future<Integer> future = executor.submit(new RequestTask(REQUEST_DURATION));

        // Make sure long request has started
        Thread.sleep(1000);

        // Shutdown server
        this.server.invoke(this.name, SHUTDOWN_METHOD, null, null);

        // Get result of long request
        // This request should succeed since it initiated before server shutdown
        try {
            Assert.assertEquals(200, future.get().intValue());
        } catch (ExecutionException e) {
            e.printStackTrace(System.err);

            Assert.fail(e.getCause().getMessage());
        }

        // Subsequent request should return 404
        Assert.assertEquals(404, new RequestTask(0).call().intValue());
    } finally {
        executor.shutdownNow();
    }
}

From source file:com.emc.ecs.sync.CasMigrationTest.java

protected List<String> createTestClips(FPPool pool, int maxBlobSize, int thisMany, Writer summaryWriter)
        throws Exception {
    ExecutorService service = Executors.newFixedThreadPool(CAS_THREADS);

    System.out.print("Creating clips");

    List<String> clipIds = Collections.synchronizedList(new ArrayList<String>());
    List<String> summaries = Collections.synchronizedList(new ArrayList<String>());
    for (int clipIdx = 0; clipIdx < thisMany; clipIdx++) {
        service.submit(new ClipWriter(pool, clipIds, maxBlobSize, summaries));
    }//from  w  w  w.  jav  a 2 s . c  o m

    service.shutdown();
    service.awaitTermination(CAS_SETUP_WAIT_MINUTES, TimeUnit.MINUTES);
    service.shutdownNow();

    Collections.sort(summaries);
    for (String summary : summaries) {
        summaryWriter.append(summary);
    }

    System.out.println();

    return clipIds;
}