Example usage for java.util.concurrent ThreadPoolExecutor awaitTermination

List of usage examples for java.util.concurrent ThreadPoolExecutor awaitTermination

Introduction

In this page you can find the example usage for java.util.concurrent ThreadPoolExecutor awaitTermination.

Prototype

public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException 

Source Link

Usage

From source file:org.nuxeo.ecm.automation.server.jaxrs.batch.BatchManagerFixture.java

@Test
public void testChunkConcurrency() throws Exception {

    // Initialize a batch
    BatchManager bm = Framework.getService(BatchManager.class);
    String batchId = bm.initBatch();

    // Add chunks concurrently
    int nbChunks = 100;
    ThreadPoolExecutor tpe = new ThreadPoolExecutor(5, 5, 500L, TimeUnit.MILLISECONDS,
            new LinkedBlockingQueue<Runnable>(nbChunks + 1));

    for (int i = 0; i < nbChunks; i++) {
        final int chunkIndex = i;
        tpe.submit(new Runnable() {
            @Override//  w  w w. jav a 2  s.  c  o  m
            public void run() {
                try {
                    bm.addStream(batchId, "0",
                            new ByteArrayInputStream(
                                    ("SomeChunkContent_" + chunkIndex + " ").getBytes(StandardCharsets.UTF_8)),
                            nbChunks, chunkIndex, "MyChunkedFile.txt", "text/plain", 0);
                } catch (IOException e) {
                    fail(e.getMessage());
                }
            }
        });
    }

    tpe.shutdown();
    boolean finish = tpe.awaitTermination(20, TimeUnit.SECONDS);
    assertTrue("timeout", finish);

    // Check chunked file
    Blob blob = bm.getBlob(batchId, "0");
    assertNotNull(blob);
    int nbOccurrences = 0;
    Pattern p = Pattern.compile("SomeChunkContent_");
    Matcher m = p.matcher(blob.getString());
    while (m.find()) {
        nbOccurrences++;
    }
    assertEquals(nbChunks, nbOccurrences);

    // Check storage size
    TransientStore ts = bm.getTransientStore();
    assertTrue(((AbstractTransientStore) ts).getStorageSize() > 17 * nbChunks);

    // Clean batch
    bm.clean(batchId);
    assertEquals(ts.getStorageSizeMB(), 0);
}

From source file:org.openmrs.module.openconceptlab.updater.Updater.java

private void processInput() throws IOException {
    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.getDeserializationConfig().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"));
    JsonParser parser = objectMapper.getJsonFactory().createJsonParser(in);

    JsonToken token = parser.nextToken();
    if (token != JsonToken.START_OBJECT) {
        throw new IOException("JSON must start from an object");
    }//from   w ww.j  a v  a  2s  .  c  om
    token = parser.nextToken();

    token = advanceToListOf("concepts", "mappings", parser);

    if (token == JsonToken.END_OBJECT || token == null) {
        return;
    }

    String baseUrl = updateService.getSubscription().getUrl();
    if (baseUrl != null) {
        try {
            URI uri = new URI(baseUrl);
            baseUrl = uri.getScheme() + "://" + uri.getHost();
            if (uri.getPort() != -1) {
                baseUrl += ":" + uri.getPort();
            }
        } catch (Exception e) {
            throw new IllegalStateException(baseUrl + " is not valid", e);
        }
    }

    ThreadPoolExecutor runner = newRunner();
    List<OclConcept> oclConcepts = new ArrayList<OclConcept>();
    while (parser.nextToken() != JsonToken.END_ARRAY) {
        OclConcept oclConcept = parser.readValueAs(OclConcept.class);
        oclConcept.setVersionUrl(prependBaseUrl(baseUrl, oclConcept.getVersionUrl()));
        oclConcept.setUrl(prependBaseUrl(baseUrl, oclConcept.getUrl()));

        oclConcepts.add(oclConcept);

        if (oclConcepts.size() >= BATCH_SIZE) {
            ImportRunner importRunner = new ImportRunner(importer, new CacheService(conceptService),
                    updateService, update);
            importRunner.setOclConcepts(oclConcepts);

            oclConcepts = new ArrayList<OclConcept>();

            runner.execute(importRunner);
        }
    }

    if (oclConcepts.size() != 0) {
        ImportRunner importRunner = new ImportRunner(importer, new CacheService(conceptService), updateService,
                update);
        importRunner.setOclConcepts(oclConcepts);

        runner.execute(importRunner);
    }

    runner.shutdown();
    try {
        runner.awaitTermination(32, TimeUnit.DAYS);
    } catch (InterruptedException e) {
        throw new RuntimeException(e);
    }

    token = advanceToListOf("mappings", null, parser);

    if (token == JsonToken.END_OBJECT) {
        return;
    }

    runner = newRunner();
    List<OclMapping> oclMappings = new ArrayList<OclMapping>();
    while (parser.nextToken() != JsonToken.END_ARRAY) {
        OclMapping oclMapping = parser.readValueAs(OclMapping.class);
        oclMapping.setUrl(prependBaseUrl(baseUrl, oclMapping.getUrl()));
        oclMapping.setFromConceptUrl(prependBaseUrl(baseUrl, oclMapping.getFromConceptUrl()));
        oclMapping.setFromSourceUrl(prependBaseUrl(baseUrl, oclMapping.getFromSourceUrl()));
        oclMapping.setToConceptUrl(prependBaseUrl(baseUrl, oclMapping.getToConceptUrl()));

        oclMappings.add(oclMapping);

        if (oclMappings.size() >= BATCH_SIZE) {
            ImportRunner importRunner = new ImportRunner(importer, new CacheService(conceptService),
                    updateService, update);
            importRunner.setOclMappings(oclMappings);

            oclMappings = new ArrayList<OclMapping>();

            runner.execute(importRunner);
        }
    }

    if (oclMappings.size() != 0) {
        ImportRunner importRunner = new ImportRunner(importer, new CacheService(conceptService), updateService,
                update);
        importRunner.setOclMappings(oclMappings);

        runner.execute(importRunner);
    }

    runner.shutdown();
    try {
        runner.awaitTermination(32, TimeUnit.DAYS);
    } catch (InterruptedException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.openstreetmap.josm.data.cache.HostLimitQueueTest.java

/**
 * Check if single threaded execution works properly
 * @throws Exception in case of error//from www .  ja  v  a 2  s.  c  om
 */
@Test
public void testSingleThreadPerHost() throws Exception {
    ThreadPoolExecutor tpe = getNewThreadPoolExecutor("test-%d", 3, 1);
    ICacheAccess<String, CacheEntry> cache = JCSCacheManager.getCache("test", 3, 0, "");
    AtomicInteger counter = new AtomicInteger(0);
    long start = System.currentTimeMillis();
    for (int i = 0; i < 10; i++) {
        tpe.execute(new Task(cache, new URL("http://localhost/" + i), counter));
    }
    tpe.shutdown();
    tpe.awaitTermination(15, TimeUnit.SECONDS); // at most it should take ~10 seconds, so after 15 it's already failed
    long duration = System.currentTimeMillis() - start;
    // check that all tasks were executed
    assertEquals(10, counter.get());
    // although there are 3 threads, we can make only 1 parallel call to localhost
    // so it should take ~10 seconds to finish
    // if it's shorter, it means that host limit does not work
    assertTrue("Expected duration between 9 and 11 seconds not met. Actual duration: " + (duration / 1000),
            duration < 11 * 1000 & duration > 9 * 1000);
}

From source file:org.openstreetmap.josm.data.cache.HostLimitQueueTest.java

/**
 * Check if two threaded execution work properly
 * @throws Exception in case of error/*from  w ww. j a  va  2s  . c  om*/
 */
@Test
public void testMultipleThreadPerHost() throws Exception {
    ThreadPoolExecutor tpe = getNewThreadPoolExecutor("test-%d", 3, 2);
    ICacheAccess<String, CacheEntry> cache = JCSCacheManager.getCache("test", 3, 0, "");
    AtomicInteger counter = new AtomicInteger(0);
    long start = System.currentTimeMillis();
    for (int i = 0; i < 10; i++) {
        tpe.execute(new Task(cache, new URL("http://hostlocal/" + i), counter));
    }
    tpe.shutdown();
    tpe.awaitTermination(15, TimeUnit.SECONDS);
    long duration = System.currentTimeMillis() - start;
    // check that all tasks were executed
    assertEquals(10, counter.get());
    // although there are 3 threads, we can make only 2 parallel call to localhost
    // so it should take ~5 seconds to finish
    // if it's shorter, it means that host limit does not work
    assertTrue("Expected duration between 4 and 6 seconds not met. Actual duration: " + (duration / 1000),
            duration < 6 * 1000 & duration > 4 * 1000);
}

From source file:org.openstreetmap.josm.data.cache.HostLimitQueueTest.java

/**
 * Check two hosts// ww  w .j  av  a  2  s  .c om
 * @throws Exception in case of error
 */
@Test
public void testTwoHosts() throws Exception {
    ThreadPoolExecutor tpe = getNewThreadPoolExecutor("test-%d", 3, 1);
    ICacheAccess<String, CacheEntry> cache = JCSCacheManager.getCache("test", 3, 0, "");
    AtomicInteger counter = new AtomicInteger(0);
    long start = System.currentTimeMillis();
    for (int i = 0; i < 10; i++) {
        String url = (i % 2 == 0) ? "http://localhost" : "http://hostlocal";
        tpe.execute(new Task(cache, new URL(url + i), counter));
    }
    tpe.shutdown();
    tpe.awaitTermination(15, TimeUnit.SECONDS);
    long duration = System.currentTimeMillis() - start;
    // check that all tasks were executed
    assertEquals(10, counter.get());
    // although there are 3 threads, we can make only 1 parallel per host, and we have 2 hosts
    // so it should take ~5 seconds to finish
    // if it's shorter, it means that host limit does not work
    assertTrue("Expected duration between 4 and 6 seconds not met. Actual duration: " + (duration / 1000),
            duration < 6 * 1000 & duration > 4 * 1000);
}

From source file:org.trnltk.apps.experiments.AmbiguityMatrixApp.java

private void waitUntilThreadPoolToTerminate(ThreadPoolExecutor pool) throws InterruptedException {
    pool.shutdown();/*w  ww  .j  a  va  2s  . c om*/
    while (!pool.isTerminated()) {
        System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(1000, TimeUnit.MILLISECONDS);
    }
}

From source file:org.trnltk.apps.morphology.contextless.parser.CachingMorphologicParserApp.java

@App("Parse all sample corpus. Does not do an offline analysis to add most frequent words to cache in advance.")
public void parse8MWords() throws Exception {
    /*//from w  w  w .  j  a  v a2 s.co m
     Total time :0:07:29.799
     Nr of tokens : 18362187
     Avg time : 0.024495938310616267 ms
    */
    final Set<File> files = SampleFiles.oneMillionSentencesTokenizedFiles();

    final LinkedList<String> words = new LinkedList<String>();
    final HashSet<String> uniqueWords = new HashSet<String>();

    for (File tokenizedFile : files) {
        final List<String> lines = Files.readLines(tokenizedFile, Charsets.UTF_8);
        for (String line : lines) {
            final ArrayList<String> strings = Lists
                    .newArrayList(Splitter.on(" ").trimResults().omitEmptyStrings().split(line));
            words.addAll(strings);
            uniqueWords.addAll(strings);
        }
    }

    System.out.println("Number of words : " + words.size());
    System.out.println("Number of unique words : " + uniqueWords.size());
    System.out.println("======================");

    final MorphologicParserCache l1Cache = new LRUMorphologicParserCache(NUMBER_OF_THREADS,
            INITIAL_L1_CACHE_SIZE, MAX_L1_CACHE_SIZE);

    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);

    final MorphologicParser[] parsers = new MorphologicParser[NUMBER_OF_THREADS];
    for (int i = 0; i < parsers.length; i++) {
        parsers[i] = new CachingMorphologicParser(new TwoLevelMorphologicParserCache(BULK_SIZE, l1Cache),
                contextlessMorphologicParser, true);
    }

    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    for (int i = 0; i < words.size(); i = i + BULK_SIZE) {
        final MorphologicParser parser = parsers[(i / BULK_SIZE) % NUMBER_OF_THREADS];
        int start = i;
        int end = i + BULK_SIZE < words.size() ? i + BULK_SIZE : words.size();
        final List<String> subWordList = words.subList(start, end);
        final int wordIndex = i;
        pool.execute(new BulkParseCommand(parser, subWordList, wordIndex, false));
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(1000, TimeUnit.MILLISECONDS);
    }

    stopWatch.stop();

    System.out.println("Total time :" + stopWatch.toString());
    System.out.println("Nr of tokens : " + words.size());
    System.out.println("Avg time : " + (stopWatch.getTime() * 1.0d) / (words.size() * 1.0d) + " ms");
}

From source file:org.trnltk.apps.morphology.contextless.parser.CachingMorphologicParserApp.java

@App("Parse all sample corpus. Does an offline analysis to add most frequent words to cache in advance.")
public void parseWordsOfOneMillionSentences_withOfflineAnalysis() throws Exception {
    /*/*from  w w  w .  j  a  v a2s.c  o m*/
    Total time :0:05:27.806
    Nr of tokens : 18362187
    Avg time : 0.01785223078274935 ms
    */
    LoggingSettings.turnOnLogger(LoggingSettings.Piece.FrequentWordAnalysis);

    final Set<File> files = SampleFiles.oneMillionSentencesTokenizedFiles();

    final List<String> words = new ArrayList<String>();
    final HashSet<String> uniqueWords = new HashSet<String>();

    for (File tokenizedFile : files) {
        final List<String> lines = Files.readLines(tokenizedFile, Charsets.UTF_8);
        for (String line : lines) {
            final ArrayList<String> strings = Lists
                    .newArrayList(Splitter.on(" ").trimResults().omitEmptyStrings().split(line));
            words.addAll(strings);
            uniqueWords.addAll(strings);
        }
    }

    System.out.println("Number of words : " + words.size());
    System.out.println("Number of unique words : " + uniqueWords.size());
    System.out.println("======================");

    final MorphologicParserCache staticCache = new MorphologicParserCache() {

        private ImmutableMap<String, List<MorphemeContainer>> cacheMap;
        private boolean built;

        @Override
        public List<MorphemeContainer> get(String input) {
            return this.cacheMap.get(input);
        }

        @Override
        public void put(String input, List<MorphemeContainer> morphemeContainers) {
            // do nothing
        }

        @Override
        public void putAll(Map<String, List<MorphemeContainer>> map) {
            // do nothing
        }

        @Override
        public void build(MorphologicParser parser) {
            final ImmutableMap.Builder<String, List<MorphemeContainer>> builder = new ImmutableMap.Builder<String, List<MorphemeContainer>>();
            final FrequentWordAnalysis.FrequentWordAnalysisResult result = new FrequentWordAnalysis().run(words,
                    0.75);

            final List<String> wordsToUseInCache = result.getWordsWithEnoughOccurrences();
            for (String word : wordsToUseInCache) {
                builder.put(word, contextlessMorphologicParser.parseStr(word));
            }
            this.cacheMap = builder.build();
            this.built = true;
        }

        @Override
        public boolean isNotBuilt() {
            return !this.built;
        }
    };

    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);

    final MorphologicParser[] parsers = new MorphologicParser[NUMBER_OF_THREADS];
    for (int i = 0; i < parsers.length; i++) {
        parsers[i] = new CachingMorphologicParser(staticCache, contextlessMorphologicParser, true);
    }

    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    for (int i = 0; i < words.size(); i = i + BULK_SIZE) {
        final MorphologicParser parser = parsers[(i / BULK_SIZE) % NUMBER_OF_THREADS];
        int start = i;
        int end = i + BULK_SIZE < words.size() ? i + BULK_SIZE : words.size();
        final List<String> subWordList = words.subList(start, end);
        final int wordIndex = i;
        pool.execute(new BulkParseCommand(parser, subWordList, wordIndex, false));
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(1000, TimeUnit.MILLISECONDS);
    }

    stopWatch.stop();

    System.out.println("Total time :" + stopWatch.toString());
    System.out.println("Nr of tokens : " + words.size());
    System.out.println("Avg time : " + (stopWatch.getTime() * 1.0d) / (words.size() * 1.0d) + " ms");
}

From source file:org.trnltk.apps.morphology.contextless.parser.FolderContextlessMorphologicParsingApp.java

@App
public void parse8MWords_withOfflineAnalysis() throws Exception {
    final File folder = new File("D:\\devl\\data\\1MSentences");

    final List<File> files = new ArrayList<File>();

    for (File file : folder.listFiles()) {
        if (file.getName().endsWith("_tokenized.txt"))
            files.add(file);//ww  w.  j  a  va 2  s.  c o m
    }

    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);

    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    for (File file : files) {
        final File targetFile = new File(file.getParent(),
                file.getName().substring(0, file.getName().length() - "_tokenized.txt".length())
                        + "_parsed.txt");
        final FileParseCommand fileParseCommand = new FileParseCommand(contextlessMorphologicParser, file,
                targetFile, false);
        pool.execute(fileParseCommand);
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(1000, TimeUnit.MILLISECONDS);
    }

    stopWatch.stop();

    System.out.println("Total time :" + stopWatch.toString());
}

From source file:org.trnltk.apps.tokenizer.TextTokenizerCorpusApp.java

@App("Creates tokenized files")
public void tokenizeBig_files_onSource() throws IOException, InterruptedException {
    final StopWatch taskStopWatch = new StopWatch();
    taskStopWatch.start();/*from   www .  j a  va 2s.co  m*/

    final File parentFolder = new File("D:\\devl\\data\\aakindan");
    final File sourceFolder = new File(parentFolder, "src_split");
    final File targetFolder = new File(parentFolder, "src_split_tokenized");
    final File errorFolder = new File(parentFolder, "src_split_tokenization_error");
    final File[] files = sourceFolder.listFiles();
    Validate.notNull(files);

    final List<File> filesToTokenize = new ArrayList<File>();
    for (File file : files) {
        if (file.isDirectory())
            continue;

        filesToTokenize.add(file);
    }

    int lineCountOfAllFiles = 0;
    for (File file : filesToTokenize) {
        lineCountOfAllFiles += Utilities.lineCount(file);
    }

    System.out.println("Total lines in all files " + lineCountOfAllFiles);

    final StopWatch callbackStopWatch = new StopWatch();
    final TokenizationCommandCallback callback = new TokenizationCommandCallback(lineCountOfAllFiles,
            callbackStopWatch);

    int NUMBER_OF_THREADS = 8;
    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);

    callbackStopWatch.start();
    for (File sourceFile : filesToTokenize) {
        final String fileBaseName = sourceFile.getName().substring(0,
                sourceFile.getName().length() - ".txt.0000".length());
        final String index = FilenameUtils.getExtension(sourceFile.getName());
        final File targetFile = new File(targetFolder, fileBaseName + "_tokenized.txt." + index);
        final File errorFile = new File(errorFolder, fileBaseName + "_tokenization_error.txt." + index);

        pool.execute(
                new TokenizationCommand(callback, fastRelaxedTokenizer, sourceFile, targetFile, errorFile));
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        //            System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(3000, TimeUnit.MILLISECONDS);
    }

    callbackStopWatch.stop();
    taskStopWatch.stop();
    System.out.println("Total time :" + taskStopWatch.toString());
    System.out.println("Nr of tokens : " + callback.getNumberOfTokens());
    System.out.println(
            "Avg time : " + (taskStopWatch.getTime() * 1.0d) / (callback.getNumberOfTokens() * 1.0d) + " ms");
}