Example usage for java.util.concurrent ThreadPoolExecutor execute

List of usage examples for java.util.concurrent ThreadPoolExecutor execute

Introduction

In this page you can find the example usage for java.util.concurrent ThreadPoolExecutor execute.

Prototype

public void execute(Runnable command) 

Source Link

Document

Executes the given task sometime in the future.

Usage

From source file:org.jumpmind.symmetric.service.impl.NodeCommunicationService.java

public boolean execute(final NodeCommunication nodeCommunication, RemoteNodeStatuses statuses,
        final INodeCommunicationExecutor executor) {
    Date now = new Date();
    Date lockTimeout = getLockTimeoutDate(nodeCommunication.getCommunicationType());
    final Set<String> executing = this.currentlyExecuting.get(nodeCommunication.getCommunicationType());
    boolean locked = !executing.contains(nodeCommunication.getNodeId())
            && sqlTemplate.update(getSql("aquireLockSql"), clusterService.getServerId(), now, now,
                    nodeCommunication.getNodeId(), nodeCommunication.getCommunicationType().name(),
                    lockTimeout) == 1;//  w w  w  .j  a v a  2  s . c  o m
    if (locked) {
        executing.add(nodeCommunication.getNodeId());
        nodeCommunication.setLastLockTime(now);
        nodeCommunication.setLockingServerId(clusterService.getServerId());
        final RemoteNodeStatus status = statuses.add(nodeCommunication.getNodeId());
        Runnable r = new Runnable() {
            public void run() {
                long ts = System.currentTimeMillis();
                boolean failed = false;
                try {
                    executor.execute(nodeCommunication, status);
                    failed = status.failed();
                } catch (Throwable ex) {
                    failed = true;
                    log.error(String.format("Failed to execute %s for node %s",
                            nodeCommunication.getCommunicationType().name(), nodeCommunication.getNodeId()),
                            ex);
                } finally {
                    unlock(nodeCommunication, status, failed, ts);
                    executing.remove(nodeCommunication.getNodeId());
                }
            }
        };
        if (parameterService.is(ParameterConstants.SYNCHRONIZE_ALL_JOBS)) {
            r.run();
        } else {
            ThreadPoolExecutor service = getExecutor(nodeCommunication.getCommunicationType());
            service.execute(r);
        }
    }
    return locked;
}

From source file:org.openmrs.module.openconceptlab.updater.Updater.java

private void processInput() throws IOException {
    ObjectMapper objectMapper = new ObjectMapper();
    objectMapper.getDeserializationConfig().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"));
    JsonParser parser = objectMapper.getJsonFactory().createJsonParser(in);

    JsonToken token = parser.nextToken();
    if (token != JsonToken.START_OBJECT) {
        throw new IOException("JSON must start from an object");
    }/*  ww w . j av  a 2 s  . c  o m*/
    token = parser.nextToken();

    token = advanceToListOf("concepts", "mappings", parser);

    if (token == JsonToken.END_OBJECT || token == null) {
        return;
    }

    String baseUrl = updateService.getSubscription().getUrl();
    if (baseUrl != null) {
        try {
            URI uri = new URI(baseUrl);
            baseUrl = uri.getScheme() + "://" + uri.getHost();
            if (uri.getPort() != -1) {
                baseUrl += ":" + uri.getPort();
            }
        } catch (Exception e) {
            throw new IllegalStateException(baseUrl + " is not valid", e);
        }
    }

    ThreadPoolExecutor runner = newRunner();
    List<OclConcept> oclConcepts = new ArrayList<OclConcept>();
    while (parser.nextToken() != JsonToken.END_ARRAY) {
        OclConcept oclConcept = parser.readValueAs(OclConcept.class);
        oclConcept.setVersionUrl(prependBaseUrl(baseUrl, oclConcept.getVersionUrl()));
        oclConcept.setUrl(prependBaseUrl(baseUrl, oclConcept.getUrl()));

        oclConcepts.add(oclConcept);

        if (oclConcepts.size() >= BATCH_SIZE) {
            ImportRunner importRunner = new ImportRunner(importer, new CacheService(conceptService),
                    updateService, update);
            importRunner.setOclConcepts(oclConcepts);

            oclConcepts = new ArrayList<OclConcept>();

            runner.execute(importRunner);
        }
    }

    if (oclConcepts.size() != 0) {
        ImportRunner importRunner = new ImportRunner(importer, new CacheService(conceptService), updateService,
                update);
        importRunner.setOclConcepts(oclConcepts);

        runner.execute(importRunner);
    }

    runner.shutdown();
    try {
        runner.awaitTermination(32, TimeUnit.DAYS);
    } catch (InterruptedException e) {
        throw new RuntimeException(e);
    }

    token = advanceToListOf("mappings", null, parser);

    if (token == JsonToken.END_OBJECT) {
        return;
    }

    runner = newRunner();
    List<OclMapping> oclMappings = new ArrayList<OclMapping>();
    while (parser.nextToken() != JsonToken.END_ARRAY) {
        OclMapping oclMapping = parser.readValueAs(OclMapping.class);
        oclMapping.setUrl(prependBaseUrl(baseUrl, oclMapping.getUrl()));
        oclMapping.setFromConceptUrl(prependBaseUrl(baseUrl, oclMapping.getFromConceptUrl()));
        oclMapping.setFromSourceUrl(prependBaseUrl(baseUrl, oclMapping.getFromSourceUrl()));
        oclMapping.setToConceptUrl(prependBaseUrl(baseUrl, oclMapping.getToConceptUrl()));

        oclMappings.add(oclMapping);

        if (oclMappings.size() >= BATCH_SIZE) {
            ImportRunner importRunner = new ImportRunner(importer, new CacheService(conceptService),
                    updateService, update);
            importRunner.setOclMappings(oclMappings);

            oclMappings = new ArrayList<OclMapping>();

            runner.execute(importRunner);
        }
    }

    if (oclMappings.size() != 0) {
        ImportRunner importRunner = new ImportRunner(importer, new CacheService(conceptService), updateService,
                update);
        importRunner.setOclMappings(oclMappings);

        runner.execute(importRunner);
    }

    runner.shutdown();
    try {
        runner.awaitTermination(32, TimeUnit.DAYS);
    } catch (InterruptedException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.openstreetmap.josm.data.cache.HostLimitQueueTest.java

/**
 * Check if single threaded execution works properly
 * @throws Exception in case of error//  w  w w.j a  va 2s  .  co  m
 */
@Test
public void testSingleThreadPerHost() throws Exception {
    ThreadPoolExecutor tpe = getNewThreadPoolExecutor("test-%d", 3, 1);
    ICacheAccess<String, CacheEntry> cache = JCSCacheManager.getCache("test", 3, 0, "");
    AtomicInteger counter = new AtomicInteger(0);
    long start = System.currentTimeMillis();
    for (int i = 0; i < 10; i++) {
        tpe.execute(new Task(cache, new URL("http://localhost/" + i), counter));
    }
    tpe.shutdown();
    tpe.awaitTermination(15, TimeUnit.SECONDS); // at most it should take ~10 seconds, so after 15 it's already failed
    long duration = System.currentTimeMillis() - start;
    // check that all tasks were executed
    assertEquals(10, counter.get());
    // although there are 3 threads, we can make only 1 parallel call to localhost
    // so it should take ~10 seconds to finish
    // if it's shorter, it means that host limit does not work
    assertTrue("Expected duration between 9 and 11 seconds not met. Actual duration: " + (duration / 1000),
            duration < 11 * 1000 & duration > 9 * 1000);
}

From source file:org.openstreetmap.josm.data.cache.HostLimitQueueTest.java

/**
 * Check if two threaded execution work properly
 * @throws Exception in case of error/*ww w.  ja  v  a  2s . c om*/
 */
@Test
public void testMultipleThreadPerHost() throws Exception {
    ThreadPoolExecutor tpe = getNewThreadPoolExecutor("test-%d", 3, 2);
    ICacheAccess<String, CacheEntry> cache = JCSCacheManager.getCache("test", 3, 0, "");
    AtomicInteger counter = new AtomicInteger(0);
    long start = System.currentTimeMillis();
    for (int i = 0; i < 10; i++) {
        tpe.execute(new Task(cache, new URL("http://hostlocal/" + i), counter));
    }
    tpe.shutdown();
    tpe.awaitTermination(15, TimeUnit.SECONDS);
    long duration = System.currentTimeMillis() - start;
    // check that all tasks were executed
    assertEquals(10, counter.get());
    // although there are 3 threads, we can make only 2 parallel call to localhost
    // so it should take ~5 seconds to finish
    // if it's shorter, it means that host limit does not work
    assertTrue("Expected duration between 4 and 6 seconds not met. Actual duration: " + (duration / 1000),
            duration < 6 * 1000 & duration > 4 * 1000);
}

From source file:org.openstreetmap.josm.data.cache.HostLimitQueueTest.java

/**
 * Check two hosts/*  w ww  .  j  a v a  2 s.co  m*/
 * @throws Exception in case of error
 */
@Test
public void testTwoHosts() throws Exception {
    ThreadPoolExecutor tpe = getNewThreadPoolExecutor("test-%d", 3, 1);
    ICacheAccess<String, CacheEntry> cache = JCSCacheManager.getCache("test", 3, 0, "");
    AtomicInteger counter = new AtomicInteger(0);
    long start = System.currentTimeMillis();
    for (int i = 0; i < 10; i++) {
        String url = (i % 2 == 0) ? "http://localhost" : "http://hostlocal";
        tpe.execute(new Task(cache, new URL(url + i), counter));
    }
    tpe.shutdown();
    tpe.awaitTermination(15, TimeUnit.SECONDS);
    long duration = System.currentTimeMillis() - start;
    // check that all tasks were executed
    assertEquals(10, counter.get());
    // although there are 3 threads, we can make only 1 parallel per host, and we have 2 hosts
    // so it should take ~5 seconds to finish
    // if it's shorter, it means that host limit does not work
    assertTrue("Expected duration between 4 and 6 seconds not met. Actual duration: " + (duration / 1000),
            duration < 6 * 1000 & duration > 4 * 1000);
}

From source file:org.polymap.core.runtime.PolymapThreadPoolExecutor.java

public PolymapThreadPoolExecutor(int minPoolSize, int maxPoolSize, int keepAliveSecs, BlockingQueue queue) {
    super(minPoolSize, maxPoolSize, keepAliveSecs, TimeUnit.SECONDS, queue);

    // thread factory
    setThreadFactory(this);

    // rejected? -> wait and try again
    setRejectedExecutionHandler(new RejectedExecutionHandler() {
        public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
            //log.warn( "Unable to queue task: " + r );
            // wait (a long time) and try again (until StackOverflow)
            synchronized (queueFull) {
                queueFull.set(true);//from w w w  .  ja va  2 s .c  om
                try {
                    queueFull.wait(1000);
                } catch (InterruptedException e) {
                }
            }
            executor.execute(r);
        }
    });

}

From source file:org.polymap.core.runtime.UnboundPoolExecutor.java

public static ExecutorService newInstance() {
    final int procs = Runtime.getRuntime().availableProcessors();
    final int maxThreads = procs * MAX_THREADS_PER_PROC;

    // thread factory
    ThreadFactory threadFactory = new ThreadFactory() {
        volatile int threadNumber = 0;

        public Thread newThread(Runnable r) {
            String prefix = "polymap-";
            Thread t = new Thread(r, prefix + threadNumber++);
            t.setDaemon(false);//from  w w w  . j  a  va  2s  .c om
            t.setPriority(DEFAULT_THREAD_PRIORITY);
            return t;
        }
    };

    // thread pool
    ThreadPoolExecutor executor = new ThreadPoolExecutor(procs, maxThreads, 180L, TimeUnit.SECONDS,
            new SynchronousQueue<Runnable>(), threadFactory);

    // rejected? -> wait and try again
    executor.setRejectedExecutionHandler(new RejectedExecutionHandler() {
        Random rand = new Random();

        public void rejectedExecution(Runnable r, ThreadPoolExecutor _executor) {
            do {
                try {
                    Thread.sleep(rand.nextInt(1000) + 100);
                } catch (InterruptedException e) {
                }
            } while (_executor.getActiveCount() >= maxThreads);

            _executor.execute(r);
        }
    });

    //executor.allowCoreThreadTimeOut( true );        
    return executor;
}

From source file:org.trnltk.apps.experiments.AmbiguityMatrixApp.java

private ThreadPoolExecutor startThreads(List<String> distinctWordsWithEnoughOccurrences,
        List<Map<String, List<String>>> resultMaps) {
    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);
    for (int i = 0; i < distinctWordsWithEnoughOccurrences.size(); i = i + BULK_SIZE) {
        int start = i;
        int end = i + BULK_SIZE < distinctWordsWithEnoughOccurrences.size() ? i + BULK_SIZE
                : distinctWordsWithEnoughOccurrences.size();
        final List<String> subWordList = distinctWordsWithEnoughOccurrences.subList(start, end);
        final int wordIndex = i;
        pool.execute(new BulkParseCommand(contextlessMorphologicParser, subWordList, wordIndex, false,
                resultMaps.get(i / BULK_SIZE)));
    }// ww  w  . ja  v a  2  s.  c o  m
    return pool;
}

From source file:org.trnltk.apps.morphology.contextless.parser.CachingMorphologicParserApp.java

@App("Parse all sample corpus. Does not do an offline analysis to add most frequent words to cache in advance.")
public void parse8MWords() throws Exception {
    /*//from   ww  w  .ja  va  2s  .co m
     Total time :0:07:29.799
     Nr of tokens : 18362187
     Avg time : 0.024495938310616267 ms
    */
    final Set<File> files = SampleFiles.oneMillionSentencesTokenizedFiles();

    final LinkedList<String> words = new LinkedList<String>();
    final HashSet<String> uniqueWords = new HashSet<String>();

    for (File tokenizedFile : files) {
        final List<String> lines = Files.readLines(tokenizedFile, Charsets.UTF_8);
        for (String line : lines) {
            final ArrayList<String> strings = Lists
                    .newArrayList(Splitter.on(" ").trimResults().omitEmptyStrings().split(line));
            words.addAll(strings);
            uniqueWords.addAll(strings);
        }
    }

    System.out.println("Number of words : " + words.size());
    System.out.println("Number of unique words : " + uniqueWords.size());
    System.out.println("======================");

    final MorphologicParserCache l1Cache = new LRUMorphologicParserCache(NUMBER_OF_THREADS,
            INITIAL_L1_CACHE_SIZE, MAX_L1_CACHE_SIZE);

    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);

    final MorphologicParser[] parsers = new MorphologicParser[NUMBER_OF_THREADS];
    for (int i = 0; i < parsers.length; i++) {
        parsers[i] = new CachingMorphologicParser(new TwoLevelMorphologicParserCache(BULK_SIZE, l1Cache),
                contextlessMorphologicParser, true);
    }

    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    for (int i = 0; i < words.size(); i = i + BULK_SIZE) {
        final MorphologicParser parser = parsers[(i / BULK_SIZE) % NUMBER_OF_THREADS];
        int start = i;
        int end = i + BULK_SIZE < words.size() ? i + BULK_SIZE : words.size();
        final List<String> subWordList = words.subList(start, end);
        final int wordIndex = i;
        pool.execute(new BulkParseCommand(parser, subWordList, wordIndex, false));
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(1000, TimeUnit.MILLISECONDS);
    }

    stopWatch.stop();

    System.out.println("Total time :" + stopWatch.toString());
    System.out.println("Nr of tokens : " + words.size());
    System.out.println("Avg time : " + (stopWatch.getTime() * 1.0d) / (words.size() * 1.0d) + " ms");
}

From source file:org.trnltk.apps.morphology.contextless.parser.CachingMorphologicParserApp.java

@App("Parse all sample corpus. Does an offline analysis to add most frequent words to cache in advance.")
public void parseWordsOfOneMillionSentences_withOfflineAnalysis() throws Exception {
    /*/*from   www  . jav  a 2s. c o m*/
    Total time :0:05:27.806
    Nr of tokens : 18362187
    Avg time : 0.01785223078274935 ms
    */
    LoggingSettings.turnOnLogger(LoggingSettings.Piece.FrequentWordAnalysis);

    final Set<File> files = SampleFiles.oneMillionSentencesTokenizedFiles();

    final List<String> words = new ArrayList<String>();
    final HashSet<String> uniqueWords = new HashSet<String>();

    for (File tokenizedFile : files) {
        final List<String> lines = Files.readLines(tokenizedFile, Charsets.UTF_8);
        for (String line : lines) {
            final ArrayList<String> strings = Lists
                    .newArrayList(Splitter.on(" ").trimResults().omitEmptyStrings().split(line));
            words.addAll(strings);
            uniqueWords.addAll(strings);
        }
    }

    System.out.println("Number of words : " + words.size());
    System.out.println("Number of unique words : " + uniqueWords.size());
    System.out.println("======================");

    final MorphologicParserCache staticCache = new MorphologicParserCache() {

        private ImmutableMap<String, List<MorphemeContainer>> cacheMap;
        private boolean built;

        @Override
        public List<MorphemeContainer> get(String input) {
            return this.cacheMap.get(input);
        }

        @Override
        public void put(String input, List<MorphemeContainer> morphemeContainers) {
            // do nothing
        }

        @Override
        public void putAll(Map<String, List<MorphemeContainer>> map) {
            // do nothing
        }

        @Override
        public void build(MorphologicParser parser) {
            final ImmutableMap.Builder<String, List<MorphemeContainer>> builder = new ImmutableMap.Builder<String, List<MorphemeContainer>>();
            final FrequentWordAnalysis.FrequentWordAnalysisResult result = new FrequentWordAnalysis().run(words,
                    0.75);

            final List<String> wordsToUseInCache = result.getWordsWithEnoughOccurrences();
            for (String word : wordsToUseInCache) {
                builder.put(word, contextlessMorphologicParser.parseStr(word));
            }
            this.cacheMap = builder.build();
            this.built = true;
        }

        @Override
        public boolean isNotBuilt() {
            return !this.built;
        }
    };

    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);

    final MorphologicParser[] parsers = new MorphologicParser[NUMBER_OF_THREADS];
    for (int i = 0; i < parsers.length; i++) {
        parsers[i] = new CachingMorphologicParser(staticCache, contextlessMorphologicParser, true);
    }

    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    for (int i = 0; i < words.size(); i = i + BULK_SIZE) {
        final MorphologicParser parser = parsers[(i / BULK_SIZE) % NUMBER_OF_THREADS];
        int start = i;
        int end = i + BULK_SIZE < words.size() ? i + BULK_SIZE : words.size();
        final List<String> subWordList = words.subList(start, end);
        final int wordIndex = i;
        pool.execute(new BulkParseCommand(parser, subWordList, wordIndex, false));
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(1000, TimeUnit.MILLISECONDS);
    }

    stopWatch.stop();

    System.out.println("Total time :" + stopWatch.toString());
    System.out.println("Nr of tokens : " + words.size());
    System.out.println("Avg time : " + (stopWatch.getTime() * 1.0d) / (words.size() * 1.0d) + " ms");
}