Example usage for java.util.concurrent ExecutorService execute

List of usage examples for java.util.concurrent ExecutorService execute

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService execute.

Prototype

void execute(Runnable command);

Source Link

Document

Executes the given command at some time in the future.

Usage

From source file:org.voyanttools.trombone.input.index.LuceneIndexer.java

public String index(List<StoredDocumentSource> storedDocumentSources) throws IOException {

    // let's check if we need to create new sources because of tokenization parameters
    if (parameters.getParameterValue("tokenization", "").isEmpty() == false) {
        StoredDocumentSourceStorage sourceDocumentSourceStorage = storage.getStoredDocumentSourceStorage();
        String params = parameters.getParameterValue("tokenization");
        for (int i = 0, len = storedDocumentSources.size(); i < len; i++) {
            StoredDocumentSource storedDocumentSource = storedDocumentSources.get(i);
            String id = storedDocumentSource.getId();
            String newId = DigestUtils.md5Hex(id + params);
            InputStream inputStream = sourceDocumentSourceStorage.getStoredDocumentSourceInputStream(id);
            DocumentMetadata metadata = storedDocumentSource.getMetadata();
            metadata.setLastTokenPositionIndex(TokenType.lexical, 0); // this is crucial to ensure that document is re-analyzed and metadata re-rewritten
            InputSource inputSource = new InputStreamInputSource(newId, metadata, inputStream);
            storedDocumentSources.set(i, sourceDocumentSourceStorage.getStoredDocumentSource(inputSource));
            inputStream.close();/*from   w w  w . j a  va  2 s. c o m*/
        }
    }

    List<String> ids = new ArrayList<String>();
    for (StoredDocumentSource storedDocumentSource : storedDocumentSources) {
        ids.add(storedDocumentSource.getId());
    }
    String corpusId = storage.storeStrings(ids);

    // determine if we need to modify the Lucene index
    Collection<StoredDocumentSource> storedDocumentSourceForLucene = new ArrayList<StoredDocumentSource>();
    if (storage.getLuceneManager().directoryExists()) {
        LeafReader reader = SlowCompositeReaderWrapper.wrap(storage.getLuceneManager().getDirectoryReader());
        Terms terms = reader.terms("id");
        if (terms == null) {
            storedDocumentSourceForLucene.addAll(storedDocumentSources);
        } else {
            TermsEnum termsEnum = terms.iterator();
            for (StoredDocumentSource storedDocumentSource : storedDocumentSources) {
                String id = storedDocumentSource.getId();
                if (!termsEnum.seekExact(new BytesRef(id))) {
                    storedDocumentSourceForLucene.add(storedDocumentSource);
                }
            }
        }
    } else {
        storedDocumentSourceForLucene.addAll(storedDocumentSources);
    }

    if (storedDocumentSourceForLucene.isEmpty() == false) {

        // index documents (or at least add corpus to document if not already there), we need to get a new writer
        IndexWriter indexWriter = storage.getLuceneManager().getIndexWriter();
        DirectoryReader indexReader = DirectoryReader.open(indexWriter, true);
        IndexSearcher indexSearcher = new IndexSearcher(indexReader);
        boolean verbose = parameters.getParameterBooleanValue("verbose");
        int processors = Runtime.getRuntime().availableProcessors();
        ExecutorService executor;

        // index
        executor = Executors.newFixedThreadPool(processors);
        for (StoredDocumentSource storedDocumentSource : storedDocumentSourceForLucene) {
            Runnable worker = new StoredDocumentSourceIndexer(storage, indexWriter, indexSearcher,
                    storedDocumentSource, corpusId, verbose);
            executor.execute(worker);
        }
        executor.shutdown();
        try {
            if (!executor.awaitTermination(parameters.getParameterIntValue("luceneIndexingTimeout", 60 * 10),
                    TimeUnit.SECONDS)) { // default 10 minutes
                throw new InterruptedException("Lucene indexing has run out of time.");
            }
        } catch (InterruptedException e) {
            throw new RuntimeException("Lucene indexing has been interrupted.", e);
        } finally {

            try {
                indexWriter.commit();
            } catch (IOException e) {
                indexWriter.close(); // this may also throw an exception, but docs say to close on commit error
                throw e;
            }
        }

        // this should almost never be called
        if (parameters.containsKey("forceMerge")) {
            indexWriter.forceMerge(parameters.getParameterIntValue("forceMerge"));
        }

        indexReader = DirectoryReader.open(indexWriter, true);
        storage.getLuceneManager().setDirectoryReader(indexReader); // make sure it's available afterwards            

        // now determine which documents need to be analyzed
        Collection<StoredDocumentSource> storedDocumentSourceForAnalysis = new ArrayList<StoredDocumentSource>();
        for (StoredDocumentSource storedDocumentSource : storedDocumentSourceForLucene) {
            if (storedDocumentSource.getMetadata().getLastTokenPositionIndex(TokenType.lexical) == 0) { // don't re-analyze
                storedDocumentSourceForAnalysis.add(storedDocumentSource);
            }
        }

        if (storedDocumentSourceForAnalysis.isEmpty() == false) {
            indexSearcher = new IndexSearcher(indexReader);
            executor = Executors.newFixedThreadPool(processors);
            for (StoredDocumentSource storedDocumentSource : storedDocumentSourceForAnalysis) {
                if (storedDocumentSource.getMetadata().getLastTokenPositionIndex(TokenType.lexical) == 0) { // don't re-analyze
                    Runnable worker = new IndexedDocumentAnalyzer(storage, indexSearcher, storedDocumentSource,
                            corpusId, verbose);
                    executor.execute(worker);
                }
            }
            executor.shutdown();
            try {
                if (!executor.awaitTermination(
                        parameters.getParameterIntValue("luceneAnalysisTimeout", 60 * 10), TimeUnit.SECONDS)) { // default 10 minutes
                    throw new InterruptedException("Lucene analysis has run out of time.");
                }
            } catch (InterruptedException e) {
                throw new RuntimeException("Lucene document analysis run out of time", e);
            }
        }

    }

    return corpusId;

}

From source file:com.relativitas.maven.plugins.formatter.FormatterMojo.java

private void formatSourceDirectory(ExecutorService service, final ResultCollector rc,
        final Properties hashCache, final File sourceDir) throws MojoExecutionException {
    final String[] fileString = constructFileList(sourceDir);
    for (final String string : fileString) {
        service.execute(new Runnable() {
            public void run() {
                formatFile(string, rc, hashCache, sourceDir);
            }//from w  w  w . ja  v a 2 s . c o  m
        });
    }
}

From source file:org.springframework.amqp.rabbit.core.RabbitTemplatePublisherCallbacksIntegrationTests.java

@Test
public void testPublisherConfirmGetUnconfirmedConcurrency() throws Exception {
    ConnectionFactory mockConnectionFactory = mock(ConnectionFactory.class);
    Connection mockConnection = mock(Connection.class);
    Channel mockChannel = mock(Channel.class);
    when(mockChannel.isOpen()).thenReturn(true);
    final AtomicLong seq = new AtomicLong();
    doAnswer(invocation -> seq.incrementAndGet()).when(mockChannel).getNextPublishSeqNo();

    when(mockConnectionFactory.newConnection(any(ExecutorService.class), anyString()))
            .thenReturn(mockConnection);
    when(mockConnection.isOpen()).thenReturn(true);
    doReturn(mockChannel).when(mockConnection).createChannel();

    CachingConnectionFactory ccf = new CachingConnectionFactory(mockConnectionFactory);
    ccf.setPublisherConfirms(true);/*from  w  w w .j a  va  2  s.c  om*/
    final RabbitTemplate template = new RabbitTemplate(ccf);

    final AtomicBoolean confirmed = new AtomicBoolean();
    template.setConfirmCallback((correlationData, ack, cause) -> confirmed.set(true));
    ExecutorService exec = Executors.newSingleThreadExecutor();
    final AtomicBoolean sentAll = new AtomicBoolean();
    exec.execute(() -> {
        for (int i = 0; i < 10000; i++) {
            template.convertAndSend(ROUTE, (Object) "message", new CorrelationData("abc"));
        }
        sentAll.set(true);
    });
    long t1 = System.currentTimeMillis();
    while (!sentAll.get() && System.currentTimeMillis() < t1 + 20000) {
        template.getUnconfirmed(-1);
    }
    assertTrue(sentAll.get());
    assertFalse(confirmed.get());
}

From source file:org.kurento.test.grid.GridHandler.java

public void startNodes() {
    try {/*from  w w  w  .jav a  2 s.  co  m*/
        countDownLatch = new CountDownLatch(nodes.size());
        ExecutorService exec = Executors.newFixedThreadPool(nodes.size());

        for (final GridNode n : nodes.values()) {
            Thread t = new Thread() {
                @Override
                public void run() {
                    startNode(n);
                }
            };
            exec.execute(t);
        }

        if (!countDownLatch.await(TIMEOUT_NODE, TimeUnit.SECONDS)) {
            Assert.fail("Timeout waiting nodes (" + TIMEOUT_NODE + " seconds)");
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}

From source file:it.wami.map.mongodeploy.OsmSaxHandler.java

/**
 * /*from w ww .  j ava2  s . c om*/
 * @param way the Way
 */
private void populateWayGeo(Way way) {
    Runnable r = new WayRunnable(db, way, waysQueue);

    waysRunnables.add(r);
    int current = (int) (readWays % WAYS_CHUNK);

    if (current == WAYS_CHUNK - 1) {
        int cores = Runtime.getRuntime().availableProcessors();
        ExecutorService executorService = Executors.newFixedThreadPool(cores);
        for (Runnable currentRunnable : waysRunnables) {
            executorService.execute(currentRunnable);
        }
        waysRunnables = Collections.synchronizedList(new ArrayList<Runnable>());
        executorService.shutdown();
        while (!executorService.isTerminated()) {
        }

        saveEntry(waysQueue, COLL_WAYS);
    }
}

From source file:it.wami.map.mongodeploy.OsmSaxHandler.java

private void populateRelation(Relation relation) {
    Runnable r = new RelationRunnable(db, relation, relationsQueue);

    relationRunnables.add(r);//from w  ww .j av  a2  s  . co  m
    int current = (int) (readRelations % RELATIONS_CHUNK);

    if (current == RELATIONS_CHUNK - 1) {
        int cores = Runtime.getRuntime().availableProcessors();
        ExecutorService executorService = Executors.newFixedThreadPool(cores);
        for (Runnable currentRunnable : relationRunnables) {
            executorService.execute(currentRunnable);
        }
        relationRunnables = Collections.synchronizedList(new ArrayList<Runnable>());
        executorService.shutdown();
        while (!executorService.isTerminated()) {
        }

        saveEntry(relationsQueue, COLL_RELATIONS);
    }
}

From source file:org.goko.grbl.controller.GrblControllerService.java

/** (inheritDoc)
 * @see org.goko.core.common.service.IGokoService#start()
 *///from  ww w.  ja v a 2s .c o  m
@Override
public void start() throws GkException {
    grblActionFactory = new GrblActionFactory(this);
    configuration = new GrblConfiguration();
    grblState = new GrblState();
    grblState.addListener(this);
    // Initiate execution queue
    executionQueue = new ExecutionQueue<GrblGCodeExecutionToken>();
    ExecutorService executor = Executors.newSingleThreadExecutor();
    grblStreamingRunnable = new GrblStreamingRunnable(executionQueue, this);
    executor.execute(grblStreamingRunnable);
}

From source file:org.pentaho.support.cmd.CommandLineUtility.java

/**
 * loads the supportutil.xml file and creates instance of selected retriever
 * and executes/*from w  w  w . j a  va2 s. com*/
 * 
 * @param args
 * @param server
 */
private static void executeService(String[] args, String server) {

    final Properties prop = loadSupportProperty();

    // if installation is manual and server is bi-server read web.xml
    if (getInstallationType() == 3 && getServer() == 1) {
        if (prop.getProperty(CMDConstant.BI_TOM_PATH) == null) {
            System.out.println(CMDConstant.ERROR_12);
            System.exit(0);
        } else {

            WEB_XML = new StringBuilder();
            WEB_XML.append(prop.getProperty(CMDConstant.BI_TOM_PATH)).append(File.separator)
                    .append(CMDConstant.WEB_APP).append(File.separator).append(CMDConstant.PENTAHO)
                    .append(File.separator).append(CMDConstant.WEB_INF).append(File.separator)
                    .append(CMDConstant.WEB_XML);

            PENTAHO_SOLU_PATH = getSolutionPath(server, WEB_XML.toString());
            prop.put(CMDConstant.PENTAHO_SOLU_PATH, PENTAHO_SOLU_PATH);
            prop.put(CMDConstant.BI_PATH, PENTAHO_SOLU_PATH);
        }

    } else if (getInstallationType() == 3 && getServer() == 2) {
        // if installation is manual and server is di-server read web.xml
        if (prop.getProperty(CMDConstant.DI_TOM_PATH) == null) {
            System.out.println(CMDConstant.ERROR_22);
            System.exit(0);
        } else {

            WEB_XML = new StringBuilder();
            WEB_XML.append(prop.getProperty(CMDConstant.DI_TOM_PATH)).append(File.separator)
                    .append(CMDConstant.WEB_APP).append(File.separator).append(CMDConstant.PENTAHO_DI)
                    .append(File.separator).append(CMDConstant.WEB_INF).append(File.separator)
                    .append(CMDConstant.WEB_XML);

            PENTAHO_SOLU_PATH = getSolutionPath(server, WEB_XML.toString());
            prop.put(CMDConstant.PENTAHO_SOLU_PATH, PENTAHO_SOLU_PATH);
            prop.put(CMDConstant.BI_PATH, PENTAHO_SOLU_PATH);
        }
    }

    if (getServer() == 1) {

        if (prop.get(CMDConstant.BI_PATH) == null) {

            System.out.println(CMDConstant.ERROR_1);
            System.exit(0);
        }
        if (prop.get(CMDConstant.BI_TOM_PATH) == null) {

            System.out.println(CMDConstant.ERROR_12);
            System.exit(0);
        } else {
            setBIServerPath(prop);
        }

    } else if (getServer() == 2) {
        if (prop.get(CMDConstant.DI_PATH) == null) {
            System.out.println(CMDConstant.ERROR_2);
            System.exit(0);
        }
        if (prop.get(CMDConstant.DI_TOM_PATH) == null) {
            System.out.println(CMDConstant.ERROR_22);
            System.exit(0);
        } else {
            setDIServerPath(prop);
        }
    }

    ApplicationContext context = new ClassPathXmlApplicationContext(CMDConstant.SPRING_FILE_NAME);
    factory = (CofingRetrieverFactory) context.getBean(CMDConstant.COFINGRETRIEVERACTORY);
    ConfigRetreiver[] config = factory.getConfigRetrevier(args);

    ExecutorService service = Executors.newFixedThreadPool(10);
    // loop for all created instance and call respective retriever
    for (final ConfigRetreiver configobj : config) {

        if (getServer() == 1) {

            configobj.setBISeverPath(prop);
            configobj.setServerName("biserver");
        } else if (getServer() == 2) {

            configobj.setDIServerPath(prop);
            configobj.setServerName("diserver");
        }

        if (getInstallationType() == 1) {

            // if installation is installer set Installer
            configobj.setInstallType("Installer");
        } else if (getInstallationType() == 2) {

            // if installation is Archive set Archive
            configobj.setInstallType("Archive");
        } else if (getInstallationType() == 3) {

            // if installation is Manual set Manual
            configobj.setInstallType("Manual");
        }

        // if instance if fileretriever sets required detail
        if (configobj instanceof FileRetriever) {

            configobj.setBidiXml(serverXml);
            configobj.setBidiBatFile(serverBatFile);
            configobj.setBidiProrperties(serverProrperties);
            configobj.setTomcatXml(tomcatXml);
        }

        service.execute(new Runnable() {
            public void run() {
                configobj.readAndSaveConfiguration(prop);
            }
        });

    }

    try {
        service.shutdown();
        Thread.sleep(60000);

        // call for zip
        if (SupportZipUtil.zipFile(prop)) {

            File file = new File(prop.getProperty(CMDConstant.SUPP_INFO_DEST_PATH) + File.separator
                    + prop.getProperty(CMDConstant.SUPP_INF_DIR));
            if (file.exists()) {
                // call for delete empty directory
                delete(file);
                System.exit(0);
            }
        }
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}

From source file:it.wami.map.mongodeploy.OsmSaxHandler.java

@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
    super.endElement(uri, localName, qName);

    if (qName == NODE) {
        save(entry, COLL_NODES);/*from   w w  w . j av  a 2s.  c  o  m*/
    }
    if (qName == WAY) {
        if (!nodesQueue.isEmpty()) {
            System.out.println("remaining nodes: " + nodesQueue.size());
            saveEntry(nodesQueue, COLL_NODES);
        }
        if (options.isWayGeometry()) {
            populateWayGeo((Way) entry);
        } else {
            standardWay((Way) entry);
        }
    }
    if (qName == RELATION) {
        if (!waysRunnables.isEmpty()) {
            System.out.println("remaining ways: " + waysRunnables.size());
            int cores = Runtime.getRuntime().availableProcessors();
            ExecutorService executorService = Executors.newFixedThreadPool(cores);
            for (Runnable currentRunnable : waysRunnables) {
                executorService.execute(currentRunnable);
            }
            executorService.shutdown();
            while (!executorService.isTerminated()) {
            }
            waysRunnables.clear();
            saveEntry(waysQueue, COLL_WAYS);
            //saveEntry(waysQueue, COLL_WAYS);
        }
        if (!waysQueue.isEmpty()) {
            System.out.println("remaining ways: " + waysRunnables.size());
            saveEntry(waysQueue, COLL_WAYS);
        }
        if (options.isRelationGeometry()) {
            populateRelation((Relation) entry);
        } else {
            save(entry, COLL_RELATIONS);
        }
    }
}

From source file:it.wami.map.mongodeploy.OsmSaxHandler.java

@Override
public void endDocument() throws SAXException {
    super.endDocument();
    if (nodesQueue.size() > 0) {
        System.out.println("remaining nodes: " + nodesQueue.size());
        saveEntry(nodesQueue, COLL_NODES);
    }/*w  w w.  j  av a 2 s  . c  o  m*/
    if (waysQueue.size() > 0) {
        System.out.println("remaining ways: " + waysQueue.size());
        saveEntry(waysQueue, COLL_WAYS);
    }
    if (!relationRunnables.isEmpty()) {
        if (!relationRunnables.isEmpty()) {
            int cores = Runtime.getRuntime().availableProcessors();
            ExecutorService executorService = Executors.newFixedThreadPool(cores);

            for (Runnable currentRunnable : relationRunnables) {
                executorService.execute(currentRunnable);

            }
            executorService.shutdown();
            while (!executorService.isTerminated()) {
            }
            relationRunnables.clear();
            System.out.println("remaining relations: " + relationsQueue.size());
            saveEntry(relationsQueue, COLL_RELATIONS);
        }
    }
    if (!relationsQueue.isEmpty()) {
        System.out.println("remaining relations: " + relationsQueue.size());
        saveEntry(relationsQueue, COLL_RELATIONS);
    }
    if (tagsQueue.size() > 0) {
        System.out.println("remaining tags: " + tagsQueue.size());
        saveEntry(tagsQueue, COLL_TAGS);
    }
    end = System.currentTimeMillis();
    long time = end - start;
    System.out.println(
            "End of document; time - " + (time / (60 * 60 * 1000)) % 60 + "h, " + (time / (60 * 1000)) % 60
                    + "m, " + (time / 1000) % 60 + "s, " + time % 1000 + "ms (" + (end - start) + ")");
}