Example usage for java.util.concurrent ExecutorService execute

List of usage examples for java.util.concurrent ExecutorService execute

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService execute.

Prototype

void execute(Runnable command);

Source Link

Document

Executes the given command at some time in the future.

Usage

From source file:org.apache.kylin.metadata.cachesync.Broadcaster.java

private Broadcaster(final KylinConfig config) {
    this.config = config;

    final String[] nodes = config.getRestServers();
    if (nodes == null || nodes.length < 1) {
        logger.warn("There is no available rest server; check the 'kylin.server.cluster-servers' config");
    }/*from   w ww. ja va  2  s . co m*/
    logger.debug(nodes.length + " nodes in the cluster: " + Arrays.toString(nodes));

    Executors.newSingleThreadExecutor(new DaemonThreadFactory()).execute(new Runnable() {
        @Override
        public void run() {
            final Map<String, RestClient> restClientMap = Maps.newHashMap();
            final ExecutorService wipingCachePool = new ThreadPoolExecutor(1, 10, 60L, TimeUnit.SECONDS,
                    new LinkedBlockingQueue<Runnable>(), new DaemonThreadFactory());

            while (true) {
                try {
                    final BroadcastEvent broadcastEvent = broadcastEvents.takeFirst();
                    String[] restServers = config.getRestServers();
                    logger.debug("Servers in the cluster: " + Arrays.toString(restServers));
                    for (final String node : restServers) {
                        if (restClientMap.containsKey(node) == false) {
                            restClientMap.put(node, new RestClient(node));
                        }
                    }

                    logger.debug("Announcing new broadcast event: " + broadcastEvent);
                    for (final String node : restServers) {
                        wipingCachePool.execute(new Runnable() {
                            @Override
                            public void run() {
                                try {
                                    restClientMap.get(node).wipeCache(broadcastEvent.getEntity(),
                                            broadcastEvent.getEvent(), broadcastEvent.getCacheKey());
                                } catch (IOException e) {
                                    logger.warn("Thread failed during wipe cache at " + broadcastEvent, e);
                                }
                            }
                        });
                    }
                } catch (Exception e) {
                    logger.error("error running wiping", e);
                }
            }
        }
    });
}

From source file:com.linkedin.pinot.integration.tests.BaseClusterIntegrationTest.java

public void pushAvroIntoKafka(final List<File> avroFiles, ExecutorService executor, final String kafkaTopic,
        final byte[] header) {
    executor.execute(new Runnable() {
        @Override//from   w  w w  .  j a  v a 2  s .  co m
        public void run() {
            pushAvroIntoKafka(avroFiles, KafkaStarterUtils.DEFAULT_KAFKA_BROKER, kafkaTopic, header);
        }
    });
}

From source file:com.google.api.ads.adwords.awreporting.processors.onfile.ReportProcessorOnFile.java

private <R extends Report> void processFiles(String mccAccountId, Class<R> reportBeanClass,
        Collection<File> localFiles, ReportDefinitionDateRangeType dateRangeType, String dateStart,
        String dateEnd) {/*w w w .ja  va 2  s . co  m*/

    final CountDownLatch latch = new CountDownLatch(localFiles.size());
    ExecutorService executorService = Executors.newFixedThreadPool(numberOfReportProcessors);

    // Processing Report Local Files
    LOGGER.info(" Processing reports...");

    Stopwatch stopwatch = Stopwatch.createStarted();

    for (File file : localFiles) {
        LOGGER.trace(".");
        try {

            // We need to create a csvToBean and mappingStrategy for each thread
            ModifiedCsvToBean<R> csvToBean = new ModifiedCsvToBean<R>();
            MappingStrategy<R> mappingStrategy = new AnnotationBasedMappingStrategy<R>(reportBeanClass);

            LOGGER.debug("Parsing file: " + file.getAbsolutePath());
            RunnableProcessorOnFile<R> runnableProcesor = new RunnableProcessorOnFile<R>(file, csvToBean,
                    mappingStrategy, dateRangeType, dateStart, dateEnd, mccAccountId, persister,
                    reportRowsSetSize);
            runnableProcesor.setLatch(latch);
            executorService.execute(runnableProcesor);

        } catch (Exception e) {
            LOGGER.error("Ignoring file (Error when processing): " + file.getAbsolutePath());
            e.printStackTrace();
        }
    }

    try {
        latch.await();
    } catch (InterruptedException e) {
        LOGGER.error(e.getMessage());
        e.printStackTrace();
    }
    executorService.shutdown();
    stopwatch.stop();
    LOGGER.info("*** Finished processing all reports in " + (stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000)
            + " seconds ***\n");
}

From source file:org.deegree.tools.coverage.gridifier.RasterTreeGridifier.java

private void generateCells(RasterLevel level) throws IOException, InterruptedException {

    double metersPerPixel = level.getNativeScale();
    double cellWidth = tileWidth * metersPerPixel;
    double cellHeight = tileHeight * metersPerPixel;

    int columns = (int) Math.ceil(domainWidth / cellWidth);
    int rows = (int) Math.ceil(domainWidth / cellHeight);

    bytesPerTile = tileWidth * tileHeight * 3;
    dataSize = (long) rows * (long) columns * tileWidth * tileHeight * 3L;
    int numberOfBlobs = (int) Math.ceil((double) dataSize / (double) maxBlobSize);

    // prepare output directory
    currentOutputDir = new File(outputDir, "" + metersPerPixel);
    if (!currentOutputDir.exists()) {
        if (!currentOutputDir.mkdir()) {
            LOG.warn("Could not create directory {}.", currentOutputDir);
        }/* ww w .ja  v a 2  s  .  c  o m*/
    }

    LOG.info("\nGridifying level: " + level.getLevel() + "\n");
    LOG.info("- meters per pixel: " + metersPerPixel);
    LOG.info("- cell width (world units): " + cellWidth);
    LOG.info("- cell height (world units): " + cellHeight);
    LOG.info("- number of columns: " + columns);
    LOG.info("- number of rows: " + rows);
    LOG.info("- output directory: " + currentOutputDir);
    LOG.info("- total amount of data: " + dataSize);
    LOG.info("- number of blobs: " + numberOfBlobs);

    Envelope env = geomFactory.createEnvelope(minX, minY, minX + columns * cellWidth, minY + rows * cellHeight,
            null);
    RasterGeoReference renv = RasterGeoReference.create(originLocation, env, columns * tileWidth,
            rows * tileHeight);

    writeMetaInfoFile(new File(currentOutputDir, GridMetaInfoFile.METAINFO_FILE_NAME), renv, columns, rows);

    // start writer daemon thread
    BlobWriterThread writer = new BlobWriterThread(rows * columns);
    Thread writerThread = new Thread(writer);
    writerThread.start();

    // generate and store cell data in separate worker threads
    ExecutorService exec = Executors.newFixedThreadPool(numWorkerThreads);

    for (int row = 0; row < rows; row++) {
        for (int column = 0; column < columns; column++) {
            double cellMinX = minX + column * cellWidth;
            double cellMinY = minY + (rows - row - 1) * cellHeight;
            double cellMaxX = cellMinX + cellWidth;
            double cellMaxY = cellMinY + cellHeight;
            int cellId = row * columns + column;
            Worker worker = new Worker(cellId, cellMinX, cellMinY, cellMaxX, cellMaxY, metersPerPixel, writer);
            exec.execute(worker);
        }
    }
    exec.shutdown();

    while (writerThread.isAlive()) {
        Thread.sleep(1000);
    }
}

From source file:com.thruzero.common.web.model.container.builder.xml.XmlPanelSetBuilder.java

protected PanelSet buildConcurrently() throws Exception {
    PanelSet result = new PanelSet(panelSetId);

    if (!panelNodes.isEmpty()) {
        // Build the panels in parallel (e.g., RSS Feed panels should be created in parallel).
        ExecutorService executorService = Executors.newFixedThreadPool(panelNodes.size());
        logHelper.logExecutorServiceCreated(panelSetId);

        final Map<String, AbstractPanel> panels = new HashMap<String, AbstractPanel>();
        for (final InfoNodeElement panelNode : panelNodes) {
            final AbstractXmlPanelBuilder panelBuilder = panelBuilderTypeRegistry
                    .createBuilder(panelNode.getName(), panelNode);
            final String panelKey = Integer.toHexString(panelNode.hashCode());

            if (panelBuilder == null) {
                panels.put(panelKey, new ErrorHtmlPanel("error", "Panel ERROR",
                        "PanelBuilder not found for panel type " + panelNode.getName()));
            } else {
                //logger.debug("  - prepare to build: " + panelNode.getName());
                executorService.execute(new Runnable() {
                    @Override/*w  ww .ja  v  a 2  s  .  co  m*/
                    public void run() {
                        try {
                            AbstractPanel panel = panelBuilder.build();
                            panels.put(panelKey, panel);
                        } catch (Exception e) {
                            panels.put(panelKey, panelBuilder.buildErrorPanel(panelBuilder.getPanelId(),
                                    "Panel ERROR",
                                    "PanelBuilder encountered an Exception: " + e.getClass().getSimpleName()));
                        }
                    }

                    @Override
                    public String toString() {
                        return panelBuilder.getPanelInfoForError();
                    }
                });
            }
        }

        // Wait for all panels to be built
        executorService.shutdown();
        logHelper.logExecutorServiceShutdown(panelSetId);
        try {
            executorService.awaitTermination(timeoutInSeconds, TimeUnit.SECONDS);
        } catch (InterruptedException e) {
            // ignore (handled below)
            logHelper.logExecutorServiceInterrupted(panelSetId);
        }

        if (executorService.isTerminated()) {
            logHelper.logExecutorServiceIsTerminated(panelSetId);
        } else {
            logHelper.logExecutorServiceIsNotTerminated(executorService, executorService.shutdownNow(),
                    panelSetId);
        }

        // add panels in the same order as defined
        for (InfoNodeElement panelNode : panelNodes) {
            String panelKey = Integer.toHexString(panelNode.hashCode());
            AbstractPanel panel = panels.get(panelKey);
            if (panel == null) {
                // if it wasn't added to the panelNodes map, then there must have been a timeout error
                AbstractXmlPanelBuilder panelBuilder = panelBuilderTypeRegistry
                        .createBuilder(panelNode.getName(), panelNode);

                result.addPanel(panelBuilder.buildErrorPanel(panelKey, "Panel ERROR",
                        "PanelBuilder encountered a timeout error: " + panelNode.getName()));
            } else {
                result.addPanel(panel);
            }
        }
    }
    logHelper.logPanelSetCompleted(panelSetId);

    return result;
}

From source file:nl.salp.warcraft4j.dev.casc.dbc.DbcFilenameGenerator.java

public void execute() {
    int cores = 32;
    int generators = 1;
    int consumers = cores - generators;

    ExecutorService executorService = Executors.newFixedThreadPool(cores);
    Generator generator = new Generator(CHARS, maxLength);
    /*/*from  www .ja  v a 2s.  c o m*/
    IntStream.range(0, consumers).forEach(i -> executorService.execute(() -> {
    LOGGER.debug("Started consumer {}", i);
    Consumer<String> consumer = this.consumer.get();
    String val = null;
    //long tryCount = 0;
    while (generator.isAvailable() || !queue.isEmpty()) {
        while (val == null && (generator.isAvailable() || !queue.isEmpty())) {
            //tryCount++;
            try {
                val = queue.poll(10, TimeUnit.MILLISECONDS);
            } catch (InterruptedException e) {
            }
            //if (tryCount % 1000 == 0) {
            //    LOGGER.debug("Starvation for consumer {} with {} tries", i);
            //}
        }
        if (val != null) {
            //tryCount = 0;
            consumer.accept(val);
            val = null;
        }
    }
    LOGGER.debug("Shutting down consumer {}", i);
    }));
    */
    IntStream.range(0, generators).forEach(i -> executorService.execute(() -> {
        LOGGER.debug("Started generator {}", i);
        while (generator.isAvailable()) {
            String value = generator.next();
            //add(format(FORMAT_DBC, value));
            //add(format(FORMAT_DB2, value));
        }
        LOGGER.debug("Shutting down generator {}", i);
    }));

    LOGGER.debug("Done...");
    executorService.shutdown();
}

From source file:com.qwazr.compiler.JavaCompiler.java

private JavaCompiler(ExecutorService executorService, File javaSourceDirectory, File javaClassesDirectory,
        String classPath, Collection<URL> urlList) throws IOException {
    this.classPath = classPath;
    this.javaSourceDirectory = javaSourceDirectory;
    String javaSourcePrefix = javaSourceDirectory.getAbsolutePath();
    javaSourcePrefixSize = javaSourcePrefix.endsWith("/") ? javaSourcePrefix.length()
            : javaSourcePrefix.length() + 1;
    this.javaClassesDirectory = javaClassesDirectory;
    if (this.javaClassesDirectory != null && !this.javaClassesDirectory.exists())
        this.javaClassesDirectory.mkdir();
    compilerLock = new LockUtils.ReadWriteLock();
    compileDirectory(javaSourceDirectory);
    directorWatcher = DirectoryWatcher.register(javaSourceDirectory.toPath(), new Consumer<Path>() {
        @Override// w w w .ja  v  a  2  s  . c om
        public void accept(Path path) {
            try {
                compileDirectory(path.toFile());
            } catch (IOException e) {
                logger.error(e.getMessage(), e);
            }
        }
    });
    executorService.execute(directorWatcher);
}

From source file:org.pentaho.support.di.server.DISupportUtilityServiceImpl.java

/**
 * loads spring configuration SupportUtil.xml file and creates instance of
 * selected retriever/*from  w ww.j  a  v  a  2 s  .c  om*/
 * 
 * @param args
 * @param prop
 * @return
 */
private boolean executeService(String[] args, final Properties prop) {
    Boolean result = false;
    String SPRING_CONFIG_CLASS = "cofingRetrieverFactory";

    try {

        ApplicationContext context = new ClassPathXmlApplicationContext(DIConstant.SPRING_FILE_NAME);
        final CofingRetrieverFactory factory = (CofingRetrieverFactory) context.getBean(SPRING_CONFIG_CLASS);
        ConfigRetreiver[] config = factory.getConfigRetrevier(args);

        ExecutorService service = Executors.newFixedThreadPool(10);

        // loop through created retriever instance and calls respective
        // retriever
        for (final ConfigRetreiver configobj : config) {

            configobj.setDIServerPath(prop);
            configobj.setServerName(selected.getServerName());
            configobj.setInstallType(selected.getInstallType());

            // if retriever instance is FileRetriever, sets required detail
            if (configobj instanceof FileRetriever) {

                configobj.setBidiXml(selected.getBidiXml());
                configobj.setBidiBatFile(selected.getBidiBatFile());
                configobj.setBidiProrperties(selected.getBidiProrperties());
                configobj.setTomcatXml(selected.getTomcatXml());
            }

            // if retriever instance is BrowserInfoRetriever, sets Browser
            // info detail
            if (configobj instanceof BrowserInfoRetriever) {
                configobj.setBrowserInfo(selected.getBrowserInfo());
            }

            service.execute(new Runnable() {
                public void run() {
                    configobj.readAndSaveConfiguration(prop);
                }
            });

        }

        service.shutdown();
        Thread.sleep(75000);

        // call zip
        if (SupportZipUtil.zipFile(prop)) {

            File file = new File(prop.getProperty(DIConstant.SUPP_INFO_DEST_PATH) + File.separator
                    + prop.getProperty(DIConstant.SUPP_INF_DIR));
            if (file.exists()) {
                // call delete
                delete(file);
            }

            result = true;
        }

    } catch (InterruptedException e) {
        e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    }

    return result;
}

From source file:ir.ac.iust.nlp.postagger.POSTaggerForm.java

private void btnStartTrainingActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnStartTrainingActionPerformed

    try {/*from w w w.jav a  2  s.  c om*/
        txtTrainLog.setText("");
        btnStartTraining.setEnabled(false);

        PrintStream out = new PrintStream(new OutputStream() {

            private StringBuffer buffer = new StringBuffer();

            @Override
            public void write(int b) throws IOException {
                this.buffer.append((char) b);
                txtTrainLog.setText(buffer.toString());
                txtTrainLog.setCaretPosition(txtTrainLog.getDocument().getLength() - 1);
            }
        });
        RunnableTrain.out = out;

        File trainFrom = new File(txtTrainFile.getText());
        File trainTo = new File(System.getProperty("user.dir") + File.separator + trainFrom.getName());
        if (!trainFrom.equals(trainTo))
            FileUtils.copyFile(trainFrom, trainTo);

        File modelTo = new File(txtTrainModelPath.getText());
        File modelFrom = new File(System.getProperty("user.dir") + File.separator + modelTo.getName());
        modelFrom.mkdirs();

        // Run in a new thread
        Runnable job = new RunnableTrain(this, modelTo.getName(), trainFrom.getName(),
                Integer.parseInt(spMaxIters.getValue().toString()));
        ExecutorService threadPool = Executors.newFixedThreadPool(1);
        threadPool.execute(job);
        threadPool.shutdown();
    } catch (IOException | NumberFormatException ex) {
    }
}

From source file:ir.ac.iust.nlp.postagger.POSTaggerForm.java

private void btnStartTaggingActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnStartTaggingActionPerformed

    wordModel.clear();/*from w w w.jav a 2 s  .c  o  m*/
    predModel.clear();
    goldModel.clear();
    txtTagLog.setText("");
    btnStartTagging.setEnabled(false);
    tabTag.setSelectedIndex(0);
    tabTag.setEnabledAt(1, false);

    File modelFrom = new File(txtModelPath.getText());
    File modelTo = new File(System.getProperty("user.dir") + File.separator + modelFrom.getName());
    if (!modelFrom.equals(modelTo)) {
        modelTo.mkdirs();
        copyDirectory(modelFrom, modelTo);
    }

    PrintStream out = new PrintStream(new OutputStream() {

        private StringBuffer buffer = new StringBuffer();

        @Override
        public void write(int b) throws IOException {
            this.buffer.append((char) b);
            txtTagLog.setText(buffer.toString());
            txtTagLog.setCaretPosition(txtTagLog.getDocument().getLength() - 1);
        }
    });
    RunnableTagging.out = out;

    String gold = null;
    if (chkGoldFile.isSelected() == true) {
        gold = txtGoldFile.getText();
    }

    // Run in a new thread
    Runnable job = new RunnableTagging(this, modelFrom.getName(), txtInputFile.getText(),
            txtOutputPath.getText() + "tagged_output.lbl", gold);
    ExecutorService threadPool = Executors.newFixedThreadPool(1);
    threadPool.execute(job);
    threadPool.shutdown();
}