Example usage for java.util.concurrent Executors newSingleThreadExecutor

List of usage examples for java.util.concurrent Executors newSingleThreadExecutor

Introduction

In this page you can find the example usage for java.util.concurrent Executors newSingleThreadExecutor.

Prototype

public static ExecutorService newSingleThreadExecutor(ThreadFactory threadFactory) 

Source Link

Document

Creates an Executor that uses a single worker thread operating off an unbounded queue, and uses the provided ThreadFactory to create a new thread when needed.

Usage

From source file:com.adaptris.core.interceptor.ThrottlingInterceptor.java

@Override
public void init() throws CoreException {
    if (isEmpty(cacheName)) {
        throw new CoreException("Cache Name not specified.");
    }/*from ww  w .  j  ava  2  s. com*/

    ((TimeSliceAbstractCacheProvider) cacheProvider).setTimeSliceDurationMilliseconds(getMillisecondDuration());
    getCacheProvider().init();
    executor = Executors.newSingleThreadExecutor(new ManagedThreadFactory(getClass().getSimpleName()));
}

From source file:com.alibaba.openapi.client.rpc.AlibabaClientReactor.java

public synchronized void start(ClientPolicy policy)
        throws IOReactorException, NoSuchAlgorithmException, KeyManagementException {
    if (workerFuture != null) {
        throw new IllegalStateException("[AlibabaClientReactor]is already started");
    }/*from  ww w  . j ava 2 s.  c o m*/
    LoggerHelper.getClientLogger().finer("[AlibabaClientReactor START]-- start OK");
    this.policy = policy;

    //Initialize HTTP processor
    HttpParams params = getHttpParams();
    //System.out.println("create IOEventDispatch...");
    createIOEventDispatch(policy, params);
    //System.out.println("ioReactor init...");
    ioReactor = new DefaultConnectingIOReactor(1, params);
    executorService = Executors.newSingleThreadExecutor(new NamedThreadFactory("AlibabaClientReactor", true));
    //System.out.println("AlibabaClientReactor submit...");
    workerFuture = executorService.submit(this);
}

From source file:com.adaptris.core.lifecycle.FilteredSharedComponentStart.java

private ExecutorService getExecutor(String name) {
    ExecutorService es = connectionStarters.get(name);
    if (es == null || es.isShutdown()) {
        es = Executors.newSingleThreadExecutor(new ManagedThreadFactory(getClass().getSimpleName()));
        connectionStarters.put(name, es);
    }/*from www  .ja v  a 2 s.co m*/
    return es;
}

From source file:com.reactivetechnologies.analytics.core.IncrementalClassifierBean.java

@PostConstruct
void init() {//  w  w  w  .  j a v a 2  s.  c o  m
    loadAndInitializeModel();

    log.info((isUpdateable() ? "UPDATEABLE " : "NON-UPDATEABLE ") + "** Weka Classifier loaded [" + clazzifier
            + "] **");
    if (log.isDebugEnabled()) {
        log.debug("weka.classifier.tokenize? " + filterDataset);
        log.debug("weka.classifier.tokenize.options: " + filterOpts);
        log.debug("weka.classifier.build.batchSize: " + instanceBatchSize);
        log.debug("weka.classifier.build.intervalSecs: " + delay);
        log.debug("weka.classifier.build.maxIdleSecs: " + maxIdle);
    }
    worker = Executors.newSingleThreadExecutor(new ThreadFactory() {

        @Override
        public Thread newThread(Runnable r) {
            Thread t = new Thread(r, "RegressionBean.Worker.Thread");
            return t;
        }
    });
    worker.submit(new EventConsumer());

    timer = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {

        @Override
        public Thread newThread(Runnable r) {
            Thread t = new Thread(r, "RegressionBean.Timer.Thread");
            t.setDaemon(true);
            return t;
        }
    });
    ((ScheduledExecutorService) timer).scheduleWithFixedDelay(new EventTimer(), delay, delay, TimeUnit.SECONDS);
}

From source file:com.ebay.pulsar.metriccalculator.processor.MetricCassandraCollector.java

@ManagedOperation
public void connect() {
    if (!connected.get()) {
        pool = Executors.newSingleThreadExecutor(new NamedThreadFactory("CassandraCallBack"));
        keySpace = config.getKeySpace();
        contactPoints = new ArrayList<String>();
        contactPoints.addAll(config.getContactPoints());
        connectInternal();//from  ww  w.  j ava  2s . co  m
    }
}

From source file:com.amazon.sqs.javamessaging.SQSSession.java

SQSSession(SQSConnection parentSQSConnection, AcknowledgeMode acknowledgeMode,
        Set<SQSMessageConsumer> messageConsumers, Set<SQSMessageProducer> messageProducers)
        throws JMSException {
    this.parentSQSConnection = parentSQSConnection;
    this.amazonSQSClient = parentSQSConnection.getWrappedAmazonSQSClient();
    this.acknowledgeMode = acknowledgeMode;
    this.acknowledger = this.acknowledgeMode.createAcknowledger(amazonSQSClient, this);
    this.sqsSessionRunnable = new SQSSessionCallbackScheduler(this, acknowledgeMode, acknowledger);
    this.executor = Executors.newSingleThreadExecutor(SESSION_THREAD_FACTORY);
    this.messageConsumers = messageConsumers;
    this.messageProducers = messageProducers;

    executor.execute(sqsSessionRunnable);
}

From source file:com.laudandjolynn.mytv.Main.java

/**
 * ??//ww  w  . j  a  v a2 s  . co m
 */
private static void initDbData(final MyTvData data) {
    final TvService tvService = new TvServiceImpl();
    makeCache(tvService);

    // ??
    ExecutorService executorService = Executors.newSingleThreadExecutor(
            new BasicThreadFactory.Builder().namingPattern("Mytv_Crawl_Task_%d").build());
    executorService.execute(new Runnable() {

        @Override
        public void run() {
            runCrawlTask(data, tvService);
        }
    });
    executorService.shutdown();
    // ??
    logger.info("create everyday crawl task.");
    createEverydayCron(data, tvService);
}

From source file:gobblin.runtime.AbstractJobLauncher.java

public AbstractJobLauncher(Properties jobProps, List<? extends Tag<?>> metadataTags,
        @Nullable SharedResourcesBroker<GobblinScopeTypes> instanceBroker) throws Exception {
    Preconditions.checkArgument(jobProps.containsKey(ConfigurationKeys.JOB_NAME_KEY),
            "A job must have a job name specified by job.name");

    // Add clusterIdentifier tag so that it is added to any new TaskState created
    List<Tag<?>> clusterNameTags = Lists.newArrayList();
    clusterNameTags.addAll(Tag.fromMap(ClusterNameTags.getClusterNameTags()));
    GobblinMetrics.addCustomTagsToProperties(jobProps, clusterNameTags);

    // Make a copy for both the system and job configuration properties
    this.jobProps = new Properties();
    this.jobProps.putAll(jobProps);

    if (!tryLockJob(this.jobProps)) {
        throw new JobException(
                String.format("Previous instance of job %s is still running, skipping this scheduled run",
                        this.jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY)));
    }/*from w  ww .ja  va 2  s .c  o m*/

    try {
        if (instanceBroker == null) {
            instanceBroker = createDefaultInstanceBroker(jobProps);
        }

        this.jobContext = new JobContext(this.jobProps, LOG, instanceBroker);
        this.eventBus.register(this.jobContext);

        this.cancellationExecutor = Executors.newSingleThreadExecutor(
                ExecutorsUtils.newThreadFactory(Optional.of(LOG), Optional.of("CancellationExecutor")));

        this.runtimeMetricContext = this.jobContext.getJobMetricsOptional()
                .transform(new Function<JobMetrics, MetricContext>() {
                    @Override
                    public MetricContext apply(JobMetrics input) {
                        return input.getMetricContext();
                    }
                });

        this.eventSubmitter = buildEventSubmitter(metadataTags);

        // Add all custom tags to the JobState so that tags are added to any new TaskState created
        GobblinMetrics.addCustomTagToState(this.jobContext.getJobState(), metadataTags);

        JobExecutionEventSubmitter jobExecutionEventSubmitter = new JobExecutionEventSubmitter(
                this.eventSubmitter);
        this.mandatoryJobListeners.add(new JobExecutionEventSubmitterListener(jobExecutionEventSubmitter));
    } catch (Exception e) {
        unlockJob();
        throw e;
    }
}

From source file:at.sti2.sparkwave.SparkwaveKernel.java

/**
 * Kick off bootstrap/*from w w w  .j a  v  a2 s. co m*/
 */
private void bootstrap(ConfigurationModel sparkwaveConfig, List<File> patternFiles) {

    //Instantiate ExecutorService for SparkwaveProcessor
    //      ThreadFactory tf = new ThreadFactoryBuilder().setNameFormat("Processor-%d").build();
    //      ExecutorService processorExecutor = Executors.newCachedThreadPool(tf);

    int patternFilesSize = patternFiles.size();
    for (int i = 0; i < patternFilesSize; i++) {

        File patternFile = patternFiles.get(i);

        //Build triple pattern representation
        logger.info(
                "Parsing pattern file (" + (i + 1) + " of " + patternFilesSize + "): " + patternFile + "...");
        SparkPatternParser patternParser = new SparkPatternParser();
        SparkParserResult parserResult = null;
        try {
            parserResult = patternParser.parse(patternFile);
        } catch (IOException e) {
            logger.error("Could not open pattern file " + patternFile + " " + e.getMessage());
        } catch (SparkParserException e) {
            logger.error("Could not parse the file " + patternFile + " " + e.getMessage());
        }
        Pattern pattern = parserResult.getPattern();

        logger.info("Parsed pattern:\n" + pattern);

        if (pattern != null) {

            addProcessorThread(pattern);

            //            processorExecutor.
            //            processorExecutor.shutdownNow();

            //TODO generate pattern id (random? hash? int++?) for pattern, use name
            //TODO Associate: processor and thread, method to find corresponding sparkwave processor for name. HashMap<Name,<List<Processor,Thread>?
            //TODO 1) remove queue for pattern 2) shutdown processorThread

            //TODO Add GrizzlyServer and access SparkwaveKernel.
            //TODO getPatterns method
            //TODO Test to remove,add patterns!!

            /* pattern class
             * 
             * variables:
             * filename
             * 
             * method:
             * getContent()
             *    
            */

            /* this class
                       
               variables:
               map<pattern,thread>
               list of patterns
                       
               methods:
             *    addProcessorThread(Pattern)
             *    removeProcessorThread(Pattern)
             *    getLoadedPatterns()
             *    getLoadedPattern(name)
             * 
             */
        }
    }

    //      for(Thread t : threads){
    //         try {
    //            Thread.sleep(3000);
    //            logger.info("interrupting "+t);
    //            t.interrupt();
    //            t.join();
    //            logger.info("joined "+t);
    //         } catch (InterruptedException e) {
    //            // TODO Auto-generated catch block
    //            e.printStackTrace();
    //         }
    //      }

    ThreadFactory threadFactoyServerSocket = new ThreadFactoryBuilder().setNameFormat("ServerSocket-%d")
            .build();
    ExecutorService serverSocketExecutor = Executors.newSingleThreadExecutor(threadFactoyServerSocket);

    //One Server for all SparkwaveNetworks, acts as multiplexer
    serverSocketExecutor.execute(new ServerSocketThread(sparkwaveConfig, queues));
}

From source file:gov.va.isaac.mojos.profileSync.ProfilesMojoBase.java

protected String getUsername() throws MojoExecutionException {
    if (username == null) {
        username = System.getProperty(PROFILE_SYNC_USERNAME_PROPERTY);

        //still blank, try property
        if (StringUtils.isBlank(username)) {
            username = profileSyncUsername;
        }/*from  w w w . j  ava2s .  co m*/

        //still no username, prompt if allowed
        if (StringUtils.isBlank(username) && !Boolean.getBoolean(PROFILE_SYNC_NO_PROMPTS)) {
            Callable<Void> callable = new Callable<Void>() {
                @Override
                public Void call() throws Exception {
                    if (!disableHintGiven) {
                        System.out.println("To disable remote sync during build, add '-D" + PROFILE_SYNC_DISABLE
                                + "=true' to your maven command");
                        disableHintGiven = true;
                    }

                    try {
                        System.out.println("Enter the " + config_.getChangeSetUrlType().name()
                                + " username for the Profiles/Changset remote store ("
                                + config_.getChangeSetUrl() + "):");
                        BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
                        username = br.readLine();
                    } catch (IOException e) {
                        throw new MojoExecutionException("Error reading username from console");
                    }
                    return null;
                }
            };

            try {
                Executors.newSingleThreadExecutor(new ThreadFactory() {
                    @Override
                    public Thread newThread(Runnable r) {
                        Thread t = new Thread(r, "User Prompt Thread");
                        t.setDaemon(true);
                        return t;
                    }
                }).submit(callable).get(2, TimeUnit.MINUTES);
            } catch (TimeoutException | InterruptedException e) {
                throw new MojoExecutionException("Username not provided within timeout");
            } catch (ExecutionException ee) {
                throw (ee.getCause() instanceof MojoExecutionException ? (MojoExecutionException) ee.getCause()
                        : new MojoExecutionException("Unexpected", ee.getCause()));
            }
        }
    }
    return username;
}