List of usage examples for java.util.concurrent Executors newSingleThreadScheduledExecutor
public static ScheduledExecutorService newSingleThreadScheduledExecutor()
From source file:org.apache.storm.metric.FileBasedEventLogger.java
private void setUpFlushTask() { ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(); Runnable task = new Runnable() { @Override/*from w w w . j a v a 2 s . c o m*/ public void run() { try { if (dirty) { eventLogWriter.flush(); dirty = false; } } catch (IOException ex) { LOG.error("Error flushing " + eventLogPath, ex); throw new RuntimeException(ex); } } }; scheduler.scheduleAtFixedRate(task, FLUSH_INTERVAL_MILLIS, FLUSH_INTERVAL_MILLIS, TimeUnit.MILLISECONDS); }
From source file:com.hurence.logisland.connect.opc.da.OpcDaSourceConnectorTest.java
@Test @Ignore/* w ww . jav a 2s . c o m*/ public void e2eTest() throws Exception { OpcDaSourceConnector connector = new OpcDaSourceConnector(); Map<String, String> properties = new HashMap<>(); properties.put(OpcDaSourceConnector.PROPERTY_AUTH_NTLM_DOMAIN, "OPC-9167C0D9342"); properties.put(CommonDefinitions.PROPERTY_CONNECTION_SOCKET_TIMEOUT, "2000"); properties.put(OpcDaSourceConnector.PROPERTY_AUTH_NTLM_PASSWORD, "opc"); properties.put(OpcDaSourceConnector.PROPERTY_AUTH_NTLM_USER, "OPC"); properties.put(CommonDefinitions.PROPERTY_SERVER_URI, "opc.da://192.168.99.100"); properties.put(OpcDaSourceConnector.PROPERTY_SERVER_CLSID, "F8582CF2-88FB-11D0-B850-00C0F0104305"); properties.put(CommonDefinitions.PROPERTY_TAGS_ID, "Random.Real8,Triangle Waves.Int4"); properties.put(CommonDefinitions.PROPERTY_TAGS_STREAM_MODE, "SUBSCRIBE,POLL"); properties.put(CommonDefinitions.PROPERTY_TAGS_SAMPLING_RATE, "PT3S,PT1S"); properties.put(OpcDaSourceConnector.PROPERTY_SESSION_REFRESH_PERIOD, "1000"); properties.put(OpcDaSourceConnector.PROPERTY_TAGS_DATA_TYPE_OVERRIDE, "0,8"); connector.start(properties); OpcDaSourceTask task = new OpcDaSourceTask(); task.start(connector.taskConfigs(1).get(0)); ScheduledExecutorService es = Executors.newSingleThreadScheduledExecutor(); Gson json = new Gson(); es.scheduleAtFixedRate(() -> { try { task.poll().stream() .map(a -> org.apache.commons.lang3.tuple.Pair.of( new Date((Long) a.sourceOffset().get(OpcRecordFields.SAMPLED_TIMESTAMP)), json.toJson(a))) .forEach(System.out::println); } catch (InterruptedException e) { //do nothing } }, 0, 10, TimeUnit.MILLISECONDS); Thread.sleep(600000); task.stop(); es.shutdown(); connector.stop(); }
From source file:com.linkedin.kmf.KafkaMonitor.java
public KafkaMonitor(Map<String, Map> testProps) throws Exception { _tests = new HashMap<>(); _services = new HashMap<>(); for (Map.Entry<String, Map> entry : testProps.entrySet()) { String name = entry.getKey(); Map props = entry.getValue(); if (!props.containsKey(CLASS_NAME_CONFIG)) throw new IllegalArgumentException(name + " is not configured with " + CLASS_NAME_CONFIG); String className = (String) props.get(CLASS_NAME_CONFIG); if (className.startsWith(App.class.getPackage().getName()) || className.startsWith(Test.class.getPackage().getName())) { App test = (App) Class.forName(className).getConstructor(Map.class, String.class).newInstance(props, name);//from w ww . j a v a2 s. co m _tests.put(name, test); } else { Service service = (Service) Class.forName(className).getConstructor(Map.class, String.class) .newInstance(props, name); _services.put(name, service); } } _executor = Executors.newSingleThreadScheduledExecutor(); }
From source file:org.apache.synapse.transport.utils.sslcert.cache.CacheManager.java
/** * A new cacheManager will be started on the given ManageableCache object. * * @param cache a Manageable Cache which could be managed by this cache manager. * @param cacheMaxSize Maximum size of the cache. If the cache exceeds this size, LRU values * will be removed/*from ww w .j a v a 2 s .c o m*/ */ public CacheManager(ManageableCache cache, int cacheMaxSize, int duration) { scheduler = Executors.newSingleThreadScheduledExecutor(); this.cache = cache; this.cacheMaxSize = cacheMaxSize; this.cacheManagingTask = new CacheManagingTask(); this.duration = duration; start(); }
From source file:com.web.server.util.FarmWarFileTransfer.java
private FarmWarFileTransfer(String deployDir, String farmwarDir, String clusterGroup) { this.deployDir = deployDir; this.farmwarDir = farmwarDir; this.clusterGroup = clusterGroup; ScheduledExecutorService exec = Executors.newSingleThreadScheduledExecutor(); FarmWarDirWatcher task = new FarmWarDirWatcher(farmwarDir, this); exec.scheduleAtFixedRate(task, 0, 1000, TimeUnit.MILLISECONDS); }
From source file:com.fluxtion.learning.fx.utils.PriceOrderGenerator.java
public PriceOrderGenerator(BiasProcessor biasCheck) { this.biasCheck = biasCheck; scheduledExecutor = Executors.newSingleThreadScheduledExecutor(); }
From source file:org.jsecurity.session.mgt.ExecutorServiceSessionValidationScheduler.java
public void enableSessionValidation() { if (this.interval > 0l) { this.service = Executors.newSingleThreadScheduledExecutor(); this.service.scheduleAtFixedRate(this, interval, interval, TimeUnit.MILLISECONDS); this.enabled = true; }// ww w .java 2 s . c om }
From source file:io.joynr.messaging.bounceproxy.ControlledBounceProxyModule.java
@Provides
@Singleton
ScheduledExecutorService getScheduledExecutorService() {
return Executors.newSingleThreadScheduledExecutor();
}
From source file:org.mule.transport.polling.schedule.FixedFrequencyScheduler.java
/** * <p>//from w w w.ja v a 2 s. c om * Creates the {@link FixedFrequencyScheduler#executor} that is going to be used to launch schedules * </p> */ @Override public void initialise() throws InitialisationException { try { lifecycleManager.fireInitialisePhase(new LifecycleCallback<Scheduler>() { @Override public void onTransition(String phaseName, Scheduler object) throws MuleException { executor = Executors.newSingleThreadScheduledExecutor(); } }); } catch (MuleException e) { throw new InitialisationException(e, this); } }
From source file:com.hp.autonomy.hod.client.api.HavenOnDemandServiceITCase.java
@Override @Before//w ww.j a va 2 s . co m public void setUp() { super.setUp(); havenOnDemandService = new HavenOnDemandServiceImpl(getConfig()); scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); jobService = new JobServiceImpl<>(getConfig(), MapJobStatus.class); }