List of usage examples for java.util.concurrent TimeUnit MINUTES
TimeUnit MINUTES
To view the source code for java.util.concurrent TimeUnit MINUTES.
Click Source Link
From source file:br.unb.cic.bionimbuz.controller.elasticitycontroller.ElasticityController.java
@Override public void start() { this.threadExecutor.scheduleAtFixedRate(this, 0, 1, TimeUnit.MINUTES); }
From source file:com.netflix.spinnaker.orca.clouddriver.tasks.image.ImageForceCacheRefreshTask.java
@Override public long getTimeout() { return TimeUnit.MINUTES.toMillis(5); }
From source file:com.simplymeasured.prognosticator.HiveWriterImpl.java
public HiveWriterImpl() { this.tableHandleCache = CacheBuilder.newBuilder().expireAfterWrite(10, TimeUnit.MINUTES).build(); }
From source file:org.terasoluna.gfw.functionaltest.domain.DBLogCleaner.java
private int cleanup(long savedPeriodMinutes) { // calculate cutoff date. Date cutoffDate = new Date(System.currentTimeMillis() - (TimeUnit.MINUTES.toMillis(savedPeriodMinutes))); LOGGER.info("Begin cleanup. cutoffDate is '{}'.", cutoffDate); // decide max event id of unnecessary log. MapSqlParameterSource queryParameters = new MapSqlParameterSource(); queryParameters.addValue("cutoffDateMillis", cutoffDate.getTime()); Long maxEventId = namedParameterJdbcTemplate.queryForObject( "SELECT MAX(event_id) FROM logging_event WHERE timestmp < :cutoffDateMillis", queryParameters, Long.class); // delete unnecessary log. int deletedCount = 0; if (maxEventId != null) { MapSqlParameterSource deleteParameters = new MapSqlParameterSource(); deleteParameters.addValue("eventId", maxEventId); namedParameterJdbcTemplate.update("DELETE FROM logging_event_exception WHERE event_id <= :eventId", deleteParameters);// w w w. j a va 2 s . com namedParameterJdbcTemplate.update("DELETE FROM logging_event_property WHERE event_id <= :eventId", deleteParameters); deletedCount = namedParameterJdbcTemplate.update("DELETE FROM logging_event WHERE event_id <= :eventId", deleteParameters); LOGGER.info("Finished cleanup. Deleted log count is '{}'.", deletedCount); } else { LOGGER.info("Finished cleanup. Not exists target log."); } return deletedCount; }
From source file:com.dcsquare.hivemq.plugin.stormpathplugin.StormpathPluginModule.java
@Override public Provider<Iterable<? extends AbstractConfiguration>> getConfigurations() { return newConfigurationProvider( newReloadablePropertiesConfiguration("stormpath.properties", 5, TimeUnit.MINUTES)); }
From source file:jp.co.ctc_g.rack.api.config.RackApiContextConfig.java
/** * ??(Tomcat)?????/*ww w.j ava 2s. c o m*/ * @return {@link TomcatEmbeddedServletContainerFactory}? */ @Bean public EmbeddedServletContainerFactory tomcat() { TomcatEmbeddedServletContainerFactory factory = new TomcatEmbeddedServletContainerFactory(); factory.setPort(port); factory.setSessionTimeout(10, TimeUnit.MINUTES); factory.addInitializers(new ApiWebConfig()); return factory; }
From source file:com.baasbox.metrics.BaasBoxMetric.java
private static void setGauges() { //memory gauges registry.register(name(GAUGE_MEMORY_MAX_ALLOCABLE), new CachedGauge<Long>(10, TimeUnit.MINUTES) { @Override// w w w . jav a2 s.c o m public Long loadValue() { Runtime rt = Runtime.getRuntime(); long maxMemory = rt.maxMemory(); return maxMemory; } }); registry.register(name(GAUGE_MEMORY_CURRENT_ALLOCATE), new Gauge<Long>() { @Override public Long getValue() { Runtime rt = Runtime.getRuntime(); long totalMemory = rt.totalMemory(); return totalMemory; } }); registry.register(name(GAUGE_MEMORY_USED), new Gauge<Long>() { @Override public Long getValue() { Runtime rt = Runtime.getRuntime(); long freeMemory = rt.freeMemory(); long totalMemory = rt.totalMemory(); return totalMemory - freeMemory; } }); registry.register(name(GAUGE_FILESYSTEM_DATAFILE_SPACE_LEFT), new CachedGauge<Long>(CACHE_TIMEOUT, TimeUnit.MINUTES) { @Override public Long loadValue() { return new File(BBConfiguration.getDBDir()).getFreeSpace(); } }); registry.register(name(GAUGE_FILESYSTEM_BACKUPDIR_SPACE_LEFT), new CachedGauge<Long>(CACHE_TIMEOUT, TimeUnit.MINUTES) { @Override public Long loadValue() { return new File(BBConfiguration.getDBBackupDir()).getFreeSpace(); } }); registry.register(name(GAUGE_DB_DATA_SIZE), new CachedGauge<Long>(CACHE_TIMEOUT, TimeUnit.MINUTES) { @Override public Long loadValue() { boolean opened = false; try { if (DbHelper.getConnection() == null || DbHelper.getConnection().isClosed()) { DbHelper.open(BBConfiguration.getAPPCODE(), BBConfiguration.getBaasBoxAdminUsername(), BBConfiguration.getBaasBoxAdminUsername()); opened = true; } return DbHelper.getConnection().getSize(); } catch (InvalidAppCodeException e) { throw new RuntimeException(e); } finally { if (opened) DbHelper.close(DbHelper.getConnection()); } } }); registry.register(name(GAUGE_DB_DATA_DIRECTORY_SIZE), new CachedGauge<Long>(CACHE_TIMEOUT, TimeUnit.MINUTES) { @Override public Long loadValue() { return FileUtils.sizeOfDirectory(new File(BBConfiguration.getDBDir())); } }); registry.register(name(GAUGE_DB_MAX_SIZE_THRESHOLD), new CachedGauge<BigInteger>(CACHE_TIMEOUT, TimeUnit.MINUTES) { @Override public BigInteger loadValue() { return BBConfiguration.getDBSizeThreshold(); } }); }
From source file:com.alibaba.rocketmq.storm.domain.BatchMessage.java
public boolean waitFinish() throws InterruptedException { return latch.await(WAIT_TIMEOUT, TimeUnit.MINUTES); }
From source file:fr.xebia.xke.metrics.web.WineController.java
public WineController() { searchCache = CacheBuilder.newBuilder().maximumSize(200).expireAfterAccess(2, TimeUnit.MINUTES) .recordStats().build(CacheLoader.from(new Function<String, List<Wine>>() { @Override//from w w w .j a va2s .c o m public List<Wine> apply(String _name) { return wineService.findByName(_name); } })); // TODO Exercise 5 - register Gauges for cache size, hits, miss, ratio, choose appropriate name // Hint : Ratio is hitCount / requestCount // TODO Exercise 6 - Register new created MetricsSet to the registry also remember to remove previously created gauges //TODO Exercise 9 add two timers to monitor find service response time in cached and direct mode ('find.cached', 'find.direct') as fields }
From source file:com.github.fhuss.storm.cassandra.client.ClusterFactory.java
/** * Creates a new Cluster based on the specified configuration. * @param stormConf the storm configuration. * @return a new a new {@link Cluster} instance. *//*from w w w . j av a 2s .co m*/ @Override protected Cluster make(Map<String, Object> stormConf) { CassandraConf cassandraConf = new CassandraConf(stormConf); Cluster.Builder cluster = Cluster.builder().withoutJMXReporting().withoutMetrics() .addContactPoints(cassandraConf.getNodes()).withPort(cassandraConf.getPort()) .withRetryPolicy(DowngradingConsistencyRetryPolicy.INSTANCE) .withReconnectionPolicy(new ExponentialReconnectionPolicy(100L, TimeUnit.MINUTES.toMillis(1))) .withLoadBalancingPolicy(new TokenAwarePolicy(new RoundRobinPolicy())); final String username = cassandraConf.getUsername(); final String password = cassandraConf.getPassword(); if (StringUtils.isNotEmpty(username) && StringUtils.isNotEmpty(password)) { cluster.withAuthProvider(new PlainTextAuthProvider(username, password)); } QueryOptions options = new QueryOptions().setConsistencyLevel(cassandraConf.getConsistencyLevel()); cluster.withQueryOptions(options); return cluster.build(); }