List of usage examples for java.util.concurrent TimeUnit DAYS
TimeUnit DAYS
To view the source code for java.util.concurrent TimeUnit DAYS.
Click Source Link
From source file:com.adobe.ags.curly.controller.BatchRunner.java
@Override public void run() { try {//from ww w . ja v a 2 s . co m ApplicationState.getInstance().runningProperty().set(true); executor = new ThreadPoolExecutor(concurrency, concurrency, 1, TimeUnit.DAYS, tasks); executor.allowCoreThreadTimeOut(true); result.start(); buildWorkerPool.run(); executor.shutdown(); executor.awaitTermination(1, TimeUnit.DAYS); result.stop(); } catch (InterruptedException ex) { Logger.getLogger(BatchRunner.class.getName()).log(Level.SEVERE, null, ex); if (!executor.isShutdown()) { executor.getQueue().clear(); } } result.stop(); }
From source file:org.yccheok.jstock.engine.AjaxGoogleSearchEngineMonitor.java
/** * Stop this monitor from running. After stopping, this monitor can no * longer be reused./*from w w w. j av a 2s . c o m*/ */ public void stop() { executor.shutdownNow(); try { executor.awaitTermination(100, TimeUnit.DAYS); } catch (InterruptedException ex) { log.error(null, ex); } }
From source file:com.smartitengineering.cms.spi.lock.impl.distributed.ZKLock.java
public boolean tryLock() { try {//from ww w . j a va2 s.c o m return tryLock(-1, TimeUnit.DAYS); } catch (Exception ex) { logger.warn(ex.getMessage(), ex); return false; } }
From source file:org.eclipse.orion.server.git.jobs.CloneJob.java
public CloneJob(Clone clone, String userRunningTask, CredentialsProvider credentials, String user, String cloneLocation, ProjectInfo project, String gitUserName, String gitUserMail, boolean initProject) { super(userRunningTask, true, (GitCredentialsProvider) credentials); this.clone = clone; this.user = user; this.project = project; this.gitUserName = gitUserName; this.gitUserMail = gitUserMail; this.cloneLocation = cloneLocation; this.initProject = initProject; setFinalMessage("Clone complete."); setTaskExpirationTime(TimeUnit.DAYS.toMillis(7)); }
From source file:com.linkedin.pinot.core.startree.TestOffheapStarTreeBuilder.java
private void testSimpleCore(int numDimensions, int numMetrics, int numSkipMaterializationDimensions) throws Exception { int ROWS = (int) MathUtils.factorial(numDimensions); StarTreeBuilderConfig builderConfig = new StarTreeBuilderConfig(); Schema schema = new Schema(); builderConfig.dimensionsSplitOrder = new ArrayList<>(); builderConfig.setSkipMaterializationForDimensions(new HashSet<String>()); Set<String> skipMaterializationForDimensions = builderConfig.getSkipMaterializationForDimensions(); for (int i = 0; i < numDimensions; i++) { String dimName = "d" + (i + 1); DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(dimName, DataType.STRING, true); schema.addField(dimensionFieldSpec); if (i < (numDimensions - numSkipMaterializationDimensions)) { builderConfig.dimensionsSplitOrder.add(dimName); } else {//w w w. j a v a 2s . c om builderConfig.getSkipMaterializationForDimensions().add(dimName); } } schema.setTimeFieldSpec(new TimeFieldSpec("daysSinceEpoch", DataType.INT, TimeUnit.DAYS)); for (int i = 0; i < numMetrics; i++) { String metricName = "m" + (i + 1); MetricFieldSpec metricFieldSpec = new MetricFieldSpec(metricName, DataType.INT); schema.addField(metricFieldSpec); } builderConfig.maxLeafRecords = 10; builderConfig.schema = schema; builderConfig.outDir = new File("/tmp/startree"); OffHeapStarTreeBuilder builder = new OffHeapStarTreeBuilder(); builder.init(builderConfig); HashMap<String, Object> map = new HashMap<>(); for (int row = 0; row < ROWS; row++) { for (int i = 0; i < numDimensions; i++) { String dimName = schema.getDimensionFieldSpecs().get(i).getName(); map.put(dimName, dimName + "-v" + row % (numDimensions - i)); } //time map.put("daysSinceEpoch", 1); for (int i = 0; i < numMetrics; i++) { String metName = schema.getMetricFieldSpecs().get(i).getName(); map.put(metName, 1); } GenericRow genericRow = new GenericRow(); genericRow.init(map); builder.append(genericRow); } builder.build(); int totalDocs = builder.getTotalRawDocumentCount() + builder.getTotalAggregateDocumentCount(); Iterator<GenericRow> iterator = builder.iterator(0, totalDocs); while (iterator.hasNext()) { GenericRow row = iterator.next(); System.out.println(row); } iterator = builder.iterator(builder.getTotalRawDocumentCount(), totalDocs); while (iterator.hasNext()) { GenericRow row = iterator.next(); for (String skipDimension : skipMaterializationForDimensions) { String rowValue = (String) row.getValue(skipDimension); assert (rowValue.equals("ALL")); } } FileUtils.deleteDirectory(builderConfig.outDir); }
From source file:metlos.executors.batch.BatchCpuThrottlingExecutorTest.java
@Test public void maxUsage_SingleThreaded() throws Exception { NamingThreadFactory factory = new NamingThreadFactory(); ThreadPoolExecutor e = new ThreadPoolExecutor(1, 1, 0, TimeUnit.DAYS, new LinkedBlockingQueue<Runnable>(), factory);//from w w w . ja va2 s . c om e.prestartAllCoreThreads(); List<Future<?>> payloadResults = new ArrayList<Future<?>>(); long startTime = System.nanoTime(); //create load for (int i = 0; i < NOF_JOBS; ++i) { Future<?> f = e.submit(new Payload()); payloadResults.add(f); } //wait for it all to finish for (Future<?> f : payloadResults) { f.get(); } long endTime = System.nanoTime(); long time = endTime - startTime; LOG.info("MAX Singlethreaded test took " + (time / 1000.0 / 1000.0) + "ms"); ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); long cpuTime = 0; for (Thread t : factory.createdThreads) { long threadCpuTime = threadBean.getThreadCpuTime(t.getId()); LOG.info(t.getName() + ": " + threadCpuTime + "ns"); cpuTime += threadCpuTime; } float actualUsage = (float) cpuTime / time; LOG.info("MAX Singlethreaded overall usage: " + actualUsage); }
From source file:org.eclipse.skalli.core.rest.admin.StatisticsQueryTest.java
@Test public void testNonNumericalPeriod() throws Exception { assertPeriodQuery("hugo", 1, TimeUnit.DAYS); assertPeriodQuery("hugod", 1, TimeUnit.DAYS); assertPeriodQuery("hugoD", 1, TimeUnit.DAYS); assertPeriodQuery("hugom", 1, TimeUnit.MINUTES); assertPeriodQuery("hugoM", 1, TimeUnit.MINUTES); assertPeriodQuery("hugoh", 1, TimeUnit.HOURS); assertPeriodQuery("hugoH", 1, TimeUnit.HOURS); }
From source file:org.wso2.security.tools.scanner.dependency.js.preprocessor.ResourceDownloader.java
/** * This method calculate and returns the no of days between published date of particular weekly release * and scan date (Current system date).//from w w w. j a v a 2 s . co m * * @param releaseDate published date of particulae weekly release. * @return no of days between published date of particular weekly release * and scan date (Current system date) * @throws ParseException Exception occurred parsing the string to date format. */ long getDateDiffFromLastWeeklyRelease(String releaseDate) throws ParseException { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd", Locale.ENGLISH); LocalDateTime now = LocalDateTime.now(); Date firstDate = sdf.parse(releaseDate); Date secondDate = sdf.parse(now.toString()); long diffInMillis = Math.abs(secondDate.getTime() - firstDate.getTime()); return TimeUnit.DAYS.convert(diffInMillis, TimeUnit.MILLISECONDS); }
From source file:org.zenoss.zep.dao.impl.ConfigDaoImpl.java
private static int calculateMaximumDays(PartitionTableConfig config) { long partitionRange = config.getPartitionUnit().toMinutes(config.getPartitionDuration()) * MAX_PARTITIONS; return (int) TimeUnit.DAYS.convert(partitionRange, TimeUnit.MINUTES); }
From source file:outfox.dict.contest.service.CrontabService.java
@PostConstruct public void init() { ScheduledExecutorService executor = Executors.newScheduledThreadPool(ContestConsts.CORE_POOL_SIZE); executor.scheduleAtFixedRate(new SingerInfoUpdate(), 0, ContestConsts.CRONTAB_TIME, TimeUnit.DAYS); // ???Zzzz//from w ww .j a v a 2 s . co m // loadingPreparedBarrage(); }