List of usage examples for java.util.concurrent TimeUnit DAYS
TimeUnit DAYS
To view the source code for java.util.concurrent TimeUnit DAYS.
Click Source Link
From source file:com.linkedin.pinot.transport.perf.ScatterGatherPerfClient.java
private void setup() { MetricsRegistry registry = new MetricsRegistry(); _timedExecutor = new ScheduledThreadPoolExecutor(1); _service = new ThreadPoolExecutor(10, 10, 10, TimeUnit.DAYS, new LinkedBlockingDeque<Runnable>()); _eventLoopGroup = new NioEventLoopGroup(10); _timer = new HashedWheelTimer(); NettyClientMetrics clientMetrics = new NettyClientMetrics(registry, "client_"); PooledNettyClientResourceManager rm = new PooledNettyClientResourceManager(_eventLoopGroup, _timer, clientMetrics);//from w ww . ja va 2 s.c o m _pool = new KeyedPoolImpl<ServerInstance, NettyClientConnection>(1, _maxActiveConnections, 300000, 10, rm, _timedExecutor, MoreExecutors.sameThreadExecutor(), registry); rm.setPool(_pool); _scatterGather = new ScatterGatherImpl(_pool, _service); for (AsyncReader r : _readerThreads) { r.start(); } }
From source file:org.opendaylight.controller.cluster.raft.RaftActorTest.java
@Test public void testRaftActorRecoveryWithPersistenceEnabled() throws Exception { TEST_LOG.info("testRaftActorRecoveryWithPersistenceEnabled starting"); new JavaTestKit(getSystem()) { {/*from w w w . j a va 2 s . c o m*/ String persistenceId = factory.generateActorId("follower-"); DefaultConfigParamsImpl config = new DefaultConfigParamsImpl(); // Set the heartbeat interval high to essentially disable election otherwise the test // may fail if the actor is switched to Leader and the commitIndex is set to the last // log entry. config.setHeartBeatInterval(new FiniteDuration(1, TimeUnit.DAYS)); ImmutableMap<String, String> peerAddresses = ImmutableMap.<String, String>builder() .put("member1", "address").build(); ActorRef followerActor = factory .createActor(MockRaftActor.props(persistenceId, peerAddresses, config), persistenceId); watch(followerActor); List<ReplicatedLogEntry> snapshotUnappliedEntries = new ArrayList<>(); ReplicatedLogEntry entry1 = new MockRaftActorContext.MockReplicatedLogEntry(1, 4, new MockRaftActorContext.MockPayload("E")); snapshotUnappliedEntries.add(entry1); int lastAppliedDuringSnapshotCapture = 3; int lastIndexDuringSnapshotCapture = 4; // 4 messages as part of snapshot, which are applied to state ByteString snapshotBytes = fromObject(Arrays.asList(new MockRaftActorContext.MockPayload("A"), new MockRaftActorContext.MockPayload("B"), new MockRaftActorContext.MockPayload("C"), new MockRaftActorContext.MockPayload("D"))); Snapshot snapshot = Snapshot.create(snapshotBytes.toByteArray(), snapshotUnappliedEntries, lastIndexDuringSnapshotCapture, 1, lastAppliedDuringSnapshotCapture, 1); InMemorySnapshotStore.addSnapshot(persistenceId, snapshot); // add more entries after snapshot is taken List<ReplicatedLogEntry> entries = new ArrayList<>(); ReplicatedLogEntry entry2 = new MockRaftActorContext.MockReplicatedLogEntry(1, 5, new MockRaftActorContext.MockPayload("F", 2)); ReplicatedLogEntry entry3 = new MockRaftActorContext.MockReplicatedLogEntry(1, 6, new MockRaftActorContext.MockPayload("G", 3)); ReplicatedLogEntry entry4 = new MockRaftActorContext.MockReplicatedLogEntry(1, 7, new MockRaftActorContext.MockPayload("H", 4)); entries.add(entry2); entries.add(entry3); entries.add(entry4); int lastAppliedToState = 5; int lastIndex = 7; InMemoryJournal.addEntry(persistenceId, 5, entry2); // 2 entries are applied to state besides the 4 entries in snapshot InMemoryJournal.addEntry(persistenceId, 6, new ApplyJournalEntries(lastAppliedToState)); InMemoryJournal.addEntry(persistenceId, 7, entry3); InMemoryJournal.addEntry(persistenceId, 8, entry4); // kill the actor followerActor.tell(PoisonPill.getInstance(), null); expectMsgClass(duration("5 seconds"), Terminated.class); unwatch(followerActor); //reinstate the actor TestActorRef<MockRaftActor> ref = factory .createTestActor(MockRaftActor.props(persistenceId, peerAddresses, config)); MockRaftActor mockRaftActor = ref.underlyingActor(); mockRaftActor.waitForRecoveryComplete(); RaftActorContext context = mockRaftActor.getRaftActorContext(); assertEquals("Journal log size", snapshotUnappliedEntries.size() + entries.size(), context.getReplicatedLog().size()); assertEquals("Journal data size", 10, context.getReplicatedLog().dataSize()); assertEquals("Last index", lastIndex, context.getReplicatedLog().lastIndex()); assertEquals("Last applied", lastAppliedToState, context.getLastApplied()); assertEquals("Commit index", lastAppliedToState, context.getCommitIndex()); assertEquals("Recovered state size", 6, mockRaftActor.getState().size()); mockRaftActor.waitForInitializeBehaviorComplete(); assertEquals("getRaftState", RaftState.Follower, mockRaftActor.getRaftState()); } }; TEST_LOG.info("testRaftActorRecoveryWithPersistenceEnabled ending"); }
From source file:co.marcin.novaguilds.util.StringUtils.java
public static String secondsToString(long seconds, TimeUnit unit) { if (seconds <= 0) { seconds = 0;/* w w w . j a v a 2s. c o m*/ } int minute = 60; int hour = 60 * minute; int day = hour * 24; int week = day * 7; int month = day * 31; int year = 31536000; long years = seconds / year; seconds = seconds % year; long months = seconds / month; seconds = seconds % month; long weeks = seconds / week; seconds = seconds % week; long days = seconds / day; seconds = seconds % day; long hours = seconds / hour; seconds = seconds % hour; long minutes = seconds / minute; seconds = seconds % minute; String stringYears = "", stringMonths = "", stringWeeks = "", stringDays = "", stringHours = "", stringSeconds = "", stringMinutes = ""; if (years > 0) { Message form = years > 1 ? Message.TIMEUNIT_YEAR_PLURAL : Message.TIMEUNIT_YEAR_SINGULAR; stringYears = years + " " + form.get() + " "; } if (months > 0) { Message form = months > 1 ? Message.TIMEUNIT_MONTH_PLURAL : Message.TIMEUNIT_MONTH_SINGULAR; stringMonths = months + " " + form.get() + " "; } if (weeks > 0) { Message form = weeks > 1 ? Message.TIMEUNIT_WEEK_PLURAL : Message.TIMEUNIT_WEEK_SINGULAR; stringWeeks = weeks + " " + form.get() + " "; } if (days > 0) { Message form = days > 1 ? Message.TIMEUNIT_DAY_PLURAL : Message.TIMEUNIT_DAY_SINGULAR; stringDays = days + " " + form.get() + " "; } if (hours > 0) { Message form = hours > 1 ? Message.TIMEUNIT_HOUR_PLURAL : Message.TIMEUNIT_HOUR_SINGULAR; stringHours = hours + " " + form.get() + " "; } if (minutes > 0) { Message form = minutes > 1 ? Message.TIMEUNIT_MINUTE_PLURAL : Message.TIMEUNIT_MINUTE_SINGULAR; stringMinutes = minutes + " " + form.get() + " "; } if (seconds > 0 || (seconds == 0 && minutes == 0 && hours == 0 && days == 0 && weeks == 0 && months == 0 && years == 0)) { Message form = seconds == 1 ? Message.TIMEUNIT_SECOND_SINGULAR : Message.TIMEUNIT_SECOND_PLURAL; stringSeconds = seconds + " " + form.get() + " "; } if (unit == TimeUnit.DAYS && days > 0) { stringHours = ""; stringMinutes = ""; stringSeconds = ""; } else if (unit == TimeUnit.HOURS && hours > 0) { stringMinutes = ""; stringSeconds = ""; } else if (unit == TimeUnit.MINUTES && minutes > 0) { stringSeconds = ""; } String r = stringYears + stringMonths + stringWeeks + stringDays + stringHours + stringMinutes + stringSeconds; r = r.substring(0, r.length() - 1); return r; }
From source file:com.linkedin.pinot.query.plan.PlanMakerTest.java
private void setupSegmentList(int numberOfSegments) throws Exception { final String filePath = TestUtils .getFileFromResourceUrl(getClass().getClassLoader().getResource(SMALL_AVRO_DATA)); _indexSegmentList.clear();//from www .ja v a 2s. c o m if (INDEXES_DIR.exists()) { FileUtils.deleteQuietly(INDEXES_DIR); } INDEXES_DIR.mkdir(); for (int i = 0; i < numberOfSegments; ++i) { final File segmentDir = new File(INDEXES_DIR, "segment_" + i); final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns( new File(filePath), segmentDir, "dim1", TimeUnit.DAYS, "test"); final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null); driver.init(config); driver.build(); LOGGER.debug("built at: {}", segmentDir.getAbsolutePath()); _indexSegmentList .add(ColumnarSegmentLoader.load(new File(segmentDir, driver.getSegmentName()), ReadMode.mmap)); } }
From source file:org.sonatype.timeline.internal.TimelinePersistorTest.java
/** * Test for collectFiles method, which is actually the "heart" of purge method (it just iterates over resulting * file/* w ww. j a v a 2 s.c o m*/ * list and deletes those), and the readAllSinceDays method, used when indexer is being rebuilt. */ public void testcollectFiles() throws Exception { final long now = System.currentTimeMillis(); final long today = now - (now % 1000); // cut millis, as filename timestamps used for data are second // resolution final long minus1d = today - TimeUnit.DAYS.toMillis(1L); final long minus2d = today - TimeUnit.DAYS.toMillis(2L); final long minus3d = today - TimeUnit.DAYS.toMillis(3L); final long minus4d = today - TimeUnit.DAYS.toMillis(4L); new File(persistDirectory, persistor.buildTimestampedFileName(today)).createNewFile(); // today's file new File(persistDirectory, persistor.buildTimestampedFileName(minus1d)).createNewFile(); // yesterday's new File(persistDirectory, persistor.buildTimestampedFileName(minus2d)).createNewFile(); // -2 new File(persistDirectory, persistor.buildTimestampedFileName(minus3d)).createNewFile(); // -3 new File(persistDirectory, persistor.buildTimestampedFileName(minus4d)).createNewFile(); // -4 // we should have 5 files final List<File> files = Arrays.asList(persistDirectory.listFiles()); assertThat(files, hasSize(5)); // check collectFiles, the "heart" of purge, some edge cases { // "newest" is here (0 day old and newer) final List<File> collectedFiles = persistor.collectFiles(0, true); assertThat(collectedFiles, hasSize(1)); assertThat(persistor.getTimestampedFileNameTimestamp(collectedFiles.get(0)), equalTo(today)); } { // all except "newest" is here (0 day and older) final List<File> collectedFiles = persistor.collectFiles(0, false); assertThat(collectedFiles, hasSize(4)); assertThat(persistor.getTimestampedFileNameTimestamp(collectedFiles.get(0)), equalTo(minus1d)); } // purge related { // older than 1 days: final List<File> collectedFiles = persistor.collectFiles(1, false); assertThat(collectedFiles, hasSize(3)); assertThat(persistor.getTimestampedFileNameTimestamp(collectedFiles.get(0)), equalTo(minus2d)); } { // older than 2 days: final List<File> collectedFiles = persistor.collectFiles(2, false); assertThat(collectedFiles, hasSize(2)); assertThat(persistor.getTimestampedFileNameTimestamp(collectedFiles.get(0)), equalTo(minus3d)); } { // older than 3 days: final List<File> collectedFiles = persistor.collectFiles(3, false); assertThat(collectedFiles, hasSize(1)); assertThat(persistor.getTimestampedFileNameTimestamp(collectedFiles.get(0)), equalTo(minus4d)); } { // older than 4 days: final List<File> collectedFiles = persistor.collectFiles(4, false); assertThat(collectedFiles, hasSize(0)); } }
From source file:org.carbondata.processing.globalsurrogategenerator.LevelGlobalSurrogateGeneratorThread.java
@Override public Void call() throws Exception { long currentTimeMillis = System.currentTimeMillis(); long currentTimeMillis1 = System.currentTimeMillis(); Hierarchy[] extractHierarchies = CarbonSchemaParser.extractHierarchies(schema, dimension); Level cubeLevel = extractHierarchies[0].levels[0]; boolean isPartitionColumn = partitionColumn.equals(cubeLevel.name); if (partitionColumn.equals(cubeLevel.name)) { isPartitionColumn = true;/*from w w w. j a v a 2 s . c o m*/ } RelationOrJoin relation = extractHierarchies[0].relation; String hierarchyTable = relation == null ? tableName : ((Table) extractHierarchies[0].relation).name; String levelFileName = hierarchyTable + '_' + cubeLevel.name; List<PartitionMemberVo> partitionMemberVoList = new ArrayList<PartitionMemberVo>( CarbonCommonConstants.CONSTANT_SIZE_TEN); ExecutorService ex = Executors.newFixedThreadPool(10); PartitionMemberVo memberVo = null; List<Future<Map<String, Integer>>> submitList = new ArrayList<Future<Map<String, Integer>>>( CarbonCommonConstants.CONSTANT_SIZE_TEN); for (int i = 0; i < partitionLocation.length; i++) { int partitionLength = partitionLocation[i].length; if (partitionLength == 0) { LOGGER.info("partition length is 0"); continue; } String path = partitionLocation[i][partitionLength - 1] + '/' + levelFileName + ".level"; FileType fileType = FileFactory.getFileType(path); if (!FileFactory.isFileExist(path, fileType)) { LOGGER.info("File does not exist at path :: " + path); continue; } CarbonFile carbonFile = FileFactory.getCarbonFile(path, fileType); memberVo = new PartitionMemberVo(); memberVo.setPath(partitionLocation[i][partitionLength - 1]); partitionMemberVoList.add(memberVo); Future<Map<String, Integer>> submit = ex.submit(new ReaderThread(carbonFile)); submitList.add(submit); } ex.shutdown(); ex.awaitTermination(1, TimeUnit.DAYS); if (partitionMemberVoList.size() < 1) { return null; } int maxSeqenceKey = getMaxSequenceKeyAssigned(levelFileName + ".globallevel"); int index = 0; for (Future<Map<String, Integer>> future : submitList) { partitionMemberVoList.get(index).setMembersMap(future.get()); index++; } LOGGER.info("Time Taken to read surrogate for Level: " + levelFileName + " : " + (System.currentTimeMillis() - currentTimeMillis)); currentTimeMillis = System.currentTimeMillis(); ex = Executors.newFixedThreadPool(5); createGlobalSurrogateKey(currentTimeMillis, currentTimeMillis1, isPartitionColumn, levelFileName, partitionMemberVoList, ex, maxSeqenceKey); return null; }
From source file:com.linkedin.pinot.query.transform.TransformGroupByTest.java
/** * Test for group-by with transformed time column from millis to days. * * @throws Exception/*from www . j av a2 s . co m*/ */ @Test public void testTimeRollUp() throws Exception { String query = String.format( "select sum(%s) from xformSegTable group by timeConvert(%s, 'MILLISECONDS', 'DAYS')", METRIC_NAME, TIME_COLUMN_NAME); AggregationGroupByResult groupByResult = executeGroupByQuery(_indexSegment, query); Assert.assertNotNull(groupByResult); Iterator<GroupKeyGenerator.GroupKey> groupKeyIterator = groupByResult.getGroupKeyIterator(); Assert.assertNotNull(groupKeyIterator); // Compute the expected answer for the query. Map<String, Double> expectedValuesMap = new HashMap<>(); _recordReader.rewind(); for (int row = 0; row < NUM_ROWS; row++) { GenericRow genericRow = _recordReader.next(); long daysSinceEpoch = TimeUnit.DAYS.convert(((Long) genericRow.getValue(TIME_COLUMN_NAME)), TimeUnit.MILLISECONDS); Double value = (Double) genericRow.getValue(METRIC_NAME); String key = String.valueOf(daysSinceEpoch); Double prevValue = expectedValuesMap.get(key); if (prevValue == null) { expectedValuesMap.put(key, value); } else { expectedValuesMap.put(key, prevValue + value); } } compareGroupByResults(groupByResult, expectedValuesMap); }
From source file:org.dcache.util.histograms.HistogramModelTest.java
@Test public void buildShouldFailWhenNoUnitGivenToCountingHistogram() throws NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { givenCountingHistogram();/*from w ww. ja v a2s.c om*/ givenFilelifetimeValuesFor(150); givenBinCountOf(51); givenBinLabelOf(TimeUnit.DAYS.name()); givenDataLabelOf("COUNT"); givenHistogramTypeOf("File Lifetime Count"); whenConfigureIsCalled(); assertThatBuildFailed(); }
From source file:com.atinternet.tracker.Tool.java
/** * Get days count between two millis time * * @param latestTimeMillis long/*from w w w .j av a 2 s . c o m*/ * @param oldestTimeMillis long * @return int */ static int getDaysBetweenTimes(long latestTimeMillis, long oldestTimeMillis) { return (int) TimeUnit.DAYS.convert((latestTimeMillis - oldestTimeMillis), TimeUnit.MILLISECONDS); }
From source file:org.glite.security.voms.admin.core.VOMSService.java
protected static void startBackgroundTasks() { VOMSExecutorService es = VOMSExecutorService.instance(); VOMSConfiguration conf = VOMSConfiguration.instance(); List<Integer> aupReminders = conf.getAUPReminderIntervals(); es.startBackgroundTask(/*from w w w . j a va 2 s. c om*/ new SignAUPReminderCheckTask(DAOFactory.instance(), EventManager.instance(), SystemTimeProvider.INSTANCE, aupReminders, TimeUnit.DAYS), VOMSConfigurationConstants.MEMBERSHIP_CHECK_PERIOD, 300L); es.startBackgroundTask( new CancelSignAUPTasksForExpiredUsersTask(DAOFactory.instance(), EventManager.instance()), VOMSConfigurationConstants.MEMBERSHIP_CHECK_PERIOD, 300L); es.startBackgroundTask(new UpdateCATask(), VOMSConfigurationConstants.TRUST_ANCHORS_REFRESH_PERIOD); es.startBackgroundTask(new TaskStatusUpdater(), 30L); es.startBackgroundTask(new ExpiredRequestsPurgerTask(DAOFactory.instance(), EventManager.instance()), VOMSConfigurationConstants.VO_MEMBERSHIP_EXPIRED_REQ_PURGER_PERIOD, 300L); es.startBackgroundTask(new UserStatsTask(), VOMSConfigurationConstants.MONITORING_USER_STATS_UPDATE_PERIOD, UserStatsTask.DEFAULT_PERIOD_IN_SECONDS); es.scheduleAtFixedRate(new PermissionCacheStatsLogger(true), 1, 60, TimeUnit.SECONDS); }