Example usage for java.util.concurrent TimeUnit HOURS

List of usage examples for java.util.concurrent TimeUnit HOURS

Introduction

In this page you can find the example usage for java.util.concurrent TimeUnit HOURS.

Prototype

TimeUnit HOURS

To view the source code for java.util.concurrent TimeUnit HOURS.

Click Source Link

Document

Time unit representing sixty minutes.

Usage

From source file:com.linkedin.pinot.common.metadata.SegmentZKMetadataTest.java

private RealtimeSegmentZKMetadata getTestDoneRealtimeSegmentMetadata() {
    RealtimeSegmentZKMetadata realtimeSegmentMetadata = new RealtimeSegmentZKMetadata();
    realtimeSegmentMetadata.setSegmentName("testTable_R_1000_2000_groupId0_part0");
    realtimeSegmentMetadata.setTableName("testTable");
    realtimeSegmentMetadata.setSegmentType(SegmentType.REALTIME);
    realtimeSegmentMetadata.setIndexVersion("v1");
    realtimeSegmentMetadata.setStartTime(1000);
    realtimeSegmentMetadata.setEndTime(2000);
    realtimeSegmentMetadata.setTimeUnit(TimeUnit.HOURS);
    realtimeSegmentMetadata.setStatus(Status.DONE);
    realtimeSegmentMetadata.setTotalRawDocs(10000);
    realtimeSegmentMetadata.setCrc(1234);
    realtimeSegmentMetadata.setCreationTime(3000);
    realtimeSegmentMetadata.setSizeThresholdToFlushSegment(1234);
    return realtimeSegmentMetadata;
}

From source file:io.druid.segment.realtime.plumber.RealtimePlumberSchoolTest.java

private void testPersist(final Object commitMetadata) throws Exception {
    final AtomicBoolean committed = new AtomicBoolean(false);
    plumber.getSinks().put(0L, new Sink(new Interval(0, TimeUnit.HOURS.toMillis(1)), schema, tuningConfig,
            new DateTime("2014-12-01T12:34:56.789").toString()));
    Assert.assertNull(plumber.startJob());

    final InputRow row = EasyMock.createNiceMock(InputRow.class);
    EasyMock.expect(row.getTimestampFromEpoch()).andReturn(0L);
    EasyMock.expect(row.getDimensions()).andReturn(new ArrayList<String>());
    EasyMock.replay(row);//  www.j a v  a 2  s.  c o m
    final Committer committer = new Committer() {
        @Override
        public Object getMetadata() {
            return commitMetadata;
        }

        @Override
        public void run() {
            committed.set(true);
        }
    };
    plumber.add(row, Suppliers.ofInstance(committer));
    plumber.persist(committer);

    while (!committed.get()) {
        Thread.sleep(100);
    }
    plumber.getSinks().clear();
    plumber.finishJob();
}

From source file:org.hawkular.metrics.generator.DataGenerator.java

private long convertDurationToMillis(long value, String units) {
    switch (units) {
    case "s":
        return TimeUnit.MILLISECONDS.convert(value, TimeUnit.SECONDS);
    case "m":
        return TimeUnit.MILLISECONDS.convert(value, TimeUnit.MINUTES);
    case "h":
        return TimeUnit.MILLISECONDS.convert(value, TimeUnit.HOURS);
    case "d":
        return TimeUnit.MILLISECONDS.convert(value, TimeUnit.DAYS);
    default:/*w w  w  .  j av a 2s  .  c o  m*/
        throw new IllegalArgumentException(units + " is an invalid time unit");
    }
}

From source file:com.streamreduce.core.service.ConnectionServiceGithubEndToEndIT.java

private void pollForProjectHostingActivity(Connection connection) throws Exception {
    // Attempt to get the latest activity by updating the connection's last poll date to 12 hours ago
    // and then retrieve connection activity.
    connection.getMetadata().put("last_activity_poll",
            Long.toString(System.currentTimeMillis() - TimeUnit.HOURS.toMillis(12)));

    connectionService.fireOneTimeHighPriorityJobForConnection(connection);
    Thread.sleep(TimeUnit.SECONDS.toMillis(20));
}

From source file:com.uber.stream.kafka.chaperone.collector.reporter.DbAuditReporter.java

private DbAuditReporter(int queueSize, long timeBucketIntervalInSec, int reportFreqMsgCount,
        int reportFreqIntervalSec, boolean combineMetricsAmongHosts, String dbUser, String dbPass, String dbUrl,
        String dataTableName, String offsetTableName, int dbRetentionInHr, boolean enableRemoveOldRecord) {
    super(queueSize, timeBucketIntervalInSec, reportFreqMsgCount, reportFreqIntervalSec,
            combineMetricsAmongHosts);/*w ww  . j a v  a  2s . co  m*/

    ds = new BasicDataSource();
    ds.setDriverClassName("com.mysql.jdbc.Driver");
    ds.setUsername(dbUser);
    ds.setPassword(dbPass);
    ds.setUrl(dbUrl);

    REMOVED_RECORDS_COUNTER = Metrics.getRegistry().meter(getType() + ".auditReporter.removedRecordsNumber");
    INSERTED_RECORDS_COUNTER = Metrics.getRegistry().meter(getType() + ".auditReporter.insertedRecordsNumber");
    UPDATED_RECORDS_COUNTER = Metrics.getRegistry().meter(getType() + ".auditReporter.updatedRecordsNumber");
    FAILED_TO_REMOVE_COUNTER = Metrics.getRegistry().meter(getType() + ".auditReporter.failedToRemoveNumber");
    DB_REPORT_LATENCY_TIMER = Metrics.getRegistry().timer(getType() + ".auditReporter.dbReportLatencyMs");

    Metrics.getRegistry().register(getType() + ".auditReporter.latestTSSeenLastInsert", new Gauge<Long>() {
        @Override
        public Long getValue() {
            long ret = latestTSSeenLastInsert;
            latestTSSeenLastInsert = 0;
            return ret;
        }
    });
    Metrics.getRegistry().register(getType() + ".auditReporter.earliestTSSeenLastInsert", new Gauge<Long>() {
        @Override
        public Long getValue() {
            long ret = earliestTSSeenLastInsert;
            earliestTSSeenLastInsert = System.currentTimeMillis();
            return ret;
        }
    });

    cronExecutor = Executors.newSingleThreadScheduledExecutor(
            new ThreadFactoryBuilder().setNameFormat(getType() + "-cron-executor-%d").build());

    auditDbRetentionMs = TimeUnit.HOURS.toMillis(dbRetentionInHr);
    this.dataTableName = dataTableName;
    this.offsetTableName = offsetTableName;
    this.enableRemoveOldRecord = enableRemoveOldRecord;

    logger.info("Try to create dataTable={} and offsetTable={}", dataTableName, offsetTableName);
    maybeCreateTable(CREATE_DATA_TABLE_SQL, dataTableName);
    maybeCreateTable(CREATE_OFFSET_TABLE_SQL, offsetTableName);
}

From source file:com.linkedin.pinot.core.data.readers.BackfillDateTimeRecordReaderTest.java

@DataProvider(name = "backfillRecordReaderDataProvider")
public Object[][] getDataForTestBackfillRecordReader() throws Exception {
    List<Object[]> entries = new ArrayList<>();

    List<GenericRow> inputData;
    Schema inputSchema;/*w  ww.  ja  v a  2s  .co m*/
    RecordReader inputRecordReader;
    TimeFieldSpec timeFieldSpec;
    DateTimeFieldSpec dateTimeFieldSpec;
    Schema wrapperSchema;

    // timeSpec in hoursSinceEpoch, generate dateTimeFieldSpec in millisSinceEpoch
    timeFieldSpec = new TimeFieldSpec(new TimeGranularitySpec(DataType.LONG, TimeUnit.HOURS, "Date"));
    inputData = createTestDataWithTimespec(timeFieldSpec);
    inputSchema = createPinotSchemaWithTimeSpec(timeFieldSpec);
    inputRecordReader = new GenericRowRecordReader(inputData, inputSchema);
    dateTimeFieldSpec = new DateTimeFieldSpec("timestampInEpoch", DataType.LONG, "1:MILLISECONDS:EPOCH",
            "1:HOURS");
    wrapperSchema = createPinotSchemaWrapperWithDateTimeSpec(inputSchema, dateTimeFieldSpec);
    entries.add(new Object[] { inputRecordReader, timeFieldSpec, dateTimeFieldSpec, wrapperSchema });

    // timeSpec in hoursSinceEpoch, generate dateTimeFieldSpec in sdf day
    timeFieldSpec = new TimeFieldSpec(new TimeGranularitySpec(DataType.LONG, TimeUnit.HOURS, "Date"));
    inputData = createTestDataWithTimespec(timeFieldSpec);
    inputSchema = createPinotSchemaWithTimeSpec(timeFieldSpec);
    inputRecordReader = new GenericRowRecordReader(inputData, inputSchema);
    dateTimeFieldSpec = new DateTimeFieldSpec("timestampInEpoch", DataType.LONG,
            "1:DAYS:SIMPLE_DATE_FORMAT:yyyyMMdd", "1:HOURS");
    wrapperSchema = createPinotSchemaWrapperWithDateTimeSpec(inputSchema, dateTimeFieldSpec);
    entries.add(new Object[] { inputRecordReader, timeFieldSpec, dateTimeFieldSpec, wrapperSchema });

    // timeSpec in hoursSinceEpoch, generate dateTimeFieldSpec in hoursSinceEpoch
    timeFieldSpec = new TimeFieldSpec(new TimeGranularitySpec(DataType.LONG, TimeUnit.HOURS, "Date"));
    inputData = createTestDataWithTimespec(timeFieldSpec);
    inputSchema = createPinotSchemaWithTimeSpec(timeFieldSpec);
    inputRecordReader = new GenericRowRecordReader(inputData, inputSchema);
    dateTimeFieldSpec = new DateTimeFieldSpec("timestampInEpoch", DataType.LONG, "1:HOURS:EPOCH", "1:HOURS");
    wrapperSchema = createPinotSchemaWrapperWithDateTimeSpec(inputSchema, dateTimeFieldSpec);
    entries.add(new Object[] { inputRecordReader, timeFieldSpec, dateTimeFieldSpec, wrapperSchema });

    // timeSpec in millisSinceEpoch, generate dateTimeFieldSpec in 5 minutesSinceEpoch
    timeFieldSpec = new TimeFieldSpec(new TimeGranularitySpec(DataType.LONG, TimeUnit.MILLISECONDS, "Date"));
    inputData = createTestDataWithTimespec(timeFieldSpec);
    inputSchema = createPinotSchemaWithTimeSpec(timeFieldSpec);
    inputRecordReader = new GenericRowRecordReader(inputData, inputSchema);
    dateTimeFieldSpec = new DateTimeFieldSpec("timestampInEpoch", DataType.LONG, "5:MILLISECONDS:EPOCH",
            "1:HOURS");
    wrapperSchema = createPinotSchemaWrapperWithDateTimeSpec(inputSchema, dateTimeFieldSpec);
    entries.add(new Object[] { inputRecordReader, timeFieldSpec, dateTimeFieldSpec, wrapperSchema });

    // timeSpec in hoursSinceEpoch, dateTimeFieldSpec in millisSinceEpoch, override dateTimeFieldSpec in millisSinceEpoch
    timeFieldSpec = new TimeFieldSpec(new TimeGranularitySpec(DataType.LONG, TimeUnit.HOURS, "Date"));
    dateTimeFieldSpec = new DateTimeFieldSpec("timestampInEpoch", DataType.LONG, "1:MILLISECONDS:EPOCH",
            "1:HOURS");
    inputData = createTestDataWithTimespec(timeFieldSpec, dateTimeFieldSpec);
    inputSchema = createPinotSchemaWithTimeSpec(timeFieldSpec, dateTimeFieldSpec);
    inputRecordReader = new GenericRowRecordReader(inputData, inputSchema);
    entries.add(new Object[] { inputRecordReader, timeFieldSpec, dateTimeFieldSpec, inputSchema });

    // timeSpec in hoursSinceEpoch, dateTimeFieldSpec in hoursSinceEpoch, override dateTimeFieldSpec in millisSinceEpoch
    timeFieldSpec = new TimeFieldSpec(new TimeGranularitySpec(DataType.LONG, TimeUnit.HOURS, "Date"));
    dateTimeFieldSpec = new DateTimeFieldSpec("timestampInEpoch", DataType.LONG, "1:MILLISECONDS:EPOCH",
            "1:HOURS");
    inputData = createTestDataWithTimespec(timeFieldSpec, dateTimeFieldSpec);
    inputSchema = createPinotSchemaWithTimeSpec(timeFieldSpec, dateTimeFieldSpec);
    inputRecordReader = new GenericRowRecordReader(inputData, inputSchema);
    dateTimeFieldSpec = new DateTimeFieldSpec("timestampInEpoch", DataType.LONG, "1:MILLISECONDS:EPOCH",
            "1:HOURS");
    wrapperSchema = createPinotSchemaWithTimeSpec(timeFieldSpec, dateTimeFieldSpec);
    entries.add(new Object[] { inputRecordReader, timeFieldSpec, dateTimeFieldSpec, wrapperSchema });

    // timeSpec in hoursSinceEpoch, dateTimeFieldSpec in hoursSinceEpoch, add new dateTimeFieldSpec in millisSinceEpoch
    timeFieldSpec = new TimeFieldSpec(new TimeGranularitySpec(DataType.LONG, TimeUnit.HOURS, "Date"));
    dateTimeFieldSpec = new DateTimeFieldSpec("hoursSinceEpoch", DataType.LONG, "1:HOURS:EPOCH", "1:HOURS");
    inputData = createTestDataWithTimespec(timeFieldSpec, dateTimeFieldSpec);
    inputSchema = createPinotSchemaWithTimeSpec(timeFieldSpec, dateTimeFieldSpec);
    inputRecordReader = new GenericRowRecordReader(inputData, inputSchema);
    DateTimeFieldSpec dateTimeFieldSpecNew = new DateTimeFieldSpec("timestampInEpoch", DataType.LONG,
            "1:MILLISECONDS:EPOCH", "1:HOURS");
    wrapperSchema = createPinotSchemaWithTimeSpec(timeFieldSpec, dateTimeFieldSpec);
    wrapperSchema = createPinotSchemaWrapperWithDateTimeSpec(wrapperSchema, dateTimeFieldSpecNew);
    entries.add(new Object[] { inputRecordReader, timeFieldSpec, dateTimeFieldSpecNew, wrapperSchema });

    return entries.toArray(new Object[entries.size()][]);
}

From source file:contestWebsite.AdminPanel.java

@Override
public void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
    VelocityEngine ve = new VelocityEngine();
    ve.setProperty(RuntimeConstants.FILE_RESOURCE_LOADER_PATH, "html/pages, html/snippets");
    ve.init();//from  w  w w . j av  a 2  s  .  c om
    VelocityContext context = new VelocityContext();
    Pair<Entity, UserCookie> infoAndCookie = init(context, req);

    UserCookie userCookie = infoAndCookie.y;
    boolean loggedIn = (boolean) context.get("loggedIn");

    String updated = req.getParameter("updated");
    if (updated != null && updated.equals("1") && !loggedIn) {
        resp.sendRedirect("/adminPanel?updated=1");
    }
    context.put("updated", req.getParameter("updated"));

    if (loggedIn && userCookie.isAdmin()) {
        Entity contestInfo = infoAndCookie.x;
        context.put("contestInfo", contestInfo);

        String confPassError = req.getParameter("confPassError");
        context.put("confPassError",
                confPassError != null && confPassError.equals("1") ? "Those passwords didn't match, try again."
                        : null);
        String passError = req.getParameter("passError");
        context.put("passError",
                passError != null && passError.equals("1") ? "That password is incorrect, try again." : null);

        context.put("middleSubjects", Test.getTests(Level.MIDDLE));
        context.put("Level", Level.class);
        context.put("subjects", Subject.values());

        String[] defaultEmails = { "forgotPass", "question", "registration" };
        for (String defaultEmail : defaultEmails) {
            String email;
            if (contestInfo.hasProperty(defaultEmail + "Email")) {
                email = ((Text) contestInfo.getProperty(defaultEmail + "Email")).getValue();
            } else {
                InputStream emailStream = getServletContext()
                        .getResourceAsStream("/html/email/" + defaultEmail + ".html");
                email = CharStreams.toString(new InputStreamReader(emailStream, Charsets.UTF_8));
                emailStream.close();
            }
            context.put(defaultEmail + "Email", email);
        }

        try {
            context.put("awardCriteria", Retrieve.awardCriteria(contestInfo));
            context.put("qualifyingCriteria", Retrieve.qualifyingCriteria(contestInfo));
            context.put("clientId", contestInfo.getProperty("OAuth2ClientId"));
        } catch (Exception e) {
            System.err.println("Surpressing exception while loading admin panel");
            e.printStackTrace();
        }

        SimpleDateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy");
        dateFormat.setTimeZone(TimeZone.getTimeZone("GMT+6"));

        try {
            Date endDate = dateFormat.parse((String) contestInfo.getProperty("editEndDate"));
            Date startDate = dateFormat.parse((String) contestInfo.getProperty("editStartDate"));
            if (new Date().after(endDate) || new Date().before(startDate)) {
                context.put("regEditClosed", true);
            }
        } catch (Exception e) {
            context.put("regEditClosed", true);
        }

        try {
            Date endDate = dateFormat.parse((String) contestInfo.getProperty("endDate"));
            Date startDate = dateFormat.parse((String) contestInfo.getProperty("startDate"));
            if (new Date().after(endDate) || new Date().before(startDate)) {
                context.put("regClosed", true);
            }
        } catch (Exception e) {
            context.put("regClosed", true);
        }

        MemcacheService memCache = MemcacheServiceFactory.getMemcacheService();
        memCache.setErrorHandler(ErrorHandlers.getConsistentLogAndContinue(java.util.logging.Level.INFO));
        byte[] tabulationTaskStatusBytes = (byte[]) memCache.get("tabulationTaskStatus");
        if (tabulationTaskStatusBytes != null) {
            String[] tabulationTaskStatus = new String(tabulationTaskStatusBytes).split("_");
            context.put("tabulationTaskStatus", tabulationTaskStatus[0]);
            List<String> tabulationTaskStatusTime = new ArrayList<String>();
            long timeAgo = new Date().getTime() - new Date(Long.parseLong(tabulationTaskStatus[1])).getTime();
            List<Pair<TimeUnit, String>> timeUnits = new ArrayList<Pair<TimeUnit, String>>() {
                {
                    add(new Pair<TimeUnit, String>(TimeUnit.DAYS, "day"));
                    add(new Pair<TimeUnit, String>(TimeUnit.HOURS, "hour"));
                    add(new Pair<TimeUnit, String>(TimeUnit.MINUTES, "minute"));
                    add(new Pair<TimeUnit, String>(TimeUnit.SECONDS, "second"));
                }
            };
            for (Pair<TimeUnit, String> entry : timeUnits) {
                if (entry.getX().convert(timeAgo, TimeUnit.MILLISECONDS) > 0) {
                    long numUnit = entry.getX().convert(timeAgo, TimeUnit.MILLISECONDS);
                    tabulationTaskStatusTime.add(numUnit + " " + entry.getY() + (numUnit == 1 ? "" : "s"));
                    timeAgo -= TimeUnit.MILLISECONDS.convert(numUnit, entry.getX());
                }
            }
            if (tabulationTaskStatusTime.size() >= 1) {
                context.put("tabulationTaskStatusTime", StringUtils.join(tabulationTaskStatusTime, ", "));
            } else {
                context.put("tabulationTaskStatusTime", timeAgo + " milliseconds");
            }
        }

        close(context, ve.getTemplate("adminPanel.html"), resp);
    } else {
        resp.sendError(HttpServletResponse.SC_FORBIDDEN,
                "Contest Administrator privileges required for that operation");
    }
}

From source file:com.codebullets.sagalib.timeout.InMemoryTimeoutManagerTest.java

private void requestAndTriggerTimeout() {
    requestAndTriggerTimeout(RandomStringUtils.randomAlphanumeric(10), RandomStringUtils.randomAlphanumeric(10),
            10, TimeUnit.HOURS, null);
}

From source file:com.netflix.genie.client.JobClientIntegrationTests.java

/**
 * Method to test submitting/killing a job.
 *
 * @throws Exception If there is any problem.
 *//*from w  w w . j  a v a 2  s .  co  m*/
@Test
public void submitAndKillJob() throws Exception {

    createClusterAndCommandForTest();

    final String jobId = UUID.randomUUID().toString();

    final List<ClusterCriteria> clusterCriteriaList = Lists
            .newArrayList(new ClusterCriteria(Sets.newHashSet("laptop")));

    final Set<String> commandCriteria = Sets.newHashSet("bash");

    final String depFile1 = this.resourceLoader.getResource("/dep1").getFile().getAbsolutePath();
    final Set<String> dependencies = Sets.newHashSet(depFile1);

    final String setUpFile = this.resourceLoader.getResource("/setupfile").getFile().getAbsolutePath();

    final JobRequest jobRequest = new JobRequest.Builder(JOB_NAME, JOB_USER, JOB_VERSION,
            "-c 'echo HELLO WORLD!!!'", clusterCriteriaList, commandCriteria).withId(jobId)
                    .withDisableLogArchival(true).withSetupFile(setUpFile).withDependencies(dependencies)
                    .withDescription(JOB_DESCRIPTION).build();

    final ExecutorService executors = Executors.newFixedThreadPool(2);
    final Future<String> jobFuture;
    try {
        jobFuture = executors.submit(() -> jobClient.submitJob(jobRequest));
        executors.submit(() -> {
            boolean result = true;
            while (result) {
                try {
                    jobClient.getJob(jobId);
                    jobClient.killJob(jobId);
                    Thread.sleep(1000);
                    result = false;
                } catch (Exception ignored) {
                    result = true;
                }
            }
        });
    } finally {
        executors.shutdown();
        executors.awaitTermination(Integer.MAX_VALUE, TimeUnit.HOURS);
    }
    final Job job = jobClient.getJob(jobId);
    Assert.assertEquals(jobId, jobFuture.get());
    Assert.assertEquals(JobStatus.KILLED, job.getStatus());
}

From source file:org.wso2.carbon.identity.account.suspension.notification.task.handler.AccountSuspensionNotificationHandler.java

private void startScheduler() {

    if (!Boolean.parseBoolean(
            configs.getModuleProperties().getProperty(NotificationConstants.SUSPENSION_NOTIFICATION_ENABLED))) {
        return;// w w  w . ja  v  a2 s. c o m
    }

    Date notificationTriggerTime = null;
    String notificationTriggerTimeProperty = configs.getModuleProperties()
            .getProperty(NotificationConstants.SUSPENSION_NOTIFICATION_TRIGGER_TIME);

    DateFormat dateFormat = new SimpleDateFormat(NotificationConstants.TRIGGER_TIME_FORMAT);

    if (notificationTriggerTimeProperty != null) {
        try {
            notificationTriggerTime = dateFormat.parse(notificationTriggerTimeProperty);
        } catch (ParseException e) {
            log.error("Invalid Date format for Notification trigger time", e);
        }
    }

    long schedulerDelayInSeconds = TimeUnit.HOURS.toSeconds(NotificationConstants.SCHEDULER_DELAY);

    Calendar currentTime = Calendar.getInstance();
    Calendar triggerTime = Calendar.getInstance();
    // If notificationTriggerTimeProperty is not found or not in right format default to 20:00:00.
    // In Calender.HOUR_OF_DAY (i.e. in 24-hour clock) it is 20.
    if (notificationTriggerTime != null) {
        triggerTime.setTime(notificationTriggerTime);
    } else {
        triggerTime.set(Calendar.HOUR_OF_DAY, 20);
        triggerTime.set(Calendar.MINUTE, 0);
        triggerTime.set(Calendar.SECOND, 0);
    }

    // Convert times into seconds
    long currentSecond = (currentTime.get(Calendar.HOUR_OF_DAY) * 3600) + currentTime.get(Calendar.MINUTE) * 60
            + currentTime.get(Calendar.SECOND);
    long triggerSecond = (triggerTime.get(Calendar.HOUR_OF_DAY) * 3600) + triggerTime.get(Calendar.MINUTE) * 60
            + triggerTime.get(Calendar.SECOND);
    long delay = triggerSecond - currentSecond;
    // If the notification time has passed, schedule the next day
    if (delay < 0) {
        delay += schedulerDelayInSeconds;
    }

    ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(
            NotificationTaskDataHolder.getInstance().getNotificationSendingThreadPoolSize());
    scheduler.scheduleAtFixedRate(new AccountValidatorThread(), delay, schedulerDelayInSeconds,
            TimeUnit.SECONDS);
}