Example usage for org.joda.time DateTime plusHours

List of usage examples for org.joda.time DateTime plusHours

Introduction

In this page you can find the example usage for org.joda.time DateTime plusHours.

Prototype

public DateTime plusHours(int hours) 

Source Link

Document

Returns a copy of this datetime plus the specified number of hours.

Usage

From source file:niche.newres.timedevents2owl.randomizer.TimedEvents2OWLRandomizer.java

public static DateTime plusRandomHours(DateTime dateTime, int minRange, int maxRange) {
    int randomHours = TimedEvents2OWLRandomizer.randInt(minRange, maxRange);

    return dateTime.plusHours(randomHours);
}

From source file:orc.lib.orchard.forms.DateTimeRangesField.java

License:Open Source License

private static Interval<DateTime> fromTimeID(final String timeID) {
    final String[] parts = timeID.split("_");
    if (parts.length != 4) {
        return new Interval<DateTime>(new DateTime());
    }/*from  www . j  a  v  a2s .co m*/
    try {
        final DateTime start = new DateTime(Integer.parseInt(parts[0]), Integer.parseInt(parts[1]),
                Integer.parseInt(parts[2]), Integer.parseInt(parts[3]), 0, 0, 0);
        final DateTime end = start.plusHours(1);
        return new Interval<DateTime>(start, end);
    } catch (final NumberFormatException nfe) {
        return new Interval<DateTime>(new DateTime());
    }
}

From source file:org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.date.BeamSqlDatetimePlusExpression.java

License:Apache License

private DateTime addInterval(DateTime dateTime, SqlTypeName intervalType, int numberOfIntervals) {
    switch (intervalType) {
    case INTERVAL_SECOND:
        return dateTime.plusSeconds(numberOfIntervals);
    case INTERVAL_MINUTE:
        return dateTime.plusMinutes(numberOfIntervals);
    case INTERVAL_HOUR:
        return dateTime.plusHours(numberOfIntervals);
    case INTERVAL_DAY:
        return dateTime.plusDays(numberOfIntervals);
    case INTERVAL_MONTH:
        return dateTime.plusMonths(numberOfIntervals);
    case INTERVAL_YEAR:
        return dateTime.plusYears(numberOfIntervals);
    default://www .  j  av  a  2  s . com
        throw new IllegalArgumentException("Adding " + intervalType.getName() + " to date is not supported");
    }
}

From source file:org.apache.gobblin.compaction.verify.CompactionAuditCountVerifier.java

License:Apache License

/**
 * Verify a specific dataset by following below steps
 *    1) Retrieve a tier-to-count mapping
 *    2) Read count from {@link CompactionAuditCountVerifier#gobblinTier}
 *    3) Read count from all other {@link CompactionAuditCountVerifier#referenceTiers}
 *    4) Compare count retrieved from steps 2) and 3), if any of (gobblin/refenence) >= threshold, return true, else return false
 * @param dataset Dataset needs to be verified
 * @return If verification is succeeded/*from w w  w .  j a v a  2s  . co m*/
 */
public Result verify(FileSystemDataset dataset) {
    if (auditCountClient == null) {
        log.debug("No audit count client specified, skipped");
        return new Result(true, "");
    }

    CompactionPathParser.CompactionParserResult result = new CompactionPathParser(this.state).parse(dataset);
    DateTime startTime = result.getTime();
    DateTime endTime = startTime.plusHours(1);
    String datasetName = result.getDatasetName();
    try {
        Map<String, Long> countsByTier = auditCountClient.fetch(datasetName, startTime.getMillis(),
                endTime.getMillis());
        for (String tier : referenceTiers) {
            Result rst = passed(datasetName, countsByTier, tier);
            if (rst.isSuccessful()) {
                return new Result(true, "");
            }
        }
    } catch (IOException e) {
        return new Result(false, ExceptionUtils.getFullStackTrace(e));
    }

    return new Result(false,
            String.format("%s data is not complete between %s and %s", datasetName, startTime, endTime));
}

From source file:org.apache.pig.pen.AugmentBaseDataVisitor.java

License:Apache License

Object GetLargerValue(Object v) {
    byte type = DataType.findType(v);

    if (type == DataType.BAG || type == DataType.TUPLE || type == DataType.MAP)
        return null;

    switch (type) {
    case DataType.CHARARRAY:
        return (String) v + "0";
    case DataType.BYTEARRAY:
        String str = ((DataByteArray) v).toString();
        str = str + "0";
        return new DataByteArray(str);
    case DataType.INTEGER:
        return Integer.valueOf((Integer) v + 1);
    case DataType.LONG:
        return Long.valueOf((Long) v + 1);
    case DataType.FLOAT:
        return Float.valueOf((Float) v + 1);
    case DataType.DOUBLE:
        return Double.valueOf((Double) v + 1);
    case DataType.BIGINTEGER:
        return ((BigInteger) v).add(BigInteger.ONE);
    case DataType.BIGDECIMAL:
        return ((BigDecimal) v).add(BigDecimal.ONE);
    case DataType.DATETIME:
        DateTime dt = (DateTime) v;
        if (dt.getMillisOfSecond() != 0) {
            return dt.plusMillis(1);
        } else if (dt.getSecondOfMinute() != 0) {
            return dt.plusSeconds(1);
        } else if (dt.getMinuteOfHour() != 0) {
            return dt.plusMinutes(1);
        } else if (dt.getHourOfDay() != 0) {
            return dt.plusHours(1);
        } else {//from   ww  w  .j  av a  2 s  .co m
            return dt.plusDays(1);
        }
    default:
        return null;
    }
}

From source file:org.apereo.portal.events.aggr.PortalEventPurgerImpl.java

License:Apache License

@AggrEventsTransactional
public EventProcessingResult doPurgeRawEvents() {
    if (!this.clusterLockService.isLockOwner(PURGE_RAW_EVENTS_LOCK_NAME)) {
        throw new IllegalStateException("The cluster lock " + PURGE_RAW_EVENTS_LOCK_NAME
                + " must be owned by the current thread and server");
    }//  ww w.  jav  a2 s .  c  o m

    final IEventAggregatorStatus eventPurgerStatus = eventAggregationManagementDao
            .getEventAggregatorStatus(IEventAggregatorStatus.ProcessingType.PURGING, true);

    //Update status with current server name
    final String serverName = this.portalInfoProvider.getUniqueServerName();
    eventPurgerStatus.setServerName(serverName);
    eventPurgerStatus.setLastStart(new DateTime());

    //Determine date of most recently aggregated data
    final IEventAggregatorStatus eventAggregatorStatus = eventAggregationManagementDao
            .getEventAggregatorStatus(IEventAggregatorStatus.ProcessingType.AGGREGATION, false);
    if (eventAggregatorStatus == null || eventAggregatorStatus.getLastEventDate() == null) {
        //Nothing has been aggregated, skip purging

        eventPurgerStatus.setLastEnd(new DateTime());
        eventAggregationManagementDao.updateEventAggregatorStatus(eventPurgerStatus);

        return new EventProcessingResult(0, null, null, true);
    }
    boolean complete = true;

    //Calculate purge end date from most recent aggregation minus the purge delay
    final DateTime lastAggregated = eventAggregatorStatus.getLastEventDate();
    DateTime purgeEnd = lastAggregated.minus(this.purgeDelay);

    //Determine the DateTime of the oldest event
    DateTime oldestEventDate = eventPurgerStatus.getLastEventDate();
    if (oldestEventDate == null) {
        oldestEventDate = this.portalEventDao.getOldestPortalEventTimestamp();
    }

    //Make sure purgeEnd is no more than 1 hour after the oldest event date to limit delete scope
    final DateTime purgeEndLimit = oldestEventDate.plusHours(1);
    if (purgeEndLimit.isBefore(purgeEnd)) {
        purgeEnd = purgeEndLimit;
        complete = false;
    }

    final Thread currentThread = Thread.currentThread();
    final String currentName = currentThread.getName();
    final int events;
    try {
        currentThread.setName(currentName + "-" + purgeEnd);

        //Purge events
        logger.debug("Starting purge of events before {}", purgeEnd);
        events = portalEventDao.deletePortalEventsBefore(purgeEnd);
    } finally {
        currentThread.setName(currentName);
    }

    //Update the status object and store it
    purgeEnd = purgeEnd.minusMillis(100); //decrement by 100ms since deletePortalEventsBefore uses lessThan and not lessThanEqualTo
    eventPurgerStatus.setLastEventDate(purgeEnd);
    eventPurgerStatus.setLastEnd(new DateTime());
    eventAggregationManagementDao.updateEventAggregatorStatus(eventPurgerStatus);

    return new EventProcessingResult(events, oldestEventDate, purgeEnd, complete);
}

From source file:org.bensteele.jirrigate.Irrigator.java

License:Open Source License

/**
 * Returns the time and date the next irrigation is due based on the watering_days and
 * watering_start_time. It does not take into account whether or not any of the {@link Controller}
 * are active.//from w  w  w .  j  a  v  a  2s  .com
 *
 * @return The time and date of the next irrigation for any controller under this irrigator's
 * control.
 */
protected DateTime nextIrrigationAt() {
    DateTime dt = new DateTime();
    for (int i = 0; i < 7; i++) {
        for (final int dayOfWeek : wateringDays) {
            if (dayOfWeek == (dt.getDayOfWeek())) {
                // If it's the first run then we may match the same day we are currently on, in this case
                // we need to check that we don't report a time in the past. Validate that the hour and
                // minute right now are not past the scheduled watering time. If it's not the first run
                // then it's ok to let through.
                if (i != 0 || (i == 0 && dt.toLocalTime().isBefore(wateringStartTime))) {

                    // Reset the hour to 0 and increment until we match the watering hour.
                    dt = dt.withHourOfDay(0);
                    while (dt.getHourOfDay() < wateringStartTime.getHourOfDay()) {
                        dt = dt.plusHours(1);
                    }

                    // Reset the minute to 0 and increment until we match the watering minute.
                    dt = dt.withMinuteOfHour(0);
                    while (dt.getMinuteOfHour() < wateringStartTime.getMinuteOfHour()) {
                        dt = dt.plusMinutes(1);
                    }
                    return dt;
                }
            }
        }
        dt = dt.plusDays(1);
    }
    return null;
}

From source file:org.egov.eventnotification.scheduler.NotificationSchedulerJob.java

License:Open Source License

/**
 * This method build the messageContent object and send it to pushbox to send the notification.
 * @param notificationSchedule//from  w w  w  . java 2  s .  c om
 * @param messageBody
 * @param seandAll
 * @param userIdList
 */
private void buildAndSendNotifications(Schedule notificationSchedule, String messageBody, Boolean seandAll,
        List<Long> userIdList) {
    User user = userService.getCurrentUser();
    DateTime calendar = new DateTime(notificationSchedule.getStartDate());
    int hours = calendar.getHourOfDay();
    int minutes = calendar.getMinuteOfHour();
    calendar.plusHours(hours);
    calendar.plusMinutes(minutes);

    DateTime calendarEnd = new DateTime(notificationSchedule.getStartDate());
    int hours1 = calendarEnd.getHourOfDay();
    int minutes1 = calendarEnd.getMinuteOfHour();
    calendarEnd.plusHours(hours1 + 1);
    calendarEnd.plusMinutes(minutes1);

    MessageContent messageContent = new MessageContent();
    MessageContentDetails messageDetails = new MessageContentDetails();

    messageContent.setCreatedDateTime(new Date().getTime());
    messageDetails.setEventDateTime(calendar.getMillis());
    messageContent.setExpiryDate(calendarEnd.getMillis());
    if (messageBody == null)
        messageContent.setMessageBody(notificationSchedule.getMessageTemplate());
    else
        messageContent.setMessageBody(messageBody);
    messageContent.setModuleName(notificationSchedule.getTemplateName());
    messageContent.setNotificationDateTime(new Date().getTime());
    messageContent.setNotificationType(notificationSchedule.getDraftType().getName());
    messageDetails.setSendAll(seandAll);
    if (userIdList != null)
        messageDetails.setUserIdList(userIdList);
    messageContent.setSenderId(user.getId());
    messageContent.setSenderName(user.getName());
    messageContent.setDetails(messageDetails);
    messageContent.setCityName(ApplicationThreadLocals.getCityName());
    messageContent.setUlbCode(ApplicationThreadLocals.getCityCode());

    pushNotificationService.sendNotifications(messageContent);
}

From source file:org.egov.pgr.service.EscalationService.java

License:Open Source License

protected DateTime getExpiryDate(final Complaint complaint) {

    DateTime expiryDate = complaint.getEscalationDate();
    final Designation designation = assignmentService
            .getPrimaryAssignmentForPositon(complaint.getAssignee().getId()).getDesignation();
    final Integer noOfhrs = getHrsToResolve(designation.getId(), complaint.getComplaintType().getId());
    expiryDate = expiryDate.plusHours(noOfhrs);
    return expiryDate;
}

From source file:org.elasticsearch.xpack.ml.transforms.PainlessDomainSplitIT.java

License:Open Source License

public void testHRDSplit() throws Exception {

    // Create job
    String job = "{\n" + "      \"description\":\"Domain splitting\",\n" + "      \"analysis_config\" : {\n"
            + "          \"bucket_span\":\"3600s\",\n"
            + "          \"detectors\" :[{\"function\":\"count\", \"by_field_name\" : \"domain_split\"}]\n"
            + "      },\n" + "      \"data_description\" : {\n" + "          \"field_delimiter\":\",\",\n"
            + "          \"time_field\":\"time\"\n" + "          \n" + "      }\n" + "  }";

    client().performRequest("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job",
            Collections.emptyMap(), new StringEntity(job, ContentType.APPLICATION_JSON));
    client().performRequest("POST", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/_open");

    // Create index to hold data
    Settings.Builder settings = Settings.builder().put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1)
            .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0);

    createIndex("painless", settings.build(),
            "\"test\": { \"properties\": { \"domain\": { \"type\": \"keyword\" },"
                    + "\"time\": { \"type\": \"date\" } } }");

    // Index some data
    DateTime baseTime = new DateTime().minusYears(1);
    TestConfiguration test = tests.get(randomInt(tests.size() - 1));

    // domainSplit() tests had subdomain, testHighestRegisteredDomainCases() did not, so we need a special case for sub
    String expectedSub = test.subDomainExpected == null ? ".*" : test.subDomainExpected.replace(".", "\\.");
    String expectedHRD = test.domainExpected.replace(".", "\\.");
    Pattern pattern = Pattern
            .compile("domain_split\":\\[\"(" + expectedSub + "),(" + expectedHRD + ")\"[,\\]]");

    for (int i = 0; i < 100; i++) {

        DateTime time = baseTime.plusHours(i);
        if (i == 64) {
            // Anomaly has 100 docs, but we don't care about the value
            for (int j = 0; j < 100; j++) {
                client().performRequest("PUT", "painless/test/" + time.toDateTimeISO() + "_" + j,
                        Collections.emptyMap(),
                        new StringEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \""
                                + time.toDateTimeISO() + "\"}", ContentType.APPLICATION_JSON));
            }// ww  w  . j  a  v  a2s  .c  o m
        } else {
            // Non-anomalous values will be what's seen when the anomaly is reported
            client().performRequest("PUT", "painless/test/" + time.toDateTimeISO(), Collections.emptyMap(),
                    new StringEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \""
                            + time.toDateTimeISO() + "\"}", ContentType.APPLICATION_JSON));
        }
    }

    client().performRequest("POST", "painless/_refresh");

    // Create and start datafeed
    String body = "{\n" + "         \"job_id\":\"hrd-split-job\",\n" + "         \"indexes\":[\"painless\"],\n"
            + "         \"types\":[\"test\"],\n" + "         \"script_fields\": {\n"
            + "            \"domain_split\": {\n"
            + "               \"script\": \"return domainSplit(doc['domain'].value, params);\"\n"
            + "            }\n" + "         }\n" + "      }";

    client().performRequest("PUT", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed",
            Collections.emptyMap(), new StringEntity(body, ContentType.APPLICATION_JSON));
    client().performRequest("POST", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed/_start");

    boolean passed = awaitBusy(() -> {
        try {
            client().performRequest("POST", "/_refresh");

            Response response = client().performRequest("GET",
                    MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/results/records");
            String responseBody = EntityUtils.toString(response.getEntity());

            if (responseBody.contains("\"count\":2")) {
                Matcher m = pattern.matcher(responseBody);

                String actualSubDomain = "";
                String actualDomain = "";
                if (m.find()) {
                    actualSubDomain = m.group(1).replace("\"", "");
                    actualDomain = m.group(2).replace("\"", "");
                }

                String expectedTotal = "[" + test.subDomainExpected + "," + test.domainExpected + "]";
                String actualTotal = "[" + actualSubDomain + "," + actualDomain + "]";

                // domainSplit() tests had subdomain, testHighestRegisteredDomainCases() do not
                if (test.subDomainExpected != null) {
                    assertThat(
                            "Expected subdomain [" + test.subDomainExpected + "] but found [" + actualSubDomain
                                    + "]. Actual " + actualTotal + " vs Expected " + expectedTotal,
                            actualSubDomain, equalTo(test.subDomainExpected));
                }

                assertThat(
                        "Expected domain [" + test.domainExpected + "] but found [" + actualDomain
                                + "].  Actual " + actualTotal + " vs Expected " + expectedTotal,
                        actualDomain, equalTo(test.domainExpected));

                return true;
            } else {
                logger.error(responseBody);
                return false;
            }

        } catch (Exception e) {
            logger.error(e.getMessage());
            return false;
        }

    }, 5, TimeUnit.SECONDS);

    if (!passed) {
        fail("Anomaly records were not found within 5 seconds");
    }
}