Example usage for org.joda.time Period Period

List of usage examples for org.joda.time Period Period

Introduction

In this page you can find the example usage for org.joda.time Period Period.

Prototype

public Period() 

Source Link

Document

Creates a new empty period with the standard set of fields.

Usage

From source file:org.apache.drill.exec.vector.DateUtilities.java

License:Apache License

public static Period fromIntervalYear(int value) {
    final int years = (value / yearsToMonths);
    final int months = (value % yearsToMonths);
    return new Period().plusYears(years).plusMonths(months);
}

From source file:org.apache.drill.exec.vector.DateUtilities.java

License:Apache License

public static Period fromIntervalDay(int days, int millis) {
    return new Period().plusDays(days).plusMillis(millis);
}

From source file:org.apache.drill.exec.vector.DateUtilities.java

License:Apache License

public static Period fromInterval(int months, int days, int millis) {
    return new Period().plusMonths(months).plusDays(days).plusMillis(millis);
}

From source file:org.apache.druid.indexing.kafka.KafkaTuningConfig.java

License:Apache License

@JsonCreator
public KafkaTuningConfig(@JsonProperty("maxRowsInMemory") @Nullable Integer maxRowsInMemory,
        @JsonProperty("maxBytesInMemory") @Nullable Long maxBytesInMemory,
        @JsonProperty("maxRowsPerSegment") @Nullable Integer maxRowsPerSegment,
        @JsonProperty("maxTotalRows") @Nullable Long maxTotalRows,
        @JsonProperty("intermediatePersistPeriod") @Nullable Period intermediatePersistPeriod,
        @JsonProperty("basePersistDirectory") @Nullable File basePersistDirectory,
        @JsonProperty("maxPendingPersists") @Nullable Integer maxPendingPersists,
        @JsonProperty("indexSpec") @Nullable IndexSpec indexSpec,
        // This parameter is left for compatibility when reading existing configs, to be removed in Druid 0.12.
        @JsonProperty("buildV9Directly") @Nullable Boolean buildV9Directly,
        @Deprecated @JsonProperty("reportParseExceptions") @Nullable Boolean reportParseExceptions,
        @JsonProperty("handoffConditionTimeout") @Nullable Long handoffConditionTimeout,
        @JsonProperty("resetOffsetAutomatically") @Nullable Boolean resetOffsetAutomatically,
        @JsonProperty("segmentWriteOutMediumFactory") @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory,
        @JsonProperty("intermediateHandoffPeriod") @Nullable Period intermediateHandoffPeriod,
        @JsonProperty("logParseExceptions") @Nullable Boolean logParseExceptions,
        @JsonProperty("maxParseExceptions") @Nullable Integer maxParseExceptions,
        @JsonProperty("maxSavedParseExceptions") @Nullable Integer maxSavedParseExceptions) {
    // Cannot be a static because default basePersistDirectory is unique per-instance
    final RealtimeTuningConfig defaults = RealtimeTuningConfig.makeDefaultTuningConfig(basePersistDirectory);

    this.maxRowsInMemory = maxRowsInMemory == null ? defaults.getMaxRowsInMemory() : maxRowsInMemory;
    this.maxRowsPerSegment = maxRowsPerSegment == null ? DEFAULT_MAX_ROWS_PER_SEGMENT : maxRowsPerSegment;
    // initializing this to 0, it will be lazily initialized to a value
    // @see server.src.main.java.org.apache.druid.segment.indexing.TuningConfigs#getMaxBytesInMemoryOrDefault(long)
    this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory;
    this.maxTotalRows = maxTotalRows;
    this.intermediatePersistPeriod = intermediatePersistPeriod == null ? defaults.getIntermediatePersistPeriod()
            : intermediatePersistPeriod;
    this.basePersistDirectory = defaults.getBasePersistDirectory();
    this.maxPendingPersists = 0;
    this.indexSpec = indexSpec == null ? defaults.getIndexSpec() : indexSpec;
    this.reportParseExceptions = reportParseExceptions == null ? defaults.isReportParseExceptions()
            : reportParseExceptions;//w  w  w .  j  a va 2s .  c  o m
    this.handoffConditionTimeout = handoffConditionTimeout == null ? defaults.getHandoffConditionTimeout()
            : handoffConditionTimeout;
    this.resetOffsetAutomatically = resetOffsetAutomatically == null ? DEFAULT_RESET_OFFSET_AUTOMATICALLY
            : resetOffsetAutomatically;
    this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory;
    this.intermediateHandoffPeriod = intermediateHandoffPeriod == null
            ? new Period().withDays(Integer.MAX_VALUE)
            : intermediateHandoffPeriod;

    if (this.reportParseExceptions) {
        this.maxParseExceptions = 0;
        this.maxSavedParseExceptions = maxSavedParseExceptions == null ? 0
                : Math.min(1, maxSavedParseExceptions);
    } else {
        this.maxParseExceptions = maxParseExceptions == null ? TuningConfig.DEFAULT_MAX_PARSE_EXCEPTIONS
                : maxParseExceptions;
        this.maxSavedParseExceptions = maxSavedParseExceptions == null
                ? TuningConfig.DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
                : maxSavedParseExceptions;
    }
    this.logParseExceptions = logParseExceptions == null ? TuningConfig.DEFAULT_LOG_PARSE_EXCEPTIONS
            : logParseExceptions;
}

From source file:org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskTuningConfig.java

License:Apache License

public SeekableStreamIndexTaskTuningConfig(@Nullable Integer maxRowsInMemory, @Nullable Long maxBytesInMemory,
        @Nullable Integer maxRowsPerSegment, @Nullable Long maxTotalRows,
        @Nullable Period intermediatePersistPeriod, @Nullable File basePersistDirectory,
        @Nullable Integer maxPendingPersists, @Nullable IndexSpec indexSpec,
        @Nullable IndexSpec indexSpecForIntermediatePersists,
        // This parameter is left for compatibility when reading existing configs, to be removed in Druid 0.12.
        @Deprecated @JsonProperty("buildV9Directly") @Nullable Boolean buildV9Directly,
        @Deprecated @Nullable Boolean reportParseExceptions, @Nullable Long handoffConditionTimeout,
        @Nullable Boolean resetOffsetAutomatically, Boolean skipSequenceNumberAvailabilityCheck,
        @Nullable SegmentWriteOutMediumFactory segmentWriteOutMediumFactory,
        @Nullable Period intermediateHandoffPeriod, @Nullable Boolean logParseExceptions,
        @Nullable Integer maxParseExceptions, @Nullable Integer maxSavedParseExceptions) {
    // Cannot be a static because default basePersistDirectory is unique per-instance
    final RealtimeTuningConfig defaults = RealtimeTuningConfig.makeDefaultTuningConfig(basePersistDirectory);

    this.maxRowsInMemory = maxRowsInMemory == null ? defaults.getMaxRowsInMemory() : maxRowsInMemory;
    this.partitionsSpec = new DynamicPartitionsSpec(maxRowsPerSegment, maxTotalRows);
    // initializing this to 0, it will be lazily initialized to a value
    // @see server.src.main.java.org.apache.druid.segment.indexing.TuningConfigs#getMaxBytesInMemoryOrDefault(long)
    this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory;
    this.intermediatePersistPeriod = intermediatePersistPeriod == null ? defaults.getIntermediatePersistPeriod()
            : intermediatePersistPeriod;
    this.basePersistDirectory = defaults.getBasePersistDirectory();
    this.maxPendingPersists = maxPendingPersists == null ? 0 : maxPendingPersists;
    this.indexSpec = indexSpec == null ? defaults.getIndexSpec() : indexSpec;
    this.indexSpecForIntermediatePersists = indexSpecForIntermediatePersists == null ? this.indexSpec
            : indexSpecForIntermediatePersists;
    this.reportParseExceptions = reportParseExceptions == null ? defaults.isReportParseExceptions()
            : reportParseExceptions;//from w  ww  .ja  va2s . c  om
    this.handoffConditionTimeout = handoffConditionTimeout == null ? defaults.getHandoffConditionTimeout()
            : handoffConditionTimeout;
    this.resetOffsetAutomatically = resetOffsetAutomatically == null ? DEFAULT_RESET_OFFSET_AUTOMATICALLY
            : resetOffsetAutomatically;
    this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory;
    this.intermediateHandoffPeriod = intermediateHandoffPeriod == null
            ? new Period().withDays(Integer.MAX_VALUE)
            : intermediateHandoffPeriod;
    this.skipSequenceNumberAvailabilityCheck = skipSequenceNumberAvailabilityCheck == null
            ? DEFAULT_SKIP_SEQUENCE_NUMBER_AVAILABILITY_CHECK
            : skipSequenceNumberAvailabilityCheck;

    if (this.reportParseExceptions) {
        this.maxParseExceptions = 0;
        this.maxSavedParseExceptions = maxSavedParseExceptions == null ? 0
                : Math.min(1, maxSavedParseExceptions);
    } else {
        this.maxParseExceptions = maxParseExceptions == null ? TuningConfig.DEFAULT_MAX_PARSE_EXCEPTIONS
                : maxParseExceptions;
        this.maxSavedParseExceptions = maxSavedParseExceptions == null
                ? TuningConfig.DEFAULT_MAX_SAVED_PARSE_EXCEPTIONS
                : maxSavedParseExceptions;
    }
    this.logParseExceptions = logParseExceptions == null ? TuningConfig.DEFAULT_LOG_PARSE_EXCEPTIONS
            : logParseExceptions;
}

From source file:rapture.kernel.DecisionApiImpl.java

License:Open Source License

@Override
public WorkflowHistoricalMetrics getMonthlyMetrics(CallingContext context, String workflowURIIn,
        String jobURIIn, String argsHashValue, String stateIn) {
    WorkOrderExecutionState state = WorkOrderExecutionState.valueOf(stateIn);

    if (StringUtils.isEmpty(workflowURIIn)) {
        throw RaptureExceptionFactory.create(HttpStatus.SC_BAD_REQUEST, "Workflow URI must be defined!");
    }//from w  ww  .  j a v a2 s  .  c  om

    RaptureURI workflowURI = new RaptureURI(workflowURIIn, Scheme.WORKFLOW);

    WorkflowHistoricalMetrics result = new WorkflowHistoricalMetrics();

    try {
        String workflowMetric = WorkflowMetricsFactory.createWorkflowMetricName(workflowURI, state);
        result.setWorkflowAverage(
                Kernel.getMetricsService().getMetricAverage(workflowMetric, new Period().withMonths(1)));
        Long workflowAverageCount = Kernel.getMetricsService().getMetricCount(workflowMetric,
                new Period().withMonths(1));

        result.setWorkflowMetricName(workflowMetric);

        if (!StringUtils.isEmpty(jobURIIn)) {
            RaptureURI jobURI = new RaptureURI(jobURIIn, Scheme.JOB);
            String jobMetric = WorkflowMetricsFactory.createJobMetricName(jobURI, state);
            Long averageCount = Kernel.getMetricsService().getMetricCount(jobMetric,
                    new Period().withMonths(1));
            if (averageCount > 10 && workflowAverageCount > averageCount) {
                // if this is too small, it's a negligible stat and we should use the workflow average instead
                Double average = Kernel.getMetricsService().getMetricAverage(jobMetric,
                        new Period().withMonths(1));
                result.setJobAverage(average);
            }
            result.setJobMetricName(jobMetric);
        }

        if (!StringUtils.isEmpty(argsHashValue)) {
            String argsMetric = WorkflowMetricsFactory.createWorkflowWithArgsMetric(workflowURI, state,
                    argsHashValue);
            Long averageCount = Kernel.getMetricsService().getMetricCount(argsMetric,
                    new Period().withMonths(1));
            if (averageCount > 10 && workflowAverageCount > averageCount) {
                // if this is too small, it's a negligible stat and we should use the workflow average instead
                Double metricAverage = Kernel.getMetricsService().getMetricAverage(argsMetric,
                        new Period().withMonths(1));
                result.setWorkflowWithArgsAverage(metricAverage);
            }
            result.setArgsHashMetricName(argsMetric);
        }
    } catch (IOException e) {
        throw RaptureExceptionFactory.create("Error while getting average: " + e.getMessage(), e);
    }

    return result;
}

From source file:supply.CapacityHarvester.java

License:Apache License

public static int getCalculatedRemainingHours(String username) {
    // Find start and end date for current sprint
    // --> Lookup sprint setup

    // Business days To Sprint End
    DateTime sprintStartDate = new DateTime(2014, 02, 1, 0, 0, 0, 0);
    DateTime sprintEndDate = new DateTime(2014, 02, 28, 17, 0);
    logger.info("sprintEndDate WeekOfWeekyear=" + sprintEndDate.getWeekOfWeekyear());
    logger.info("sprintEndDate WeekOfWeekyear=" + sprintEndDate.getWeekOfWeekyear());
    LocalDate today = new LocalDate();

    // business days left in current week
    logger.info("Current week=" + today.getWeekOfWeekyear());
    if (today.getDayOfWeek() > 5) {
        logger.info("Not a business day. 0 hours left of availability as this is weekend.");
    }//from  w ww  .j a v  a2 s .  co m
    SimpleDateFormat df = new SimpleDateFormat("dd.MM.yyyy");
    Period weekPeriod = new Period().withWeeks(1);
    Interval i = new Interval(sprintStartDate, weekPeriod);
    int hours = 0;
    while (i.getEnd().isBefore(sprintEndDate)) {
        logger.info("week: " + i.getStart().getWeekOfWeekyear() + " start: " + df.format(i.getStart().toDate())
                + " end: " + df.format(i.getEnd().minusMillis(1).toDate()));
        i = new Interval(i.getStart().plus(weekPeriod), weekPeriod);
        int availabilityHours = Availability.getAvailability(i.getStart().toCalendar(Locale.US), username);
        logger.info("Reported availability hours for [" + username + "]: " + availabilityHours);
        hours += availabilityHours;
    }

    Days days = Days.daysBetween(today.toDateTimeAtStartOfDay(), sprintEndDate);

    int hoursRemaining = Hours.hoursBetween(today.toDateTimeAtCurrentTime(), sprintEndDate).getHours();
    if (hoursRemaining < 0)
        hoursRemaining = 0;
    logger.info("HoursToSprintEnd=" + hoursRemaining);
    logger.info("DayOfWeek=" + today.getDayOfWeek());
    logger.info("WeekOfWeekyear=" + today.getWeekOfWeekyear());
    logger.info("Hours from DB=" + hours);

    // --> Find week numbers
    // --> Check that current date is between start/end date of sprint

    // Lookup how many hours this user has for the sprint
    // --> lookup in HBase
    // --> 

    return hoursRemaining;
}

From source file:voldemort.store.readonly.mr.utils.HadoopUtils.java

License:Apache License

public static Period parsePeriod(String periodStr) {
    Matcher monthsFormat = Pattern.compile("[0-9][0-9]*M").matcher(periodStr);
    Matcher daysFormat = Pattern.compile("[0-9][0-9]*d").matcher(periodStr);
    Matcher hoursFormat = Pattern.compile("[0-9][0-9]*h").matcher(periodStr);
    Matcher minutesFormat = Pattern.compile("[0-9][0-9]*m").matcher(periodStr);

    Period period = new Period();
    while (monthsFormat.find()) {
        period = period.plusMonths(//from  w ww  .ja va  2  s  . c o  m
                Integer.parseInt(monthsFormat.group().substring(0, monthsFormat.group().length() - 1)));
    }
    while (daysFormat.find()) {
        period = period
                .plusDays(Integer.parseInt(daysFormat.group().substring(0, daysFormat.group().length() - 1)));
    }
    while (hoursFormat.find()) {
        period = period.plusHours(
                Integer.parseInt(hoursFormat.group().substring(0, hoursFormat.group().length() - 1)));
    }
    while (minutesFormat.find()) {
        period = period.plusMinutes(
                Integer.parseInt(minutesFormat.group().substring(0, minutesFormat.group().length() - 1)));
    }

    return period;
}

From source file:windows.Recursos.java

/**
 * Este metodo se encarga de buscar todas las semnas en el ao actual
 *
 * @return numero de semanas/*from ww w . j a  va2  s  .c om*/
 */
public static int generateWeeks() {
    //        SimpleDateFormat df = new SimpleDateFormat("dd.MM.yyyy");
    DateTime d = new DateTime();
    Period weekPeriod = new Period().withWeeks(1);
    DateTime startDate = new DateTime(d.getYear(), 1, 1, 0, 0, 0, 0);
    while (startDate.getDayOfWeek() != DateTimeConstants.MONDAY) {
        startDate = startDate.plusDays(1);
    }

    DateTime endDate = new DateTime(d.getYear() + 1, 1, 1, 0, 0, 0, 0);
    Interval i = new Interval(startDate, weekPeriod);
    int ct = 0;
    while (i.getStart().isBefore(endDate)) {

        i = new Interval(i.getStart().plus(weekPeriod), weekPeriod);
        ct++;
    }
    return ct;
}

From source file:windows.Recursos.java

public static int getActualweek() {
    //        SimpleDateFormat df = new SimpleDateFormat("dd.MM.yyyy");
    DateTime d = new DateTime();
    Period weekPeriod = new Period().withWeeks(1);
    DateTime startDate = new DateTime(d.getYear(), 1, 1, 0, 0, 0, 0);
    while (startDate.getDayOfWeek() != DateTimeConstants.MONDAY) {
        startDate = startDate.plusDays(1);
    }//from   ww w . j  a va  2s.  c  o m

    DateTime endDate = new DateTime(d);
    Interval i = new Interval(startDate, weekPeriod);
    int ct = 0;
    while (i.getStart().isBefore(endDate)) {

        i = new Interval(i.getStart().plus(weekPeriod), weekPeriod);
        ct++;
    }
    return ct;
}