Example usage for org.joda.time DateTime plusSeconds

List of usage examples for org.joda.time DateTime plusSeconds

Introduction

In this page you can find the example usage for org.joda.time DateTime plusSeconds.

Prototype

public DateTime plusSeconds(int seconds) 

Source Link

Document

Returns a copy of this datetime plus the specified number of seconds.

Usage

From source file:eu.stork.peps.auth.engine.STORKSAMLEngine.java

License:EUPL

/**
 * Generate stork attribute query response.
 * //w ww  . j  a  v  a 2s .com
 * @param request the request
 * @param responseAttrQueryRes the response authentication request
 * @param ipAddress the IP address
 * @param isHashing the hashing of values
 * 
 * @return the sTORK authentication response
 * 
 * @throws STORKSAMLEngineException the STORKSAML engine exception
 */
public STORKAttrQueryResponse generateSTORKAttrQueryResponse(final STORKAttrQueryRequest request,
        final STORKAttrQueryResponse responseAttrQueryRes, final String ipAddress, final String destinationUrl,
        final boolean isHashing) throws STORKSAMLEngineException {
    LOG.info("generateSTORKAttrQueryResponse");

    // Validate parameters
    validateParamAttrQueryResponse(request, responseAttrQueryRes);

    // Mandatory SAML
    LOG.debug("Generate StatusCode");
    final StatusCode statusCode = SAMLEngineUtils.generateStatusCode(StatusCode.SUCCESS_URI);

    LOG.debug("Generate Status");
    final Status status = SAMLEngineUtils.generateStatus(statusCode);

    LOG.debug("Generate StatusMessage");
    final StatusMessage statusMessage = (StatusMessage) SAMLEngineUtils
            .generateStatusMessage(StatusCode.SUCCESS_URI);

    status.setStatusMessage(statusMessage);

    LOG.debug("Generate Response");

    // RESPONSE
    final Response response = genAuthnRespBase(status, destinationUrl, request.getSamlId());

    DateTime notOnOrAfter = new DateTime();

    notOnOrAfter = notOnOrAfter.plusSeconds(super.getSamlCoreProperties().getTimeNotOnOrAfter());

    final Assertion assertion = this.generateAssertion(ipAddress, "", request.getSamlId(), request.getIssuer(),
            notOnOrAfter);

    final AttributeStatement attrStatement = this
            .generateAttributeStatement(responseAttrQueryRes.getPersonalAttributeList(), isHashing);

    assertion.getAttributeStatements().add(attrStatement);

    // Add assertions
    response.getAssertions().add(assertion);

    final STORKAttrQueryResponse attrQueryResponse = new STORKAttrQueryResponse();

    try {
        attrQueryResponse.setTokenSaml(super.signAndMarshall(response));
        attrQueryResponse.setSamlId(response.getID());
    } catch (SAMLEngineException e) {
        LOG.error("Sign and Marshall.", e);
        throw new STORKSAMLEngineException(e);
    }
    return attrQueryResponse;
}

From source file:eu.stork.peps.auth.engine.STORKSAMLEngine.java

License:EUPL

/**
 * Generate stork attribute query response from multiple assertions 
 * //from   www.  j  a  v a2s  .  co  m
 * @param request the request
 * @param responseAttrQueryRes the response to the query request
 * @param responses the responses to include in the response (aggregation)
 * @param ipAddress the IP address
 * @param isHashing the hashing of values
 * 
 * @return the sTORK attribute query response
 * 
 * @throws STORKSAMLEngineException the STORKSAML engine exception
 */
public STORKAttrQueryResponse generateSTORKAttrQueryResponseWithAssertions(final STORKAttrQueryRequest request,
        final STORKAttrQueryResponse responseAttrQueryRes, final List<STORKAttrQueryResponse> responses,
        final String ipAddress, final String destinationUrl, final boolean isHashing)
        throws STORKSAMLEngineException {
    LOG.info("generateSTORKAttrQueryResponse");

    // Validate parameters
    validateParamAttrQueryResponseFromAssertions(request, responseAttrQueryRes);

    // Mandatory SAML
    LOG.debug("Generate StatusCode");
    final StatusCode statusCode = SAMLEngineUtils.generateStatusCode(StatusCode.SUCCESS_URI);

    LOG.debug("Generate Status");
    final Status status = SAMLEngineUtils.generateStatus(statusCode);

    LOG.debug("Generate StatusMessage");
    final StatusMessage statusMessage = (StatusMessage) SAMLEngineUtils
            .generateStatusMessage(StatusCode.SUCCESS_URI);

    status.setStatusMessage(statusMessage);

    LOG.debug("Generate Response");

    // RESPONSE
    final Response response = genAuthnRespBase(status, destinationUrl, request.getSamlId());

    DateTime notOnOrAfter = new DateTime();

    notOnOrAfter = notOnOrAfter.plusSeconds(super.getSamlCoreProperties().getTimeNotOnOrAfter());

    final Assertion assertion = this.generateAssertion(ipAddress, "", request.getSamlId(), request.getIssuer(),
            notOnOrAfter);

    final AttributeStatement attrStatement = this
            .generateAttributeStatement(responseAttrQueryRes.getPersonalAttributeList(), isHashing);

    assertion.getAttributeStatements().add(attrStatement);

    // Add the assertions from the former Query responses
    response.getAssertions().add(assertion);
    if (responses != null && responses.size() > 0) {
        for (int i = 0; i < responses.size(); i++) {
            Assertion tempAssertion = responses.get(i).getAssertion();
            tempAssertion.setParent(response);
            response.getAssertions().add(tempAssertion);
        }
    }

    final STORKAttrQueryResponse attrQueryResponse = new STORKAttrQueryResponse();

    try {
        attrQueryResponse.setTokenSaml(super.signAndMarshall(response));
        attrQueryResponse.setSamlId(response.getID());
    } catch (SAMLEngineException e) {
        LOG.error("Sign and Marshall.", e);
        throw new STORKSAMLEngineException(e);
    }
    return attrQueryResponse;
}

From source file:eu.stork.peps.auth.engine.STORKSAMLEngine.java

License:EUPL

/**
 * Generate stork attribute query response fail.
 * /* ww  w . j a  va 2s .co m*/
 * @param request the request
 * @param response the response
 * @param ipAddress the IP address
 * @param isHashing the is hashing
 * 
 * @return the STORK attribute query response
 * 
 * @throws STORKSAMLEngineException the STORKSAML engine exception
 */
public STORKAttrQueryResponse generateSTORKAttrQueryResponseFail(final STORKAttrQueryRequest request,
        final STORKAttrQueryResponse response, final String ipAddress, final String destinationUrl,
        final boolean isHashing) throws STORKSAMLEngineException {
    LOG.info("generateSTORKAttrQueryResponseFail");

    validateParamAttrQueryResponseFail(request, response);

    // Mandatory
    final StatusCode statusCode = SAMLEngineUtils.generateStatusCode(response.getStatusCode());

    // Mandatory SAML
    LOG.debug("Generate StatusCode.");
    // Subordinate code it's optional in case not covered into next codes:
    // - urn:oasis:names:tc:SAML:2.0:status:AuthnFailed
    // - urn:oasis:names:tc:SAML:2.0:status:InvalidAttrNameOrValue
    // - urn:oasis:names:tc:SAML:2.0:status:InvalidNameIDPolicy
    // - urn:oasis:names:tc:SAML:2.0:status:RequestDenied
    // - http://www.stork.gov.eu/saml20/statusCodes/QAANotSupported

    if (StringUtils.isNotBlank(response.getSubStatusCode())) {
        final StatusCode newStatusCode = SAMLEngineUtils.generateStatusCode(response.getSubStatusCode());
        statusCode.setStatusCode(newStatusCode);
    }

    LOG.debug("Generate Status.");
    final Status status = SAMLEngineUtils.generateStatus(statusCode);

    if (StringUtils.isNotBlank(response.getMessage())) {
        final StatusMessage statusMessage = (StatusMessage) SAMLEngineUtils
                .generateStatusMessage(response.getMessage());

        status.setStatusMessage(statusMessage);
    }

    LOG.debug("Generate Response.");
    // RESPONSE
    final Response responseFail = genAuthnRespBase(status, destinationUrl, request.getSamlId());

    DateTime notOnOrAfter = new DateTime();

    notOnOrAfter = notOnOrAfter.plusSeconds(super.getSamlCoreProperties().getTimeNotOnOrAfter());

    final Assertion assertion = this.generateAssertion(ipAddress, "", request.getSamlId(), request.getIssuer(),
            notOnOrAfter);

    responseFail.getAssertions().add(assertion);

    LOG.debug("Sign and Marshall ResponseFail.");

    final STORKAttrQueryResponse storkResponse = new STORKAttrQueryResponse();

    try {
        storkResponse.setTokenSaml(super.signAndMarshall(responseFail));
        storkResponse.setSamlId(responseFail.getID());
    } catch (SAMLEngineException e) {
        LOG.error("SAMLEngineException.", e);
        throw new STORKSAMLEngineException(e);
    }
    return storkResponse;
}

From source file:gobblin.configuration.SourceState.java

License:Apache License

/**
 * Create a new properly populated {@link Extract} instance.
 *
 * <p>//from www .  ja  va 2  s.  c  om
 *   This method should always return a new unique {@link Extract} instance.
 * </p>
 *
 * @param type {@link gobblin.source.workunit.Extract.TableType}
 * @param namespace namespace of the table this extract belongs to
 * @param table name of the table this extract belongs to
 * @return a new unique {@link Extract} instance
 *
 * @Deprecated Use {@link gobblin.source.extractor.extract.AbstractSource#createExtract(
 * gobblin.source.workunit.Extract.TableType, String, String)}
 */
@Deprecated
public synchronized Extract createExtract(Extract.TableType type, String namespace, String table) {
    Extract extract = new Extract(this, type, namespace, table);
    while (EXTRACT_SET.contains(extract)) {
        if (Strings.isNullOrEmpty(extract.getExtractId())) {
            extract.setExtractId(DTF.print(new DateTime()));
        } else {
            DateTime extractDateTime = DTF.parseDateTime(extract.getExtractId());
            extract.setExtractId(DTF.print(extractDateTime.plusSeconds(1)));
        }
    }
    EXTRACT_SET.add(extract);
    return extract;
}

From source file:gobblin.source.workunit.ExtractFactory.java

License:Apache License

/**
 * Returns a unique {@link Extract} instance.
 * Any two calls of this method from the same {@link ExtractFactory} instance guarantees to
 * return {@link Extract}s with different IDs.
 *
 * @param type {@link TableType}/*from   www  . j  a va 2s . c  o m*/
 * @param namespace dot separated namespace path
 * @param table table name
 * @return a unique {@link Extract} instance
 */
public synchronized Extract getUniqueExtract(TableType type, String namespace, String table) {
    Extract newExtract = new Extract(type, namespace, table);
    while (this.createdInstances.contains(newExtract)) {
        if (Strings.isNullOrEmpty(newExtract.getExtractId())) {
            newExtract.setExtractId(this.dtf.print(new DateTime()));
        } else {
            DateTime extractDateTime = this.dtf.parseDateTime(newExtract.getExtractId());
            newExtract.setExtractId(this.dtf.print(extractDateTime.plusSeconds(1)));
        }
    }
    this.createdInstances.add(newExtract);
    return newExtract;
}

From source file:google.registry.loadtest.LoadTestAction.java

License:Open Source License

@Override
public void run() {
    validateAndLogRequest();/*w  w w .  j av  a  2 s . com*/
    DateTime initialStartSecond = DateTime.now(UTC).plusSeconds(delaySeconds);
    ImmutableList.Builder<String> preTaskXmls = new ImmutableList.Builder<>();
    ImmutableList.Builder<String> contactNamesBuilder = new ImmutableList.Builder<>();
    ImmutableList.Builder<String> hostPrefixesBuilder = new ImmutableList.Builder<>();
    for (int i = 0; i < successfulDomainCreatesPerSecond; i++) {
        String contactName = getRandomLabel(MAX_CONTACT_LENGTH);
        String hostPrefix = getRandomLabel(ARBITRARY_VALID_HOST_LENGTH);
        contactNamesBuilder.add(contactName);
        hostPrefixesBuilder.add(hostPrefix);
        preTaskXmls.add(xmlContactCreateTmpl.replace("%contact%", contactName),
                xmlHostCreateTmpl.replace("%host%", hostPrefix));
    }
    enqueue(createTasks(preTaskXmls.build(), DateTime.now(UTC)));
    ImmutableList<String> contactNames = contactNamesBuilder.build();
    ImmutableList<String> hostPrefixes = hostPrefixesBuilder.build();

    ImmutableList.Builder<TaskOptions> tasks = new ImmutableList.Builder<>();
    for (int offsetSeconds = 0; offsetSeconds < runSeconds; offsetSeconds++) {
        DateTime startSecond = initialStartSecond.plusSeconds(offsetSeconds);
        // The first "failed" creates might actually succeed if the object doesn't already exist, but
        // that shouldn't affect the load numbers.
        tasks.addAll(
                createTasks(createNumCopies(xmlContactCreateFail, failedContactCreatesPerSecond), startSecond));
        tasks.addAll(createTasks(createNumCopies(xmlHostCreateFail, failedHostCreatesPerSecond), startSecond));
        tasks.addAll(
                createTasks(createNumCopies(xmlDomainCreateFail, failedDomainCreatesPerSecond), startSecond));
        // We can do infos on the known existing objects.
        tasks.addAll(createTasks(createNumCopies(xmlContactInfo, contactInfosPerSecond), startSecond));
        tasks.addAll(createTasks(createNumCopies(xmlHostInfo, hostInfosPerSecond), startSecond));
        tasks.addAll(createTasks(createNumCopies(xmlDomainInfo, domainInfosPerSecond), startSecond));
        // The domain check template uses "example.TLD" which won't exist, and one existing domain.
        tasks.addAll(createTasks(createNumCopies(xmlDomainCheck, domainChecksPerSecond), startSecond));
        // Do successful creates on random names
        tasks.addAll(
                createTasks(transform(createNumCopies(xmlContactCreateTmpl, successfulContactCreatesPerSecond),
                        randomNameReplacer("%contact%", MAX_CONTACT_LENGTH)), startSecond));
        tasks.addAll(createTasks(transform(createNumCopies(xmlHostCreateTmpl, successfulHostCreatesPerSecond),
                randomNameReplacer("%host%", ARBITRARY_VALID_HOST_LENGTH)), startSecond));
        tasks.addAll(createTasks(
                FluentIterable.from(createNumCopies(xmlDomainCreateTmpl, successfulDomainCreatesPerSecond))
                        .transform(randomNameReplacer("%domain%", MAX_DOMAIN_LABEL_LENGTH))
                        .transform(listNameReplacer("%contact%", contactNames))
                        .transform(listNameReplacer("%host%", hostPrefixes)).toList(),
                startSecond));
    }
    ImmutableList<TaskOptions> taskOptions = tasks.build();
    enqueue(taskOptions);
    logger.infofmt("Added %d total load test tasks", taskOptions.size());
}

From source file:graph.inference.module.LaterThanWorker.java

License:Open Source License

private Interval parseDate(DAGNode date, DateTime now) {
    String dateStr = date.toString();
    if (dateStr.equals("Now") || dateStr.equals("Now-Generally"))
        return new Interval(now.getMillis(), now.getMillis() + 1);
    if (dateStr.equals("Today-Indexical"))
        return new Interval(now.dayOfYear().roundFloorCopy(), now.dayOfYear().roundCeilingCopy());
    if (dateStr.equals("Tomorrow-Indexical")) {
        return new Interval(now.plusDays(1).dayOfYear().roundFloorCopy(),
                now.plusDays(1).dayOfYear().roundCeilingCopy());
    }/*from w  ww.  ja va 2s.co  m*/
    if (dateStr.equals("Yesterday-Indexical")) {
        return new Interval(now.minusDays(1).dayOfYear().roundFloorCopy(),
                now.minusDays(1).dayOfYear().roundCeilingCopy());
    }
    if (dateStr.equals("TheYear-Indexical")) {
        return new Interval(now.year().roundFloorCopy(), now.year().roundCeilingCopy());
    }

    // Parse the date from the DAGNode
    String parsePattern = null;
    for (int i = DATE_PARSE_INTERVALS.length - 1; i >= 0; i--) {
        StringBuilder newPattern = new StringBuilder("(" + DATE_PARSE_INTERVALS[i]);
        if (parsePattern != null)
            newPattern.append(" " + parsePattern);
        newPattern.append(")");
        parsePattern = newPattern.toString();

        DateTimeFormatter dtf = DateTimeFormat.forPattern(parsePattern);
        try {
            DateTime dateTime = dtf.parseDateTime(dateStr);
            if (dateTime != null) {
                switch (i) {
                case 0:
                    return new Interval(dateTime.getMillis(),
                            dateTime.plusSeconds(1).minusMillis(1).getMillis());
                case 1:
                    return new Interval(dateTime.getMillis(),
                            dateTime.plusMinutes(1).minusMillis(1).getMillis());
                case 2:
                    return new Interval(dateTime.getMillis(), dateTime.plusHours(1).minusMillis(1).getMillis());
                case 3:
                    return new Interval(dateTime.getMillis(), dateTime.plusDays(1).minusMillis(1).getMillis());
                case 4:
                    return new Interval(dateTime.getMillis(),
                            dateTime.plusMonths(1).minusMillis(1).getMillis());
                case 5:
                    return new Interval(dateTime.getMillis(), dateTime.plusYears(1).minusMillis(1).getMillis());
                }
            }
        } catch (Exception e) {
        }
    }
    return null;
}

From source file:hoot.services.models.osm.Changeset.java

License:Open Source License

/**
 * Updates the expiration of this changeset in the database by modifying its closed at time
 *
 * This logic is pulled directly from the Rails port, and is meant to be executed
 * at the end of each upload process involving this changeset.  This effectively extends the
 * changeset's expiration once any data is written to it and leaves it with a shorter expiration
 * if it has been opened but had no data added to it.
 *
 * @throws Exception/*from  www.j  a  v a2  s. c om*/
 * @todo This method is very confusing.
 */
public void updateExpiration() throws Exception {
    final DateTime now = new DateTime();

    //SQLQuery query = new SQLQuery(conn, DbUtils.getConfiguration());

    Changesets changesetRecord = (Changesets) new SQLQuery(conn, DbUtils.getConfiguration(_mapId))
            .from(changesets).where(changesets.id.eq(getId())).singleResult(changesets);

    if (isOpen()) {
        final int maximumChangesetElements = Integer.parseInt(HootProperties.getInstance().getProperty(
                "maximumChangesetElements", HootProperties.getDefault("maximumChangesetElements")));
        Timestamp newClosedAt = null;
        assert (changesetRecord.getNumChanges() <= maximumChangesetElements);
        if (changesetRecord.getNumChanges() == maximumChangesetElements) {
            newClosedAt = new Timestamp(now.getMillis());
        } else if (changesetRecord.getNumChanges() > 0) {
            /*
             * from rails port:
             *
             * if (closed_at - created_at) > (MAX_TIME_OPEN - IDLE_TIMEOUT)
             *   self.closed_at = create_at + MAX_TIME_OPEN
             * else
             *   self.closed_at = Time.now.getutc + IDLE_TIMEOUT
             */

            final DateTime createdAt = new DateTime(changesetRecord.getCreatedAt().getTime());
            final DateTime closedAt = new DateTime(changesetRecord.getClosedAt().getTime());

            final int changesetIdleTimeout = Integer.parseInt(HootProperties.getInstance().getProperty(
                    "changesetIdleTimeoutMinutes", HootProperties.getDefault("changesetIdleTimeoutMinutes")));
            final int changesetMaxOpenTime = Integer.parseInt(HootProperties.getInstance().getProperty(
                    "changesetMaxOpenTimeHours", HootProperties.getDefault("changesetMaxOpenTimeHours")));
            //The testChangesetAutoClose option = true causes changesetIdleTimeoutMinutes and
            //changesetMaxOpenTimeHours to be interpreted in seconds rather than minutes and hours,
            //respectively.  This enables faster running of auto-close related unit tests.
            if (Boolean.parseBoolean(HootProperties.getInstance().getProperty("testChangesetAutoClose",
                    HootProperties.getDefault("testChangesetAutoClose")))) {
                final int changesetMaxOpenTimeSeconds = changesetMaxOpenTime;
                final int changesetIdleTimeoutSeconds = changesetIdleTimeout;
                if (Seconds.secondsBetween(createdAt, closedAt)
                        .getSeconds() > (changesetMaxOpenTimeSeconds - changesetIdleTimeoutSeconds)) {
                    newClosedAt = new Timestamp(createdAt.plusSeconds(changesetMaxOpenTimeSeconds).getMillis());
                } else {
                    newClosedAt = new Timestamp(now.plusSeconds(changesetIdleTimeoutSeconds).getMillis());
                }
            } else {
                final int changesetMaxOpenTimeMinutes = changesetMaxOpenTime * 60;
                final int changesetIdleTimeoutMinutes = changesetIdleTimeout;
                if (Minutes.minutesBetween(createdAt, closedAt)
                        .getMinutes() > (changesetMaxOpenTimeMinutes - changesetIdleTimeoutMinutes)) {
                    newClosedAt = new Timestamp(createdAt.plusMinutes(changesetMaxOpenTimeMinutes).getMillis());
                } else {
                    newClosedAt = new Timestamp(now.plusMinutes(changesetIdleTimeoutMinutes).getMillis());
                }
            }
        }

        if (newClosedAt != null) {
            if (new SQLUpdateClause(conn, DbUtils.getConfiguration(_mapId), changesets)
                    .where(changesets.id.eq(getId())).set(changesets.closedAt, newClosedAt).execute() != 1) {
                throw new Exception("Error updating expiration on changeset.");
            }
        }
    } else {
        //TODO: I have no idea why this code block is needed now.  It didn't use to be, but after
        //some refactoring to support the changes to marking items as reviewed in ReviewResource, it
        //now is needed.  I've been unable to track down what causes this to happen.
        if (!changesetRecord.getClosedAt().before(new Timestamp(now.getMillis()))) {
            if (new SQLUpdateClause(conn, DbUtils.getConfiguration(_mapId), changesets)
                    .where(changesets.id.eq(getId())).set(changesets.closedAt, new Timestamp(now.getMillis()))
                    .execute() != 1) {
                throw new Exception("Error updating expiration on changeset.");
            }
        }
    }
}

From source file:hoot.services.models.osm.Changeset.java

License:Open Source License

/**
 * Inserts a new empty changeset into the services database
 *
 * @param mapId corresponding map ID for the node
 * @param userId corresponding user ID for the node
 * @param conn JDBC Connection// w w w  . j a va 2s . c  om
 * @return ID of the inserted changeset
 * @throws Exception
 */
public static long insertNew(final long mapId, final long userId, Connection dbConn) throws Exception {
    log.debug("Inserting new changeset...");

    final DateTime now = new DateTime();

    Timestamp closedAt = null;
    final int changesetIdleTimeout = Integer.parseInt(HootProperties.getInstance().getProperty(
            "changesetIdleTimeoutMinutes", HootProperties.getDefault("changesetIdleTimeoutMinutes")));
    //The testChangesetAutoClose option = true causes changesetIdleTimeoutMinutes to be interpreted
    //in seconds rather than minutes and enables faster running of auto-close related unit tests.
    if (Boolean.parseBoolean(HootProperties.getInstance().getProperty("testChangesetAutoClose",
            HootProperties.getDefault("testChangesetAutoClose")))) {
        closedAt = new Timestamp(now.plusSeconds(changesetIdleTimeout).getMillis());
    } else {
        closedAt = new Timestamp(now.plusMinutes(changesetIdleTimeout).getMillis());
    }

    return new SQLInsertClause(dbConn, DbUtils.getConfiguration("" + mapId), changesets)
            .columns(changesets.closedAt, changesets.createdAt, changesets.maxLat, changesets.maxLon,
                    changesets.minLat, changesets.minLon, changesets.userId)
            .values(closedAt, new Timestamp(now.getMillis()),
                    DbUtils.toDbCoordValue(GeoUtils.DEFAULT_COORD_VALUE),
                    DbUtils.toDbCoordValue(GeoUtils.DEFAULT_COORD_VALUE),
                    DbUtils.toDbCoordValue(GeoUtils.DEFAULT_COORD_VALUE),
                    DbUtils.toDbCoordValue(GeoUtils.DEFAULT_COORD_VALUE), userId)
            .executeWithKey(changesets.id);
}

From source file:influent.server.dataaccess.DataAccessHelper.java

License:MIT License

public static DateTime getExclusiveEndDate(FL_DateRange date) {

    if (date == null) {
        return null;
    }//www.ja va2  s .  c  o  m

    DateTime d = new DateTime((long) date.getStartDate(), DateTimeZone.UTC);

    switch (date.getDurationPerBin().getInterval()) {
    case SECONDS:
        return d.plusSeconds(date.getNumBins().intValue());
    case HOURS:
        return d.plusHours(date.getNumBins().intValue());
    case DAYS:
        return d.plusDays(date.getNumBins().intValue());
    case WEEKS:
        return d.plusWeeks(date.getNumBins().intValue());
    case MONTHS:
        return d.plusMonths(date.getNumBins().intValue());
    case QUARTERS:
        return d.plusMonths(date.getNumBins().intValue() * 3);
    case YEARS:
        return d.plusYears(date.getNumBins().intValue());
    }

    return d;
}