List of usage examples for org.joda.time DateTime plus
public DateTime plus(ReadablePeriod period)
From source file:dk.dma.epd.ship.service.IntendedRouteHandler.java
License:Apache License
/** * Main thread run method. Broadcasts the intended route *///from w ww. ja v a 2 s .c o m public void run() { // Initialize first send // lastSend = new DateTime(); // broadcastIntendedRoute(); while (running) { if (routeManager != null) { // We have no active route, keep sleeping if (routeManager.getActiveRoute() == null) { Util.sleep(BROADCAST_TIME * 1000L); } else { // Here we handle the periodical broadcasts DateTime calculatedTimeOfLastSend = new DateTime(); calculatedTimeOfLastSend = calculatedTimeOfLastSend.minus(BROADCAST_TIME * 1000L); // Do we need to rebroadcast based on the broadcast time // setting if (calculatedTimeOfLastSend.isAfter(lastSend)) { LOG.debug("Periodically rebroadcasting"); broadcastIntendedRoute(); lastSend = new DateTime(); } else if (lastTransmitActiveWp != null) { // We check for the adaptive route broadcast here // We need to compare lastTransmitActiveWp which is the // last stored // ETA of the waypoint we sent to the current one DateTime currentActiveWaypointETA = new DateTime( routeManager.getActiveRoute().getActiveWaypointEta()); // LOG.debug("The ETA at last transmission was : " + // lastTransmitActiveWp); // LOG.debug("It is now : " + // currentActiveWaypointETA); // //It can either be before or after // if (currentActiveWaypointETA.isAfter(lastTransmitActiveWp) || currentActiveWaypointETA.isBefore(lastTransmitActiveWp)) { long etaTimeChange; // Is it before? if (currentActiveWaypointETA.isAfter(lastTransmitActiveWp)) { etaTimeChange = currentActiveWaypointETA.minus(lastTransmitActiveWp.getMillis()) .getMillis(); // Must be after } else { etaTimeChange = currentActiveWaypointETA.plus(lastTransmitActiveWp.getMillis()) .getMillis(); } if (etaTimeChange > ADAPTIVE_TIME * 1000L) { LOG.debug("Broadcast based on adaptive time!"); broadcastIntendedRoute(); lastSend = new DateTime(); } // LOG.debug("ETA has changed with " + etaTimeChange // + " mili seconds" ); } } Util.sleep(1000L); } } } }
From source file:edu.internet2.middleware.shibboleth.idp.profile.saml1.AbstractSAML1ProfileHandler.java
License:Open Source License
/** * Builds a SAML assertion condition set. The following fields are set; not before, not on or after, audience * restrictions, and proxy restrictions. * /* www . j a va 2s. c o m*/ * @param requestContext current request context * @param issueInstant timestamp the assertion was created * * @return constructed conditions */ protected Conditions buildConditions(BaseSAML1ProfileRequestContext<?, ?, ?> requestContext, DateTime issueInstant) { AbstractSAML1ProfileConfiguration profileConfig = requestContext.getProfileConfiguration(); Conditions conditions = conditionsBuilder.buildObject(); conditions.setNotBefore(issueInstant); conditions.setNotOnOrAfter(issueInstant.plus(profileConfig.getAssertionLifetime())); Collection<String> audiences; AudienceRestrictionCondition audienceRestriction = audienceRestrictionConditionBuilder.buildObject(); conditions.getAudienceRestrictionConditions().add(audienceRestriction); Audience audience = audienceBuilder.buildObject(); audience.setUri(requestContext.getInboundMessageIssuer()); audienceRestriction.getAudiences().add(audience); // add other audience restrictions audiences = profileConfig.getAssertionAudiences(); if (audiences != null && audiences.size() > 0) { for (String audienceUri : audiences) { audience = audienceBuilder.buildObject(); audience.setUri(audienceUri); audienceRestriction.getAudiences().add(audience); } } return conditions; }
From source file:edu.internet2.middleware.shibboleth.idp.profile.saml2.AbstractSAML2ProfileHandler.java
License:Open Source License
/** * Builds a SAML assertion condition set. The following fields are set; not before, not on or after, audience * restrictions, and proxy restrictions. * /*from w w w .j a v a2s. c om*/ * @param requestContext current request context * @param issueInstant timestamp the assertion was created * * @return constructed conditions */ protected Conditions buildConditions(BaseSAML2ProfileRequestContext<?, ?, ?> requestContext, DateTime issueInstant) { AbstractSAML2ProfileConfiguration profileConfig = requestContext.getProfileConfiguration(); Conditions conditions = conditionsBuilder.buildObject(); conditions.setNotBefore(issueInstant); conditions.setNotOnOrAfter(issueInstant.plus(profileConfig.getAssertionLifetime())); Collection<String> audiences; // add audience restrictions AudienceRestriction audienceRestriction = audienceRestrictionBuilder.buildObject(); // TODO we should only do this for certain outgoing bindings, not globally Audience audience = audienceBuilder.buildObject(); audience.setAudienceURI(requestContext.getInboundMessageIssuer()); audienceRestriction.getAudiences().add(audience); audiences = profileConfig.getAssertionAudiences(); if (audiences != null && audiences.size() > 0) { for (String audienceUri : audiences) { audience = audienceBuilder.buildObject(); audience.setAudienceURI(audienceUri); audienceRestriction.getAudiences().add(audience); } } conditions.getAudienceRestrictions().add(audienceRestriction); // add proxy restrictions audiences = profileConfig.getProxyAudiences(); if (audiences != null && audiences.size() > 0) { ProxyRestriction proxyRestriction = proxyRestrictionBuilder.buildObject(); for (String audienceUri : audiences) { audience = audienceBuilder.buildObject(); audience.setAudienceURI(audienceUri); proxyRestriction.getAudiences().add(audience); } proxyRestriction.setProxyCount(profileConfig.getProxyCount()); conditions.getConditions().add(proxyRestriction); } return conditions; }
From source file:edu.internet2.middleware.shibboleth.idp.profile.saml2.AbstractSAML2ProfileHandler.java
License:Open Source License
/** * Builds the SubjectConfirmation appropriate for this request. * //w w w. j a v a 2 s .com * @param requestContext current request context * @param confirmationMethod confirmation method to use for the request * @param issueInstant issue instant of the response * * @return the constructed subject confirmation */ protected SubjectConfirmation buildSubjectConfirmation(BaseSAML2ProfileRequestContext<?, ?, ?> requestContext, String confirmationMethod, DateTime issueInstant) { SubjectConfirmationData confirmationData = subjectConfirmationDataBuilder.buildObject(); HTTPInTransport inTransport = (HTTPInTransport) requestContext.getInboundMessageTransport(); confirmationData.setAddress(inTransport.getPeerAddress()); confirmationData.setInResponseTo(requestContext.getInboundSAMLMessageId()); confirmationData.setNotOnOrAfter( issueInstant.plus(requestContext.getProfileConfiguration().getAssertionLifetime())); Endpoint relyingPartyEndpoint = requestContext.getPeerEntityEndpoint(); if (relyingPartyEndpoint != null) { if (relyingPartyEndpoint.getResponseLocation() != null) { confirmationData.setRecipient(relyingPartyEndpoint.getResponseLocation()); } else { confirmationData.setRecipient(relyingPartyEndpoint.getLocation()); } } SubjectConfirmation subjectConfirmation = subjectConfirmationBuilder.buildObject(); subjectConfirmation.setMethod(confirmationMethod); subjectConfirmation.setSubjectConfirmationData(confirmationData); return subjectConfirmation; }
From source file:es.jpons.persistence.TemporalPersistenceManager.java
License:Open Source License
/** * Function to close a vtp from another// www.ja v a 2 s. c om * * @param toClose The vtp to close * @param newVtp The other vtp to start * @return A copy of the object toClose closed to the left. * @throws TemporalException If the closure of the vtp can not be computed. */ public PossibilisticVTP closeR(PossibilisticVTP toClose, PossibilisticVTP newVtp) throws TemporalException { if (toClose.getSide() != null && toClose.getSide().compareTo(OpenInterval.UC) == 0) { DateTime startmp = new DateTime(toClose.getStartMP()); // DateTime leftmp = startmp.minus(toClose.getStartLeft()); DateTime rightmp = startmp.plus(toClose.getStartRight()); DateTime newmp = new DateTime(newVtp.getStartMP()); DateTime newleft = newmp.minus(newVtp.getStartLeft()); // DateTime newright = newmp.plus(newVtp.getStartRight()); if (rightmp.isBefore(newleft)) { log.trace("Closing ending point"); Duration d = new Duration(startmp, newmp); Duration d1 = new Duration(d.getMillis() / 2); DateTime closeMp = new DateTime(startmp); closeMp = closeMp.plus(d1); Duration left = new Duration(startmp, closeMp); Duration right = new Duration(closeMp, newleft); toClose.setEndMP(closeMp.getMillis()); toClose.setEndLeft(left.getMillis()); toClose.setEndRight(right.getMillis()); toClose.setSide(null); } else { log.error("The point cannot be closed"); throw new TemporalException("The point cannot be closed"); } // // DateTime lefts = startmp.plus( new Instant(newVtp.getStartLeft())); // if(newVtp.getStartMP()> ) } else { log.error("The point is not open"); throw new TemporalException("The point is not open"); } return toClose; }
From source file:es.jpons.temporal.types.PossibilisticVTP.java
License:Open Source License
@Override public String toString() { String result = new String(); if (this.getSide() != null && this.getSide().compareTo(OpenInterval.FB) == 0) { result += " [ FB ,"; } else {/*from ww w . j av a 2s . c om*/ DateTime mp = new DateTime(this.startMP); DateTime left = new DateTime(mp.minus(this.startLeft)); DateTime right = new DateTime(mp.plus(this.startRight)); result += " [ ( " + left + " , " + mp + " , " + right + " ) ,"; } if (this.getSide() != null && this.getSide().compareTo(OpenInterval.UC) == 0) { result += " UC "; } else { DateTime mp = new DateTime(this.endMP); DateTime left = new DateTime(mp.minus(this.endLeft)); DateTime right = new DateTime(mp.plus(this.endRight)); result += " ( " + left + " , " + mp + " , " + right + " ) ]"; } return result; }
From source file:fi.hsl.parkandride.back.prediction.PredictionDao.java
License:EUPL
private static List<Prediction> normalizeToPredictionWindow(DateTime start, List<Prediction> predictions) { DateTime end = start.plus(PREDICTION_WINDOW).minus(PREDICTION_RESOLUTION); return predictions.stream() // remove too fine-grained predictions .collect(groupByRoundedTimeKeepingNewest()) // -> Map<DateTime, Prediction> .values().stream()/*from w w w .ja va 2 s. c o m*/ // normalize resolution .map(roundTimestampsToPredictionResolution()) // interpolate too coarse-grained predictions .sorted(Comparator.comparing(p -> p.timestamp)).map(Collections::singletonList) // 1. wrap values in immutable singleton lists .reduce(new ArrayList<>(), linearInterpolation()).stream() // 2. mutable ArrayList as accumulator // normalize range .filter(isWithin(start, end)) // after interpolation because of PredictionDaoTest.does_linear_interpolation_also_between_values_outside_the_prediction_window .collect(toList()); }
From source file:fi.hsl.parkandride.back.PredictionDao.java
License:EUPL
@TransactionalWrite @Override/*from w w w . java 2 s. c om*/ public void updatePredictions(PredictionBatch pb) { validationService.validate(pb); DateTime start = toPredictionResolution(pb.sourceTimestamp); DateTime end = start.plus(PREDICTION_WINDOW).minus(PREDICTION_RESOLUTION); SQLUpdateClause update = queryFactory.update(qPrediction) .where(qPrediction.facilityId.eq(pb.utilizationKey.facilityId), qPrediction.capacityType.eq(pb.utilizationKey.capacityType), qPrediction.usage.eq(pb.utilizationKey.usage)) .set(qPrediction.start, start); pb.predictions.stream().sorted(Comparator.comparing(p -> p.timestamp)) .map(roundTimestampsToPredictionResolution()).collect(groupByTimeKeepingNewest()) // -> Map<DateTime, Prediction> .values().stream().map(Collections::singletonList) // 1. wrap values in immutable singleton lists .reduce(new ArrayList<>(), linearInterpolation()).stream() // 2. mutable ArrayList as accumulator .filter(isWithin(start, end)) // after interpolation because of PredictionDaoTest.does_linear_interpolation_also_between_values_outside_the_prediction_window .forEach(p -> update.set(spacesAvailableAt(p.timestamp), p.spacesAvailable)); long updatedRows = update.execute(); if (updatedRows == 0) { insertBlankPredictionRow(pb); updatePredictions(pb); } }
From source file:fi.hsl.parkandride.core.domain.prediction.AverageOfPreviousWeeksPredictor.java
License:EUPL
@Override public List<Prediction> predict(PredictorState state, UtilizationHistory history, int maxCapacity) { Optional<Utilization> latest = history.getLatest(); if (!latest.isPresent()) return Collections.emptyList(); DateTime now = state.latestUtilization = latest.get().timestamp; List<List<Prediction>> groupedByWeek = Stream.of(Weeks.weeks(1), Weeks.weeks(2), Weeks.weeks(3)) .map(offset -> {/* ww w. j av a 2s .co m*/ DateTime start = now.minus(offset); DateTime end = start.plus(PredictionRepository.PREDICTION_WINDOW); List<Utilization> utilizations = history.getRange(start, end); return utilizations.stream() .map(u -> new Prediction(u.timestamp.plus(offset), u.spacesAvailable)) .collect(Collectors.toList()); }).collect(Collectors.toList()); List<List<Prediction>> groupedByTimeOfDay = ListUtil.transpose(groupedByWeek); return groupedByTimeOfDay.stream().map(this::reduce).collect(Collectors.toList()); }
From source file:fi.hsl.parkandride.core.domain.prediction.RelativizedAverageOfPreviousWeeksPredictor.java
License:EUPL
@Override public List<Prediction> predict(PredictorState state, UtilizationHistory history, int maxCapacity) { Optional<Utilization> latest = history.getLatest(); if (!latest.isPresent()) { return Collections.emptyList(); }/* w w w .j ava 2 s. com*/ DateTime now = state.latestUtilization = latest.get().timestamp; final UtilizationHistory inMemoryHistory = new UtilizationHistoryList( history.getRange(now.minusWeeks(3).minus(LOOKBACK_MINUTES), now)); List<List<Prediction>> groupedByWeek = LOOKBACK_PERIODS.stream().map(offset -> { DateTime start = now.minus(offset); DateTime end = start.plus(PredictionRepository.PREDICTION_WINDOW); Optional<Utilization> utilizationAtReferenceTime = inMemoryHistory.getAt(start); if (!utilizationAtReferenceTime.isPresent()) { return null; } Integer spacesAvailableAtReferenceTime = utilizationAtReferenceTime.get().spacesAvailable; List<Utilization> utilizations = inMemoryHistory.getRange(start, end); return utilizations.stream().map(u -> new Prediction(u.timestamp.plus(offset), u.spacesAvailable - spacesAvailableAtReferenceTime)).collect(Collectors.toList()); }).filter(Objects::nonNull).collect(Collectors.toList()); List<List<Prediction>> groupedByTimeOfDay = ListUtil.transpose(groupedByWeek); return groupedByTimeOfDay.stream().map(predictions -> reduce(predictions, latest.get().spacesAvailable, getUtilizationMultiplier(now, inMemoryHistory), maxCapacity)).collect(Collectors.toList()); }