List of usage examples for org.joda.time DateTime plusSeconds
public DateTime plusSeconds(int seconds)
From source file:org.fenixedu.academic.domain.accounting.Event.java
License:Open Source License
public final void recalculateState(final DateTime whenRegistered) { if (getEventState() == EventState.CANCELLED) { return;//from w w w.j a v a 2 s . c om } internalRecalculateState(whenRegistered.plusSeconds(1)); }
From source file:org.fenixedu.academic.domain.accounting.Event.java
License:Open Source License
public void exempt(Person responsible, EventExemptionJustificationType justificationType, String justification) {/*from w ww. j ava 2 s . c o m*/ DateTime when = new DateTime().minusSeconds(2); DebtInterestCalculator debtInterestCalculator = getDebtInterestCalculator(when); Money dueInterestAmount = new Money(debtInterestCalculator.getDueInterestAmount()); Money dueFineAmount = new Money(debtInterestCalculator.getDueFineAmount()); Money dueAmount = new Money(debtInterestCalculator.getDueAmount()); if (dueInterestAmount.isPositive()) { FixedAmountInterestExemption fixedAmountInterestExemption = new FixedAmountInterestExemption(this, responsible, dueInterestAmount, justificationType, new DateTime(), justification); fixedAmountInterestExemption.setWhenCreated(when); when = when.plusSeconds(1); } if (dueFineAmount.isPositive()) { FixedAmountFineExemption fixedAmountFineExemption = new FixedAmountFineExemption(this, responsible, dueFineAmount, justificationType, new DateTime(), justification); fixedAmountFineExemption.setWhenCreated(when); when = when.plusSeconds(1); } if (dueAmount.isPositive()) { EventExemption eventExemption = new EventExemption(this, responsible, dueAmount, justificationType, new DateTime(), justification); eventExemption.setWhenCreated(when); } }
From source file:org.geomesa.QuickStart.java
License:Apache License
static FeatureCollection createNewFeatures(SimpleFeatureType simpleFeatureType, int numNewFeatures) { DefaultFeatureCollection featureCollection = new DefaultFeatureCollection(); String id;/*from w w w. j a va 2 s . c o m*/ Object[] NO_VALUES = {}; String[] PEOPLE_NAMES = { "Addams", "Bierce", "Clemens" }; Long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L; Random random = new Random(5771); DateTime MIN_DATE = new DateTime(2014, 1, 1, 0, 0, 0, DateTimeZone.forID("UTC")); Double MIN_X = -78.0; Double MIN_Y = -39.0; Double DX = 2.0; Double DY = 2.0; for (int i = 0; i < numNewFeatures; i++) { // create the new (unique) identifier and empty feature shell id = "Observation." + Integer.toString(i); SimpleFeature simpleFeature = SimpleFeatureBuilder.build(simpleFeatureType, NO_VALUES, id); // be sure to tell GeoTools explicitly that you want to use the ID you provided simpleFeature.getUserData().put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE); // populate the new feature's attributes // string value simpleFeature.setAttribute("Who", PEOPLE_NAMES[i % PEOPLE_NAMES.length]); // long value simpleFeature.setAttribute("What", i); // location: construct a random point within a 2-degree-per-side square double x = MIN_X + random.nextDouble() * DX; double y = MIN_Y + random.nextDouble() * DY; Geometry geometry = WKTUtils$.MODULE$.read("POINT(" + x + " " + y + ")"); // date-time: construct a random instant within a year simpleFeature.setAttribute("Where", geometry); DateTime dateTime = MIN_DATE.plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)); simpleFeature.setAttribute("When", dateTime.toDate()); // another string value // "Why"; left empty, showing that not all attributes need values // accumulate this new feature in the collection featureCollection.add(simpleFeature); } return featureCollection; }
From source file:org.jaggeryjs.modules.sso.common.util.Util.java
License:Open Source License
/** * Validates the 'Not Before' and 'Not On Or After' conditions of the SAML Assertion * * @param resp SAML Response/* w w w . j a v a 2s .com*/ */ public static boolean validateAssertionValidityPeriod(Response resp, Properties prop) { Assertion assertion; assertion = retrieveAssertion(resp); if (assertion == null) { log.error("SAML Assertion not found in the Response"); return false; } DateTime validFrom = assertion.getConditions().getNotBefore(); DateTime validTill = assertion.getConditions().getNotOnOrAfter(); int timeStampSkewInSeconds = getTimeStampSkewInSeconds(prop); if (validFrom != null && validFrom.minusSeconds(timeStampSkewInSeconds).isAfterNow()) { log.error("Failed to meet SAML Assertion Condition 'Not Before'"); return false; } if (validTill != null && validTill.plusSeconds(timeStampSkewInSeconds).isBeforeNow()) { log.error("Failed to meet SAML Assertion Condition 'Not On Or After'"); return false; } if (validFrom != null && validTill != null && validFrom.isAfter(validTill)) { log.error( "SAML Assertion Condition 'Not Before' must be less than the " + "value of 'Not On Or After'"); return false; } return true; }
From source file:org.jasig.portal.portlet.dao.jpa.JpaPortletCookieDaoImpl.java
License:Apache License
@Override @PortalTransactional/*from ww w.j a va 2s. c o m*/ public void purgeExpiredCookies(int maxAge) { final DateTime now = DateTime.now(); logger.debug("begin portlet cookie expiration"); final EntityManager entityManager = this.getEntityManager(); final Query deletePortletCookieQuery = entityManager.createQuery(this.deletePortletCookieQueryString); deletePortletCookieQuery.setParameter(this.nowParameter.getName(), now); final int deletedPortletCookies = deletePortletCookieQuery.executeUpdate(); logger.debug("finished purging {} portlet cookies with expiration before {}", deletedPortletCookies, now); final TypedQuery<PortletCookieImpl> expiredByParentCookiesQuery = this .createQuery(findExpiredByParentPortletCookiesQuery); expiredByParentCookiesQuery.setParameter(this.nowParameter.getName(), now); final List<PortletCookieImpl> indirectlyExpiredCookies = expiredByParentCookiesQuery.getResultList(); for (final PortletCookieImpl portletCookieImpl : indirectlyExpiredCookies) { entityManager.remove(portletCookieImpl); } logger.debug("finished purging {} portlet cookies with parent expiration before {}", indirectlyExpiredCookies.size(), now); logger.debug("begin portal cookie expiration"); final Query deletePortalCookieQuery = entityManager.createQuery(this.deletePortalCookieQueryString); deletePortalCookieQuery.setParameter(this.nowParameter.getName(), now); final int deletedPortalCookies = deletePortalCookieQuery.executeUpdate(); logger.debug("finished purging {} portal cookies with expiration before {}", deletedPortalCookies, now); final Query deleteEmptyPortalCookieQuery = entityManager .createQuery(this.deleteEmptyPortalCookieQueryString); //Add the maxAge to now and then subtract the emptyCookieMaxAge //For example (now + 1 year) - 1 day == the empty-cookie expiration date final DateTime emptyExpiration = now.plusSeconds(maxAge).minusSeconds(emptyCookieMaxAge); deleteEmptyPortalCookieQuery.setParameter(this.nowParameter.getName(), emptyExpiration); final int deletedEmptyPortalCookies = deleteEmptyPortalCookieQuery.executeUpdate(); logger.debug("finished purging {} empty portal cookies with expiration before {}", deletedEmptyPortalCookies, emptyExpiration); }
From source file:org.jgrasstools.hortonmachine.modules.hydrogeomorphology.peakflow.OmsPeakflow.java
License:Open Source License
@Execute public void process() throws Exception { checkNull(inRescaledsup);/*w ww. j a va 2 s.c o m*/ HashMap<String, Double> regionMap = CoverageUtilities.getRegionParamsFromGridCoverage(inRescaledsup); cols = regionMap.get(CoverageUtilities.COLS).intValue(); rows = regionMap.get(CoverageUtilities.ROWS).intValue(); xRes = regionMap.get(CoverageUtilities.XRES); yRes = regionMap.get(CoverageUtilities.YRES); RenderedImage supRescaledRI = inRescaledsup.getRenderedImage(); WritableRaster supRescaledWR = CoverageUtilities.renderedImage2WritableRaster(supRescaledRI, false); WritableRaster subRescaledWR = null; if (inRescaledsub != null) { RenderedImage subRescaledRI = inRescaledsub.getRenderedImage(); subRescaledWR = CoverageUtilities.renderedImage2WritableRaster(subRescaledRI, false); } if (inTopindex != null) { processWithTopIndex(supRescaledWR, subRescaledWR); } else if (inSat != null) { processWithSaturation(inSat, supRescaledWR, subRescaledWR); } else { throw new ModelsIllegalargumentException( "At least one of the topindex or the saturation map have to be available to proceed.", this); } GridCoverage2D widthfunctionSupCoverage = CoverageUtilities.buildCoverage("sup", supRescaledWR, regionMap, inRescaledsup.getCoordinateReferenceSystem()); double[][] widthfunctionSupCb = doCb(widthfunctionSupCoverage); double[][] widthfunctionSubCb = null; if (inRescaledsub != null) { GridCoverage2D widthfunctionSubCoverage = CoverageUtilities.buildCoverage("sub", subRescaledWR, regionMap, inRescaledsup.getCoordinateReferenceSystem()); widthfunctionSubCb = doCb(widthfunctionSubCoverage); } setSuperficialWidthFunction(widthfunctionSupCb); if (inRescaledsub != null) { setSubSuperficialAmplitude(widthfunctionSubCb); } // check the case if (pA != -1 && pN != -1 && widthfunctionSupCb != null && pCelerity != -1 && pDiffusion != -1) { pm.message("OmsPeakflow launched in statistic mode..."); isStatistics = true; isReal = false; } else if (widthfunctionSupCb != null && pCelerity != -1 && pDiffusion != -1 && inRainfall != null) { pm.message("OmsPeakflow launched with real rain..."); isStatistics = false; isReal = true; } else { throw new ModelsIllegalargumentException( "Problems occurred in parsing the command arguments. Please check your arguments.", this); } // the internal timestep is always 1 second double timestep = 1f; // /* // * Calculate the tcorr as the one calculated for the superficial discharge if we have // * only Superficial flow, an for the subsuperficial discharge otherwise // */ // double tcorr = 0f; // if (timeSubArray != null) { // tcorr = timeSubArray[timeSubArray.length - 1] / channelCelerity; // } else { // tcorr = timeSupArray[timeSupArray.length - 1] / channelCelerity; // } /* * prepare all the needed parameters by the core algorithms */ /* * this needs to be integrated into the interface */ parameterBox.setN_idf(pN); parameterBox.setA_idf(pA); parameterBox.setArea(areaSup); parameterBox.setTimestep(timestep); parameterBox.setDiffusionparameter(pDiffusion); parameterBox.setVc(pCelerity); parameterBox.setDelta(deltaSup); parameterBox.setXres(xRes); parameterBox.setYres(yRes); parameterBox.setNpixel(pixelTotalSup); parameterBox.setSize(widthfunctionSupCb.length); parameterBox.setTime(timeSupArray); parameterBox.setPxl(pixelSupArray); effectsBox.setAmpi(widthFunctionSuperficial); if (timeSubArray != null) { parameterBox.setSubsuperficial(true); parameterBox.setDelta_sub(deltaSub); parameterBox.setNpixel_sub(pixelTotalSub); parameterBox.setTime_sub(timeSubArray); parameterBox.setArea_sub(areaSub); parameterBox.setPxl_sub(pixelSubArray); parameterBox.setResid_time(residentTime); effectsBox.setAmpi_sub(widthFunctionSubSuperficial); effectsBox.setAmpi_help_sub(widthFunctionSubSuperficialHelper); } // if (isScs) { // parameterBox.setVcvv(celerityRatio); // parameterBox.setBasinstate(basinStatus); // parameterBox.setPhi(phi); // parameterBox.setScs(true); // } effectsBox.setRainDataExists(inRainfall != null ? true : false); outDischarge = new LinkedHashMap<DateTime, double[]>(); if (isStatistics) { DateTime dummyDate = new DateTime(); IUHCalculator iuhC = null; if (pDiffusion < 10) { pm.message("IUH Kinematic..."); iuhC = new IUHKinematic(effectsBox, parameterBox, pm); } else { pm.message("IUH Diffusion..."); iuhC = new IUHDiffusion(effectsBox, parameterBox, pm); } pm.message("Statistic Jeff..."); StatisticJeff jeffC = new StatisticJeff(parameterBox, iuhC.getTpMax(), pm); pm.message("Q calculation..."); QStatistic qtotal = new QStatistic(parameterBox, iuhC, jeffC, pm); double[][] calculateQ = qtotal.calculateQ(); pm.message("Maximum rainfall duration: " + qtotal.getTpMax()); pm.message("Maximum discharge value: " + qtotal.calculateQmax()); for (int i = 0; i < calculateQ.length; i++) { if (i % outputStepArg != 0) continue; DateTime tmpDate = dummyDate.plusSeconds((int) calculateQ[i][0]); double[] value = new double[1]; value[0] = calculateQ[i][1]; outDischarge.put(tmpDate, value); } } else if (isReal) { IUHCalculator iuhC = null; if (pDiffusion < 10) { pm.message("IUH Kinematic..."); iuhC = new IUHKinematic(effectsBox, parameterBox, pm); } else { pm.message("IUH Diffusion..."); iuhC = new IUHDiffusion(effectsBox, parameterBox, pm); } pm.message("Read rain data..."); pm.message("Real Jeff..."); RealJeff jeffC = new RealJeff(inRainfall); pm.message("Q calculation..."); QReal qtotal = new QReal(parameterBox, iuhC, jeffC, pm); double[][] calculateQ = qtotal.calculateQ(); // pm.message("Maximum rainfall duration: " + qtotal.getTpMax()); // pm.message("Maximum discharge value: " + qtotal.calculateQmax()); DateTime firstDate = jeffC.getFirstDate(); for (int i = 0; i < calculateQ.length; i++) { if (i % outputStepArg != 0) continue; DateTime tmpDate = firstDate.plusSeconds((int) calculateQ[i][0]); double[] value = new double[1]; value[0] = calculateQ[i][1]; outDischarge.put(tmpDate, value); } } else { throw new ModelsIllegalargumentException("Statistic and real rain are implemented only.", this.getClass().getSimpleName()); } /* * here two ways can be taken 1) standard peakflow theory 2) peakflow hybrid with SCS */ // if (isStatistics || isReal) { // if (!peakflowStandard()) { // // throw some // } // } else if (isScs) { // if (!peakflowScs()) { // // throw some // } // } }
From source file:org.jmxtrans.embedded.samples.graphite.BasicAppMetricsSimulator.java
License:Open Source License
public void generateLoad(GraphiteDataInjector graphiteDataInjector) throws Exception { int dataPointIntervalInSeconds = 30; int scaleFactor = 2; TimeSeries timeSeries = new TimeSeries("www.requestsCounter"); DateTime now = new DateTime(); DateTime end = now.plusDays(2);//from w ww . jav a 2s. c o m DateTime date = now.minusDays(1); int integratedValue = 0; while (date.isBefore(end)) { integratedValue += dataPointIntervalInSeconds * scaleFactor; timeSeries.add(new Second(date.toDate()), integratedValue); date = date.plusSeconds(dataPointIntervalInSeconds); } graphiteDataInjector.exportMetrics(timeSeries); }
From source file:org.jmxtrans.embedded.samples.graphite.CocktailAppMetricsSimulator.java
License:Open Source License
public void generateLoad(GraphiteDataInjector graphiteDataInjector) throws Exception { TimeSeries rawIntegratedTimeSeries = new TimeSeries("sales.integrated.raw"); TimeSeries rawTimeSeries = new TimeSeries("sales.raw"); DateTime now = new DateTime(); DateTime end = now.plusDays(3);//from ww w .j ava 2 s .c o m DateTime date = now.minusDays(15); DateTime twoDaysAfterBegin = date.plusDays(2); double serverFairness = 1.05; int integratedValue = 0; MathContext mathContext = new MathContext(1, RoundingMode.CEILING); int randomFactor = 0; while (date.isBefore(end)) { if (rawIntegratedTimeSeries.getItemCount() % 120 == 0) { randomFactor = 10 + random.nextInt(2); } int weekGrowthFactor = 6 - (now.getWeekOfWeekyear() - date.getWeekOfWeekyear()); int value = new BigDecimal(randomFactor) // random factor .multiply(new BigDecimal(10)) // go to cents of USD .multiply(new BigDecimal(weekGrowthFactor)) .multiply(new BigDecimal(hourlyDistribution[date.getHourOfDay()])) .multiply(new BigDecimal(weeklyDistribution[date.getDayOfWeek()])) .divide(new BigDecimal(20), mathContext).intValue(); // split hourly value in minutes integratedValue += value; for (int i1 = 0; i1 < 3; i1++) { Second period = new Second(date.toDate()); rawTimeSeries.add(period, value); rawIntegratedTimeSeries.add(period, integratedValue); date = date.plusSeconds(30); } } rawIntegratedTimeSeries = MovingAverage.createMovingAverage(rawIntegratedTimeSeries, rawIntegratedTimeSeries.getKey().toString(), 60 * 7, 0); rawTimeSeries = MovingAverage.createMovingAverage(rawTimeSeries, rawTimeSeries.getKey().toString(), 60 * 7, 0); // SALES - REVENUE TimeSeries salesRevenueInCentsCounter = new TimeSeries("sales.revenueInCentsCounter"); TimeSeries salesRevenueInCentsCounterSrv1 = new TimeSeries("srv1.sales.revenueInCentsCounter"); TimeSeries salesRevenueInCentsCounterSrv2 = new TimeSeries("srv2.sales.revenueInCentsCounter"); int resetValue2ToZeroOffset = 0; // reset value 2 after 3 days of metrics for (int i = 0; i < rawIntegratedTimeSeries.getItemCount(); i++) { TimeSeriesDataItem dataItem = rawIntegratedTimeSeries.getDataItem(i); int value = dataItem.getValue().intValue(); // value1 is 5% higher to value2 due to a 'weirdness' in the load balancing int value1 = Math.min((int) (value * serverFairness / 2), value); { // simulate srv2 restart DateTime currentDate = new DateTime(dataItem.getPeriod().getStart()); boolean shouldResetValue2 = resetValue2ToZeroOffset == 0 && currentDate.getDayOfYear() == twoDaysAfterBegin.getDayOfYear(); if (shouldResetValue2) { resetValue2ToZeroOffset = value - value1; System.out.println("reset value2 of " + resetValue2ToZeroOffset + " at " + currentDate); } } int value2 = value - value1 - resetValue2ToZeroOffset; salesRevenueInCentsCounter.add(dataItem.getPeriod(), value); salesRevenueInCentsCounterSrv1.add(dataItem.getPeriod(), value1); salesRevenueInCentsCounterSrv2.add(dataItem.getPeriod(), value2); } graphiteDataInjector.exportMetrics(salesRevenueInCentsCounter, salesRevenueInCentsCounterSrv1, salesRevenueInCentsCounterSrv2); // SALES - ITEMS TimeSeries salesItemsCounter = new TimeSeries("sales.itemsCounter"); TimeSeries salesItemsCounterSrv1 = new TimeSeries("srv1.sales.itemsCounter"); TimeSeries salesItemsCounterSrv2 = new TimeSeries("srv2.sales.itemsCounter"); for (int i = 0; i < rawIntegratedTimeSeries.getItemCount(); i++) { RegularTimePeriod period = salesRevenueInCentsCounter.getDataItem(i).getPeriod(); int ordersPriceInCents1 = salesRevenueInCentsCounterSrv1.getDataItem(i).getValue().intValue(); int ordersPriceInCents2 = salesRevenueInCentsCounterSrv2.getDataItem(i).getValue().intValue(); int value1 = ordersPriceInCents1 / 600; int value2 = ordersPriceInCents2 / 600; salesItemsCounter.add(period, value1 + value2); salesItemsCounterSrv1.add(period, value1); salesItemsCounterSrv2.add(period, value2); } graphiteDataInjector.exportMetrics(salesItemsCounter, salesItemsCounterSrv1, salesItemsCounterSrv2); // WEBSITE - VISITORS TimeSeries newVisitorsCounterSrv1 = new TimeSeries("srv1.website.visitors.newVisitorsCounter"); TimeSeries newVisitorsCounterSrv2 = new TimeSeries("srv1.website.visitors.newVisitorsCounter"); TimeSeries activeVisitorsGaugeSrv1 = new TimeSeries("srv1.website.visitors.activeGauge"); TimeSeries activeVisitorsGaugeSrv2 = new TimeSeries("srv2.website.visitors.activeGauge"); int integratedValue1 = 0; int integratedValue2 = 0; float activeVisitorsFactor = 1; for (int i = 0; i < rawTimeSeries.getItemCount(); i++) { TimeSeriesDataItem dataItem = rawTimeSeries.getDataItem(i); RegularTimePeriod period = dataItem.getPeriod(); int value = dataItem.getValue().intValue() / 20; integratedValue += value; int value1 = Math.min((int) (value * serverFairness / 2), value); integratedValue1 += value1; int value2 = value - value1; integratedValue2 += value2; newVisitorsCounterSrv1.add(period, integratedValue1); newVisitorsCounterSrv2.add(period, integratedValue2); if (i % 120 == 0) { activeVisitorsFactor = (10 + random.nextInt(3)) / 10; } activeVisitorsGaugeSrv1.add(period, Math.floor(value1 * activeVisitorsFactor)); activeVisitorsGaugeSrv2.add(period, Math.floor(value2 * activeVisitorsFactor)); } graphiteDataInjector.exportMetrics(newVisitorsCounterSrv1, newVisitorsCounterSrv2, activeVisitorsGaugeSrv1, activeVisitorsGaugeSrv2); }
From source file:org.jruby.ext.date.RubyDate.java
License:LGPL
private void adjustWithDayFraction(ThreadContext context, DateTime dt, final long[] rest) { final RubyFixnum zero = RubyFixnum.zero(context.runtime); int ival;// w w w .j a va 2 s . c o m ival = RubyDateTime.getHour(context, zero, rest); dt = dt.plusHours(ival); if (rest[0] != 0) { ival = RubyDateTime.getMinute(context, zero, rest); dt = dt.plusMinutes(ival); if (rest[0] != 0) { ival = RubyDateTime.getSecond(context, zero, rest); dt = dt.plusSeconds(ival); final long r0 = rest[0], r1 = rest[1]; if (r0 != 0) { long millis = (1000 * r0) / r1; dt = dt.plusMillis((int) millis); subMillisNum = ((1000 * r0) - (millis * r1)); subMillisDen = r1; normalizeSubMillis(); } } } this.dt = dt; }
From source file:org.kairosdb.util.Util.java
License:Apache License
/** Computes the duration of the sampling (value * unit) starting at timestamp. //from www . j ava2s .c om @param timestamp unix timestamp of the start time. @return the duration of the sampling in millisecond. */ public static long getSamplingDuration(long timestamp, Sampling sampling, DateTimeZone timeZone) { long ret = sampling.getValue(); DateTime dt = new DateTime(timestamp, timeZone); switch (sampling.getUnit()) { case YEARS: ret = new org.joda.time.Duration(dt, dt.plusYears((int) sampling.getValue())).getMillis(); break; case MONTHS: ret = new org.joda.time.Duration(dt, dt.plusMonths((int) sampling.getValue())).getMillis(); break; case WEEKS: ret = new org.joda.time.Duration(dt, dt.plusWeeks((int) sampling.getValue())).getMillis(); break; case DAYS: ret = new org.joda.time.Duration(dt, dt.plusDays((int) sampling.getValue())).getMillis(); break; case HOURS: ret = new org.joda.time.Duration(dt, dt.plusHours((int) sampling.getValue())).getMillis(); break; case MINUTES: ret = new org.joda.time.Duration(dt, dt.plusMinutes((int) sampling.getValue())).getMillis(); break; case SECONDS: ret = new org.joda.time.Duration(dt, dt.plusSeconds((int) sampling.getValue())).getMillis(); break; case MILLISECONDS: ret = (long) sampling.getValue(); break; } return ret; }