List of usage examples for org.joda.time DateTime getMillis
public long getMillis()
From source file:com.ning.arecibo.collector.persistent.EventReplayingLoadGenerator.java
License:Apache License
private DateTime getAdjustedSampleTime(final DateTime timestamp) { if (firstReplayEventTimestamp == null) { firstReplayEventTimestamp = timestamp; }/*from ww w.jav a 2 s .com*/ final int addend = (int) (timestamp.getMillis() - firstReplayEventTimestamp.getMillis()); //log.info("In processSamples(), timestamp %s, replayIterationStartTime %s, firstReplayEventTimestamp %s, addend %d", // timestamp.toString(), replayIterationStartTime.toString(), firstReplayEventTimestamp.toString(), addend); return replayIterationStartTime.plusMillis(addend); }
From source file:com.ning.arecibo.util.timeline.DecimatingSampleFilter.java
License:Apache License
@Override public void processOneSample(DateTime time, SampleOpcode opcode, Object value) { if (!initialized) { // Estimate the sampleCount, assuming that there are no gaps final long adjustedEndMillis = Math.min(getEndTime().getMillis(), System.currentTimeMillis()); final long millisTilEnd = adjustedEndMillis - time.getMillis(); final int sampleCount = Math.max(outputCount, (int) (millisTilEnd / pollingInterval.getMillis())); initializeFilterHistory(sampleCount); }//w ww .j ava2 s . c om sampleNumber++; final SampleState sampleState = new SampleState(opcode, value, ScalarSample.getDoubleValue(opcode, value), time); final int historyIndex = sampleNumber % filterHistory.length; filterHistory[historyIndex] = sampleState; runningSum += outputsPerSample; if (runningSum >= 1.0) { runningSum -= 1.0; if (opcode == SampleOpcode.STRING) { // We don't have interpolation, so just output // this one sampleConsumer.consumeSample(sampleNumber, opcode, value, time); } else { // Time to output a sample - compare the sum of the first samples with the // sum of the last samples making up the output, choosing the lowest value if // if the first samples are larger, and the highest value if the last samples // are larger final int samplesInAverage = ceilSamplesPerOutput > 5 ? ceilSamplesPerOutput * 2 / 3 : Math.max(1, ceilSamplesPerOutput - 1); final int samplesLeftOut = ceilSamplesPerOutput - samplesInAverage; double max = Double.MIN_VALUE; int maxIndex = 0; int minIndex = 0; double min = Double.MAX_VALUE; double sum = 0.0; double firstSum = 0.0; double lastSum = 0.0; for (int i = 0; i < ceilSamplesPerOutput; i++) { final int index = (sampleNumber + ceilSamplesPerOutput - i) % ceilSamplesPerOutput; final SampleState sample = filterHistory[index]; if (sample != null) { final double doubleValue = sample.getDoubleValue(); sum += doubleValue; if (doubleValue > max) { max = doubleValue; maxIndex = index; } if (doubleValue < min) { min = doubleValue; minIndex = index; } if (i < samplesInAverage) { lastSum += doubleValue; } if (i >= samplesLeftOut) { firstSum += doubleValue; } } } final SampleState firstSample = filterHistory[(sampleNumber + ceilSamplesPerOutput - (ceilSamplesPerOutput - 1)) % ceilSamplesPerOutput]; final SampleState lastSample = filterHistory[sampleNumber % ceilSamplesPerOutput]; final DateTime centerTime = firstSample != null ? new DateTime((firstSample.getTime().getMillis() + lastSample.getTime().getMillis()) >> 1) : lastSample.getTime(); switch (decimationMode) { case PEAK_PICK: if (firstSum > lastSum) { // The sample window is generally down with time - - pick the minimum final SampleState minSample = filterHistory[minIndex]; sampleConsumer.consumeSample(sampleNumber, minSample.getSampleOpcode(), minSample.getValue(), centerTime); } else { // The sample window is generally up with time - - pick the maximum final SampleState maxSample = filterHistory[maxIndex]; sampleConsumer.consumeSample(sampleNumber, maxSample.getSampleOpcode(), maxSample.getValue(), centerTime); } break; case AVERAGE: final double average = sum / ceilSamplesPerOutput; sampleConsumer.consumeSample(minIndex, SampleOpcode.DOUBLE, average, centerTime); break; default: throw new IllegalStateException( String.format("The decimation filter mode %s is not recognized", decimationMode)); } } } }
From source file:com.ning.arecibo.util.timeline.persistent.Replayer.java
License:Apache License
public int readAll(final boolean deleteFiles, final @Nullable DateTime minStartTime, final Function<HostSamplesForTimestamp, Void> fn) { final Collection<File> files = FileUtils.listFiles(new File(path), new String[] { "bin" }, false); int filesSkipped = 0; for (final File file : FILE_ORDERING.sortedCopy(files)) { try {//from w ww .ja va2 s.c om // Skip files whose last modification date is is earlier than the first start time. if (minStartTime != null && file.lastModified() < minStartTime.getMillis()) { filesSkipped++; continue; } read(file, fn); if (shuttingDown.get()) { break; } if (deleteFiles) { if (!file.delete()) { log.warn("Unable to delete file: {}", file.getAbsolutePath()); } } } catch (IOException e) { log.warn("Exception replaying file: {}", file.getAbsolutePath(), e); } } return filesSkipped; }
From source file:com.ning.arecibo.util.timeline.persistent.Replayer.java
License:Apache License
public void purgeOldFiles(final DateTime purgeIfOlderDate) { final Collection<File> files = FileUtils.listFiles(new File(path), new String[] { "bin" }, false); for (final File file : files) { if (FileUtils.isFileOlder(file, new Date(purgeIfOlderDate.getMillis()))) { if (!file.delete()) { log.warn("Unable to delete file: {}", file.getAbsolutePath()); }/*from w ww . java 2 s .c o m*/ } } }
From source file:com.ning.billing.entitlement.DefaultEntitlementTestInitializer.java
License:Apache License
private void resetClockToStartOfTest(final ClockMock clock) { clock.resetDeltaFromReality();/* w ww . j a va 2 s.c om*/ // Date at which all tests start-- we create the date object here after the system properties which set the JVM in UTC have been set. final DateTime testStartDate = new DateTime(2012, 5, 7, 0, 3, 42, 0); clock.setDeltaFromReality(testStartDate.getMillis() - clock.getUTCNow().getMillis()); }
From source file:com.ning.billing.meter.timeline.consumer.filter.DecimatingSampleFilter.java
License:Apache License
@Override public void processOneSample(final DateTime time, final SampleOpcode opcode, final Object value) { if (!initialized) { // Estimate the sampleCount, assuming that there are no gaps final long adjustedEndMillis = Math.min(getEndTime().getMillis(), System.currentTimeMillis()); final long millisTilEnd = adjustedEndMillis - time.getMillis(); final int sampleCount = Math.max(outputCount, (int) (millisTilEnd / pollingInterval.getMillis())); initializeFilterHistory(sampleCount); }/*from w w w .j a v a2s . c o m*/ sampleNumber++; final SampleState sampleState = new SampleState(opcode, value, ScalarSample.getDoubleValue(opcode, value), time); final int historyIndex = sampleNumber % filterHistory.length; filterHistory[historyIndex] = sampleState; runningSum += outputsPerSample; if (runningSum >= 1.0) { runningSum -= 1.0; if (opcode == SampleOpcode.STRING) { // We don't have interpolation, so just output // this one sampleProcessor.processOneSample(time, opcode, value); } else { // Time to output a sample - compare the sum of the first samples with the // sum of the last samples making up the output, choosing the lowest value if // if the first samples are larger, and the highest value if the last samples // are larger final int samplesInAverage = ceilSamplesPerOutput > 5 ? ceilSamplesPerOutput * 2 / 3 : Math.max(1, ceilSamplesPerOutput - 1); final int samplesLeftOut = ceilSamplesPerOutput - samplesInAverage; double max = Double.MIN_VALUE; int maxIndex = 0; int minIndex = 0; double min = Double.MAX_VALUE; double sum = 0.0; double firstSum = 0.0; double lastSum = 0.0; for (int i = 0; i < ceilSamplesPerOutput; i++) { final int index = (sampleNumber + ceilSamplesPerOutput - i) % ceilSamplesPerOutput; final SampleState sample = filterHistory[index]; if (sample != null) { final double doubleValue = sample.getDoubleValue(); sum += doubleValue; if (doubleValue > max) { max = doubleValue; maxIndex = index; } if (doubleValue < min) { min = doubleValue; minIndex = index; } if (i < samplesInAverage) { lastSum += doubleValue; } if (i >= samplesLeftOut) { firstSum += doubleValue; } } } final SampleState firstSample = filterHistory[(sampleNumber + ceilSamplesPerOutput - (ceilSamplesPerOutput - 1)) % ceilSamplesPerOutput]; final SampleState lastSample = filterHistory[sampleNumber % ceilSamplesPerOutput]; final DateTime centerTime = firstSample != null ? new DateTime((firstSample.getTime().getMillis() + lastSample.getTime().getMillis()) >> 1) : lastSample.getTime(); switch (decimationMode) { case PEAK_PICK: if (firstSum > lastSum) { // The sample window is generally down with time - - pick the minimum final SampleState minSample = filterHistory[minIndex]; sampleProcessor.processOneSample(centerTime, minSample.getSampleOpcode(), minSample.getValue()); } else { // The sample window is generally up with time - - pick the maximum final SampleState maxSample = filterHistory[maxIndex]; sampleProcessor.processOneSample(centerTime, maxSample.getSampleOpcode(), maxSample.getValue()); } break; case AVERAGE: final double average = sum / ceilSamplesPerOutput; sampleProcessor.processOneSample(centerTime, SampleOpcode.DOUBLE, average); break; default: throw new IllegalStateException( String.format("The decimation filter mode %s is not recognized", decimationMode)); } } } }
From source file:com.ning.billing.meter.timeline.persistent.Replayer.java
License:Apache License
public int readAll(final boolean deleteFiles, @Nullable final DateTime minStartTime, final Function<SourceSamplesForTimestamp, Void> fn) { final List<File> files = findCandidates(); int filesSkipped = 0; for (final File file : FILE_ORDERING.sortedCopy(files)) { try {//from w w w. ja v a 2 s.c o m // Skip files whose last modification date is is earlier than the first start time. if (minStartTime != null && file.lastModified() < minStartTime.getMillis()) { filesSkipped++; continue; } read(file, fn); if (shuttingDown.get()) { break; } if (deleteFiles) { if (!file.delete()) { log.warn("Unable to delete file: {}", file.getAbsolutePath()); } } } catch (IOException e) { log.warn("Exception replaying file: {}", file.getAbsolutePath(), e); } } return filesSkipped; }
From source file:com.ning.billing.meter.timeline.persistent.Replayer.java
License:Apache License
public void purgeOldFiles(final DateTime purgeIfOlderDate) { final List<File> candidates = findCandidates(); for (final File file : candidates) { if (file.lastModified() <= purgeIfOlderDate.getMillis()) { if (!file.delete()) { log.warn("Unable to delete file: {}", file.getAbsolutePath()); }//from w ww. j ava 2 s . c o m } } }
From source file:com.ning.billing.util.notificationq.NotificationQueueDispatcher.java
License:Apache License
private void handleNotificationWithMetrics(final NotificationQueueHandler handler, final Notification notification, final NotificationKey key) { // Create specific metric name because: // - ':' is not allowed for metric name // - name would be too long (e.g entitlement-service:subscription-events-process-time -> ent-subscription-events-process-time) ////from w w w . j a v a 2s. co m final String[] parts = notification.getQueueName().split(":"); final String metricName = new StringBuilder(parts[0].substring(0, 3)).append("-").append(parts[1]) .append("-process-time").toString(); final Histogram perQueueHistogramProcessingTime; synchronized (perQueueProcessingTime) { if (!perQueueProcessingTime.containsKey(notification.getQueueName())) { perQueueProcessingTime.put(notification.getQueueName(), Metrics.newHistogram(NotificationQueueDispatcher.class, metricName)); } perQueueHistogramProcessingTime = perQueueProcessingTime.get(notification.getQueueName()); } final DateTime beforeProcessing = clock.getUTCNow(); handler.handleReadyNotification(key, notification.getEffectiveDate(), notification.getFutureUserToken(), notification.getAccountRecordId(), notification.getTenantRecordId()); final DateTime afterProcessing = clock.getUTCNow(); perQueueHistogramProcessingTime.update(afterProcessing.getMillis() - beforeProcessing.getMillis()); processedNotificationsSinceStart.inc(); }
From source file:com.ning.metrics.serialization.event.SmileEnvelopeEvent.java
License:Apache License
/** * Given a map ("json-like"), create an event with hourly granularity * * @param eventName name of the event * @param eventDateTime event timestamp//from w w w . ja v a 2 s. com * @param map event data * @throws IOException generic serialization exception */ public SmileEnvelopeEvent(String eventName, DateTime eventDateTime, HashMap<String, Object> map) throws IOException { this.eventName = eventName; this.eventDateTime = eventDateTime; this.granularity = Granularity.HOURLY; ByteArrayOutputStream stream = new ByteArrayOutputStream(); JsonGenerator g = smileFactory.createJsonGenerator(stream); g.writeStartObject(); g.writeNumberField(SmileEnvelopeEvent.SMILE_EVENT_DATETIME_TOKEN_NAME, eventDateTime.getMillis()); g.writeStringField(SmileEnvelopeEvent.SMILE_EVENT_GRANULARITY_TOKEN_NAME, granularity.toString()); for (String key : map.keySet()) { g.writeObjectField(key, map.get(key)); // will hopefully do the right thing (e.g. take care of numerics) } g.writeEndObject(); g.close(); // important: will force flushing of output, close underlying output stream setPayloadFromByteArray(stream.toByteArray()); }