List of usage examples for org.joda.time Interval getStart
public DateTime getStart()
From source file:com.marand.thinkmed.medications.task.impl.MedicationsTasksProviderImpl.java
License:Open Source License
private Opt<TaskDto> findLastTaskForTherapy(final String patientId, final String therapyId, final Interval searchInterval, final boolean findHistoric) { final List<TaskDto> tasks = findAdministrationTasks( Collections.singletonList(AdministrationTaskDef.getTaskTypeEnum().buildKey(patientId)), Collections.singletonList(therapyId), searchInterval != null ? searchInterval.getStart() : null, searchInterval != null ? searchInterval.getEnd() : null, null, findHistoric); return Opt.from(tasks.stream().max(Comparator.comparing(TaskDto::getDueTime))); }
From source file:com.marand.thinkmed.medications.task.impl.MedicationsTasksProviderImpl.java
License:Open Source License
@Override public List<AdministrationPatientTaskDto> findAdministrationTasks( final Map<String, PatientDisplayWithLocationDto> patientWithLocationMap, final Interval searchInterval, final int maxNumberOfTasks, final Locale locale, final DateTime when) { final List<TaskDto> tasks = findAdministrationTasks(patientWithLocationMap.keySet(), searchInterval.getStart(), searchInterval.getEnd()); Collections.sort(tasks, (task1, task2) -> task1.getDueTime().compareTo(task2.getDueTime())); final List<TaskDto> filteredTasks = tasks.size() > maxNumberOfTasks ? tasks.subList(0, maxNumberOfTasks) : tasks;/*from w ww.j av a2 s. co m*/ final List<AdministrationPatientTaskDto> list = administrationTaskConverter .convertTasksToAdministrationPatientTasks(filteredTasks, patientWithLocationMap, locale, when); Collections.sort(list, (task1, task2) -> task1.getPlannedTime().compareTo(task2.getPlannedTime())); return list; }
From source file:com.metamx.common.Granularity.java
License:Apache License
/** * Round out Interval such that it becomes granularity-aligned and nonempty. */// w ww. ja v a 2 s .c o m public final Interval widen(Interval interval) { final DateTime start = truncate(interval.getStart()); final DateTime end; if (interval.getEnd().equals(start)) { // Empty with aligned start/end; expand into a granularity-sized interval end = increment(start); } else if (truncate(interval.getEnd()).equals(interval.getEnd())) { // Non-empty with aligned end; keep the same end end = interval.getEnd(); } else { // Non-empty with non-aligned end; push it out end = increment(truncate(interval.getEnd())); } return new Interval(start, end); }
From source file:com.metamx.common.JodaUtils.java
License:Apache License
public static ArrayList<Interval> condenseIntervals(Iterable<Interval> intervals) { ArrayList<Interval> retVal = Lists.newArrayList(); TreeSet<Interval> sortedIntervals = Sets.newTreeSet(Comparators.intervalsByStartThenEnd()); for (Interval interval : intervals) { sortedIntervals.add(interval);/*from w ww . j a v a2s. c o m*/ } if (sortedIntervals.isEmpty()) { return Lists.newArrayList(); } Iterator<Interval> intervalsIter = sortedIntervals.iterator(); Interval currInterval = intervalsIter.next(); while (intervalsIter.hasNext()) { Interval next = intervalsIter.next(); if (currInterval.overlaps(next) || currInterval.abuts(next)) { currInterval = new Interval(currInterval.getStart(), next.getEnd()); } else { retVal.add(currInterval); currInterval = next; } } retVal.add(currInterval); return retVal; }
From source file:com.metamx.common.JodaUtils.java
License:Apache License
public static Interval umbrellaInterval(Iterable<Interval> intervals) { ArrayList<DateTime> startDates = Lists.newArrayList(); ArrayList<DateTime> endDates = Lists.newArrayList(); for (Interval interval : intervals) { startDates.add(interval.getStart()); endDates.add(interval.getEnd()); }/* ww w .j a v a2 s . c o m*/ DateTime minStart = minDateTime(startDates.toArray(new DateTime[] {})); DateTime maxEnd = maxDateTime(endDates.toArray(new DateTime[] {})); if (minStart == null || maxEnd == null) { throw new IllegalArgumentException("Empty list of intervals"); } return new Interval(minStart, maxEnd); }
From source file:com.metamx.druid.client.CachingClusteredClient.java
License:Open Source License
@Override public Sequence<T> run(final Query<T> query) { final QueryToolChest<T, Query<T>> toolChest = warehouse.getToolChest(query); final CacheStrategy<T, Object, Query<T>> strategy = toolChest.getCacheStrategy(query); final Map<DruidServer, List<SegmentDescriptor>> serverSegments = Maps.newTreeMap(); final List<Pair<DateTime, byte[]>> cachedResults = Lists.newArrayList(); final Map<String, CachePopulator> cachePopulatorMap = Maps.newHashMap(); final boolean useCache = Boolean.parseBoolean(query.getContextValue("useCache", "true")) && strategy != null; final boolean populateCache = Boolean.parseBoolean(query.getContextValue("populateCache", "true")) && strategy != null; final boolean isBySegment = Boolean.parseBoolean(query.getContextValue("bySegment", "false")); final Query<T> rewrittenQuery; if (populateCache) { rewrittenQuery = query/*from w w w .j ava 2 s. c om*/ .withOverriddenContext(ImmutableMap.of("bySegment", "true", "intermediate", "true")); } else { rewrittenQuery = query.withOverriddenContext(ImmutableMap.of("intermediate", "true")); } VersionedIntervalTimeline<String, ServerSelector> timeline = serverView.getTimeline(query.getDataSource()); if (timeline == null) { return Sequences.empty(); } // build set of segments to query Set<Pair<ServerSelector, SegmentDescriptor>> segments = Sets.newLinkedHashSet(); for (Interval interval : rewrittenQuery.getIntervals()) { List<TimelineObjectHolder<String, ServerSelector>> serversLookup = timeline.lookup(interval); for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) { for (PartitionChunk<ServerSelector> chunk : holder.getObject()) { ServerSelector selector = chunk.getObject(); final SegmentDescriptor descriptor = new SegmentDescriptor(holder.getInterval(), holder.getVersion(), chunk.getChunkNumber()); segments.add(Pair.of(selector, descriptor)); } } } final byte[] queryCacheKey; if (strategy != null) { queryCacheKey = strategy.computeCacheKey(query); } else { queryCacheKey = null; } // Pull cached segments from cache and remove from set of segments to query if (useCache && queryCacheKey != null) { Map<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> cacheKeys = Maps.newHashMap(); for (Pair<ServerSelector, SegmentDescriptor> e : segments) { cacheKeys.put(e, computeSegmentCacheKey(e.lhs.getSegment().getIdentifier(), e.rhs, queryCacheKey)); } Map<Cache.NamedKey, byte[]> cachedValues = cache.getBulk(cacheKeys.values()); for (Map.Entry<Pair<ServerSelector, SegmentDescriptor>, Cache.NamedKey> entry : cacheKeys.entrySet()) { Pair<ServerSelector, SegmentDescriptor> segment = entry.getKey(); Cache.NamedKey segmentCacheKey = entry.getValue(); final ServerSelector selector = segment.lhs; final SegmentDescriptor descriptor = segment.rhs; final Interval segmentQueryInterval = descriptor.getInterval(); final byte[] cachedValue = cachedValues.get(segmentCacheKey); if (cachedValue != null) { cachedResults.add(Pair.of(segmentQueryInterval.getStart(), cachedValue)); // remove cached segment from set of segments to query segments.remove(segment); } else { final String segmentIdentifier = selector.getSegment().getIdentifier(); cachePopulatorMap.put(String.format("%s_%s", segmentIdentifier, segmentQueryInterval), new CachePopulator(cache, objectMapper, segmentCacheKey)); } } } // Compile list of all segments not pulled from cache for (Pair<ServerSelector, SegmentDescriptor> segment : segments) { final DruidServer server = segment.lhs.pick(); List<SegmentDescriptor> descriptors = serverSegments.get(server); if (descriptors == null) { descriptors = Lists.newArrayList(); serverSegments.put(server, descriptors); } descriptors.add(segment.rhs); } return new LazySequence<T>(new Supplier<Sequence<T>>() { @Override public Sequence<T> get() { ArrayList<Pair<DateTime, Sequence<T>>> listOfSequences = Lists.newArrayList(); addSequencesFromServer(listOfSequences); addSequencesFromCache(listOfSequences); Collections.sort(listOfSequences, Ordering.natural().onResultOf(Pair.<DateTime, Sequence<T>>lhsFn())); final Sequence<Sequence<T>> seq = Sequences .simple(Iterables.transform(listOfSequences, Pair.<DateTime, Sequence<T>>rhsFn())); if (strategy == null) { return toolChest.mergeSequences(seq); } else { return strategy.mergeSequences(seq); } } private void addSequencesFromCache(ArrayList<Pair<DateTime, Sequence<T>>> listOfSequences) { if (strategy == null) { return; } final Function<Object, T> pullFromCacheFunction = strategy.pullFromCache(); final TypeReference<Object> cacheObjectClazz = strategy.getCacheObjectClazz(); for (Pair<DateTime, byte[]> cachedResultPair : cachedResults) { final byte[] cachedResult = cachedResultPair.rhs; Sequence<Object> cachedSequence = new BaseSequence<Object, Iterator<Object>>( new BaseSequence.IteratorMaker<Object, Iterator<Object>>() { @Override public Iterator<Object> make() { try { if (cachedResult.length == 0) { return Iterators.emptyIterator(); } return objectMapper.readValues( objectMapper.getJsonFactory().createJsonParser(cachedResult), cacheObjectClazz); } catch (IOException e) { throw Throwables.propagate(e); } } @Override public void cleanup(Iterator<Object> iterFromMake) { } }); listOfSequences.add( Pair.of(cachedResultPair.lhs, Sequences.map(cachedSequence, pullFromCacheFunction))); } } @SuppressWarnings("unchecked") private void addSequencesFromServer(ArrayList<Pair<DateTime, Sequence<T>>> listOfSequences) { for (Map.Entry<DruidServer, List<SegmentDescriptor>> entry : serverSegments.entrySet()) { final DruidServer server = entry.getKey(); final List<SegmentDescriptor> descriptors = entry.getValue(); final QueryRunner clientQueryable = serverView.getQueryRunner(server); if (clientQueryable == null) { throw new ISE("WTF!? server[%s] doesn't have a client Queryable?", server); } final Sequence<T> resultSeqToAdd; final MultipleSpecificSegmentSpec segmentSpec = new MultipleSpecificSegmentSpec(descriptors); List<Interval> intervals = segmentSpec.getIntervals(); if ("realtime".equals(server.getType()) || !populateCache || isBySegment) { resultSeqToAdd = clientQueryable.run(query.withQuerySegmentSpec(segmentSpec)); } else { resultSeqToAdd = toolChest.mergeSequences( Sequences.map(clientQueryable.run(rewrittenQuery.withQuerySegmentSpec(segmentSpec)), new Function<Object, Sequence<T>>() { private final Function<T, Object> prepareForCache = strategy .prepareForCache(); @Override public Sequence<T> apply(Object input) { Result<Object> result = (Result<Object>) input; final BySegmentResultValueClass<T> value = (BySegmentResultValueClass<T>) result .getValue(); String segmentIdentifier = value.getSegmentId(); final Iterable<T> segmentResults = value.getResults(); cachePopulatorMap .get(String.format("%s_%s", segmentIdentifier, value.getInterval())) .populate(Iterables.transform(segmentResults, prepareForCache)); return Sequences.simple(Iterables.transform(segmentResults, toolChest.makeMetricManipulatorFn(rewrittenQuery, new MetricManipulationFn() { @Override public Object manipulate( AggregatorFactory factory, Object object) { return factory.deserialize(object); } }))); } })); } listOfSequences.add(Pair.of(intervals.get(0).getStart(), resultSeqToAdd)); } } }); }
From source file:com.metamx.druid.index.v1.IncrementalIndexStorageAdapter.java
License:Open Source License
@Override public Iterable<Cursor> makeCursors(final Filter filter, final Interval interval, final QueryGranularity gran) { Interval actualIntervalTmp = interval; Interval dataInterval = getInterval(); if (!actualIntervalTmp.overlaps(dataInterval)) { return ImmutableList.of(); }/* w w w .j a va 2s . c o m*/ if (actualIntervalTmp.getStart().isBefore(dataInterval.getStart())) { actualIntervalTmp = actualIntervalTmp.withStart(dataInterval.getStart()); } if (actualIntervalTmp.getEnd().isAfter(dataInterval.getEnd())) { actualIntervalTmp = actualIntervalTmp.withEnd(dataInterval.getEnd()); } final Interval actualInterval = actualIntervalTmp; return new Iterable<Cursor>() { @Override public Iterator<Cursor> iterator() { return FunctionalIterator.create( gran.iterable(actualInterval.getStartMillis(), actualInterval.getEndMillis()).iterator()) .transform(new Function<Long, Cursor>() { EntryHolder currEntry = new EntryHolder(); private final ValueMatcher filterMatcher; { filterMatcher = makeFilterMatcher(filter, currEntry); } @Override public Cursor apply(@Nullable final Long input) { final long timeStart = Math.max(input, actualInterval.getStartMillis()); return new Cursor() { private Iterator<Map.Entry<IncrementalIndex.TimeAndDims, Aggregator[]>> baseIter; private ConcurrentNavigableMap<IncrementalIndex.TimeAndDims, Aggregator[]> cursorMap; final DateTime time; int numAdvanced = -1; boolean done; { cursorMap = index.getSubMap( new IncrementalIndex.TimeAndDims(timeStart, new String[][] {}), new IncrementalIndex.TimeAndDims(Math .min(actualInterval.getEndMillis(), gran.next(timeStart)), new String[][] {})); time = gran.toDateTime(input); reset(); } @Override public DateTime getTime() { return time; } @Override public void advance() { if (!baseIter.hasNext()) { done = true; return; } while (baseIter.hasNext()) { currEntry.set(baseIter.next()); if (filterMatcher.matches()) { return; } } if (!filterMatcher.matches()) { done = true; } } @Override public boolean isDone() { return done; } @Override public void reset() { baseIter = cursorMap.entrySet().iterator(); if (numAdvanced == -1) { numAdvanced = 0; while (baseIter.hasNext()) { currEntry.set(baseIter.next()); if (filterMatcher.matches()) { return; } numAdvanced++; } } else { Iterators.skip(baseIter, numAdvanced); if (baseIter.hasNext()) { currEntry.set(baseIter.next()); } } done = cursorMap.size() == 0 || !baseIter.hasNext(); } @Override public DimensionSelector makeDimensionSelector(String dimension) { final String dimensionName = dimension.toLowerCase(); final IncrementalIndex.DimDim dimValLookup = index .getDimension(dimensionName); if (dimValLookup == null) { return null; } final int maxId = dimValLookup.size(); final int dimIndex = index.getDimensionIndex(dimensionName); return new DimensionSelector() { @Override public IndexedInts getRow() { final ArrayList<Integer> vals = Lists.newArrayList(); if (dimIndex < currEntry.getKey().getDims().length) { final String[] dimVals = currEntry.getKey().getDims()[dimIndex]; if (dimVals != null) { for (String dimVal : dimVals) { int id = dimValLookup.getId(dimVal); if (id < maxId) { vals.add(id); } } } } return new IndexedInts() { @Override public int size() { return vals.size(); } @Override public int get(int index) { return vals.get(index); } @Override public Iterator<Integer> iterator() { return vals.iterator(); } }; } @Override public int getValueCardinality() { return dimValLookup.size(); } @Override public String lookupName(int id) { return dimValLookup.getValue(id); } @Override public int lookupId(String name) { return dimValLookup.getId(name); } }; } @Override public FloatMetricSelector makeFloatMetricSelector(String metric) { final String metricName = metric.toLowerCase(); final Integer metricIndexInt = index.getMetricIndex(metricName); if (metricIndexInt == null) { return new FloatMetricSelector() { @Override public float get() { return 0.0f; } }; } final int metricIndex = metricIndexInt; return new FloatMetricSelector() { @Override public float get() { return currEntry.getValue()[metricIndex].getFloat(); } }; } @Override public ComplexMetricSelector makeComplexMetricSelector(String metric) { final String metricName = metric.toLowerCase(); final Integer metricIndexInt = index.getMetricIndex(metricName); if (metricIndexInt == null) { return null; } final int metricIndex = metricIndexInt; final ComplexMetricSerde serde = ComplexMetrics .getSerdeForType(index.getMetricType(metricName)); return new ComplexMetricSelector() { @Override public Class classOfObject() { return serde.getObjectStrategy().getClazz(); } @Override public Object get() { return currEntry.getValue()[metricIndex].get(); } }; } }; } }); } }; }
From source file:com.metamx.druid.index.v1.IndexMerger.java
License:Open Source License
public static void createIndexDrdFile(byte versionId, File inDir, GenericIndexed<String> availableDimensions, GenericIndexed<String> availableMetrics, Interval dataInterval) throws IOException { File indexFile = new File(inDir, "index.drd"); FileChannel channel = null;/*w w w .j av a 2 s . c o m*/ try { channel = new FileOutputStream(indexFile).getChannel(); channel.write(ByteBuffer.wrap(new byte[] { versionId })); availableDimensions.writeToChannel(channel); availableMetrics.writeToChannel(channel); serializerUtils.writeString(channel, String.format("%s/%s", dataInterval.getStart(), dataInterval.getEnd())); } finally { Closeables.closeQuietly(channel); channel = null; } IndexIO.checkFileSize(indexFile); }
From source file:com.metamx.druid.index.v1.IndexStorageAdapter.java
License:Open Source License
@Override public Iterable<Cursor> makeCursors(final Filter filter, final Interval interval, final QueryGranularity gran) { Interval actualIntervalTmp = interval; if (!actualIntervalTmp.overlaps(index.dataInterval)) { return ImmutableList.of(); }// w w w . ja v a 2 s . co m if (actualIntervalTmp.getStart().isBefore(index.dataInterval.getStart())) { actualIntervalTmp = actualIntervalTmp.withStart(index.dataInterval.getStart()); } if (actualIntervalTmp.getEnd().isAfter(index.dataInterval.getEnd())) { actualIntervalTmp = actualIntervalTmp.withEnd(index.dataInterval.getEnd()); } final Interval actualInterval = actualIntervalTmp; final Pair<Integer, Integer> intervalStartAndEnd = computeTimeStartEnd(actualInterval); return new Iterable<Cursor>() { @Override public Iterator<Cursor> iterator() { final Offset baseOffset; if (filter == null) { baseOffset = new ArrayBasedOffset(ids, intervalStartAndEnd.lhs); } else { baseOffset = new StartLimitedOffset( new ConciseOffset(filter.goConcise(new IndexBasedBitmapIndexSelector(index))), intervalStartAndEnd.lhs); } final Map<String, Object> metricHolderCache = Maps.newHashMap(); // This after call is not perfect, if there is an exception during processing, it will never get called, // but it's better than nothing and doing this properly all the time requires a lot more fixerating return MoreIterators.after(FunctionalIterator.create( gran.iterable(actualInterval.getStartMillis(), actualInterval.getEndMillis()).iterator()) .keep(new Function<Long, Cursor>() { @Override public Cursor apply(final Long intervalStart) { final Offset offset = new TimestampCheckingOffset(baseOffset, index.timeOffsets, Math.min(actualInterval.getEndMillis(), gran.next(intervalStart))); return new Cursor() { private final Offset initOffset = offset.clone(); private Offset cursorOffset = offset; private final DateTime timestamp = gran.toDateTime(intervalStart); @Override public DateTime getTime() { return timestamp; } @Override public void advance() { cursorOffset.increment(); } @Override public boolean isDone() { return !cursorOffset.withinBounds(); } @Override public void reset() { cursorOffset = initOffset.clone(); } @Override public DimensionSelector makeDimensionSelector(String dimension) { final String dimensionName = dimension.toLowerCase(); final String[] nameLookup = index.reverseDimLookup.get(dimensionName); if (nameLookup == null) { return null; } return new DimensionSelector() { final Map<String, Integer> dimValLookup = index.dimIdLookup .get(dimensionName); final DimensionColumn dimColumn = index.dimensionValues .get(dimensionName); final int[][] dimensionExpansions = dimColumn.getDimensionExpansions(); final int[] dimensionRowValues = dimColumn.getDimensionRowValues(); @Override public IndexedInts getRow() { return new ArrayBasedIndexedInts( dimensionExpansions[dimensionRowValues[cursorOffset .getOffset()]]); } @Override public int getValueCardinality() { return nameLookup.length; } @Override public String lookupName(int id) { return nameLookup[id]; } @Override public int lookupId(String name) { final Integer retVal = dimValLookup.get(name); return retVal == null ? -1 : retVal; } }; } @Override public FloatMetricSelector makeFloatMetricSelector(String metric) { String metricName = metric.toLowerCase(); IndexedFloats cachedFloats = (IndexedFloats) metricHolderCache.get(metric); if (cachedFloats == null) { MetricHolder holder = index.metricVals.get(metricName); if (holder == null) { return new FloatMetricSelector() { @Override public float get() { return 0.0f; } }; } cachedFloats = holder.getFloatType(); metricHolderCache.put(metricName, cachedFloats); } final IndexedFloats metricVals = cachedFloats; return new FloatMetricSelector() { @Override public float get() { return metricVals.get(cursorOffset.getOffset()); } }; } @Override public ComplexMetricSelector makeComplexMetricSelector(String metric) { final String metricName = metric.toLowerCase(); Indexed cachedComplex = (Indexed) metricHolderCache.get(metricName); if (cachedComplex == null) { MetricHolder holder = index.metricVals.get(metricName); if (holder != null) { cachedComplex = holder.getComplexType(); metricHolderCache.put(metricName, cachedComplex); } } if (cachedComplex == null) { return null; } final Indexed vals = cachedComplex; return new ComplexMetricSelector() { @Override public Class classOfObject() { return vals.getClazz(); } @Override public Object get() { return vals.get(cursorOffset.getOffset()); } }; } }; } }), new Runnable() { @Override public void run() { for (Object object : metricHolderCache.values()) { if (object instanceof Closeable) { Closeables.closeQuietly((Closeable) object); } } } }); } }; }
From source file:com.metamx.druid.index.v1.IndexStorageAdapter.java
License:Open Source License
private Pair<Integer, Integer> computeTimeStartEnd(Interval interval) { DateTime actualIntervalStart = index.dataInterval.getStart(); DateTime actualIntervalEnd = index.dataInterval.getEnd(); if (index.dataInterval.contains(interval.getStart())) { actualIntervalStart = interval.getStart(); }/*from w w w. j a v a 2s. c o m*/ if (index.dataInterval.contains(interval.getEnd())) { actualIntervalEnd = interval.getEnd(); } return computeOffsets(actualIntervalStart.getMillis(), 0, actualIntervalEnd.getMillis(), index.timeOffsets.length); }