List of usage examples for org.joda.time Interval getStart
public DateTime getStart()
From source file:io.druid.indexing.overlord.IndexerDBCoordinator.java
License:Open Source License
public List<DataSegment> getUnusedSegmentsForInterval(final String dataSource, final Interval interval) { List<DataSegment> matchingSegments = dbConnector.getDBI() .withHandle(new HandleCallback<List<DataSegment>>() { @Override//from ww w. j av a 2s. c o m public List<DataSegment> withHandle(Handle handle) throws IOException, SQLException { return handle.createQuery(String.format(dbConnector.isPostgreSQL() ? "SELECT payload FROM %s WHERE dataSource = :dataSource and start >= :start and \"end\" <= :end and used = false" : "SELECT payload FROM %s WHERE dataSource = :dataSource and start >= :start and end <= :end and used = false", dbTables.getSegmentsTable())).bind("dataSource", dataSource) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()).fold(Lists.<DataSegment>newArrayList(), new Folder3<List<DataSegment>, Map<String, Object>>() { @Override public List<DataSegment> fold(List<DataSegment> accumulator, Map<String, Object> stringObjectMap, FoldController foldController, StatementContext statementContext) throws SQLException { try { DataSegment segment = jsonMapper.readValue( (String) stringObjectMap.get("payload"), DataSegment.class); accumulator.add(segment); return accumulator; } catch (Exception e) { throw Throwables.propagate(e); } } }); } }); log.info("Found %,d segments for %s for interval %s.", matchingSegments.size(), dataSource, interval); return matchingSegments; }
From source file:io.druid.indexing.overlord.IndexerMetadataStorageAdapter.java
License:Apache License
public int deletePendingSegments(String dataSource, Interval deleteInterval) { // Check the given interval overlaps the interval(minCreatedDateOfActiveTasks, MAX) final Optional<DateTime> minCreatedDateOfActiveTasks = taskStorageQueryAdapter.getActiveTasks().stream() .map(task -> Preconditions.checkNotNull(taskStorageQueryAdapter.getCreatedTime(task.getId()), "Can't find the createdTime for task[%s]", task.getId())) .min(Comparator.naturalOrder()); final Interval activeTaskInterval = new Interval(minCreatedDateOfActiveTasks.orElse(DateTimes.MAX), DateTimes.MAX);//from w w w. j a v a2s . c om Preconditions.checkArgument(!deleteInterval.overlaps(activeTaskInterval), "Cannot delete pendingSegments because there is at least one active task created at %s", activeTaskInterval.getStart()); return indexerMetadataStorageCoordinator.deletePendingSegments(dataSource, deleteInterval); }
From source file:io.druid.indexing.overlord.TaskLockbox.java
License:Apache License
/** * Return all locks that overlap some search interval. *///from ww w .j ava 2s. co m private List<TaskLockPosse> findLockPossesForInterval(final String dataSource, final Interval interval) { giant.lock(); try { final NavigableMap<Interval, TaskLockPosse> dsRunning = running.get(dataSource); if (dsRunning == null) { // No locks at all return Collections.emptyList(); } else { // Tasks are indexed by locked interval, which are sorted by interval start. Intervals are non-overlapping, so: final NavigableSet<Interval> dsLockbox = dsRunning.navigableKeySet(); final Iterable<Interval> searchIntervals = Iterables.concat( // Single interval that starts at or before ours Collections.singletonList(dsLockbox .floor(new Interval(interval.getStart(), new DateTime(JodaUtils.MAX_INSTANT)))), // All intervals that start somewhere between our start instant (exclusive) and end instant (exclusive) dsLockbox.subSet(new Interval(interval.getStart(), new DateTime(JodaUtils.MAX_INSTANT)), false, new Interval(interval.getEnd(), interval.getEnd()), false)); return Lists .newArrayList(FunctionalIterable.create(searchIntervals).filter(new Predicate<Interval>() { @Override public boolean apply(@Nullable Interval searchInterval) { return searchInterval != null && searchInterval.overlaps(interval); } }).transform(new Function<Interval, TaskLockPosse>() { @Override public TaskLockPosse apply(Interval interval) { return dsRunning.get(interval); } })); } } finally { giant.unlock(); } }
From source file:io.druid.java.util.common.Intervals.java
License:Apache License
public static boolean isEmpty(Interval interval) { return interval.getStart().equals(interval.getEnd()); }
From source file:io.druid.java.util.common.JodaUtils.java
License:Apache License
public static ArrayList<Interval> condenseIntervals(Iterable<Interval> intervals) { ArrayList<Interval> retVal = Lists.newArrayList(); final SortedSet<Interval> sortedIntervals; if (intervals instanceof SortedSet) { sortedIntervals = (SortedSet<Interval>) intervals; } else {/*from w w w .j av a 2 s . co m*/ sortedIntervals = Sets.newTreeSet(Comparators.intervalsByStartThenEnd()); for (Interval interval : intervals) { sortedIntervals.add(interval); } } if (sortedIntervals.isEmpty()) { return Lists.newArrayList(); } Iterator<Interval> intervalsIter = sortedIntervals.iterator(); Interval currInterval = intervalsIter.next(); while (intervalsIter.hasNext()) { Interval next = intervalsIter.next(); if (currInterval.abuts(next)) { currInterval = new Interval(currInterval.getStart(), next.getEnd()); } else if (currInterval.overlaps(next)) { DateTime nextEnd = next.getEnd(); DateTime currEnd = currInterval.getEnd(); currInterval = new Interval(currInterval.getStart(), nextEnd.isAfter(currEnd) ? nextEnd : currEnd); } else { retVal.add(currInterval); currInterval = next; } } retVal.add(currInterval); return retVal; }
From source file:io.druid.metadata.IndexerSQLMetadataStorageCoordinator.java
License:Apache License
public List<DataSegment> getUsedSegmentsForInterval(final String dataSource, final Interval interval) throws IOException { final VersionedIntervalTimeline<String, DataSegment> timeline = connector.getDBI() .withHandle(new HandleCallback<VersionedIntervalTimeline<String, DataSegment>>() { @Override//from w w w . j a va 2 s .c o m public VersionedIntervalTimeline<String, DataSegment> withHandle(Handle handle) throws IOException { final VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<String, DataSegment>( Ordering.natural()); final ResultIterator<byte[]> dbSegments = handle.createQuery(String.format( "SELECT payload FROM %s WHERE used = true AND dataSource = :dataSource AND start <= :end and \"end\" >= :start AND used = true", dbTables.getSegmentsTable())).bind("dataSource", dataSource) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()).map(ByteArrayMapper.FIRST).iterator(); while (dbSegments.hasNext()) { final byte[] payload = dbSegments.next(); DataSegment segment = jsonMapper.readValue(payload, DataSegment.class); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); } dbSegments.close(); return timeline; } }); return Lists.newArrayList(Iterables.concat(Iterables.transform(timeline.lookup(interval), new Function<TimelineObjectHolder<String, DataSegment>, Iterable<DataSegment>>() { @Override public Iterable<DataSegment> apply(TimelineObjectHolder<String, DataSegment> input) { return input.getObject().payloads(); } }))); }
From source file:io.druid.metadata.IndexerSQLMetadataStorageCoordinator.java
License:Apache License
public List<DataSegment> getUnusedSegmentsForInterval(final String dataSource, final Interval interval) { List<DataSegment> matchingSegments = connector.getDBI().withHandle(new HandleCallback<List<DataSegment>>() { @Override/*from w w w.j a v a 2 s. co m*/ public List<DataSegment> withHandle(Handle handle) throws IOException, SQLException { return handle.createQuery(String.format( "SELECT payload FROM %s WHERE dataSource = :dataSource and start >= :start and \"end\" <= :end and used = false", dbTables.getSegmentsTable())).bind("dataSource", dataSource) .bind("start", interval.getStart().toString()).bind("end", interval.getEnd().toString()) .map(ByteArrayMapper.FIRST) .fold(Lists.<DataSegment>newArrayList(), new Folder3<List<DataSegment>, byte[]>() { @Override public List<DataSegment> fold(List<DataSegment> accumulator, byte[] payload, FoldController foldController, StatementContext statementContext) throws SQLException { try { accumulator.add(jsonMapper.readValue(payload, DataSegment.class)); return accumulator; } catch (Exception e) { throw Throwables.propagate(e); } } }); } }); log.info("Found %,d segments for %s for interval %s.", matchingSegments.size(), dataSource, interval); return matchingSegments; }
From source file:io.druid.segment.incremental.IncrementalIndexStorageAdapter.java
License:Apache License
@Override public Sequence<Cursor> makeCursors(final Filter filter, final Interval interval, final QueryGranularity gran) { if (index.isEmpty()) { return Sequences.empty(); }//w w w . ja v a 2 s . c om Interval actualIntervalTmp = interval; final Interval dataInterval = new Interval(getMinTime().getMillis(), gran.next(gran.truncate(getMaxTime().getMillis()))); if (!actualIntervalTmp.overlaps(dataInterval)) { return Sequences.empty(); } if (actualIntervalTmp.getStart().isBefore(dataInterval.getStart())) { actualIntervalTmp = actualIntervalTmp.withStart(dataInterval.getStart()); } if (actualIntervalTmp.getEnd().isAfter(dataInterval.getEnd())) { actualIntervalTmp = actualIntervalTmp.withEnd(dataInterval.getEnd()); } final Interval actualInterval = actualIntervalTmp; return Sequences.map( Sequences.simple(gran.iterable(actualInterval.getStartMillis(), actualInterval.getEndMillis())), new Function<Long, Cursor>() { EntryHolder currEntry = new EntryHolder(); private final ValueMatcher filterMatcher; { filterMatcher = makeFilterMatcher(filter, currEntry); } @Override public Cursor apply(@Nullable final Long input) { final long timeStart = Math.max(input, actualInterval.getStartMillis()); return new Cursor() { private Iterator<Map.Entry<IncrementalIndex.TimeAndDims, Integer>> baseIter; private ConcurrentNavigableMap<IncrementalIndex.TimeAndDims, Integer> cursorMap; final DateTime time; int numAdvanced = -1; boolean done; { cursorMap = index.getSubMap( new IncrementalIndex.TimeAndDims(timeStart, new String[][] {}), new IncrementalIndex.TimeAndDims( Math.min(actualInterval.getEndMillis(), gran.next(input)), new String[][] {})); time = gran.toDateTime(input); reset(); } @Override public DateTime getTime() { return time; } @Override public void advance() { if (!baseIter.hasNext()) { done = true; return; } while (baseIter.hasNext()) { if (Thread.interrupted()) { throw new QueryInterruptedException(); } currEntry.set(baseIter.next()); if (filterMatcher.matches()) { return; } } if (!filterMatcher.matches()) { done = true; } } @Override public void advanceTo(int offset) { int count = 0; while (count < offset && !isDone()) { advance(); count++; } } @Override public boolean isDone() { return done; } @Override public void reset() { baseIter = cursorMap.entrySet().iterator(); if (numAdvanced == -1) { numAdvanced = 0; } else { Iterators.advance(baseIter, numAdvanced); } if (Thread.interrupted()) { throw new QueryInterruptedException(); } boolean foundMatched = false; while (baseIter.hasNext()) { currEntry.set(baseIter.next()); if (filterMatcher.matches()) { foundMatched = true; break; } numAdvanced++; } done = !foundMatched && (cursorMap.size() == 0 || !baseIter.hasNext()); } @Override public DimensionSelector makeDimensionSelector(final String dimension, @Nullable final ExtractionFn extractionFn) { if (dimension.equals(Column.TIME_COLUMN_NAME)) { return new SingleScanTimeDimSelector(makeLongColumnSelector(dimension), extractionFn); } final IncrementalIndex.DimDim dimValLookup = index.getDimension(dimension); if (dimValLookup == null) { return NULL_DIMENSION_SELECTOR; } final int maxId = dimValLookup.size(); final int dimIndex = index.getDimensionIndex(dimension); return new DimensionSelector() { @Override public IndexedInts getRow() { final ArrayList<Integer> vals = Lists.newArrayList(); if (dimIndex < currEntry.getKey().getDims().length) { final String[] dimVals = currEntry.getKey().getDims()[dimIndex]; if (dimVals != null) { for (String dimVal : dimVals) { int id = dimValLookup.getId(dimVal); if (id < maxId) { vals.add(id); } } } } // check for null entry if (vals.isEmpty() && dimValLookup.contains(null)) { int id = dimValLookup.getId(null); if (id < maxId) { vals.add(id); } } return new IndexedInts() { @Override public int size() { return vals.size(); } @Override public int get(int index) { return vals.get(index); } @Override public Iterator<Integer> iterator() { return vals.iterator(); } @Override public void fill(int index, int[] toFill) { throw new UnsupportedOperationException("fill not supported"); } @Override public void close() throws IOException { } }; } @Override public int getValueCardinality() { return maxId; } @Override public String lookupName(int id) { final String value = dimValLookup.getValue(id); return extractionFn == null ? value : extractionFn.apply(value); } @Override public int lookupId(String name) { if (extractionFn != null) { throw new UnsupportedOperationException( "cannot perform lookup when applying an extraction function"); } return dimValLookup.getId(name); } }; } @Override public FloatColumnSelector makeFloatColumnSelector(String columnName) { final Integer metricIndexInt = index.getMetricIndex(columnName); if (metricIndexInt == null) { return new FloatColumnSelector() { @Override public float get() { return 0.0f; } }; } final int metricIndex = metricIndexInt; return new FloatColumnSelector() { @Override public float get() { return index.getMetricFloatValue(currEntry.getValue(), metricIndex); } }; } @Override public LongColumnSelector makeLongColumnSelector(String columnName) { if (columnName.equals(Column.TIME_COLUMN_NAME)) { return new LongColumnSelector() { @Override public long get() { return currEntry.getKey().getTimestamp(); } }; } final Integer metricIndexInt = index.getMetricIndex(columnName); if (metricIndexInt == null) { return new LongColumnSelector() { @Override public long get() { return 0L; } }; } final int metricIndex = metricIndexInt; return new LongColumnSelector() { @Override public long get() { return index.getMetricLongValue(currEntry.getValue(), metricIndex); } }; } @Override public ObjectColumnSelector makeObjectColumnSelector(String column) { if (column.equals(Column.TIME_COLUMN_NAME)) { return new ObjectColumnSelector<Long>() { @Override public Class classOfObject() { return Long.TYPE; } @Override public Long get() { return currEntry.getKey().getTimestamp(); } }; } final Integer metricIndexInt = index.getMetricIndex(column); if (metricIndexInt != null) { final int metricIndex = metricIndexInt; final ComplexMetricSerde serde = ComplexMetrics .getSerdeForType(index.getMetricType(column)); return new ObjectColumnSelector() { @Override public Class classOfObject() { return serde.getObjectStrategy().getClazz(); } @Override public Object get() { return index.getMetricObjectValue(currEntry.getValue(), metricIndex); } }; } final Integer dimensionIndexInt = index.getDimensionIndex(column); if (dimensionIndexInt != null) { final int dimensionIndex = dimensionIndexInt; return new ObjectColumnSelector<Object>() { @Override public Class classOfObject() { return Object.class; } @Override public Object get() { IncrementalIndex.TimeAndDims key = currEntry.getKey(); if (key == null) { return null; } String[][] dims = key.getDims(); if (dimensionIndex >= dims.length) { return null; } final String[] dimVals = dims[dimensionIndex]; if (dimVals == null || dimVals.length == 0) { return null; } if (dimVals.length == 1) { return dimVals[0]; } return dimVals; } }; } return null; } }; } }); }
From source file:io.druid.segment.IndexMerger.java
License:Apache License
public static void createIndexDrdFile(byte versionId, File inDir, GenericIndexed<String> availableDimensions, GenericIndexed<String> availableMetrics, Interval dataInterval, BitmapSerdeFactory bitmapSerdeFactory) throws IOException { File indexFile = new File(inDir, "index.drd"); try (FileChannel channel = new FileOutputStream(indexFile).getChannel()) { channel.write(ByteBuffer.wrap(new byte[] { versionId })); availableDimensions.writeToChannel(channel); availableMetrics.writeToChannel(channel); serializerUtils.writeString(channel, String.format("%s/%s", dataInterval.getStart(), dataInterval.getEnd())); serializerUtils.writeString(channel, mapper.writeValueAsString(bitmapSerdeFactory)); }//from w w w . ja va2 s . c o m IndexIO.checkFileSize(indexFile); }
From source file:io.druid.segment.QueryableIndexStorageAdapter.java
License:Apache License
@Override public Sequence<Cursor> makeCursors(Filter filter, Interval interval, QueryGranularity gran) { Interval actualInterval = interval; long minDataTimestamp = getMinTime().getMillis(); long maxDataTimestamp = getMaxTime().getMillis(); final Interval dataInterval = new Interval(minDataTimestamp, gran.next(gran.truncate(maxDataTimestamp))); if (!actualInterval.overlaps(dataInterval)) { return Sequences.empty(); }/*from w w w. j a v a 2 s . com*/ if (actualInterval.getStart().isBefore(dataInterval.getStart())) { actualInterval = actualInterval.withStart(dataInterval.getStart()); } if (actualInterval.getEnd().isAfter(dataInterval.getEnd())) { actualInterval = actualInterval.withEnd(dataInterval.getEnd()); } final Offset offset; if (filter == null) { offset = new NoFilterOffset(0, index.getNumRows()); } else { final ColumnSelectorBitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector( index.getBitmapFactoryForDimensions(), index); offset = new BitmapOffset(selector.getBitmapFactory(), filter.getBitmapIndex(selector)); } return Sequences.filter( new CursorSequenceBuilder(index, actualInterval, gran, offset, maxDataTimestamp).build(), Predicates.<Cursor>notNull()); }