List of usage examples for org.joda.time Interval getStart
public DateTime getStart()
From source file:io.druid.segment.realtime.appenderator.SegmentIdentifier.java
License:Apache License
@JsonCreator public SegmentIdentifier(@JsonProperty("dataSource") String dataSource, @JsonProperty("interval") Interval interval, @JsonProperty("version") String version, @JsonProperty("shardSpec") ShardSpec shardSpec) { this.dataSource = Preconditions.checkNotNull(dataSource, "dataSource"); this.interval = Preconditions.checkNotNull(interval, "interval"); this.version = Preconditions.checkNotNull(version, "version"); this.shardSpec = Preconditions.checkNotNull(shardSpec, "shardSpec"); this.asString = DataSegment.makeDataSegmentIdentifier(dataSource, interval.getStart(), interval.getEnd(), version, shardSpec);/* w ww .j a v a 2 s .c om*/ }
From source file:io.druid.segment.realtime.plumber.RealtimePlumber.java
License:Apache License
protected Object bootstrapSinksFromDisk() { final VersioningPolicy versioningPolicy = config.getVersioningPolicy(); File baseDir = computeBaseDir(schema); if (baseDir == null || !baseDir.exists()) { return null; }//from ww w. java 2s. c o m File[] files = baseDir.listFiles(); if (files == null) { return null; } Object metadata = null; long latestCommitTime = 0; for (File sinkDir : files) { Interval sinkInterval = new Interval(sinkDir.getName().replace("_", "/")); //final File[] sinkFiles = sinkDir.listFiles(); // To avoid reading and listing of "merged" dir final File[] sinkFiles = sinkDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String fileName) { return !(Ints.tryParse(fileName) == null); } }); Arrays.sort(sinkFiles, new Comparator<File>() { @Override public int compare(File o1, File o2) { try { return Ints.compare(Integer.parseInt(o1.getName()), Integer.parseInt(o2.getName())); } catch (NumberFormatException e) { log.error(e, "Couldn't compare as numbers? [%s][%s]", o1, o2); return o1.compareTo(o2); } } }); boolean isCorrupted = false; try { List<FireHydrant> hydrants = Lists.newArrayList(); for (File segmentDir : sinkFiles) { log.info("Loading previously persisted segment at [%s]", segmentDir); // Although this has been tackled at start of this method. // Just a doubly-check added to skip "merged" dir. from being added to hydrants // If 100% sure that this is not needed, this check can be removed. if (Ints.tryParse(segmentDir.getName()) == null) { continue; } QueryableIndex queryableIndex = null; try { queryableIndex = IndexIO.loadIndex(segmentDir); } catch (IOException e) { log.error(e, "Problem loading segmentDir from disk."); isCorrupted = true; } if (isCorrupted) { try { File corruptSegmentDir = computeCorruptedFileDumpDir(segmentDir, schema); log.info("Renaming %s to %s", segmentDir.getAbsolutePath(), corruptSegmentDir.getAbsolutePath()); FileUtils.copyDirectory(segmentDir, corruptSegmentDir); FileUtils.deleteDirectory(segmentDir); } catch (Exception e1) { log.error(e1, "Failed to rename %s", segmentDir.getAbsolutePath()); } //Note: skipping corrupted segment might lead to dropping some data. This strategy should be changed //at some point. continue; } Map<String, Object> segmentMetadata = queryableIndex.getMetaData(); if (segmentMetadata != null) { Object timestampObj = segmentMetadata.get(COMMIT_METADATA_TIMESTAMP_KEY); if (timestampObj != null) { long timestamp = ((Long) timestampObj).longValue(); if (timestamp > latestCommitTime) { log.info( "Found metaData [%s] with latestCommitTime [%s] greater than previous recorded [%s]", queryableIndex.getMetaData(), timestamp, latestCommitTime); latestCommitTime = timestamp; metadata = queryableIndex.getMetaData().get(COMMIT_METADATA_KEY); } } } hydrants.add( new FireHydrant(new QueryableIndexSegment( DataSegment.makeDataSegmentIdentifier(schema.getDataSource(), sinkInterval.getStart(), sinkInterval.getEnd(), versioningPolicy.getVersion(sinkInterval), config.getShardSpec()), queryableIndex), Integer.parseInt(segmentDir.getName()))); } if (hydrants.isEmpty()) { // Probably encountered a corrupt sink directory log.warn( "Found persisted segment directory with no intermediate segments present at %s, skipping sink creation.", sinkDir.getAbsolutePath()); continue; } Sink currSink = new Sink(sinkInterval, schema, config, versioningPolicy.getVersion(sinkInterval), hydrants); sinks.put(sinkInterval.getStartMillis(), currSink); sinkTimeline.add(currSink.getInterval(), currSink.getVersion(), new SingleElementPartitionChunk<Sink>(currSink)); segmentAnnouncer.announceSegment(currSink.getSegment()); } catch (IOException e) { log.makeAlert(e, "Problem loading sink[%s] from disk.", schema.getDataSource()) .addData("interval", sinkInterval).emit(); } } return metadata; }
From source file:io.druid.segment.SegmentDesc.java
License:Apache License
public static String withInterval(final String identifier, Interval newInterval) { String[] splits = identifier.split(DataSegment.delimiter); if (splits.length < 4) { // happens for test segments which has invalid segment id.. ignore for now LOGGER.warn("Invalid segment identifier " + identifier); return identifier; }/* w w w.ja va2 s . c o m*/ StringBuilder builder = new StringBuilder(); builder.append(splits[0]).append(DataSegment.delimiter); builder.append(newInterval.getStart()).append(DataSegment.delimiter); builder.append(newInterval.getEnd()).append(DataSegment.delimiter); for (int i = 3; i < splits.length - 1; i++) { builder.append(splits[i]).append(DataSegment.delimiter); } builder.append(splits[splits.length - 1]); return builder.toString(); }
From source file:io.druid.server.audit.SQLAuditManager.java
License:Apache License
@Override public List<AuditEntry> fetchAuditHistory(final String key, final String type, Interval interval) { final Interval theInterval = getIntervalOrDefault(interval); return dbi.withHandle(new HandleCallback<List<AuditEntry>>() { @Override/*from ww w . j a va2s .com*/ public List<AuditEntry> withHandle(Handle handle) throws Exception { return handle.createQuery(String.format( "SELECT payload FROM %s WHERE audit_key = :audit_key and type = :type and created_date between :start_date and :end_date ORDER BY created_date", getAuditTable())).bind("audit_key", key).bind("type", type) .bind("start_date", theInterval.getStart().toString()) .bind("end_date", theInterval.getEnd().toString()).map(new ResultSetMapper<AuditEntry>() { @Override public AuditEntry map(int index, ResultSet r, StatementContext ctx) throws SQLException { try { return jsonMapper.readValue(r.getBytes("payload"), AuditEntry.class); } catch (IOException e) { throw new SQLException(e); } } }).list(); } }); }
From source file:io.druid.server.audit.SQLAuditManager.java
License:Apache License
@Override public List<AuditEntry> fetchAuditHistory(final String type, Interval interval) { final Interval theInterval = getIntervalOrDefault(interval); return dbi.withHandle(new HandleCallback<List<AuditEntry>>() { @Override/*from w w w. j a v a 2 s . c o m*/ public List<AuditEntry> withHandle(Handle handle) throws Exception { return handle.createQuery(String.format( "SELECT payload FROM %s WHERE type = :type and created_date between :start_date and :end_date ORDER BY created_date", getAuditTable())).bind("type", type).bind("start_date", theInterval.getStart().toString()) .bind("end_date", theInterval.getEnd().toString()).map(new ResultSetMapper<AuditEntry>() { @Override public AuditEntry map(int index, ResultSet r, StatementContext ctx) throws SQLException { try { return jsonMapper.readValue(r.getBytes("payload"), AuditEntry.class); } catch (IOException e) { throw new SQLException(e); } } }).list(); } }); }
From source file:io.druid.server.coordinator.helper.SegmentCompactorUtil.java
License:Apache License
/** * Removes {@code smallInterval} from {@code largeInterval}. The end of both intervals should be same. * * @return an interval of {@code largeInterval} - {@code smallInterval}. */// www .j av a 2 s . c o m static Interval removeIntervalFromEnd(Interval largeInterval, Interval smallInterval) { Preconditions.checkArgument(largeInterval.getEnd().equals(smallInterval.getEnd()), "end should be same. largeInterval[%s] smallInterval[%s]", largeInterval, smallInterval); return new Interval(largeInterval.getStart(), smallInterval.getStart()); }
From source file:io.druid.timeline.VersionedIntervalTimeline.java
License:Apache License
/** * * @param timeline/*from w w w. ja v a 2 s.c om*/ * @param key * @param entry * @return boolean flag indicating whether or not we inserted or discarded something */ private boolean addAtKey(NavigableMap<Interval, TimelineEntry> timeline, Interval key, TimelineEntry entry) { boolean retVal = false; Interval currKey = key; Interval entryInterval = entry.getTrueInterval(); if (!currKey.overlaps(entryInterval)) { return false; } while (entryInterval != null && currKey != null && currKey.overlaps(entryInterval)) { Interval nextKey = timeline.higherKey(currKey); int versionCompare = versionComparator.compare(entry.getVersion(), timeline.get(currKey).getVersion()); if (versionCompare < 0) { if (currKey.contains(entryInterval)) { return true; } else if (currKey.getStart().isBefore(entryInterval.getStart())) { entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd()); } else { addIntervalToTimeline(new Interval(entryInterval.getStart(), currKey.getStart()), entry, timeline); if (entryInterval.getEnd().isAfter(currKey.getEnd())) { entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd()); } else { entryInterval = null; // discard this entry } } } else if (versionCompare > 0) { TimelineEntry oldEntry = timeline.remove(currKey); if (currKey.contains(entryInterval)) { addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline); addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline); addIntervalToTimeline(entryInterval, entry, timeline); return true; } else if (currKey.getStart().isBefore(entryInterval.getStart())) { addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline); } else if (entryInterval.getEnd().isBefore(currKey.getEnd())) { addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline); } } else { if (timeline.get(currKey).equals(entry)) { // This occurs when restoring segments timeline.remove(currKey); } else { throw new UnsupportedOperationException( String.format("Cannot add overlapping segments [%s and %s] with the same version [%s]", currKey, entryInterval, entry.getVersion())); } } currKey = nextKey; retVal = true; } addIntervalToTimeline(entryInterval, entry, timeline); return retVal; }
From source file:julian.lylly.model.Task.java
public void setIntervals(List<Interval> intervals) { checkDistinctness(intervals);//from ww w . j a v a 2s. c om Collections.sort(intervals, new Comparator<Interval>() { @Override public int compare(Interval lhs, Interval rhs) { return lhs.getStart().compareTo(rhs.getStart()); } }); this.intervals = intervals; }
From source file:Model.ModeloTabelaPerdas.java
@Override public Object getValueAt(int rowIndex, int columnIndex) { Perda perda = perdas.get(rowIndex);//from ww w .java 2 s .c o m switch (columnIndex) { case 0: return perda.getTipoDeFlor(); case 1: String numero1 = String.valueOf(perda.getSemanas().get(0).getNumero()); Interval intervalo1 = perda.getSemanas().get(0).getIntervalo(); return "<html>Semana " + numero1 + "<br/>" + intervalo1.getStart() + "-" + intervalo1.getEnd() + "</html>"; case 2: return ""; case 3: String numero2 = String.valueOf(perda.getSemanas().get(0).getNumero()); Interval intervalo2 = perda.getSemanas().get(0).getIntervalo(); return "<html>Semana " + numero2 + "<br/>" + intervalo2.getStart() + "-" + intervalo2.getEnd() + "</html>"; case 4: return ""; case 5: String numero3 = String.valueOf(perda.getSemanas().get(0).getNumero()); Interval intervalo3 = perda.getSemanas().get(0).getIntervalo(); return "<html>Semana " + numero3 + "<br/>" + intervalo3.getStart() + "-" + intervalo3.getEnd() + "</html>"; case 6: return ""; case 7: String numero4 = String.valueOf(perda.getSemanas().get(0).getNumero()); Interval intervalo4 = perda.getSemanas().get(0).getIntervalo(); return "<html>Semana " + numero4 + "<br/>" + intervalo4.getStart() + "-" + intervalo4.getEnd() + "</html>"; case 8: return ""; case 9: String numero5 = String.valueOf(perda.getSemanas().get(0).getNumero()); Interval intervalo5 = perda.getSemanas().get(0).getIntervalo(); return "<html>Semana " + numero5 + "<br/>" + intervalo5.getStart() + "-" + intervalo5.getEnd() + "</html>"; case 10: return ""; case 11: String numero6 = String.valueOf(perda.getSemanas().get(0).getNumero()); Interval intervalo6 = perda.getSemanas().get(0).getIntervalo(); return "<html>Semana " + numero6 + "<br/>" + intervalo6.getStart() + "-" + intervalo6.getEnd() + "</html>"; case 12: return ""; default: return null; } }
From source file:net.lshift.diffa.config.DailyPeriodUnit.java
License:Apache License
@Override public boolean isCovering(Interval interval) { return interval != null && floor(interval.getEnd()).minusDays(1).compareTo(ceiling(interval.getStart())) >= 0; }