List of usage examples for java.util.stream LongStream range
public static LongStream range(long startInclusive, final long endExclusive)
From source file:Main.java
public static void main(String[] args) { LongStream b = LongStream.range(1L, 5L); b.forEach(System.out::println); }
From source file:com.devicehive.handler.notification.NotificationSearchHandlerTest.java
@Override public void setUp() throws Exception { super.setUp(); guid = UUID.randomUUID().toString(); // create notifications notifications = LongStream.range(0, 3).mapToObj(i -> createNotification(i, guid)) .collect(Collectors.toList()); // insert notifications notifications.stream().map(this::insertNotification).forEach(this::waitForResponse); }
From source file:io.mandrel.metrics.MetricsService.java
public Timeserie serie(String name) { Timeserie serie = metricsRepository.serie(name); LocalDateTime now = LocalDateTime.now(); LocalDateTime minus4Hours = now.withMinute(0).withSecond(0).withNano(0).minusHours(4); LocalDateTime firstTime = CollectionUtils.isNotEmpty(serie) && serie.first() != null && serie.first().getTime().isBefore(minus4Hours) ? serie.first().getTime() : minus4Hours; LocalDateTime lastTime = now; Set<Data> results = LongStream.range(0, Duration.between(firstTime, lastTime).toMinutes()) .mapToObj(minutes -> firstTime.plusMinutes(minutes)).map(time -> Data.of(time, Long.valueOf(0))) .collect(TreeSet::new, TreeSet::add, (left, right) -> { left.addAll(right);/*from w ww . ja v a 2s. c o m*/ }); Timeserie serieWithBlank = new Timeserie(); serieWithBlank.addAll(results); serieWithBlank.addAll(serie); return serieWithBlank; }
From source file:pl.java.scalatech.generator.RandomPersonService.java
public Stream<User> generate(Long count) { return LongStream.range(0, count).mapToObj(x -> generate()); }
From source file:io.pivotal.strepsirrhini.chaosloris.docs.DocumentationUtilities.java
static <T> Page<T> page(Pageable pageable, LongFunction<T> function) { return new PageImpl<>(LongStream.range(pageable.getOffset(), pageable.getOffset() + pageable.getPageSize()) .mapToObj(function).collect(Collectors.toList()), pageable, 12); }
From source file:io.divolte.server.hdfs.HdfsFlusherTest.java
@Test public void shouldCreateAndPopulateFileWithSimpleStrategy() throws IOException { final Schema schema = schemaFromClassPath("/MinimalRecord.avsc"); final Config config = ConfigFactory .parseString("divolte.hdfs_flusher.simple_rolling_file_strategy.roll_every = 1 day\n" + "divolte.hdfs_flusher.simple_rolling_file_strategy.working_dir = \"" + tempInflightDir.toString() + "\"\n" + "divolte.hdfs_flusher.simple_rolling_file_strategy.publish_dir = \"" + tempPublishDir.toString() + '"') .withFallback(ConfigFactory.parseResources("hdfs-flusher-test.conf")); final ValidatedConfiguration vc = new ValidatedConfiguration(() -> config); final HdfsFlusher flusher = new HdfsFlusher(vc, schema); final List<Record> records = LongStream.range(0, 10).mapToObj( (time) -> new GenericRecordBuilder(schema).set("ts", time).set("remoteHost", ARBITRARY_IP).build()) .collect(Collectors.toList()); records.forEach((record) -> flusher.process(AvroRecordBuffer.fromRecord(DivolteIdentifier.generate(), DivolteIdentifier.generate(), System.currentTimeMillis(), 0, record))); flusher.cleanup();/* ww w. j a v a2 s. co m*/ Files.walk(tempPublishDir).filter((p) -> p.toString().endsWith(".avro")).findFirst() .ifPresent((p) -> verifyAvroFile(records, schema, p)); }
From source file:io.divolte.server.hdfs.HdfsFlusherTest.java
@Test public void shouldWriteInProgressFilesWithNonAvroExtension() throws IOException { final Schema schema = schemaFromClassPath("/MinimalRecord.avsc"); final Config config = ConfigFactory .parseString("divolte.hdfs_flusher.simple_rolling_file_strategy.roll_every = 1 day\n" + "divolte.hdfs_flusher.simple_rolling_file_strategy.working_dir = \"" + tempInflightDir.toString() + "\"\n" + "divolte.hdfs_flusher.simple_rolling_file_strategy.publish_dir = \"" + tempPublishDir.toString() + '"') .withFallback(ConfigFactory.parseResources("hdfs-flusher-test.conf")); final ValidatedConfiguration vc = new ValidatedConfiguration(() -> config); final HdfsFlusher flusher = new HdfsFlusher(vc, schema); final List<Record> records = LongStream.range(0, 10).mapToObj( (time) -> new GenericRecordBuilder(schema).set("ts", time).set("remoteHost", ARBITRARY_IP).build()) .collect(Collectors.toList()); records.forEach((record) -> flusher.process(AvroRecordBuffer.fromRecord(DivolteIdentifier.generate(), DivolteIdentifier.generate(), System.currentTimeMillis(), 0, record))); assertTrue(Files.walk(tempInflightDir).filter((p) -> p.toString().endsWith(".avro.partial")).findFirst() .isPresent());//w w w . j av a 2 s . c om }
From source file:com.epam.catgenome.dao.DaoHelper.java
/** * Returns {@code List} which contains next values for sequence with the given name. * * @param sequenceName {@code String} specifies full-qualified name of sequence which * next values should be returned by a call * @param count int specifies the number of next values are should be retrieved * @return {@code List} list of next values for sequence; list.size() == count *///from w w w .ja va2 s . c o m @Transactional(propagation = Propagation.MANDATORY) public List<Long> createIds(final String sequenceName, final int count) { Assert.isTrue(StringUtils.isNotBlank(sequenceName)); if (count == 0) { return Collections.emptyList(); } // creates a new temporary list: list.size() == count final List<Long> rows = LongStream.range(0L, count).collect(LinkedList::new, LinkedList::add, LinkedList::addAll); final Long listId = createTempLongList(rows); // generates next values for sequence with the given name final MapSqlParameterSource params = new MapSqlParameterSource(); params.addValue(HelperParameters.LIST_ID.name(), listId); params.addValue(HelperParameters.SEQUENCE_NAME.name(), sequenceName.trim()); final List<Long> list = getNamedParameterJdbcTemplate().queryForList(createIdsQuery, params, Long.class); // clears a temporary list clearTempList(listId); return list; }
From source file:io.divolte.server.filesinks.hdfs.FileFlusherLocalHdfsTest.java
private void setupFlusher(final String rollEvery, final int recordCount) throws IOException { final Config config = ConfigFactory .parseMap(ImmutableMap.of("divolte.sinks.hdfs.file_strategy.roll_every", rollEvery, "divolte.sinks.hdfs.file_strategy.working_dir", tempInflightDir.toString(), "divolte.sinks.hdfs.file_strategy.publish_dir", tempPublishDir.toString())) .withFallback(ConfigFactory.parseResources("hdfs-flusher-test.conf")) .withFallback(ConfigFactory.parseResources("reference-test.conf")); final ValidatedConfiguration vc = new ValidatedConfiguration(() -> config); records = LongStream.range(0, recordCount).mapToObj( (time) -> new GenericRecordBuilder(schema).set("ts", time).set("remoteHost", ARBITRARY_IP).build()) .collect(Collectors.toList()); flusher = new FileFlusher( vc.configuration().getSinkConfiguration("hdfs", FileSinkConfiguration.class).fileStrategy, HdfsFileManager.newFactory(vc, "hdfs", schema).create()); }
From source file:io.divolte.server.hdfs.HdfsFlusherTest.java
@Test public void shouldRollFilesWithSimpleStrategy() throws IOException, InterruptedException { final Schema schema = schemaFromClassPath("/MinimalRecord.avsc"); final Config config = ConfigFactory .parseString("divolte.hdfs_flusher.simple_rolling_file_strategy.roll_every = 1 second\n" + "divolte.hdfs_flusher.simple_rolling_file_strategy.working_dir = \"" + tempInflightDir.toString() + "\"\n" + "divolte.hdfs_flusher.simple_rolling_file_strategy.publish_dir = \"" + tempPublishDir.toString() + '"') .withFallback(ConfigFactory.parseResources("hdfs-flusher-test.conf")); final ValidatedConfiguration vc = new ValidatedConfiguration(() -> config); final List<Record> records = LongStream.range(0, 5).mapToObj( (time) -> new GenericRecordBuilder(schema).set("ts", time).set("remoteHost", ARBITRARY_IP).build()) .collect(Collectors.toList()); final HdfsFlusher flusher = new HdfsFlusher(vc, schema); records.forEach((record) -> flusher.process(AvroRecordBuffer.fromRecord(DivolteIdentifier.generate(), DivolteIdentifier.generate(), System.currentTimeMillis(), 0, record))); for (int c = 0; c < 2; c++) { Thread.sleep(500);/*from w w w. ja v a 2 s. c o m*/ flusher.heartbeat(); } records.forEach((record) -> flusher.process(AvroRecordBuffer.fromRecord(DivolteIdentifier.generate(), DivolteIdentifier.generate(), System.currentTimeMillis(), 0, record))); flusher.cleanup(); final MutableInt count = new MutableInt(0); Files.walk(tempPublishDir).filter((p) -> p.toString().endsWith(".avro")).forEach((p) -> { verifyAvroFile(records, schema, p); count.increment(); }); assertEquals(2, count.intValue()); }