List of usage examples for java.util.stream IntStream range
public static IntStream range(int startInclusive, int endExclusive)
From source file:io.yields.math.concepts.operator.SmoothnessTest.java
@Test(expected = IllegalArgumentException.class) public void notEnoughSamples() { List<Tuple> tuples = IntStream.range(0, 5).mapToObj(index -> new Tuple(index, index)) .collect(Collectors.toList()); new Smoothness(10).apply(tuples); }
From source file:org.lenskit.data.store.PackedEntityCollection.java
public Stream<Entity> stream() { return IntStream.range(0, size).mapToObj(entityBuilder); }
From source file:com.vmware.photon.controller.cloudstore.xenon.entity.SchedulingConstantGeneratorTest.java
/** * Test distribution of scheduling constants, creating hosts on multiple Xenon * hosts, one thread per Xenon host.//from w w w . ja v a 2 s . co m */ @Test(dataProvider = "MultiHostHostCounts") public void testSchedulingConstantVariationMultiHost(int xenonHostCount, int hostCount) throws Throwable { List<Long> schedulingConstants = Collections.synchronizedList(new ArrayList<>()); TestEnvironment env = TestEnvironment.create(xenonHostCount); List<Thread> threads = new ArrayList<>(); ServiceHost[] xenonHosts = env.getHosts(); IntStream.range(0, xenonHostCount).forEach((xenonHostId) -> { Thread t = new Thread(() -> { List<Long> thisThreadSchedulingConstants = createHosts(xenonHosts[xenonHostId], hostCount); schedulingConstants.addAll(thisThreadSchedulingConstants); }); t.start(); threads.add(t); }); for (Thread t : threads) { t.join(); } env.stop(); assertThat(schedulingConstants.size(), equalTo(hostCount * xenonHostCount)); Collections.sort(schedulingConstants); double cv = schedulingConstantGapCV(schedulingConstants); logger.info("Scheduling constant gap coefficient of variation: {}", cv); assertThat(cv, lessThan(MAX_VARIATION)); }
From source file:com.github.mrenou.jacksonatic.internal.annotations.ClassAnnotationDecorator.java
private void setAnnotationsOnMemberWithParams(Map<Class<? extends Annotation>, Annotation> memberAnnotation, List<ParameterMapping> parametersMapping, AnnotatedWithParams constructorMember) { memberAnnotation.values().stream().forEach(constructorMember::addOrOverride); IntStream.range(0, parametersMapping.size()).forEach(index -> parametersMapping.get(index).getAnnotations() .values().stream().forEach(annotation -> constructorMember.addOrOverrideParam(index, annotation))); }
From source file:org.lightjason.agentspeak.action.builtin.TestCActionCollectionList.java
/** * test reverse action//from w w w . j a v a 2 s .c o m */ @Test public final void reverse() { final List<ITerm> l_return = new ArrayList<>(); final List<?> l_list = IntStream.range(0, 10).mapToObj(i -> Math.random()).collect(Collectors.toList()); new CReverse().execute(false, IContext.EMPTYPLAN, Stream.of(CRawTerm.from(l_list)).collect(Collectors.toList()), l_return); Assert.assertArrayEquals(l_return.stream().map(ITerm::raw).toArray(), Lists.reverse(l_list).toArray()); }
From source file:alfio.model.PriceContainerTest.java
private Stream<Pair<Integer, PriceContainer>> generateTestStream(PriceContainer.VatStatus vatStatus) { List<BigDecimal> vatPercentages = IntStream.range(100, 3000).mapToObj( vatCts -> new BigDecimal(vatCts).divide(new BigDecimal("100.00"), 2, RoundingMode.UNNECESSARY)) .collect(Collectors.toList()); return IntStream.range(1, 500_00).parallel().boxed().flatMap(i -> vatPercentages.stream() .map(vat -> Pair.of(i, new PriceContainerImpl(i, "CHF", vat, vatStatus)))); }
From source file:nl.salp.warcraft4j.casc.cdn.EncodingFileParser.java
public EncodingFile parse(DataReader reader, long encodingFileSize) throws DataReadingException, DataParsingException, CascParsingException { LOGGER.trace("Parsing {}-byte encoding file", encodingFileSize); if (reader.remaining() < encodingFileSize) { throw new CascParsingException( format("Tried to read a %d-byte encoding file from a stream with %d-bytes remaining.", encodingFileSize, reader.remaining())); }/*from ww w .ja va 2 s . co m*/ long startOffset = reader.position(); LOGGER.trace("Parsing encoding header from offset {}", startOffset); EncodingFileHeader header = parseHeader(reader); long segmentStartPosition = startOffset + header.getSegmentOffset() + HEADER_SIZE; LOGGER.trace("Parsed {} byte encoding header from position {} to {}: {}", HEADER_SIZE, startOffset, reader.position(), header); LOGGER.trace("Encoding file segments: {}", header.getSegmentCount()); LOGGER.trace("Encoding file segment offset: {}", header.getSegmentOffset()); LOGGER.trace("Encoding file requires {} bytes of data with a file size of {} bytes.", getEncodingFileSize(header), encodingFileSize); if (reader.position() != startOffset + HEADER_SIZE) { throw new CascParsingException( format("Error reading header segment, ended up on offset %d instead of %d", reader.position(), startOffset + HEADER_SIZE)); } if (getEncodingFileSize(header) > encodingFileSize) { throw new CascParsingException(format( "Invalid encoding file size: %d segments require %d bytes of data with %d bytes of data provided", header.getSegmentCount(), getEncodingFileSize(header), encodingFileSize)); } long stringsStartPosition = reader.position(); LOGGER.trace("Parsing encoding string segment from offset {}", (stringsStartPosition - startOffset)); List<String> strings = parseStrings(segmentStartPosition, reader); LOGGER.trace("Read {} strings from position {} to {} ({} bytes)", strings.size(), stringsStartPosition, segmentStartPosition, segmentStartPosition - stringsStartPosition); if (reader.position() != segmentStartPosition) { throw new CascParsingException( format("Error reading string segment, ended up on offset %d instead of %d", reader.position(), segmentStartPosition)); } LOGGER.trace("Parsing {} encoding file segment checksums from offset {}", header.getSegmentCount(), segmentStartPosition - startOffset); List<EncodingFileSegmentChecksum> segmentChecksums = parseSegmentChecksums(header.getSegmentCount(), reader); LOGGER.trace("Read {} {}-byte segment checksums from position {} to {} ({} bytes)", segmentChecksums.size(), SEGMENT_CHECKSUMS_SIZE, segmentStartPosition, reader.position(), reader.position() - segmentStartPosition); if (reader.position() != (segmentStartPosition + (header.getSegmentCount() * SEGMENT_CHECKSUMS_SIZE))) { throw new CascParsingException( format("Error reading segment checksums, ended up on offset %d instead of %d", reader.position(), segmentStartPosition)); } long entryStartPosition = reader.position(); LOGGER.trace("Parsing {} encoding file entry segments from offset {}", header.getSegmentCount(), entryStartPosition - startOffset); List<EncodingFileSegment> segments = parseSegments(header.getSegmentCount(), reader); LOGGER.trace("Parsed {} encoding file entry segments from position {} to {} ({} bytes)", header.getSegmentCount(), entryStartPosition, reader.position(), reader.position() - entryStartPosition); if (segments.size() != segmentChecksums.size()) { throw new CascParsingException(format( "Retrieved %d encoding file segments and %d segment checksums with %d segments expected.", segments.size(), segmentChecksums.size(), header.getSegmentCount())); } LOGGER.trace("Validating data integrity of {} segments", segments.size()); List<Boolean> segmentValidity = IntStream.range(0, segments.size()) .mapToObj(i -> validateSegment(segments.get(i), segmentChecksums.get(i))) .collect(Collectors.toList()); if (segmentValidity.stream().anyMatch(valid -> valid == false)) { IntStream.range(0, segments.size()).filter(index -> segmentValidity.get(index) == false) .forEach(i -> LOGGER.trace("segment {} has checksum {} calculated with {} expected", i, byteArrayToHexString(segments.get(i).getChecksum()), byteArrayToHexString(segmentChecksums.get(i).getSegmentChecksum()))); long invalidSegments = IntStream.range(0, segmentValidity.size()) .filter(index -> segmentValidity.get(index) == false).count(); throw new CascParsingException(format("Encoding file has %d invalid segments out of %d segments.", invalidSegments, segments.size())); } LOGGER.trace("Validated data integrity of {} segments", segments.size()); List<EncodingEntry> entries = segments.stream().map(EncodingFileSegment::getEntries).flatMap(List::stream) .collect(Collectors.toList()); LOGGER.trace("Successfully parsed encoding file with {} entries from {} segments", entries.size(), header.getSegmentCount()); return new EncodingFile(entries); }
From source file:org.lightjason.trafficsimulation.simulation.movable.IBaseMoveable.java
/** * skips the current n-elements of the routing queue * * @param p_value number of elements/* ww w . j a v a 2 s .c o m*/ */ @IAgentActionFilter @IAgentActionName(name = "route/skip") private void routeskip(final Number p_value) { if (p_value.intValue() < 1) throw new RuntimeException("value must be greater than zero"); IntStream.range(0, p_value.intValue()).filter(i -> !m_route.isEmpty()).forEach(i -> m_route.remove(0)); }
From source file:com.orange.ngsi2.server.Ngsi2BaseControllerTest.java
@Test public void checkListAttrsInvalidSyntax() throws Exception { String invalidAttrs = IntStream.range(0, 257).mapToObj(x -> "?").collect(Collectors.joining()); String message = "The incoming request is invalid in this context. " + invalidAttrs + " has a bad syntax."; mockMvc.perform(get("/v2/i/entities").param("attrs", invalidAttrs).contentType(MediaType.APPLICATION_JSON) .header("Host", "localhost").accept(MediaType.APPLICATION_JSON)) .andExpect(MockMvcResultMatchers.jsonPath("$.error").value("400")) .andExpect(MockMvcResultMatchers.jsonPath("$.description").value(message)) .andExpect(status().isBadRequest()); }
From source file:org.apache.hadoop.hbase.client.TestAsyncTable.java
@Test public void testIncrement() throws InterruptedException, ExecutionException { AsyncTableBase table = getTable.get(); int count = 100; CountDownLatch latch = new CountDownLatch(count); AtomicLong sum = new AtomicLong(0L); IntStream.range(0, count) .forEach(i -> table.incrementColumnValue(row, FAMILY, QUALIFIER, 1).thenAccept(x -> { sum.addAndGet(x);// www .j av a 2 s . c o m latch.countDown(); })); latch.await(); assertEquals(count, Bytes .toLong(table.get(new Get(row).addColumn(FAMILY, QUALIFIER)).get().getValue(FAMILY, QUALIFIER))); assertEquals((1 + count) * count / 2, sum.get()); }