List of usage examples for java.util.stream IntStream range
public static IntStream range(int startInclusive, int endExclusive)
From source file:com.yahoo.bullet.storm.JoinBoltTest.java
@Test public void testGroupBy() { final int entries = 16; BulletConfig bulletConfig = GroupByTest.makeConfiguration(entries); GroupBy groupBy = GroupByTest.makeGroupBy(bulletConfig, singletonMap("fieldA", "A"), entries, AggregationUtils.makeGroupOperation(COUNT, null, "cnt"), AggregationUtils.makeGroupOperation(SUM, "fieldB", "sumB")); IntStream.range(0, 256) .mapToObj(i -> RecordBox.get().add("fieldA", i % 16).add("fieldB", i / 16).getRecord()) .forEach(groupBy::consume);/*ww w .ja va2 s.com*/ byte[] first = groupBy.getData(); groupBy = GroupByTest.makeGroupBy(bulletConfig, singletonMap("fieldA", "A"), entries, AggregationUtils.makeGroupOperation(COUNT, null, "cnt"), AggregationUtils.makeGroupOperation(SUM, "fieldB", "sumB")); IntStream.range(256, 1024) .mapToObj(i -> RecordBox.get().add("fieldA", i % 16).add("fieldB", i / 16).getRecord()) .forEach(groupBy::consume); byte[] second = groupBy.getData(); // Send generated data to JoinBolt bolt = new DonableJoinBolt(config, 2, true); setup(bolt); List<GroupOperation> operations = asList(new GroupOperation(COUNT, null, "cnt"), new GroupOperation(SUM, "fieldB", "sumB")); String queryString = makeGroupFilterQuery("ts", singletonList("1"), EQUALS, GROUP, entries, operations, Pair.of("fieldA", "A")); Tuple query = TupleUtils.makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", queryString, EMPTY); bolt.execute(query); sendRawByteTuplesTo(bolt, "42", asList(first, second)); Tuple tick = TupleUtils.makeTuple(TupleClassifier.Type.TICK_TUPLE); bolt.execute(tick); for (int i = 0; i < BulletStormConfig.DEFAULT_JOIN_BOLT_QUERY_POST_FINISH_BUFFER_TICKS - 1; ++i) { bolt.execute(tick); Assert.assertEquals(collector.getEmittedCount(), 0); } bolt.execute(tick); Assert.assertEquals(collector.getEmittedCount(), 2); String response = (String) collector .getMthElementFromNthTupleEmittedTo(TopologyConstants.RESULT_STREAM, 1, 1).get(); JsonParser parser = new JsonParser(); JsonObject actual = parser.parse(response).getAsJsonObject(); JsonArray actualRecords = actual.get(Clip.RECORDS_KEY).getAsJsonArray(); Assert.assertEquals(actualRecords.size(), 16); }
From source file:com.simiacryptus.mindseye.lang.Tensor.java
/** * Sets parallel by index.//from ww w .j a v a2 s.co m * * @param f the f */ public void setParallelByIndex(@Nonnull final IntToDoubleFunction f) { IntStream.range(0, length()).parallel().forEach(c -> set(c, f.applyAsDouble(c))); }
From source file:com.yahoo.bullet.storm.JoinBoltTest.java
@Test public void testDistribution() { BulletConfig bulletConfig = DistributionTest.makeConfiguration(10, 128); Distribution distribution = DistributionTest.makeDistribution(bulletConfig, makeAttributes(Distribution.Type.PMF, 3), "field", 10, null); IntStream.range(0, 50).mapToObj(i -> RecordBox.get().add("field", i).getRecord()) .forEach(distribution::consume); byte[] first = distribution.getData(); distribution = DistributionTest.makeDistribution(bulletConfig, makeAttributes(Distribution.Type.PMF, 3), "field", 10, null); IntStream.range(50, 101).mapToObj(i -> RecordBox.get().add("field", i).getRecord()) .forEach(distribution::consume); byte[] second = distribution.getData(); bolt = new DonableJoinBolt(config, 2, true); setup(bolt);/*from w ww . j a v a2s. c o m*/ Tuple query = TupleUtils.makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeAggregationQuery(DISTRIBUTION, 10, Distribution.Type.PMF, "field", null, null, null, null, 3), EMPTY); bolt.execute(query); sendRawByteTuplesTo(bolt, "42", asList(first, second)); BulletRecord expectedA = RecordBox.get() .add(RANGE_FIELD, NEGATIVE_INFINITY_START + SEPARATOR + 0.0 + END_EXCLUSIVE).add(COUNT_FIELD, 0.0) .add(PROBABILITY_FIELD, 0.0).getRecord(); BulletRecord expectedB = RecordBox.get() .add(RANGE_FIELD, START_INCLUSIVE + 0.0 + SEPARATOR + 50.0 + END_EXCLUSIVE).add(COUNT_FIELD, 50.0) .add(PROBABILITY_FIELD, 50.0 / 101).getRecord(); BulletRecord expectedC = RecordBox.get() .add(RANGE_FIELD, START_INCLUSIVE + 50.0 + SEPARATOR + 100.0 + END_EXCLUSIVE).add(COUNT_FIELD, 50.0) .add(PROBABILITY_FIELD, 50.0 / 101).getRecord(); BulletRecord expectedD = RecordBox.get() .add(RANGE_FIELD, START_INCLUSIVE + 100.0 + SEPARATOR + POSITIVE_INFINITY_END).add(COUNT_FIELD, 1.0) .add(PROBABILITY_FIELD, 1.0 / 101).getRecord(); List<BulletRecord> results = asList(expectedA, expectedB, expectedC, expectedD); Tuple expected = TupleUtils.makeTuple(TupleClassifier.Type.RESULT_TUPLE, "42", Clip.of(results).asJSON(), COMPLETED); Tuple tick = TupleUtils.makeTuple(TupleClassifier.Type.TICK_TUPLE); bolt.execute(tick); for (int i = 0; i < BulletStormConfig.DEFAULT_JOIN_BOLT_QUERY_POST_FINISH_BUFFER_TICKS - 1; ++i) { bolt.execute(tick); Assert.assertFalse(wasResultEmittedTo(TopologyConstants.RESULT_STREAM, expected)); } bolt.execute(tick); Assert.assertTrue(wasResultEmittedTo(TopologyConstants.RESULT_STREAM, expected)); Tuple metadata = TupleUtils.makeTuple(TupleClassifier.Type.FEEDBACK_TUPLE, "42", new Metadata(Metadata.Signal.COMPLETE, null)); Assert.assertTrue(wasMetadataEmittedTo(TopologyConstants.FEEDBACK_STREAM, metadata)); Assert.assertEquals(collector.getAllEmittedTo(TopologyConstants.RESULT_STREAM).count(), 1); Assert.assertEquals(collector.getAllEmittedTo(TopologyConstants.FEEDBACK_STREAM).count(), 1); }
From source file:com.yahoo.bullet.storm.JoinBoltTest.java
@Test public void testTopK() { BulletConfig bulletConfig = TopKTest.makeConfiguration(ErrorType.NO_FALSE_NEGATIVES, 16); Map<String, String> fields = new HashMap<>(); fields.put("A", ""); fields.put("B", "foo"); TopK topK = TopKTest.makeTopK(bulletConfig, makeAttributes(null, 5L), fields, 2, null); IntStream.range(0, 32).mapToObj(i -> RecordBox.get().add("A", i % 8).getRecord()).forEach(topK::consume); byte[] first = topK.getData(); topK = TopKTest.makeTopK(bulletConfig, makeAttributes(null, 5L), fields, 2, null); IntStream.range(0, 8).mapToObj(i -> RecordBox.get().add("A", i % 2).getRecord()).forEach(topK::consume); byte[] second = topK.getData(); bolt = new DonableJoinBolt(config, 2, true); setup(bolt);//w ww . j a v a2 s . c o m String aggregationQuery = makeAggregationQuery(TOP_K, 2, 5L, "cnt", Pair.of("A", ""), Pair.of("B", "foo")); Tuple query = TupleUtils.makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", aggregationQuery, EMPTY); bolt.execute(query); sendRawByteTuplesTo(bolt, "42", asList(first, second)); BulletRecord expectedA = RecordBox.get().add("A", "0").add("foo", "null").add("cnt", 8L).getRecord(); BulletRecord expectedB = RecordBox.get().add("A", "1").add("foo", "null").add("cnt", 8L).getRecord(); List<BulletRecord> results = asList(expectedA, expectedB); Tuple expected = TupleUtils.makeTuple(TupleClassifier.Type.RESULT_TUPLE, "42", Clip.of(results).asJSON(), COMPLETED); Tuple tick = TupleUtils.makeTuple(TupleClassifier.Type.TICK_TUPLE); bolt.execute(tick); for (int i = 0; i < BulletStormConfig.DEFAULT_JOIN_BOLT_QUERY_POST_FINISH_BUFFER_TICKS - 1; ++i) { bolt.execute(tick); Assert.assertFalse(wasResultEmittedTo(TopologyConstants.RESULT_STREAM, expected)); } bolt.execute(tick); Assert.assertTrue(wasResultEmittedTo(TopologyConstants.RESULT_STREAM, expected)); Tuple metadata = TupleUtils.makeTuple(TupleClassifier.Type.FEEDBACK_TUPLE, "42", new Metadata(Metadata.Signal.COMPLETE, null)); Assert.assertTrue(wasMetadataEmittedTo(TopologyConstants.FEEDBACK_STREAM, metadata)); Assert.assertEquals(collector.getAllEmittedTo(TopologyConstants.RESULT_STREAM).count(), 1); Assert.assertEquals(collector.getAllEmittedTo(TopologyConstants.FEEDBACK_STREAM).count(), 1); }
From source file:org.apache.geode.cache.query.functional.IndexCreationJUnitTest.java
@Test public void failedIndexCreationCorrectlyRemovesItself() throws Exception { QueryService qs;/*from w ww . ja va 2s . c om*/ qs = CacheUtils.getQueryService(); Cache cache = CacheUtils.getCache(); cache.createRegionFactory(RegionShortcut.PARTITION).create("portfoliosInPartitionedRegion"); Region region = CacheUtils.getCache().getRegion("/portfoliosInPartitionedRegion"); IntStream.range(0, 3).forEach((i) -> { region.put(i, new Portfolio(i)); }); Index i1 = qs.createIndex("statusIndex", "secId", "/portfoliosInPartitionedRegion p, p.positions pos, pos.secId secId"); try { Index i2 = qs.createIndex("anotherIndex", "secId", "/portfoliosInPartitionedRegion p, p.positions"); // index should fail to create fail(); } catch (IndexInvalidException e) { } qs.removeIndex(i1); // This test should not throw an exception if i2 was properly cleaned up. Index i3 = qs.createIndex("anotherIndex", "secType", "/portfoliosInPartitionedRegion p, p.positions pos, pos.secType secType"); assertNotNull(i3); }
From source file:com.simiacryptus.mindseye.lang.Tensor.java
@Nonnull private JsonElement toJson(@Nonnull final int[] coords) { if (coords.length == dimensions.length) { final double d = get(coords); return new JsonPrimitive(d); } else {/* w ww .j a va2 s . c om*/ @Nonnull final JsonArray jsonArray = new JsonArray(); IntStream.range(0, dimensions[dimensions.length - (coords.length + 1)]).mapToObj(i -> { @Nonnull final int[] newCoord = new int[coords.length + 1]; System.arraycopy(coords, 0, newCoord, 1, coords.length); newCoord[0] = i; return toJson(newCoord); }).forEach(l -> jsonArray.add(l)); return jsonArray; } }
From source file:org.jamocha.dn.compiler.pathblocks.PathBlocks.java
protected static List<PathRule> createOutput(final List<Either<Rule, ExistentialProxy>> rules, final PathBlockSet resultBlockSet) { final Function<? super Block, ? extends Integer> characteristicNumber = block -> block .getFlatFilterInstances().size() / block.getRulesOrProxies().size(); final TreeMap<Integer, CursorableLinkedList<Block>> blockMap = resultBlockSet.getBlocks().stream() .collect(groupingBy(characteristicNumber, TreeMap::new, toCollection(CursorableLinkedList::new))); // iterate over all the filter proxies ever used for (final FilterProxy filterProxy : FilterProxy.getFilterProxies()) { final Set<ExistentialProxy> existentialProxies = filterProxy.getProxies(); // determine the largest characteristic number of the blocks containing filter instances // of one of the existential proxies (choice is arbitrary, since the filters and the // conflicts are identical if they belong to the same filter). final OptionalInt optMax = resultBlockSet.getRuleInstanceToBlocks() .computeIfAbsent(Either.right(existentialProxies.iterator().next()), newHashSet()).stream() .mapToInt(composeToInt(characteristicNumber, Integer::intValue)).max(); if (!optMax.isPresent()) continue; final int eCN = optMax.getAsInt(); // get the list to append the blocks using the existential closure filter INSTANCE to final CursorableLinkedList<Block> targetList = blockMap.get(eCN); // for every existential part for (final ExistentialProxy existentialProxy : existentialProxies) { final FilterInstance exClosure = existentialProxy.getExistentialClosure(); // create a list storing the blocks to move final List<Block> toMove = new ArrayList<>(); for (final CursorableLinkedList<Block> blockList : blockMap.headMap(eCN, true).values()) { // iterate over the blocks in the current list for (final ListIterator<Block> iterator = blockList.listIterator(); iterator.hasNext();) { final Block current = iterator.next(); // if the current block uses the current existential closure filter // INSTANCE, it has to be moved if (current.getFlatFilterInstances().contains(exClosure)) { iterator.remove(); toMove.add(current); }//w ww . j a v a 2 s . co m } } // append the blocks to be moved (they were only removed so far) targetList.addAll(toMove); } } final Set<FilterInstance> constructedFIs = new HashSet<>(); final Map<Either<Rule, ExistentialProxy>, Map<FilterInstance, Set<FilterInstance>>> ruleToJoinedWith = new HashMap<>(); final Map<Set<FilterInstance>, PathFilterList> joinedWithToComponent = new HashMap<>(); // at this point, the network can be constructed for (final CursorableLinkedList<Block> blockList : blockMap.values()) { for (final Block block : blockList) { final List<Either<Rule, ExistentialProxy>> blockRules = Lists .newArrayList(block.getRulesOrProxies()); final Set<List<FilterInstance>> filterInstanceColumns = Block .getFilterInstanceColumns(block.getFilters(), block.getRuleToFilterToRow(), blockRules); // since we are considering blocks, it is either the case that all filter // instances of the column have been constructed or none of them have final PathSharedListWrapper sharedListWrapper = new PathSharedListWrapper(blockRules.size()); final Map<Either<Rule, ExistentialProxy>, PathSharedList> ruleToSharedList = IntStream .range(0, blockRules.size()).boxed() .collect(toMap(blockRules::get, sharedListWrapper.getSharedSiblings()::get)); final List<List<FilterInstance>> columnsToConstruct, columnsAlreadyConstructed; { final Map<Boolean, List<List<FilterInstance>>> partition = filterInstanceColumns.stream() .collect(partitioningBy(column -> Collections.disjoint(column, constructedFIs))); columnsAlreadyConstructed = partition.get(Boolean.FALSE); columnsToConstruct = partition.get(Boolean.TRUE); } if (!columnsAlreadyConstructed.isEmpty()) { final Map<PathSharedList, LinkedHashSet<PathFilterList>> sharedPart = new HashMap<>(); for (final List<FilterInstance> column : columnsAlreadyConstructed) { for (final FilterInstance fi : column) { sharedPart .computeIfAbsent(ruleToSharedList.get(fi.getRuleOrProxy()), newLinkedHashSet()) .add(joinedWithToComponent .get(ruleToJoinedWith.get(fi.getRuleOrProxy()).get(fi))); } } sharedListWrapper.addSharedColumns(sharedPart); } for (final List<FilterInstance> column : columnsToConstruct) { sharedListWrapper.addSharedColumn(column.stream().collect( toMap(fi -> ruleToSharedList.get(fi.getRuleOrProxy()), FilterInstance::convert))); } constructedFIs.addAll(block.getFlatFilterInstances()); for (final Entry<Either<Rule, ExistentialProxy>, Map<Filter, FilterInstancesSideBySide>> entry : block .getRuleToFilterToRow().entrySet()) { final Either<Rule, ExistentialProxy> rule = entry.getKey(); final Set<FilterInstance> joined = entry.getValue().values().stream() .flatMap(sbs -> sbs.getInstances().stream()).collect(toSet()); final Map<FilterInstance, Set<FilterInstance>> joinedWithMapForThisRule = ruleToJoinedWith .computeIfAbsent(rule, newHashMap()); joined.forEach(fi -> joinedWithMapForThisRule.put(fi, joined)); joinedWithToComponent.put(joined, ruleToSharedList.get(rule)); } } } final List<PathRule> pathRules = new ArrayList<>(); for (final Either<Rule, ExistentialProxy> either : rules) { if (either.isRight()) { continue; } final List<PathFilterList> pathFilterLists = Stream .concat(either.left().get().existentialProxies.values().stream().map(p -> Either.right(p)), Stream.of(either)) .flatMap(e -> ruleToJoinedWith.getOrDefault(e, Collections.emptyMap()).values().stream() .distinct()) .map(joinedWithToComponent::get).collect(toList()); pathRules.add(either.left().get().getOriginal().toPathRule(PathFilterList.toSimpleList(pathFilterLists), pathFilterLists.size() > 1 ? InitialFactPathsFinder.gather(pathFilterLists) : Collections.emptySet())); } return pathRules; }
From source file:com.simiacryptus.mindseye.lang.Tensor.java
private String toString(final boolean prettyPrint, @Nonnull final int... coords) { if (coords.length == dimensions.length) { return Double.toString(get(coords)); } else {/*from ww w .j a va 2 s . co m*/ List<CharSequence> list = IntStream.range(0, dimensions[coords.length]).mapToObj(i -> { @Nonnull final int[] newCoord = Arrays.copyOf(coords, coords.length + 1); newCoord[coords.length] = i; return toString(prettyPrint, newCoord); }).limit(15).collect(Collectors.toList()); if (list.size() > 10) { list = list.subList(0, 8); list.add("..."); } if (prettyPrint) { if (coords.length < dimensions.length - 2) { final CharSequence str = list.stream().limit(10) .map(s -> "\t" + s.toString().replaceAll("\n", "\n\t")).reduce((a, b) -> a + ",\n" + b) .orElse(""); return "[\n" + str + "\n]"; } else { final CharSequence str = list.stream().reduce((a, b) -> a + ", " + b).orElse(""); return "[ " + str + " ]"; } } else { final CharSequence str = list.stream().reduce((a, b) -> a + "," + b).orElse(""); return "[ " + str + " ]"; } } }
From source file:org.eclipse.hawkbit.repository.jpa.ControllerManagementTest.java
@Test @Description("Verifies that the quota specifying the maximum number of messages per action status is enforced.") @ExpectEvents({ @Expect(type = TargetCreatedEvent.class, count = 1), @Expect(type = DistributionSetCreatedEvent.class, count = 1), @Expect(type = ActionCreatedEvent.class, count = 1), @Expect(type = TargetUpdatedEvent.class, count = 1), @Expect(type = TargetAssignDistributionSetEvent.class, count = 1), @Expect(type = SoftwareModuleCreatedEvent.class, count = 3) }) public void createActionStatusWithTooManyMessages() { final int maxMessages = quotaManagement.getMaxMessagesPerActionStatus(); final Long actionId = assignDistributionSet(testdataFactory.createDistributionSet("ds1"), testdataFactory.createTargets(1)).getActions().get(0); assertThat(actionId).isNotNull();//from w ww . j a v a 2 s. com final List<String> messages = Lists.newArrayList(); IntStream.range(0, maxMessages).forEach(i -> messages.add(i, "msg")); assertThat(controllerManagement.addInformationalActionStatus( entityFactory.actionStatus().create(actionId).messages(messages).status(Status.WARNING))) .isNotNull(); messages.add("msg"); assertThatExceptionOfType(QuotaExceededException.class) .isThrownBy(() -> controllerManagement.addInformationalActionStatus( entityFactory.actionStatus().create(actionId).messages(messages).status(Status.WARNING))); }