Example usage for java.util.stream IntStream range

List of usage examples for java.util.stream IntStream range

Introduction

In this page you can find the example usage for java.util.stream IntStream range.

Prototype

public static IntStream range(int startInclusive, int endExclusive) 

Source Link

Document

Returns a sequential ordered IntStream from startInclusive (inclusive) to endExclusive (exclusive) by an incremental step of 1 .

Usage

From source file:com.yahoo.bullet.storm.JoinBoltTest.java

@Test
public void testCounting() {
    bolt = new DonableJoinBolt(config, 5, true);
    setup(bolt);// www  .  j  av  a2 s  .c om

    String filterQuery = makeGroupFilterQuery("timestamp", asList("1", "2"), EQUALS, GROUP, 1,
            singletonList(new GroupOperation(COUNT, null, "cnt")));
    Tuple query = TupleUtils.makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", filterQuery, EMPTY);
    bolt.execute(query);

    // Send 5 GroupData with counts 1, 2, 3, 4, 5 to the JoinBolt
    IntStream.range(1, 6)
            .forEach(i -> sendRawByteTuplesTo(bolt, "42", singletonList(getGroupDataWithCount("cnt", i))));

    // 1 + 2 + 3 + 4 + 5
    List<BulletRecord> result = singletonList(RecordBox.get().add("cnt", 15L).getRecord());
    Tuple expected = TupleUtils.makeTuple(TupleClassifier.Type.RESULT_TUPLE, "42", Clip.of(result).asJSON(),
            COMPLETED);

    Tuple tick = TupleUtils.makeTuple(TupleClassifier.Type.TICK_TUPLE);
    // Should starts buffering the query for the query tickout
    bolt.execute(tick);
    for (int i = 0; i < BulletStormConfig.DEFAULT_JOIN_BOLT_QUERY_POST_FINISH_BUFFER_TICKS - 1; ++i) {
        bolt.execute(tick);
        Assert.assertFalse(wasResultEmittedTo(TopologyConstants.RESULT_STREAM, expected));
    }
    bolt.execute(tick);

    Assert.assertTrue(wasResultEmittedTo(TopologyConstants.RESULT_STREAM, expected));
    Tuple metadata = TupleUtils.makeTuple(TupleClassifier.Type.FEEDBACK_TUPLE, "42",
            new Metadata(Metadata.Signal.COMPLETE, null));
    Assert.assertTrue(wasMetadataEmittedTo(TopologyConstants.FEEDBACK_STREAM, metadata));
    Assert.assertEquals(collector.getAllEmittedTo(TopologyConstants.RESULT_STREAM).count(), 1);
    Assert.assertEquals(collector.getAllEmittedTo(TopologyConstants.FEEDBACK_STREAM).count(), 1);
}

From source file:com.simiacryptus.mindseye.lang.Tensor.java

/**
 * Reduce parallel tensor./*from  www .  j av a  2s.  c om*/
 *
 * @param right the right
 * @param f     the f
 * @return the tensor
 */
@Nullable
public Tensor reduceParallel(@Nonnull final Tensor right, @Nonnull final DoubleBinaryOperator f) {
    if (!Arrays.equals(right.getDimensions(), getDimensions())) {
        throw new IllegalArgumentException(
                Arrays.toString(right.getDimensions()) + " != " + Arrays.toString(getDimensions()));
    }
    @Nullable
    final double[] dataL = getData();
    @Nullable
    final double[] dataR = right.getData();
    return new Tensor(Tensor.getDoubles(
            IntStream.range(0, length()).mapToDouble(i -> f.applyAsDouble(dataL[i], dataR[i])), length()),
            dimensions);
}

From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java

private List<double[]> obtainMAPVariableConditionalDistributions(
        List<List<UnivariateDistribution>> posteriorMAPVariableDistributions) {

    List<double[]> listCondDistributions = new ArrayList<>(nTimeSteps);

    int nStates = MAPvariable.getNumberOfStates();

    // Univariate distribution Y_0
    // UnivariateDistribution dist0_1 = posteriorMAPDistributionsEvenModel.get(0); // This variable Z_0 groups Y_0 and Y_1
    // UnivariateDistribution dist0 = posteriorMAPDistributionsOddModel.get(0); // This variable is just Y_0 (not a group)
    //        double[] dist0_probs = new double[nStates];

    //        System.out.println("\n\n\n\n\n\n\n\n\n\n\n\n\n");
    IntStream.range(0, nTimeSteps).forEachOrdered(timeStep -> {

        //            System.out.println("\n\nTime step " + timeStep);
        double[] combinedConditionalDistributionProbabilities, baseDistributionProbabilities;

        int baseModelIndex = (timeStep + 1) % nMergedClassVars;
        int baseDistributionIndex = (timeStep >= baseModelIndex)
                ? (baseModelIndex == 0 ? 0 : 1) + (timeStep - baseModelIndex) / nMergedClassVars
                : (timeStep - baseModelIndex) / nMergedClassVars;
        baseDistributionProbabilities = posteriorMAPVariableDistributions.get(baseModelIndex)
                .get(baseDistributionIndex).getParameters();
        int nStatesBaseDistribution = baseDistributionProbabilities.length;
        int baseDistrib_nMergedVars = (int) Math.round(Math.log(nStatesBaseDistribution) / Math.log(nStates));

        combinedConditionalDistributionProbabilities =

                IntStream.range(0, nMergedClassVars).mapToObj(modelNumber -> {

                    if (modelNumber == baseModelIndex) {
                        //                            System.out.println("\nModel number " + modelNumber);
                        //System.out.println(Arrays.toString(baseDistributionProbabilities));
                        return baseDistributionProbabilities;
                    }//from   w ww  .  j a  v a 2  s  .  c o m

                    //                        System.out.println("\nModel number " + modelNumber);

                    int distributionIndex = (timeStep >= modelNumber)
                            ? (modelNumber == 0 ? 0 : 1) + (timeStep - modelNumber) / nMergedClassVars
                            : (timeStep - modelNumber) / nMergedClassVars;
                    int currentVarIndex = (timeStep >= modelNumber)
                            ? (timeStep - modelNumber) % nMergedClassVars
                            : timeStep;
                    //                        System.out.println("CurrentVarIndex " + currentVarIndex);

                    UnivariateDistribution currentDistrib = posteriorMAPVariableDistributions.get(modelNumber)
                            .get(distributionIndex);
                    //System.out.println(currentDistrib.toString());

                    double[] probabilities = new double[nStatesBaseDistribution];

                    int currentDistrib_nMergedVars = (int) Math.round(
                            Math.log(currentDistrib.getVariable().getNumberOfStates()) / Math.log(nStates));
                    int current_nMergedVarsBaseDist = (int) Math
                            .round(Math.log(baseDistributionProbabilities.length) / Math.log(nStates));

                    if (distributionIndex == 0) {

                        //                            System.out.println("Current nMergedVars " + currentDistrib_nMergedVars + ", current nMergedVarsBaseDist " + current_nMergedVarsBaseDist);
                        for (int m = 0; m < Math.pow(nStates, currentDistrib_nMergedVars); m++) {

                            String m_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(m), 10),
                                    nStates);
                            m_base_nStates = StringUtils.leftPad(m_base_nStates, currentDistrib_nMergedVars,
                                    '0');

                            //                                int index_init = currentVarIndex - ((timeStep >= nMergedClassVars) ? nMergedClassVars : timeStep);
                            int index_init = currentVarIndex + 1 - baseDistrib_nMergedVars;
                            int index_end = currentVarIndex + 1;

                            //                                String statesSequence = m_base_nStates.substring(currentVarIndex, currentVarIndex + current_nMergedVarsBaseDist);
                            String statesSequence = m_base_nStates.substring(index_init, index_end);
                            int currentState = Integer.parseInt(statesSequence, nStates);

                            //                                System.out.println("Current state " + currentState);

                            probabilities[currentState] += currentDistrib.getParameters()[m];
                        }
                    } else {
                        UnivariateDistribution previousDistrib = posteriorMAPVariableDistributions
                                .get(modelNumber).get(distributionIndex - 1);
                        int previousDistrib_nMergedVars = (int) Math
                                .round(Math.log(previousDistrib.getVariable().getNumberOfStates())
                                        / Math.log(nStates));

                        //                            System.out.println("Current nMergedVars " + currentDistrib_nMergedVars + ", previous nMergedVars " + previousDistrib_nMergedVars + ", current nMergedVarsBaseDist " + current_nMergedVarsBaseDist);

                        for (int n = 0; n < Math.pow(nStates, previousDistrib_nMergedVars); n++) {

                            String n_base_nStates = Integer.toString(Integer.parseInt(Integer.toString(n), 10),
                                    nStates);
                            n_base_nStates = StringUtils.leftPad(n_base_nStates, previousDistrib_nMergedVars,
                                    '0');

                            for (int m = 0; m < Math.pow(nStates, currentDistrib_nMergedVars); m++) {

                                String m_base_nStates = Integer
                                        .toString(Integer.parseInt(Integer.toString(m), 10), nStates);
                                m_base_nStates = StringUtils.leftPad(m_base_nStates, currentDistrib_nMergedVars,
                                        '0');

                                //                                    String statesSequence = m_base_nStates.substring(currentVarIndex, currentVarIndex + current_nMergedVarsBaseDist);
                                //                                    int currentState = Integer.parseInt(statesSequence, nStates);

                                String n_concat_m_base_nStates = n_base_nStates.concat(m_base_nStates);
                                int index_init = previousDistrib_nMergedVars + currentVarIndex + 1
                                        - baseDistrib_nMergedVars;
                                int index_end = previousDistrib_nMergedVars + currentVarIndex + 1;
                                String statesSequence = n_concat_m_base_nStates.substring(index_init,
                                        index_end);
                                //                                    System.out.println("Complete sequence: " + n_concat_m_base_nStates + ", statesSequence:" + statesSequence);

                                //                                    int subIndices_m = currentVarIndex;
                                //                                    int subIndices_n = 1 + ((timeStep >= nMergedClassVars) ? (previousDistrib_nMergedVars - nMergedClassVars + currentVarIndex) : (previousDistrib_nMergedVars - (nMergedClassVars-timeStep) + currentVarIndex));
                                //
                                //                                    System.out.println("n_base_nStates: " + n_base_nStates + "m_base_nStates: " + m_base_nStates );
                                //
                                //                                    System.out.println("subIndices_m: " + Integer.toString(subIndices_m));
                                //                                    System.out.println("subIndices_n: " + Integer.toString(subIndices_n));
                                //
                                //                                    String statesSequence_m = m_base_nStates.substring(0, subIndices_m);
                                //                                    String statesSequence_n = n_base_nStates.substring(subIndices_n, previousDistrib_nMergedVars);
                                //
                                //
                                //                                    System.out.println("statesSequence n: " + statesSequence_n + ", statesSequence m: " + statesSequence_m );
                                //
                                //                                    String statesSequence = statesSequence_n.concat(statesSequence_m);
                                //                                    System.out.println("States sequence length: " + statesSequence.length() + ", sequence: " + statesSequence);

                                int currentState = Integer.parseInt(statesSequence, nStates);
                                //                                    System.out.println("Current state " + currentState);

                                probabilities[currentState] += previousDistrib.getParameters()[n]
                                        * currentDistrib.getParameters()[m];
                            }
                        }
                    }
                    //                        System.out.println("Model distribution: " + Arrays.toString(probabilities));
                    return probabilities;
                }).reduce(new double[baseDistributionProbabilities.length], (doubleArray1, doubleArray2) -> {
                    if (doubleArray1.length != doubleArray2.length) {
                        //                            System.out.println("Problem with lengths");
                        System.exit(-40);
                    }
                    for (int i = 0; i < doubleArray1.length; i++)
                        doubleArray1[i] += ((double) 1 / nMergedClassVars) * doubleArray2[i];
                    return doubleArray1;
                });

        //System.out.println("Combined distribution " + Arrays.toString(combinedConditionalDistributionProbabilities));
        listCondDistributions.add(combinedConditionalDistributionProbabilities);
    });

    //        System.out.println("\n\n\n\n\n\n\n\n\n\n\n\n\n");
    return listCondDistributions;
}

From source file:com.yahoo.bullet.storm.FilterBoltTest.java

@Test
public void testFilteringLatency() {
    config = new BulletStormConfig();
    // Don't use the overridden aggregation default size but turn on built in metrics
    config.set(BulletStormConfig.TOPOLOGY_METRICS_BUILT_IN_ENABLE, true);
    collector = new CustomCollector();
    CustomTopologyContext context = new CustomTopologyContext();
    bolt = new FilterBolt(TopologyConstants.RECORD_COMPONENT, config);
    ComponentUtils.prepare(new HashMap<>(), bolt, context, collector);

    Tuple query = makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42", makeFieldFilterQuery("bar"), METADATA);
    bolt.execute(query);// w  ww. ja va  2s  .c om

    BulletRecord record = RecordBox.get().add("field", "foo").getRecord();
    long start = System.currentTimeMillis();
    IntStream.range(0, 10).mapToObj(i -> makeRecordTuple(record, System.currentTimeMillis()))
            .forEach(bolt::execute);
    long end = System.currentTimeMillis();
    double actualLatecy = context.getDoubleMetric(TopologyConstants.LATENCY_METRIC);
    Assert.assertTrue(actualLatecy <= end - start);
}

From source file:org.eclipse.hawkbit.repository.test.util.TestdataFactory.java

/**
 * builds a set of {@link Target} fixtures from the given parameters.
 *
 * @param numberOfTargets/* w  w  w  . ja  v a 2 s  . c  om*/
 *            number of targets to create
 * @param controllerIdPrefix
 *            prefix used for the controller ID
 * @param descriptionPrefix
 *            prefix used for the description
 * @return set of {@link Target}
 */
public List<Target> createTargets(final int numberOfTargets, final String controllerIdPrefix,
        final String descriptionPrefix) {

    return targetManagement.create(IntStream.range(0, numberOfTargets)
            .mapToObj(i -> entityFactory.target().create()
                    .controllerId(String.format("%s-%05d", controllerIdPrefix, i))
                    .description(descriptionPrefix + i))
            .collect(Collectors.toList()));
}

From source file:io.pravega.controller.task.Stream.StreamMetadataTasks.java

public CompletableFuture<Void> notifyDeleteSegments(String scope, String stream, int count) {
    return Futures.allOf(IntStream.range(0, count).parallel()
            .mapToObj(segment -> notifyDeleteSegment(scope, stream, segment)).collect(Collectors.toList()));
}

From source file:com.yahoo.bullet.storm.JoinBoltTest.java

@Test
public void testCountDistinct() {
    BulletConfig bulletConfig = CountDistinctTest.makeConfiguration(8, 512);

    CountDistinct distinct = CountDistinctTest.makeCountDistinct(bulletConfig, singletonList("field"));

    IntStream.range(0, 256).mapToObj(i -> RecordBox.get().add("field", i).getRecord())
            .forEach(distinct::consume);
    byte[] first = distinct.getData();

    distinct = CountDistinctTest.makeCountDistinct(bulletConfig, singletonList("field"));

    IntStream.range(128, 256).mapToObj(i -> RecordBox.get().add("field", i).getRecord())
            .forEach(distinct::consume);
    byte[] second = distinct.getData();

    // Send generated data to JoinBolt
    bolt = new DonableJoinBolt(config, 2, true);
    setup(bolt);//w  ww.j  a v  a2 s . co  m

    Tuple query = TupleUtils.makeIDTuple(TupleClassifier.Type.QUERY_TUPLE, "42",
            makeAggregationQuery(COUNT_DISTINCT, 1, null, Pair.of("field", "field")), EMPTY);
    bolt.execute(query);

    sendRawByteTuplesTo(bolt, "42", asList(first, second));

    List<BulletRecord> result = singletonList(
            RecordBox.get().add(CountDistinct.DEFAULT_NEW_NAME, 256.0).getRecord());
    Tuple expected = TupleUtils.makeTuple(TupleClassifier.Type.RESULT_TUPLE, "42", Clip.of(result).asJSON(),
            COMPLETED);

    Tuple tick = TupleUtils.makeTuple(TupleClassifier.Type.TICK_TUPLE);
    bolt.execute(tick);
    for (int i = 0; i < BulletStormConfig.DEFAULT_JOIN_BOLT_QUERY_POST_FINISH_BUFFER_TICKS - 1; ++i) {
        bolt.execute(tick);
        Assert.assertFalse(wasResultEmittedTo(TopologyConstants.RESULT_STREAM, expected));
    }
    bolt.execute(tick);

    Assert.assertTrue(wasResultEmittedTo(TopologyConstants.RESULT_STREAM, expected));
    Tuple metadata = TupleUtils.makeTuple(TupleClassifier.Type.FEEDBACK_TUPLE, "42",
            new Metadata(Metadata.Signal.COMPLETE, null));
    Assert.assertTrue(wasMetadataEmittedTo(TopologyConstants.FEEDBACK_STREAM, metadata));
    Assert.assertEquals(collector.getAllEmittedTo(TopologyConstants.RESULT_STREAM).count(), 1);
    Assert.assertEquals(collector.getAllEmittedTo(TopologyConstants.FEEDBACK_STREAM).count(), 1);
}

From source file:org.eclipse.hawkbit.repository.test.util.TestdataFactory.java

/**
 * builds a set of {@link Target} fixtures from the given parameters.
 *
 * @param numberOfTargets//w w w.jav a 2  s .  c o  m
 *            number of targets to create
 * @param controllerIdPrefix
 *            prefix used for the controller ID
 * @param descriptionPrefix
 *            prefix used for the description
 * @param lastTargetQuery
 *            last time the target polled
 * @return set of {@link Target}
 */
public List<Target> createTargets(final int numberOfTargets, final String controllerIdPrefix,
        final String descriptionPrefix, final Long lastTargetQuery) {

    return targetManagement.create(IntStream.range(0, numberOfTargets)
            .mapToObj(i -> entityFactory.target().create()
                    .controllerId(String.format("%s-%05d", controllerIdPrefix, i))
                    .description(descriptionPrefix + i).lastTargetQuery(lastTargetQuery))
            .collect(Collectors.toList()));
}

From source file:org.talend.dataprep.preparation.service.PreparationService.java

/**
 * Extract all actions after a provided step
 *
 * @param stepsIds  The steps list//from w w w . j  a  v a2s.co  m
 * @param afterStep The (excluded) step id where to start the extraction
 * @return The actions after 'afterStep' to the end of the list
 */
private List<AppendStep> extractActionsAfterStep(final List<String> stepsIds, final String afterStep) {
    final int stepIndex = stepsIds.indexOf(afterStep);
    if (stepIndex == -1) {
        return emptyList();
    }

    final List<Step> steps;
    try (IntStream range = IntStream.range(stepIndex, stepsIds.size())) {
        steps = range.mapToObj(index -> getStep(stepsIds.get(index))).collect(toList());
    }

    final List<List<Action>> stepActions = steps.stream().map(this::getActions).collect(toList());

    try (IntStream filteredActions = IntStream.range(1, steps.size())) {
        return filteredActions.mapToObj(index -> {
            final List<Action> previous = stepActions.get(index - 1);
            final List<Action> current = stepActions.get(index);
            final Step step = steps.get(index);

            final AppendStep appendStep = new AppendStep();
            appendStep.setDiff(step.getDiff());
            appendStep.setActions(current.subList(previous.size(), current.size()));
            return appendStep;
        }).collect(toList());
    }
}

From source file:com.thinkbiganalytics.feedmgr.service.template.RegisteredTemplateService.java

/**
 * pass in the Template Ids in Order/*from w  w w.ja va2 s .  com*/
 */
public void orderTemplates(List<String> orderedTemplateIds, Set<String> exclude) {
    metadataAccess.commit(() -> {
        this.accessController.checkPermission(AccessController.SERVICES,
                FeedServicesAccessControl.EDIT_TEMPLATES);

        if (orderedTemplateIds != null && !orderedTemplateIds.isEmpty()) {
            IntStream.range(0, orderedTemplateIds.size()).forEach(i -> {
                String id = orderedTemplateIds.get(i);
                if (!"NEW".equals(id) && (exclude == null || (exclude != null && !exclude.contains(id)))) {
                    FeedManagerTemplate template = templateProvider.findById(templateProvider.resolveId(id));
                    if (template != null) {
                        if (template.getOrder() == null || !template.getOrder().equals(new Long(i))) {
                            //save the new order
                            template.setOrder(new Long(i));
                            templateProvider.update(template);
                        }
                    }
                }
            });
        }
    }, MetadataAccess.ADMIN);

}