Example usage for java.util Random nextDouble

List of usage examples for java.util Random nextDouble

Introduction

In this page you can find the example usage for java.util Random nextDouble.

Prototype

public double nextDouble() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed double value between 0.0 and 1.0 from this random number generator's sequence.

Usage

From source file:org.apache.hadoop.hbase.regionserver.TestMultiColumnScanner.java

@Test
public void testMultiColumnScanner() throws IOException {
    HRegion region = TEST_UTIL.createTestRegion(TABLE_NAME,
            new HColumnDescriptor(FAMILY).setCompressionType(comprAlgo).setBloomFilterType(bloomType)
                    .setMaxVersions(MAX_VERSIONS).setDataBlockEncoding(dataBlockEncoding));
    List<String> rows = sequentialStrings("row", NUM_ROWS);
    List<String> qualifiers = sequentialStrings("qual", NUM_COLUMNS);
    List<KeyValue> kvs = new ArrayList<KeyValue>();
    Set<String> keySet = new HashSet<String>();

    // A map from <row>_<qualifier> to the most recent delete timestamp for
    // that column.
    Map<String, Long> lastDelTimeMap = new HashMap<String, Long>();

    Random rand = new Random(29372937L);
    Set<String> rowQualSkip = new HashSet<String>();

    // Skip some columns in some rows. We need to test scanning over a set
    // of columns when some of the columns are not there.
    for (String row : rows)
        for (String qual : qualifiers)
            if (rand.nextDouble() < COLUMN_SKIP_IN_ROW_PROB) {
                LOG.info("Skipping " + qual + " in row " + row);
                rowQualSkip.add(rowQualKey(row, qual));
            }/*from www  .ja  va 2  s  .  c  om*/

    // Also skip some columns in all rows.
    for (String qual : qualifiers)
        if (rand.nextDouble() < COLUMN_SKIP_EVERYWHERE_PROB) {
            LOG.info("Skipping " + qual + " in all rows");
            for (String row : rows)
                rowQualSkip.add(rowQualKey(row, qual));
        }

    for (int iFlush = 0; iFlush < NUM_FLUSHES; ++iFlush) {
        for (String qual : qualifiers) {
            // This is where we decide to include or not include this column into
            // this store file, regardless of row and timestamp.
            if (rand.nextDouble() < COLUMN_SKIP_IN_STORE_FILE_PROB)
                continue;

            byte[] qualBytes = Bytes.toBytes(qual);
            for (String row : rows) {
                Put p = new Put(Bytes.toBytes(row));
                for (long ts : TIMESTAMPS) {
                    String value = createValue(row, qual, ts);
                    KeyValue kv = KeyValueTestUtil.create(row, FAMILY, qual, ts, value);
                    assertEquals(kv.getTimestamp(), ts);
                    p.add(kv);
                    String keyAsString = kv.toString();
                    if (!keySet.contains(keyAsString)) {
                        keySet.add(keyAsString);
                        kvs.add(kv);
                    }
                }
                region.put(p);

                Delete d = new Delete(Bytes.toBytes(row));
                boolean deletedSomething = false;
                for (long ts : TIMESTAMPS)
                    if (rand.nextDouble() < DELETE_PROBABILITY) {
                        d.deleteColumns(FAMILY_BYTES, qualBytes, ts);
                        String rowAndQual = row + "_" + qual;
                        Long whenDeleted = lastDelTimeMap.get(rowAndQual);
                        lastDelTimeMap.put(rowAndQual, whenDeleted == null ? ts : Math.max(ts, whenDeleted));
                        deletedSomething = true;
                    }
                if (deletedSomething)
                    region.delete(d);
            }
        }
        region.flushcache();
    }

    Collections.sort(kvs, KeyValue.COMPARATOR);
    for (int maxVersions = 1; maxVersions <= TIMESTAMPS.length; ++maxVersions) {
        for (int columnBitMask = 1; columnBitMask <= MAX_COLUMN_BIT_MASK; ++columnBitMask) {
            Scan scan = new Scan();
            scan.setMaxVersions(maxVersions);
            Set<String> qualSet = new TreeSet<String>();
            {
                int columnMaskTmp = columnBitMask;
                for (String qual : qualifiers) {
                    if ((columnMaskTmp & 1) != 0) {
                        scan.addColumn(FAMILY_BYTES, Bytes.toBytes(qual));
                        qualSet.add(qual);
                    }
                    columnMaskTmp >>= 1;
                }
                assertEquals(0, columnMaskTmp);
            }

            InternalScanner scanner = region.getScanner(scan);
            List<Cell> results = new ArrayList<Cell>();

            int kvPos = 0;
            int numResults = 0;
            String queryInfo = "columns queried: " + qualSet + " (columnBitMask=" + columnBitMask
                    + "), maxVersions=" + maxVersions;

            while (scanner.next(results) || results.size() > 0) {
                for (Cell kv : results) {
                    while (kvPos < kvs.size()
                            && !matchesQuery(kvs.get(kvPos), qualSet, maxVersions, lastDelTimeMap)) {
                        ++kvPos;
                    }
                    String rowQual = getRowQualStr(kv);
                    String deleteInfo = "";
                    Long lastDelTS = lastDelTimeMap.get(rowQual);
                    if (lastDelTS != null) {
                        deleteInfo = "; last timestamp when row/column " + rowQual + " was deleted: "
                                + lastDelTS;
                    }
                    assertTrue("Scanner returned additional key/value: " + kv + ", " + queryInfo + deleteInfo
                            + ";", kvPos < kvs.size());
                    assertTrue("Scanner returned wrong key/value; " + queryInfo + deleteInfo + ";",
                            CellComparator.equalsIgnoreMvccVersion(kvs.get(kvPos), (kv)));
                    ++kvPos;
                    ++numResults;
                }
                results.clear();
            }
            for (; kvPos < kvs.size(); ++kvPos) {
                KeyValue remainingKV = kvs.get(kvPos);
                assertFalse(
                        "Matching column not returned by scanner: " + remainingKV + ", " + queryInfo
                                + ", results returned: " + numResults,
                        matchesQuery(remainingKV, qualSet, maxVersions, lastDelTimeMap));
            }
        }
    }
    assertTrue("This test is supposed to delete at least some row/column " + "pairs",
            lastDelTimeMap.size() > 0);
    LOG.info("Number of row/col pairs deleted at least once: " + lastDelTimeMap.size());
    HRegion.closeHRegion(region);
}

From source file:com.linkedin.pinot.query.transform.TransformExpressionOperatorTest.java

/**
 * Helper method to build a segment with {@link #NUM_METRICS} metrics with random
 * data as per the schema.//from w w w.j av  a2 s  . c o  m
 *
 * @param segmentDirName Name of segment directory
 * @param segmentName Name of segment
 * @param schema Schema for segment
 * @return Schema built for the segment
 * @throws Exception
 */
private Schema buildSegment(String segmentDirName, String segmentName, Schema schema) throws Exception {

    SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema);
    config.setOutDir(segmentDirName);
    config.setFormat(FileFormat.AVRO);
    config.setSegmentName(segmentName);

    Random random = new Random(RANDOM_SEED);
    final List<GenericRow> data = new ArrayList<>();

    _values = new double[NUM_ROWS][NUM_METRICS];
    for (int row = 0; row < NUM_ROWS; row++) {
        HashMap<String, Object> map = new HashMap<>();

        // Metric columns.
        for (int i = 0; i < NUM_METRICS; i++) {
            String metName = schema.getMetricFieldSpecs().get(i).getName();
            double value = random.nextInt(MAX_METRIC_VALUE) + random.nextDouble() + 1.0;
            map.put(metName, value);
            _values[row][i] = value;
        }

        GenericRow genericRow = new GenericRow();
        genericRow.init(map);
        data.add(genericRow);
    }

    SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl();
    RecordReader reader = new TestUtils.GenericRowRecordReader(schema, data);
    driver.init(config, reader);
    driver.build();

    LOGGER.info("Built segment {} at {}", segmentName, segmentDirName);
    return schema;
}

From source file:kuzki.net.exercisetracker.MainActivity.java

private void updateFakeLocation(Location location) {
    if (mPrevLatLng == null) {
        mPrevLatLng = new LatLng(location.getLatitude(), location.getLongitude());
    }/* w  w  w .  j a v  a 2 s. c  o m*/
    Random r = new Random();
    double side1 = 2.5 * r.nextDouble() - 1 > 0 ? -1 : 1;
    double side2 = 2.5 * r.nextDouble() - 1 > 0 ? -1 : 1;
    double dist1 = (double) ((int) (r.nextDouble() * 100000)) * .000000001;
    double dist2 = (double) ((int) (r.nextDouble() * 100000)) * .000000001;
    location.setLatitude(mPrevLatLng.latitude + dist1 * side1);
    location.setLongitude(mPrevLatLng.longitude + dist2 * side2);
    mPrevLatLng = new LatLng(location.getLatitude(), location.getLongitude());
}

From source file:de.hpi.isg.mdms.benchmark.ConstraintInsertPerfomanceBenchmark.java

@Test
public void testInsertInclusionDependenciesIntoDefaultMetadataStore() throws Exception {

    LOGGER.info("Creating Java-serialized metadata store...");
    File metadataStoreFile = createTempFile("ser");
    MetadataStore metadataStore = MetadataStoreFactory.createAndSaveDefaultMetadataStore(metadataStoreFile);

    LOGGER.info("Creating schema...");
    int numTables = 1000;
    int numColumnsPerTable = 100;
    int numColumns = numTables * numColumnsPerTable;
    Schema schema = createSchema(metadataStore, numTables, numColumnsPerTable);
    metadataStore.flush();/*  w ww  .  j av a 2s .  c o  m*/

    LOGGER.info("Generating INDs...");
    int numDesiredInds = 100000;
    double indProbablity = numDesiredInds / Math.pow(numTables * numColumnsPerTable, 2);
    // Boost probablity to speed up generation.
    indProbablity = Math.sqrt(indProbablity);

    Collection<Column[]> inclusionDependencies = new LinkedList<Column[]>();
    Random random = new Random();
    OuterLoop: for (final Table table1 : schema.getTables()) {
        for (final Table table2 : schema.getTables()) {
            for (final Column column1 : table1.getColumns()) {
                for (final Column column2 : table2.getColumns()) {
                    if (column1 != column2 && random.nextDouble() <= indProbablity) {
                        inclusionDependencies.add(new Column[] { column1, column2 });
                        if (inclusionDependencies.size() >= numDesiredInds) {
                            break OuterLoop;
                        }
                    }
                }
            }
        }
    }

    LOGGER.info("Inserting the {} generated INDs...", inclusionDependencies.size());
    long startTimeGross = System.currentTimeMillis();
    ConstraintCollection<InclusionDependency> constraintCollection = metadataStore
            .createConstraintCollection(null, InclusionDependency.class);
    long startTimeNet = System.currentTimeMillis();
    for (Column[] columnPair : inclusionDependencies) {
        Collection<Column> dependentColumns = Collections.singleton(columnPair[0]);
        Collection<Column> referencedColumns = Collections.singletonList(columnPair[1]);
        final InclusionDependency.Reference reference = new InclusionDependency.Reference(
                dependentColumns.toArray(new Column[dependentColumns.size()]),
                referencedColumns.toArray(new Column[referencedColumns.size()]));
        InclusionDependency.buildAndAddToCollection(reference, constraintCollection);
    }
    long endTimeNet = System.currentTimeMillis();
    metadataStore.flush();
    long endTimeGross = System.currentTimeMillis();
    double numInsertsPerSecGross = 1000d * numColumns / (endTimeGross - startTimeGross);
    double numInsertsPerSecNet = 1000d * numColumns / (endTimeNet - startTimeNet);
    LOGGER.info("[gross] Inserted in {} ms ({} inserts/s)", endTimeGross - startTimeGross,
            numInsertsPerSecGross);
    LOGGER.info("[net]   Inserted in {} ms ({} inserts/s)", endTimeNet - startTimeNet, numInsertsPerSecNet);
    LOGGER.info("File size: {} MB", metadataStoreFile.length() / (1024 * 1024));
}

From source file:de.hpi.isg.mdms.benchmark.ConstraintInsertPerfomanceBenchmark.java

@Test
public void testInsertInclusionDependenciesIntoRDBMSMetadataStore() throws Exception {

    LOGGER.info("Creating RDBMS metadata store...");
    File metadataStoreFile = createTempFile("sqlite");
    MetadataStore metadataStore = RDBMSMetadataStore
            .createNewInstance(SQLiteInterface.createForFile(metadataStoreFile));

    LOGGER.info("Creating schema...");
    int numTables = 1000;
    int numColumnsPerTable = 100;
    int numColumns = numTables * numColumnsPerTable;
    Schema schema = createSchema(metadataStore, numTables, numColumnsPerTable);
    metadataStore.flush();//  w  ww. j  av a2 s  . co m

    LOGGER.info("Generating INDs...");
    int numDesiredInds = 100000;
    double indProbablity = numDesiredInds / Math.pow(numTables * numColumnsPerTable, 2);
    // Boost probablity to speed up generation.
    indProbablity = Math.sqrt(indProbablity);

    Collection<Column[]> inclusionDependencies = new LinkedList<Column[]>();
    Random random = new Random();
    OuterLoop: for (final Table table1 : schema.getTables()) {
        for (final Table table2 : schema.getTables()) {
            for (final Column column1 : table1.getColumns()) {
                for (final Column column2 : table2.getColumns()) {
                    if (column1 != column2 && random.nextDouble() <= indProbablity) {
                        inclusionDependencies.add(new Column[] { column1, column2 });
                        if (inclusionDependencies.size() >= numDesiredInds) {
                            break OuterLoop;
                        }
                    }
                }
            }
        }
    }

    LOGGER.info("Inserting the {} generated INDs...", inclusionDependencies.size());
    long startTimeGross = System.currentTimeMillis();
    ConstraintCollection<InclusionDependency> constraintCollection = metadataStore
            .createConstraintCollection(null, InclusionDependency.class);
    long startTimeNet = System.currentTimeMillis();
    for (Column[] columnPair : inclusionDependencies) {
        Collection<Column> dependentColumns = Collections.singleton(columnPair[0]);
        Collection<Column> referencedColumns = Collections.singletonList(columnPair[1]);
        final InclusionDependency.Reference reference = new InclusionDependency.Reference(
                dependentColumns.toArray(new Column[dependentColumns.size()]),
                referencedColumns.toArray(new Column[referencedColumns.size()]));
        InclusionDependency.buildAndAddToCollection(reference, constraintCollection);
    }
    long endTimeNet = System.currentTimeMillis();
    metadataStore.flush();
    long endTimeGross = System.currentTimeMillis();
    double numInsertsPerSecGross = 1000d * numColumns / (endTimeGross - startTimeGross);
    double numInsertsPerSecNet = 1000d * numColumns / (endTimeNet - startTimeNet);
    LOGGER.info("[gross] Inserted in {} ms ({} inserts/s)", endTimeGross - startTimeGross,
            numInsertsPerSecGross);
    LOGGER.info("[net]   Inserted in {} ms ({} inserts/s)", endTimeNet - startTimeNet, numInsertsPerSecNet);
    LOGGER.info("File size: {} MB", metadataStoreFile.length() / (1024 * 1024));
}

From source file:de.hpi.isg.mdms.benchmark.ConstraintInsertPerfomanceBenchmark.java

@Test
public void testInsertUniqueColumnCombinationsIntoDefaultMetadataStore() throws Exception {

    LOGGER.info("Creating Java-serialized metadata store...");
    File metadataStoreFile = createTempFile("ser");
    MetadataStore metadataStore = MetadataStoreFactory.createAndSaveDefaultMetadataStore(metadataStoreFile);

    LOGGER.info("Creating schema...");
    int numTables = 1000;
    int numColumnsPerTable = 100;
    int numColumns = numTables * numColumnsPerTable;
    Schema schema = createSchema(metadataStore, numTables, numColumnsPerTable);
    metadataStore.flush();/*w  w w  .  jav  a  2  s  .  c  o m*/

    LOGGER.info("Generating UCCs...");
    int numDesiredInds = 100000;
    double indProbablity = numDesiredInds / Math.pow(numTables * numColumnsPerTable, 2);
    // Boost probablity to speed up generation.
    indProbablity = Math.sqrt(indProbablity);

    Collection<Column[]> inclusionDependencies = new LinkedList<Column[]>();
    Random random = new Random();
    OuterLoop: for (final Table table1 : schema.getTables()) {
        for (final Table table2 : schema.getTables()) {
            for (final Column column1 : table1.getColumns()) {
                for (final Column column2 : table2.getColumns()) {
                    if (column1 != column2 && random.nextDouble() <= indProbablity) {
                        inclusionDependencies.add(new Column[] { column1, column2 });
                        if (inclusionDependencies.size() >= numDesiredInds) {
                            break OuterLoop;
                        }
                    }
                }
            }
        }
    }

    LOGGER.info("Inserting the {} generated UCCs...", inclusionDependencies.size());
    long startTimeGross = System.currentTimeMillis();
    ConstraintCollection<UniqueColumnCombination> constraintCollection = metadataStore
            .createConstraintCollection(null, UniqueColumnCombination.class);
    long startTimeNet = System.currentTimeMillis();
    for (Column[] columnPair : inclusionDependencies) {
        Collection<Column> uniqueColumns = Collections.singleton(columnPair[0]);
        List<Integer> ids = new ArrayList<>();
        for (Column c : uniqueColumns) {
            ids.add(c.getId());
        }
        int[] intArray = ArrayUtils.toPrimitive(ids.toArray(new Integer[ids.size()]));
        final UniqueColumnCombination.Reference reference = new UniqueColumnCombination.Reference(intArray);
        UniqueColumnCombination.buildAndAddToCollection(reference, constraintCollection);
    }
    long endTimeNet = System.currentTimeMillis();
    metadataStore.flush();
    long endTimeGross = System.currentTimeMillis();
    double numInsertsPerSecGross = 1000d * numColumns / (endTimeGross - startTimeGross);
    double numInsertsPerSecNet = 1000d * numColumns / (endTimeNet - startTimeNet);
    LOGGER.info("[gross] Inserted in {} ms ({} inserts/s)", endTimeGross - startTimeGross,
            numInsertsPerSecGross);
    LOGGER.info("[net]   Inserted in {} ms ({} inserts/s)", endTimeNet - startTimeNet, numInsertsPerSecNet);
    LOGGER.info("File size: {} MB", metadataStoreFile.length() / (1024 * 1024));

}

From source file:de.hpi.isg.mdms.benchmark.ConstraintInsertPerfomanceBenchmark.java

@Test
public void testInsertUniqueColumnCombinationsIntoRDBMSMetadataStore() throws Exception {

    LOGGER.info("Creating RDBMS metadata store...");
    File metadataStoreFile = createTempFile("sqlite");
    MetadataStore metadataStore = RDBMSMetadataStore
            .createNewInstance(SQLiteInterface.createForFile(metadataStoreFile));

    LOGGER.info("Creating schema...");
    int numTables = 1000;
    int numColumnsPerTable = 100;
    int numColumns = numTables * numColumnsPerTable;
    Schema schema = createSchema(metadataStore, numTables, numColumnsPerTable);
    metadataStore.flush();/* w  w w.  j av a  2  s .c  o m*/

    LOGGER.info("Generating UCCs...");
    int numDesiredInds = 100000;
    double indProbablity = numDesiredInds / Math.pow(numTables * numColumnsPerTable, 2);
    // Boost probablity to speed up generation.
    indProbablity = Math.sqrt(indProbablity);

    Collection<Column[]> inclusionDependencies = new LinkedList<Column[]>();
    Random random = new Random();
    OuterLoop: for (final Table table1 : schema.getTables()) {
        for (final Table table2 : schema.getTables()) {
            for (final Column column1 : table1.getColumns()) {
                for (final Column column2 : table2.getColumns()) {
                    if (column1 != column2 && random.nextDouble() <= indProbablity) {
                        inclusionDependencies.add(new Column[] { column1, column2 });
                        if (inclusionDependencies.size() >= numDesiredInds) {
                            break OuterLoop;
                        }
                    }
                }
            }
        }
    }

    LOGGER.info("Inserting the {} generated UCCs...", inclusionDependencies.size());
    long startTimeGross = System.currentTimeMillis();
    ConstraintCollection<UniqueColumnCombination> constraintCollection = metadataStore
            .createConstraintCollection(null, UniqueColumnCombination.class);
    long startTimeNet = System.currentTimeMillis();
    for (Column[] columnPair : inclusionDependencies) {
        Collection<Column> uniqueColumns = Collections.singleton(columnPair[0]);
        List<Integer> ids = new ArrayList<>();
        for (Column c : uniqueColumns) {
            ids.add(c.getId());
        }
        int[] intArray = ArrayUtils.toPrimitive(ids.toArray(new Integer[ids.size()]));
        final UniqueColumnCombination.Reference reference = new UniqueColumnCombination.Reference(intArray);
        UniqueColumnCombination.buildAndAddToCollection(reference, constraintCollection);
    }
    long endTimeNet = System.currentTimeMillis();
    metadataStore.flush();
    long endTimeGross = System.currentTimeMillis();
    double numInsertsPerSecGross = 1000d * numColumns / (endTimeGross - startTimeGross);
    double numInsertsPerSecNet = 1000d * numColumns / (endTimeNet - startTimeNet);
    LOGGER.info("[gross] Inserted in {} ms ({} inserts/s)", endTimeGross - startTimeGross,
            numInsertsPerSecGross);
    LOGGER.info("[net]   Inserted in {} ms ({} inserts/s)", endTimeNet - startTimeNet, numInsertsPerSecNet);
    LOGGER.info("File size: {} MB", metadataStoreFile.length() / (1024 * 1024));

}

From source file:eu.amidst.core.exponentialfamily.EF_Normal_NormalParents.java

/**
 * {@inheritDoc}//from w w  w  . ja v  a 2s. co m
 */
@Override
public SufficientStatistics createInitSufficientStatistics() {
    CompoundVector vectorSS = this.createEmtpyCompoundVector();

    double[] Xarray = { 0.0 };

    double[] Yarray = this.parents.stream().mapToDouble(w -> 0.0).toArray();
    RealVector XYRealVector = new ArrayRealVector(Xarray, Yarray);
    vectorSS.setXYbaseVector(XYRealVector);

    RealMatrix covRealmatrix = new Array2DRowRealMatrix(Yarray.length + 1, Yarray.length + 1);

    //We perform the "laplace" correction in that way to break symmetric covariance matrixes.
    Random rand = new Random(0);
    for (int i = 0; i < Yarray.length + 1; i++) {
        for (int j = 0; j < Yarray.length + 1; j++) {
            covRealmatrix.addToEntry(i, j, rand.nextDouble() + 0.01);
        }
    }
    //covRealmatrix = covRealmatrix.scalarAdd(1.0);

    vectorSS.setcovbaseVector(covRealmatrix);

    return vectorSS;
}

From source file:org.apache.mahout.classifier.sequencelearning.hmm.HmmModel.java

/**
 * Initialize a valid random set of HMM parameters
 *
 * @param seed seed to use for Random initialization. Use 0 to use Java-built-in-version.
 *//*from   w  w  w.  java 2 s  .  c o m*/
private void initRandomParameters(long seed) {
    Random rand;
    // initialize the random number generator
    if (seed == 0) {
        rand = RandomUtils.getRandom();
    } else {
        rand = RandomUtils.getRandom(seed);
    }
    // initialize the initial Probabilities
    double sum = 0; // used for normalization
    for (int i = 0; i < nrOfHiddenStates; i++) {
        double nextRand = rand.nextDouble();
        initialProbabilities.set(i, nextRand);
        sum += nextRand;
    }
    // "normalize" the vector to generate probabilities
    initialProbabilities = initialProbabilities.divide(sum);

    // initialize the transition matrix
    double[] values = new double[nrOfHiddenStates];
    for (int i = 0; i < nrOfHiddenStates; i++) {
        sum = 0;
        for (int j = 0; j < nrOfHiddenStates; j++) {
            values[j] = rand.nextDouble();
            sum += values[j];
        }
        // normalize the random values to obtain probabilities
        for (int j = 0; j < nrOfHiddenStates; j++) {
            values[j] /= sum;
        }
        // set this row of the transition matrix
        transitionMatrix.set(i, values);
    }

    // initialize the output matrix
    values = new double[nrOfOutputStates];
    for (int i = 0; i < nrOfHiddenStates; i++) {
        sum = 0;
        for (int j = 0; j < nrOfOutputStates; j++) {
            values[j] = rand.nextDouble();
            sum += values[j];
        }
        // normalize the random values to obtain probabilities
        for (int j = 0; j < nrOfOutputStates; j++) {
            values[j] /= sum;
        }
        // set this row of the output matrix
        emissionMatrix.set(i, values);
    }
}

From source file:org.apache.camel.processor.RedeliveryPolicy.java

/**
 * Calculates the new redelivery delay based on the last one
 *
 * @param previousDelay  previous redelivery delay
 * @param redeliveryCounter  number of previous redelivery attempts
 * @return the calculate delay/*from  ww w. ja va2  s .  c  o  m*/
 */
public long calculateRedeliveryDelay(long previousDelay, int redeliveryCounter) {
    if (ObjectHelper.isNotEmpty(delayPattern)) {
        // calculate delay using the pattern
        return calculateRedeliverDelayUsingPattern(delayPattern, redeliveryCounter);
    }

    // calculate the delay using the conventional parameters
    long redeliveryDelayResult;
    if (previousDelay == 0) {
        redeliveryDelayResult = redeliveryDelay;
    } else if (useExponentialBackOff && backOffMultiplier > 1) {
        redeliveryDelayResult = Math.round(backOffMultiplier * previousDelay);
    } else {
        redeliveryDelayResult = previousDelay;
    }

    if (useCollisionAvoidance) {

        /*
         * First random determines +/-, second random determines how far to
         * go in that direction. -cgs
         */
        Random random = getRandomNumberGenerator();
        double variance = (random.nextBoolean() ? collisionAvoidanceFactor : -collisionAvoidanceFactor)
                * random.nextDouble();
        redeliveryDelayResult += redeliveryDelayResult * variance;
    }

    // ensure the calculated result is not bigger than the max delay (if configured)
    if (maximumRedeliveryDelay > 0 && redeliveryDelayResult > maximumRedeliveryDelay) {
        redeliveryDelayResult = maximumRedeliveryDelay;
    }

    return redeliveryDelayResult;
}