Example usage for java.util Random nextDouble

List of usage examples for java.util Random nextDouble

Introduction

In this page you can find the example usage for java.util Random nextDouble.

Prototype

public double nextDouble() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed double value between 0.0 and 1.0 from this random number generator's sequence.

Usage

From source file:org.apache.hadoop.hbase.regionserver.wal.TestHLogFiltering.java

private void fillTable() throws IOException, InterruptedException {
    HTable table = TEST_UTIL.createTable(TABLE_NAME, FAMILIES, 3, Bytes.toBytes("row0"), Bytes.toBytes("row99"),
            NUM_RS);/*from ww w .j  a  v a2  s  .  c  o  m*/
    Random rand = new Random(19387129L);
    for (int iStoreFile = 0; iStoreFile < 4; ++iStoreFile) {
        for (int iRow = 0; iRow < 100; ++iRow) {
            final byte[] row = Bytes.toBytes("row" + iRow);
            Put put = new Put(row);
            Delete del = new Delete(row);
            for (int iCol = 0; iCol < 10; ++iCol) {
                final byte[] cf = rand.nextBoolean() ? CF1 : CF2;
                final long ts = Math.abs(rand.nextInt());
                final byte[] qual = Bytes.toBytes("col" + iCol);
                if (rand.nextBoolean()) {
                    final byte[] value = Bytes
                            .toBytes("value_for_row_" + iRow + "_cf_" + Bytes.toStringBinary(cf) + "_col_"
                                    + iCol + "_ts_" + ts + "_random_" + rand.nextLong());
                    put.add(cf, qual, ts, value);
                } else if (rand.nextDouble() < 0.8) {
                    del.deleteColumn(cf, qual, ts);
                } else {
                    del.deleteColumns(cf, qual, ts);
                }
            }
            table.put(put);
            table.delete(del);
            table.flushCommits();
        }
    }
    TEST_UTIL.waitUntilAllRegionsAssigned(TABLE_NAME);
}

From source file:de.upb.timok.run.GenericSmacPipeline.java

private void splitTrainTestFile(String timedInputFile, String timedInputTrainFile, String timedInputTestFile,
        double trainPercentage, double testPercentage, double anomalyPercentage, boolean isRti)
        throws IOException {
    logger.info("TimedInputFile=" + timedInputFile);
    final File f = new File(timedInputFile);
    System.out.println(f);/*  w  w w  .j ava 2 s  .  co m*/
    final LineNumberReader lnr = new LineNumberReader(new FileReader(timedInputFile));
    lnr.skip(Long.MAX_VALUE);
    int samples = lnr.getLineNumber();
    lnr.close();
    final int trainingSamples = (int) (samples * trainPercentage);
    final int testSamples = (int) (samples * testPercentage);
    final int anomalies = (int) (anomalyPercentage * testSamples);
    final int writtenTrainingSamples = 0;
    final int writtenTestSamples = 0;
    int insertedAnomalies = 0;
    final BufferedReader br = Files.newBufferedReader(Paths.get(timedInputFile), StandardCharsets.UTF_8);
    String line = null;
    final BufferedWriter trainWriter = Files.newBufferedWriter(Paths.get(timedInputTrainFile),
            StandardCharsets.UTF_8);
    final BufferedWriter testWriter = Files.newBufferedWriter(Paths.get(timedInputTestFile),
            StandardCharsets.UTF_8);
    final Random r = new Random(MasterSeed.nextLong());
    final Random mutation = new Random(MasterSeed.nextLong());
    boolean force = false;
    int lineIndex = 0;
    int linesLeft;
    int anomaliesToInsert;
    if (isRti) {
        br.readLine();
        samples--;
    }
    while ((line = br.readLine()) != null) {
        if (writtenTrainingSamples < trainingSamples && writtenTestSamples < testSamples) {
            // choose randomly according to train/test percentage
            if (r.nextDouble() > testPercentage) {
                // write to train
                writeSample(new TimedSequence(line, true, false).toTrebaString(), trainWriter);
            } else {
                // write to test
                insertedAnomalies = testAndWriteAnomaly(anomalies, insertedAnomalies, anomalyPercentage, line,
                        testWriter, mutation, force);
            }
        } else if (writtenTrainingSamples >= trainingSamples) {
            insertedAnomalies = testAndWriteAnomaly(anomalies, insertedAnomalies, anomalyPercentage, line,
                    testWriter, mutation, force);
        } else if (writtenTestSamples >= testSamples) {
            // only write trainSamples from now on
            writeSample(new TimedSequence(line, true, false).toTrebaString(), trainWriter);
        }
        lineIndex++;
        linesLeft = samples - lineIndex;
        anomaliesToInsert = anomalies - insertedAnomalies;
        if (linesLeft <= anomaliesToInsert) {
            force = true;
        }
    }
    br.close();
    trainWriter.close();
    testWriter.close();
}

From source file:edu.cornell.med.icb.goby.stats.TestStatistics.java

@Test
public void testFoldChange() {
    final Random randomEngine = new Random();
    final DifferentialExpressionCalculator deCalc = new DifferentialExpressionCalculator() {

        @Override/*  w  w w . j a  v  a  2  s  . co m*/
        public double getNormalizedExpressionValue(final String sample, final NormalizationMethod method,
                final MutableString elementId) {
            if (sample.startsWith("A")) {
                return 2 * Math.abs(randomEngine.nextDouble());
            } else {
                return Math.abs(randomEngine.nextDouble());
            }

            // fold change A/B = 2
        }
    };

    deCalc.defineElement("id-1");
    deCalc.defineElement("id-2");
    deCalc.defineGroup("A");
    deCalc.defineGroup("B");
    final int numReplicates = 20000;
    deCalc.reserve(2, numReplicates * 2);

    for (int i = 0; i < numReplicates; i++) {
        deCalc.associateSampleToGroup("A-" + i, "A");
        deCalc.associateSampleToGroup("B-" + i, "B");
    }

    final DifferentialExpressionInfo info = new DifferentialExpressionInfo("id-1");
    final DifferentialExpressionResults results = new DifferentialExpressionResults();
    final FoldChangeCalculator foldChange = new FoldChangeCalculator(results);
    final NormalizationMethod normalizationMethod = new AlignedCountNormalization();
    foldChange.evaluate(deCalc, normalizationMethod, results, info, "A", "B");
    assertEquals("fold-change does not match", 2d, results.getStatistic(info, foldChange.statisticIds.get(0)),
            .1);
}

From source file:edu.cornell.med.icb.goby.stats.TestStatistics.java

@Test
public void testAverage() throws IOException {
    final Random randomEngine = new Random();
    final DifferentialExpressionCalculator deCalc = new DifferentialExpressionCalculator() {

        @Override/*ww w.j  a v a  2s  . c om*/
        public double getNormalizedExpressionValue(final String sample, final NormalizationMethod method,
                final MutableString elementId) {
            if (sample.startsWith("A")) {
                return 2 * Math.abs(randomEngine.nextDouble());
            } else {
                return Math.abs(randomEngine.nextDouble());
            }

            // fold change A/B = 2
        }
    };

    deCalc.defineElement("id-1");
    deCalc.defineElement("id-2");
    deCalc.defineGroup("A");
    deCalc.defineGroup("B");
    final int numReplicates = 20000;
    deCalc.reserve(2, numReplicates * 2);

    for (int i = 0; i < numReplicates; i++) {
        deCalc.associateSampleToGroup("A-" + i, "A");
        deCalc.associateSampleToGroup("B-" + i, "B");
    }

    final DifferentialExpressionInfo info = new DifferentialExpressionInfo("id-1");
    final DifferentialExpressionResults results = new DifferentialExpressionResults();
    final AverageCalculator averageCalculator = new AverageCalculator(results);
    results.add(info);
    final NormalizationMethod normalizationMethod = new AlignedCountNormalization();
    averageCalculator.evaluate(deCalc, normalizationMethod, results, info, "A", "B");
    assertEquals("average A must be around 2", 1d,
            results.getStatistic(info, averageCalculator.getStatisticId("A", "RPKM", normalizationMethod)), .1);
    assertEquals("average B must be around 1", 0.5d,
            results.getStatistic(info, averageCalculator.getStatisticId("B", "RPKM", normalizationMethod)), .1);
    System.out.println(results);
    results.write(new PrintWriter("test-results/out-stats.tsv"), '\t', deCalc);
}

From source file:org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.java

public int randomFill(Random r, int randField, int field, ExtraTypeInfo extraTypeInfo) {
    myBool = chooseNull(r, randField, field++) ? null : Boolean.valueOf(r.nextInt(1) == 1);
    myByte = chooseNull(r, randField, field++) ? null : Byte.valueOf((byte) r.nextInt());
    myShort = chooseNull(r, randField, field++) ? null : Short.valueOf((short) r.nextInt());
    myInt = chooseNull(r, randField, field++) ? null : Integer.valueOf(r.nextInt());
    myLong = chooseNull(r, randField, field++) ? null : Long.valueOf(r.nextLong());
    myFloat = chooseNull(r, randField, field++) ? null : Float.valueOf(r.nextFloat() * 10 - 5);
    myDouble = chooseNull(r, randField, field++) ? null : Double.valueOf(r.nextDouble() * 10 - 5);
    myString = chooseNull(r, randField, field++) ? null : getRandString(r);
    myHiveChar = chooseNull(r, randField, field++) ? null : getRandHiveChar(r, extraTypeInfo);
    myHiveVarchar = chooseNull(r, randField, field++) ? null : getRandHiveVarchar(r, extraTypeInfo);
    myBinary = getRandBinary(r, r.nextInt(1000));
    myDecimal = chooseNull(r, randField, field++) ? null : getRandHiveDecimal(r, extraTypeInfo);
    myDate = chooseNull(r, randField, field++) ? null : getRandDate(r);
    myTimestamp = chooseNull(r, randField, field++) ? null : RandomTypeUtil.getRandTimestamp(r);
    myIntervalYearMonth = chooseNull(r, randField, field++) ? null : getRandIntervalYearMonth(r);
    myIntervalDayTime = chooseNull(r, randField, field++) ? null : getRandIntervalDayTime(r);
    return field;
}

From source file:org.apache.hadoop.io.TestArrayOutputStream.java

private void runComparison(ArrayOutputStream aos, DataOutputStream dos, ByteArrayOutputStream bos)
        throws IOException {
    Random r = new Random();
    // byte//  ww  w . j  a  v a2s.c  o  m
    int b = r.nextInt(128);
    aos.write(b);
    dos.write(b);

    // byte[]
    byte[] bytes = new byte[10];
    r.nextBytes(bytes);
    aos.write(bytes, 0, 10);
    dos.write(bytes, 0, 10);

    // Byte
    aos.writeByte(b);
    dos.writeByte(b);

    // boolean
    boolean bool = r.nextBoolean();
    aos.writeBoolean(bool);
    dos.writeBoolean(bool);

    // short
    short s = (short) r.nextInt();
    aos.writeShort(s);
    dos.writeShort(s);

    // char
    int c = r.nextInt();
    aos.writeChar(c);
    dos.writeChar(c);

    // int
    int i = r.nextInt();
    aos.writeInt(i);
    dos.writeInt(i);

    // long
    long l = r.nextLong();
    aos.writeLong(l);
    dos.writeLong(l);

    // float
    float f = r.nextFloat();
    aos.writeFloat(f);
    dos.writeFloat(f);

    // double
    double d = r.nextDouble();
    aos.writeDouble(d);
    dos.writeDouble(d);

    // strings
    String str = RandomStringUtils.random(20);
    aos.writeBytes(str);
    aos.writeChars(str);
    aos.writeUTF(str);
    dos.writeBytes(str);
    dos.writeChars(str);
    dos.writeUTF(str);

    byte[] expected = bos.toByteArray();
    assertEquals(expected.length, aos.size());

    byte[] actual = new byte[aos.size()];
    System.arraycopy(aos.getBytes(), 0, actual, 0, actual.length);
    // serialized bytes should be the same
    assertTrue(Arrays.equals(expected, actual));
}

From source file:org.dellapenna.research.ldr.Popolazione.java

/**
 * Genera il numero randomico per la selezione dell'individuo da immettere
 * nella matingPool//from   ww w. j  a v a  2s  . c o  m
 *
 * @param rDiv generatore randomico per la selezione del tipo di divisione
 * @param r generatore randomico che genera il double per la selezione
 * @return randomSelecter il numero che porta alla selezione dell'individuo
 */
private Double generaRandomSelecter(Random rDiv, Random r) {
    switch (rDiv.nextInt(5)) {
    case 0:
        return (Double) r.nextDouble() / 10;
    case 1:
        return (Double) r.nextDouble() / 1;
    case 2:
        return (Double) r.nextDouble() / 1;
    case 3:
        return (Double) r.nextDouble() / 10;
    case 4:
        return (Double) r.nextDouble() / 1;
    default:
        System.out.println("errore random selecter");
        return 9999999.0;
    }
}

From source file:org.nd4j.linalg.ops.DerivativeTests.java

@Test
public void testSoftMaxDerivative() {
    assertTrue(Nd4j.getOpFactory().createTransform("softmax", Nd4j.ones(1))
            .derivative() instanceof SoftMaxDerivative);

    Random r = new Random(12345L);

    int[] mb = new int[] { 10, 2, 1 };
    for (int minibatch : mb) {
        System.out.println("Minibatch size: " + minibatch);
        INDArray z = Nd4j.zeros(minibatch, 5);
        double[][] in = new double[minibatch][5];
        double[][] softmax = new double[minibatch][5];
        double[][] expOut = new double[minibatch][5];
        for (int i = 0; i < minibatch; i++) {
            double rowSumExp = 0.0;
            for (int j = 0; j < 5; j++) {
                in[i][j] = 10 * r.nextDouble();
                z.putScalar(new int[] { i, j }, in[i][j]);
                rowSumExp += FastMath.exp(in[i][j]);
            }//from   w ww. j  a  v a 2 s  .co m
            for (int j = 0; j < 5; j++) {
                softmax[i][j] = FastMath.exp(in[i][j]) / rowSumExp;
                expOut[i][j] = softmax[i][j] * (1.0 - softmax[i][j]);
            }
        }

        INDArray sm = Nd4j.getExecutioner()
                .execAndReturn(Nd4j.getOpFactory().createTransform("softmax", z.dup()));
        INDArray zPrime = Nd4j.getExecutioner()
                .execAndReturn(Nd4j.getOpFactory().createTransform("softmax", z).derivative());
        System.out.println(Arrays.toString(sm.data().asDouble()));
        System.out.println(Arrays.toString(zPrime.data().asDouble()));
        assertNotEquals(sm, zPrime);

        for (int i = 0; i < minibatch; i++) {
            for (int j = 0; j < 5; j++) {
                double relError = Math.abs(expOut[i][j] - zPrime.getDouble(i, j))
                        / (Math.abs(expOut[i][j]) + Math.abs(zPrime.getDouble(i, j)));
                //                    System.out.println("Error: " + relError);
                assertTrue(relError < REL_ERROR_TOLERANCE);
            }
        }
    }
}

From source file:org.elasticsearch.test.ESIntegTestCase.java

private static Settings.Builder setRandomIndexMergeSettings(Random random, Settings.Builder builder) {
    if (random.nextBoolean()) {
        builder.put(MergePolicyConfig.INDEX_COMPOUND_FORMAT,
                random.nextBoolean() ? random.nextDouble() : random.nextBoolean());
    }//from   w ww .  j a  va2  s  .  co m
    switch (random.nextInt(4)) {
    case 3:
        final int maxThreadCount = RandomInts.randomIntBetween(random, 1, 4);
        final int maxMergeCount = RandomInts.randomIntBetween(random, maxThreadCount, maxThreadCount + 4);
        builder.put(MergeSchedulerConfig.MAX_MERGE_COUNT, maxMergeCount);
        builder.put(MergeSchedulerConfig.MAX_THREAD_COUNT, maxThreadCount);
        break;
    }

    return builder;
}

From source file:tinfour.gwr.GwrInterpolator.java

/**
 * Perform a variation of a statistical bootstrap analysis in which the
 * resampling is based on random selection of samples without repetition.
 * The result gives a mean and variance for the predicted value of
 * the surface at the query point.//from   w ww .  j a v  a 2s .co  m
 *
 * @param model the model to be used to represent the surface
 * @param bandwidthMethod the method used for selecting bandwidth
 * @param bandwidthParameter the input parameter for the specified method
 * @param qx X coordinate of query point
 * @param qy Y coordinate of query point
 * @param nSamples the number of samples for processing
 * @param samples an nSamples-by-3 array of samples for processing
 * @param nRepetitions number of sub-samples to evaluate for bootstrap
 * analysis.
 * @param threshold the probability of accepting any single sample into
 * the subsample.
 * @return if successful, a non-null result instance; otherwise, a null.
 */
public BootstrapResult bootstrap(SurfaceModel model, BandwidthSelectionMethod bandwidthMethod,
        double bandwidthParameter, double qx, double qy, int nSamples, double[][] samples, int nRepetitions,
        double threshold) {
    checkInputs(nSamples, samples);
    double[] distsq = new double[nSamples];
    double[] weights = new double[nSamples];
    double meanDist = prepDistances(qx, qy, nSamples, samples, distsq);

    double[][] sampleWeightsMatrix = null;
    if (bandwidthMethod == BandwidthSelectionMethod.OptimalAICc) {
        sampleWeightsMatrix = new double[nSamples][nSamples];
    }
    if (!prepWeights(model, bandwidthMethod, bandwidthParameter, qx, qy, nSamples, samples, distsq, weights,
            sampleWeightsMatrix, meanDist)) {
        return null;
    }

    double[][] jInputs = new double[nSamples][];
    double[] jWeights = new double[nSamples];
    Random jRand = new Random(0);
    double jSum = 0;
    double j2Sum = 0;
    int n = 0;

    for (int jN = 0; jN < nRepetitions; jN++) {
        int k = 0;
        for (int i = 0; i < nSamples; i++) {
            double d = jRand.nextDouble();
            if (d < threshold) {
                continue;
            }
            jInputs[k] = samples[i];
            jWeights[k] = weights[i];
            k++;
        }

        if (k < minRequiredSamples) {
            continue;
        }
        beta = gwr.computeRegression(model, qx, qy, k, jInputs, jWeights, null);
        if (beta == null || Double.isNaN(beta[0])) {
            continue;
        }
        jSum += beta[0];
        j2Sum += (beta[0] * beta[0]);
        n++;
    }
    double jMean = jSum / n;
    double s2 = (n * j2Sum - jSum * jSum) / (n * (n - 1));
    return new BootstrapResult(n, jMean, s2);

}