Example usage for java.util Random nextDouble

List of usage examples for java.util Random nextDouble

Introduction

In this page you can find the example usage for java.util Random nextDouble.

Prototype

public double nextDouble() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed double value between 0.0 and 1.0 from this random number generator's sequence.

Usage

From source file:org.nd4j.linalg.Nd4jTestsC.java

@Test
public void testMMulFTimesC() {
    int nRows = 3;
    int nCols = 3;
    java.util.Random r = new java.util.Random(12345);

    INDArray arrC = Nd4j.create(new int[] { nRows, nCols }, 'c');
    INDArray arrF = Nd4j.create(new int[] { nRows, nCols }, 'f');
    INDArray arrC2 = Nd4j.create(new int[] { nRows, nCols }, 'c');
    for (int i = 0; i < nRows; i++) {
        for (int j = 0; j < nCols; j++) {
            double rv = r.nextDouble();
            arrC.putScalar(new int[] { i, j }, rv);
            arrF.putScalar(new int[] { i, j }, rv);
            arrC2.putScalar(new int[] { i, j }, r.nextDouble());
        }/*from   w  ww .j av  a 2 s .  com*/
    }
    assertTrue(arrF.equals(arrC));

    INDArray fTimesC = arrF.mmul(arrC2);
    INDArray cTimesC = arrC.mmul(arrC2);

    assertEquals(fTimesC, cTimesC);
}

From source file:ml.shifu.shifu.core.dtrain.dt.DTWorker.java

/**
 * Add to training set or validation set according to validation rate.
 * //ww  w  . j a va  2  s  . c  o m
 * @param hashcode
 *            the hash code of the data
 * @param data
 *            data instance
 * @param isValidation
 *            if it is validation
 * @return if in training, training is true, others are false.
 */
protected boolean addDataPairToDataSet(long hashcode, Data data, boolean isValidation) {
    if (this.isKFoldCV) {
        int k = this.modelConfig.getTrain().getNumKFold();
        if (hashcode % k == this.trainerId) {
            this.validationData.append(data);
            if (isPositive(data.label)) {
                this.positiveValidationCount += 1L;
            } else {
                this.negativeValidationCount += 1L;
            }
            return false;
        } else {
            this.trainingData.append(data);
            if (isPositive(data.label)) {
                this.positiveTrainCount += 1L;
            } else {
                this.negativeTrainCount += 1L;
            }
            return true;
        }
    }

    if (this.isManualValidation) {
        if (isValidation) {
            this.validationData.append(data);
            if (isPositive(data.label)) {
                this.positiveValidationCount += 1L;
            } else {
                this.negativeValidationCount += 1L;
            }
            return false;
        } else {
            this.trainingData.append(data);
            if (isPositive(data.label)) {
                this.positiveTrainCount += 1L;
            } else {
                this.negativeTrainCount += 1L;
            }
            return true;
        }
    } else {
        if (Double.compare(this.modelConfig.getValidSetRate(), 0d) != 0) {
            int classValue = (int) (data.label + 0.01f);
            Random random = null;
            if (this.isStratifiedSampling) {
                // each class use one random instance
                random = validationRandomMap.get(classValue);
                if (random == null) {
                    random = new Random();
                    this.validationRandomMap.put(classValue, random);
                }
            } else {
                // all data use one random instance
                random = validationRandomMap.get(0);
                if (random == null) {
                    random = new Random();
                    this.validationRandomMap.put(0, random);
                }
            }

            if (this.modelConfig.isFixInitialInput()) {
                // for fix initial input, if hashcode%100 is in [start-hashcode, end-hashcode), validation,
                // otherwise training. start hashcode in different job is different to make sure bagging jobs have
                // different data. if end-hashcode is over 100, then check if hashcode is in [start-hashcode, 100]
                // or [0, end-hashcode]
                int startHashCode = (100 / this.modelConfig.getBaggingNum()) * this.trainerId;
                int endHashCode = startHashCode
                        + Double.valueOf(this.modelConfig.getValidSetRate() * 100).intValue();
                if (isInRange(hashcode, startHashCode, endHashCode)) {
                    this.validationData.append(data);
                    if (isPositive(data.label)) {
                        this.positiveValidationCount += 1L;
                    } else {
                        this.negativeValidationCount += 1L;
                    }
                    return false;
                } else {
                    this.trainingData.append(data);
                    if (isPositive(data.label)) {
                        this.positiveTrainCount += 1L;
                    } else {
                        this.negativeTrainCount += 1L;
                    }
                    return true;
                }
            } else {
                // not fixed initial input, if random value >= validRate, training, otherwise validation.
                if (random.nextDouble() >= this.modelConfig.getValidSetRate()) {
                    this.trainingData.append(data);
                    if (isPositive(data.label)) {
                        this.positiveTrainCount += 1L;
                    } else {
                        this.negativeTrainCount += 1L;
                    }
                    return true;
                } else {
                    this.validationData.append(data);
                    if (isPositive(data.label)) {
                        this.positiveValidationCount += 1L;
                    } else {
                        this.negativeValidationCount += 1L;
                    }
                    return false;
                }
            }
        } else {
            this.trainingData.append(data);
            if (isPositive(data.label)) {
                this.positiveTrainCount += 1L;
            } else {
                this.negativeTrainCount += 1L;
            }
            return true;
        }
    }
}

From source file:org.sakaiproject.tool.assessment.services.GradingService.java

/**
 * CALCULATED_QUESTION//w ww.j  av  a2s.com
 * Takes a map of ranges and randomly chooses values for those ranges and stores them in a new map.
 */
public Map<String, String> determineRandomValuesForRanges(Map<String, String> variableRangeMap, long itemId,
        long gradingId, String agentId, int validAnswersAttemptCount) {
    Map<String, String> variableValueMap = new HashMap<String, String>();

    // seed random number generator
    long seed = getCalcuatedQuestionSeed(itemId, gradingId, agentId, validAnswersAttemptCount);
    Random generator = new Random(seed);

    Iterator<Map.Entry<String, String>> i = variableRangeMap.entrySet().iterator();
    while (i.hasNext()) {
        Map.Entry<String, String> entry = i.next();

        String delimRange = entry.getValue().toString(); // ie. "-100|100,2"

        double minVal = Double.valueOf(delimRange.substring(0, delimRange.indexOf('|')));
        double maxVal = Double
                .valueOf(delimRange.substring(delimRange.indexOf('|') + 1, delimRange.indexOf(',')));
        int decimalPlaces = Integer
                .valueOf(delimRange.substring(delimRange.indexOf(',') + 1, delimRange.length()));

        // This line does the magic of creating the random variable value within the range.
        Double randomValue = minVal + (maxVal - minVal) * generator.nextDouble();

        // Trim off excess decimal points based on decimalPlaces value
        BigDecimal bd = new BigDecimal(randomValue);
        bd = bd.setScale(decimalPlaces, BigDecimal.ROUND_HALF_UP);
        randomValue = bd.doubleValue();

        String displayNumber = randomValue.toString();
        // Remove ".0" if decimalPlaces ==0
        if (decimalPlaces == 0) {
            displayNumber = displayNumber.replace(".0", "");
        }

        variableValueMap.put(entry.getKey(), displayNumber);
    }

    return variableValueMap;
}

From source file:org.apache.qpid.server.store.berkeleydb.AbstractBDBMessageStore.java

public void removeMessage(long messageId, boolean sync) throws AMQStoreException {

    boolean complete = false;
    com.sleepycat.je.Transaction tx = null;

    Random rand = null;
    int attempts = 0;
    try {/*w  w  w.j a  v  a  2s .  c o  m*/
        do {
            tx = null;
            try {
                tx = _environment.beginTransaction(null, null);

                //remove the message meta data from the store
                DatabaseEntry key = new DatabaseEntry();
                LongBinding.longToEntry(messageId, key);

                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Removing message id " + messageId);
                }

                OperationStatus status = _messageMetaDataDb.delete(tx, key);
                if (status == OperationStatus.NOTFOUND) {
                    LOGGER.info(
                            "Message not found (attempt to remove failed - probably application initiated rollback) "
                                    + messageId);
                }

                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Deleted metadata for message " + messageId);
                }

                //now remove the content data from the store if there is any.
                DatabaseEntry contentKeyEntry = new DatabaseEntry();
                LongBinding.longToEntry(messageId, contentKeyEntry);
                _messageContentDb.delete(tx, contentKeyEntry);

                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Deleted content for message " + messageId);
                }

                commit(tx, sync);
                complete = true;
                tx = null;
            } catch (LockConflictException e) {
                try {
                    if (tx != null) {
                        tx.abort();
                    }
                } catch (DatabaseException e2) {
                    LOGGER.warn("Unable to abort transaction after LockConflictExcption", e2);
                    // rethrow the original log conflict exception, the secondary exception should already have
                    // been logged.
                    throw e;
                }

                LOGGER.warn("Lock timeout exception. Retrying (attempt " + (attempts + 1) + " of "
                        + LOCK_RETRY_ATTEMPTS + ") " + e);

                if (++attempts < LOCK_RETRY_ATTEMPTS) {
                    if (rand == null) {
                        rand = new Random();
                    }

                    try {
                        Thread.sleep(500l + (long) (500l * rand.nextDouble()));
                    } catch (InterruptedException e1) {

                    }
                } else {
                    // rethrow the lock conflict exception since we could not solve by retrying
                    throw e;
                }
            }
        } while (!complete);
    } catch (DatabaseException e) {
        LOGGER.error("Unexpected BDB exception", e);

        if (tx != null) {
            try {
                tx.abort();
                tx = null;
            } catch (DatabaseException e1) {
                throw new AMQStoreException("Error aborting transaction " + e1, e1);
            }
        }

        throw new AMQStoreException(
                "Error removing message with id " + messageId + " from database: " + e.getMessage(), e);
    } finally {
        if (tx != null) {
            try {
                tx.abort();
                tx = null;
            } catch (DatabaseException e1) {
                throw new AMQStoreException("Error aborting transaction " + e1, e1);
            }
        }
    }
}

From source file:org.lilyproject.hadooptestfw.fork.HBaseTestingUtility.java

/**
 * Creates a random table with the given parameters
 *//*from w w  w .  j a  v  a2 s  .  c om*/
public HTable createRandomTable(String tableName, final Collection<String> families, final int maxVersions,
        final int numColsPerRow, final int numFlushes, final int numRegions, final int numRowsPerFlush)
        throws IOException, InterruptedException {

    LOG.info("\n\nCreating random table " + tableName + " with " + numRegions + " regions, " + numFlushes
            + " storefiles per region, " + numRowsPerFlush + " rows per flush, maxVersions=" + maxVersions
            + "\n");

    final Random rand = new Random(tableName.hashCode() * 17L + 12938197137L);
    final int numCF = families.size();
    final byte[][] cfBytes = new byte[numCF][];
    final byte[] tableNameBytes = Bytes.toBytes(tableName);

    {
        int cfIndex = 0;
        for (String cf : families) {
            cfBytes[cfIndex++] = Bytes.toBytes(cf);
        }
    }

    final int actualStartKey = 0;
    final int actualEndKey = Integer.MAX_VALUE;
    final int keysPerRegion = (actualEndKey - actualStartKey) / numRegions;
    final int splitStartKey = actualStartKey + keysPerRegion;
    final int splitEndKey = actualEndKey - keysPerRegion;
    final String keyFormat = "%08x";
    final HTable table = createTable(tableNameBytes, cfBytes, maxVersions,
            Bytes.toBytes(String.format(keyFormat, splitStartKey)),
            Bytes.toBytes(String.format(keyFormat, splitEndKey)), numRegions);
    if (hbaseCluster != null) {
        getMiniHBaseCluster().flushcache(HConstants.META_TABLE_NAME);
    }

    for (int iFlush = 0; iFlush < numFlushes; ++iFlush) {
        for (int iRow = 0; iRow < numRowsPerFlush; ++iRow) {
            final byte[] row = Bytes.toBytes(
                    String.format(keyFormat, actualStartKey + rand.nextInt(actualEndKey - actualStartKey)));

            Put put = new Put(row);
            Delete del = new Delete(row);
            for (int iCol = 0; iCol < numColsPerRow; ++iCol) {
                final byte[] cf = cfBytes[rand.nextInt(numCF)];
                final long ts = rand.nextInt();
                final byte[] qual = Bytes.toBytes("col" + iCol);
                if (rand.nextBoolean()) {
                    final byte[] value = Bytes
                            .toBytes("value_for_row_" + iRow + "_cf_" + Bytes.toStringBinary(cf) + "_col_"
                                    + iCol + "_ts_" + ts + "_random_" + rand.nextLong());
                    put.add(cf, qual, ts, value);
                } else if (rand.nextDouble() < 0.8) {
                    del.deleteColumn(cf, qual, ts);
                } else {
                    del.deleteColumns(cf, qual, ts);
                }
            }

            if (!put.isEmpty()) {
                table.put(put);
            }

            if (!del.isEmpty()) {
                table.delete(del);
            }
        }
        LOG.info("Initiating flush #" + iFlush + " for table " + tableName);
        table.flushCommits();
        if (hbaseCluster != null) {
            getMiniHBaseCluster().flushcache(tableNameBytes);
        }
    }

    return table;
}

From source file:org.apache.hadoop.raid.DBStripeStore.java

@Override
public void putStripe(Codec codec, List<Block> parityBlks, List<Block> srcBlks) throws IOException {

    if (parityBlks.size() != codec.parityLength) {
        throw new IOException("Number of parity blocks " + parityBlks.size() + " doesn't match codec "
                + codec.id + " (" + codec.parityLength + ")");
    }/*from   ww  w  . j  av  a2s  .c  o m*/
    if (srcBlks.size() > codec.stripeLength) {
        throw new IOException("Number of source blocks " + srcBlks.size() + " is greater than codec " + codec.id
                + " (" + codec.stripeLength + ")");
    }

    List<Object> getStripeSqlParams = constructGetStripeSqlParam(codec, parityBlks, srcBlks);
    String insertStripeSql = getInsertStripeSql(parityBlks, srcBlks);

    int waitMS = 3000; // wait for at least 3sec before next retry.
    Random rand = new Random();
    for (int i = 0; i < sqlNumRetries; ++i) {
        Connection conn = null;
        PreparedStatement getStripeStatement = null;
        ResultSet generatedKeys = null;
        PreparedStatement insertStripeStatement = null;
        String url = null;
        try {
            try {
                url = connectionFactory.getUrl(true);
            } catch (IOException ioe) {
                LOG.warn("Cannot get DB URL, fall back to the default one:" + defaultUrl, ioe);
                url = defaultUrl;
                if (url == null) {
                    throw ioe;
                }
            }
            LOG.info("Attepting connection with URL " + url);
            conn = connectionFactory.getConnection(url);
            conn.setAutoCommit(false);
            defaultUrl = url;
            getStripeStatement = DBUtils.getPreparedStatement(conn, NEW_STRIPE_ID_SQL, getStripeSqlParams,
                    true);
            int recordsUpdated = getStripeStatement.executeUpdate();
            LOG.info("rows inserted: " + recordsUpdated + " sql: " + NEW_STRIPE_ID_SQL);
            generatedKeys = getStripeStatement.getGeneratedKeys();
            List<List<Object>> results = DBUtils.getResults(generatedKeys);
            Long stripeId = (Long) results.get(0).get(0);
            List<Object> insertStripeSqlParams = constructInsertStripeSqlParam(codec, parityBlks, srcBlks,
                    stripeId);
            insertStripeStatement = DBUtils.getPreparedStatement(conn, insertStripeSql, insertStripeSqlParams,
                    false);
            recordsUpdated = insertStripeStatement.executeUpdate();
            conn.commit();
            LOG.info("rows inserted: " + recordsUpdated + " sql: " + insertStripeSql);
            StripeInfo si = new StripeInfo(codec, null, parityBlks, srcBlks);
            LOG.info("Put " + si + " into stripe store");
            Thread.sleep(putStripeSleepTime + rand.nextInt(1000));
            return;
        } catch (Exception e) {
            // We should catch a better exception than Exception, but since
            // DBConnectionUrlFactory.getUrl() defines throws Exception, it's hard
            // for us to figure out the complete set it can throw. We follow
            // DBConnectionUrlFactory.getUrl()'s definition to catch Exception.
            // It shouldn't be a big problem as after numRetries, we anyway exit.
            LOG.info("Exception " + e + ". Will retry " + (sqlNumRetries - i) + " times.");
            // Introducing a random factor to the wait time before another retry.
            // The wait time is dependent on # of failures and a random factor.
            // At the first time of getting a SQLException, the wait time
            // is a random number between [0,300] msec. If the first retry
            // still fails, we will wait 300 msec grace period before the 2nd retry.
            // Also at the second retry, the waiting window is expanded to 600 msec
            // alleviating the request rate from the server. Similarly the 3rd retry
            // will wait 600 msec grace period before retry and the waiting window
            // is
            // expanded to 1200 msec.
            if (conn != null) {
                try {
                    conn.rollback();
                    LOG.info("putStripe Transaction was rolled back");
                } catch (SQLException excep) {
                    LOG.error(excep);
                }
            }
            waitMS += waitMS;
            if (waitMS > DBUtils.RETRY_MAX_INTERVAL_SEC * 1000) {
                waitMS = DBUtils.RETRY_MAX_INTERVAL_SEC * 1000;
            }
            double waitTime = waitMS + waitMS * rand.nextDouble();
            if (i + 1 == sqlNumRetries) {
                LOG.error("Still got Exception after " + sqlNumRetries + "  retries.", e);
                throw new IOException(e);
            }
            try {
                Thread.sleep((long) waitTime);
            } catch (InterruptedException ie) {
                throw new IOException(ie);
            }
        } finally {
            try {
                if (conn != null) {
                    conn.setAutoCommit(true);
                }
            } catch (SQLException sqlExp) {
                LOG.warn("Fail to set AutoCommit to true", sqlExp);
            }
            DBUtils.close(generatedKeys, new PreparedStatement[] { getStripeStatement, insertStripeStatement },
                    conn);
        }
    }
}

From source file:org.apache.pig.test.TestBuiltin.java

@Test
public void testMathFuncs() throws Exception {
    Random generator = new Random();
    generator.setSeed(System.currentTimeMillis());
    Double delta = 0.1;//ww w  .j  a v  a 2  s  .  c  o m
    // We assume that UDFs are stored in org.apache.pig.builtin
    // Change this test case if we add more hierarchy later\
    // Also, we assume that we have a function with math function
    // associated with these UDF with a lowercase name
    String[] mathFuncs = { "SIN", "SINH", "ASIN", "COS", "COSH", "ACOS", "TAN", "TANH", "ATAN", "LOG", "LOG10",
            "SQRT", "CEIL", "EXP", "FLOOR", "CBRT" };
    String udfPackage = "org.apache.pig.builtin.";
    //String[] mathNonStdFuncs = {};
    EvalFunc<Double> evalFunc;
    Tuple tup;
    Double input, actual, expected;
    Method mathMethod;
    String msg;
    for (String func : mathFuncs) {
        evalFunc = (EvalFunc<Double>) Class.forName(udfPackage + func).newInstance();
        tup = TupleFactory.getInstance().newTuple(1);
        // double value between 0.0 and 1.0
        input = generator.nextDouble();
        tup.set(0, input);
        mathMethod = Math.class.getDeclaredMethod(func.toLowerCase(), double.class);
        actual = evalFunc.exec(tup);
        expected = (Double) mathMethod.invoke(null, input);
        msg = "[Testing " + func + " on input: " + input + " ( (actual) " + actual + " == " + expected
                + " (expected) )]";
        assertEquals(msg, actual, expected, delta);
    }
}

From source file:org.apache.hadoop.hbase.HBaseTestingUtility.java

/** Creates a random table with the given parameters */
public HTable createRandomTable(String tableName, final Collection<String> families, final int maxVersions,
        final int numColsPerRow, final int numFlushes, final int numRegions, final int numRowsPerFlush)
        throws IOException, InterruptedException {

    LOG.info("\n\nCreating random table " + tableName + " with " + numRegions + " regions, " + numFlushes
            + " storefiles per region, " + numRowsPerFlush + " rows per flush, maxVersions=" + maxVersions
            + "\n");

    final Random rand = new Random(tableName.hashCode() * 17L + 12938197137L);
    final int numCF = families.size();
    final byte[][] cfBytes = new byte[numCF][];
    {//from  w  w w  .j  av  a  2  s .  c  om
        int cfIndex = 0;
        for (String cf : families) {
            cfBytes[cfIndex++] = Bytes.toBytes(cf);
        }
    }

    final int actualStartKey = 0;
    final int actualEndKey = Integer.MAX_VALUE;
    final int keysPerRegion = (actualEndKey - actualStartKey) / numRegions;
    final int splitStartKey = actualStartKey + keysPerRegion;
    final int splitEndKey = actualEndKey - keysPerRegion;
    final String keyFormat = "%08x";
    final HTable table = createTable(tableName, cfBytes, maxVersions,
            Bytes.toBytes(String.format(keyFormat, splitStartKey)),
            Bytes.toBytes(String.format(keyFormat, splitEndKey)), numRegions);

    if (hbaseCluster != null) {
        getMiniHBaseCluster().flushcache(TableName.META_TABLE_NAME);
    }

    for (int iFlush = 0; iFlush < numFlushes; ++iFlush) {
        for (int iRow = 0; iRow < numRowsPerFlush; ++iRow) {
            final byte[] row = Bytes.toBytes(
                    String.format(keyFormat, actualStartKey + rand.nextInt(actualEndKey - actualStartKey)));

            Put put = new Put(row);
            Delete del = new Delete(row);
            for (int iCol = 0; iCol < numColsPerRow; ++iCol) {
                final byte[] cf = cfBytes[rand.nextInt(numCF)];
                final long ts = rand.nextInt();
                final byte[] qual = Bytes.toBytes("col" + iCol);
                if (rand.nextBoolean()) {
                    final byte[] value = Bytes
                            .toBytes("value_for_row_" + iRow + "_cf_" + Bytes.toStringBinary(cf) + "_col_"
                                    + iCol + "_ts_" + ts + "_random_" + rand.nextLong());
                    put.add(cf, qual, ts, value);
                } else if (rand.nextDouble() < 0.8) {
                    del.deleteColumn(cf, qual, ts);
                } else {
                    del.deleteColumns(cf, qual, ts);
                }
            }

            if (!put.isEmpty()) {
                table.put(put);
            }

            if (!del.isEmpty()) {
                table.delete(del);
            }
        }
        LOG.info("Initiating flush #" + iFlush + " for table " + tableName);
        table.flushCommits();
        if (hbaseCluster != null) {
            getMiniHBaseCluster().flushcache(table.getName());
        }
    }

    return table;
}