Example usage for java.util Random setSeed

List of usage examples for java.util Random setSeed

Introduction

In this page you can find the example usage for java.util Random setSeed.

Prototype

public synchronized void setSeed(long seed) 

Source Link

Document

Sets the seed of this random number generator using a single long seed.

Usage

From source file:com.ephesoft.dcma.encryption.core.EncryptorDecryptor.java

private byte[] generateSalt(int saltLength) {
    Random random = new Random();
    random.setSeed(System.currentTimeMillis());
    byte[] randomBytes = new byte[saltLength];
    random.nextBytes(randomBytes);// w w w.  j  ava  2  s  . co m
    return randomBytes;
}

From source file:io.kloudwork.controller.LoginController.java

private String generateCSRFToken(int length) {
    char[] validChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456879".toCharArray();
    Random random = new Random();
    char[] buffer = new char[length];
    for (int i = 0; i < length; ++i) {
        if ((i % 10) == 0) {
            random.setSeed(secureRandom.nextLong());
        }/* w ww  .j  av a2  s . c om*/
        buffer[i] = validChars[random.nextInt(validChars.length)];
    }
    return new String(buffer);
}

From source file:org.apache.hadoop.mapreduce.lib.input.TestCombineSequenceFileInputFormat.java

@Test(timeout = 10000)
public void testFormat() throws IOException, InterruptedException {
    Job job = Job.getInstance(conf);/*  w w  w . ja va2s. c  o m*/

    Random random = new Random();
    long seed = random.nextLong();
    random.setSeed(seed);

    localFs.delete(workDir, true);
    FileInputFormat.setInputPaths(job, workDir);

    final int length = 10000;
    final int numFiles = 10;

    // create files with a variety of lengths
    createFiles(length, numFiles, random, job);

    TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration());
    // create a combine split for the files
    InputFormat<IntWritable, BytesWritable> format = new CombineSequenceFileInputFormat<IntWritable, BytesWritable>();
    for (int i = 0; i < 3; i++) {
        int numSplits = random.nextInt(length / (SequenceFile.SYNC_INTERVAL / 20)) + 1;
        LOG.info("splitting: requesting = " + numSplits);
        List<InputSplit> splits = format.getSplits(job);
        LOG.info("splitting: got =        " + splits.size());

        // we should have a single split as the length is comfortably smaller than
        // the block size
        assertEquals("We got more than one splits!", 1, splits.size());
        InputSplit split = splits.get(0);
        assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass());

        // check the split
        BitSet bits = new BitSet(length);
        RecordReader<IntWritable, BytesWritable> reader = format.createRecordReader(split, context);
        MapContext<IntWritable, BytesWritable, IntWritable, BytesWritable> mcontext = new MapContextImpl<IntWritable, BytesWritable, IntWritable, BytesWritable>(
                job.getConfiguration(), context.getTaskAttemptID(), reader, null, null,
                MapReduceTestUtil.createDummyReporter(), split);
        reader.initialize(split, mcontext);
        assertEquals("reader class is CombineFileRecordReader.", CombineFileRecordReader.class,
                reader.getClass());

        try {
            while (reader.nextKeyValue()) {
                IntWritable key = reader.getCurrentKey();
                BytesWritable value = reader.getCurrentValue();
                assertNotNull("Value should not be null.", value);
                final int k = key.get();
                LOG.debug("read " + k);
                assertFalse("Key in multiple partitions.", bits.get(k));
                bits.set(k);
            }
        } finally {
            reader.close();
        }
        assertEquals("Some keys in no partition.", length, bits.cardinality());
    }
}

From source file:org.apache.hadoop.fs.TestLocalFileSystem.java

void testFileCrcInternal(boolean inlineChecksum) throws IOException {
    ((Log4JLogger) HftpFileSystem.LOG).getLogger().setLevel(Level.ALL);

    Random random = new Random(1);

    final long seed = random.nextLong();
    random.setSeed(seed);
    FileSystem fs = FileSystem.getLocal(new Configuration());

    // generate random data
    final byte[] data = new byte[1024 * 1024 + 512 * 7 + 66];
    random.nextBytes(data);/*from  w ww  . java  2s .c  o  m*/

    // write data to a file
    Path foo = new Path(TEST_ROOT_DIR, "foo_" + inlineChecksum);
    {
        final FSDataOutputStream out = fs.create(foo, false, 512, (short) 2, 512);
        out.write(data);
        out.close();
    }

    // compute data CRC
    DataChecksum checksum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 1);
    checksum.update(data, 0, data.length);

    // compute checksum
    final int crc = fs.getFileCrc(foo);
    System.out.println("crc=" + crc);

    TestCase.assertEquals((int) checksum.getValue(), crc);
}

From source file:io.undertow.server.handlers.ChunkedRequestNotConsumedTestCase.java

@Test
public void testChunkedRequestNotConsumed() throws IOException {
    HttpPost post = new HttpPost(DefaultServer.getDefaultServerURL() + "/path");
    TestHttpClient client = new TestHttpClient();
    try {// w w w . j av a 2  s. c  o m
        final Random random = new Random();
        final int seed = random.nextInt();
        System.out.print("Using Seed " + seed);
        random.setSeed(seed);

        for (int i = 0; i < 3; ++i) {
            post.setEntity(new StringEntity("") {
                @Override
                public long getContentLength() {
                    return -1;
                }

                @Override
                public boolean isChunked() {
                    return true;
                }

                @Override
                public void writeTo(OutputStream outstream) throws IOException {
                    outstream.flush();
                    try {
                        Thread.sleep(100);
                    } catch (InterruptedException e) {

                    }
                    outstream.write(MESSAGE.getBytes(StandardCharsets.US_ASCII));
                }
            });
            HttpResponse result = client.execute(post);
            Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
            HttpClientUtils.readResponse(result);
        }
    } finally {

        client.getConnectionManager().shutdown();
    }
}

From source file:org.apache.hadoop.mapred.TestCombineSequenceFileInputFormat.java

@Test(timeout = 10000)
public void testFormat() throws Exception {
    JobConf job = new JobConf(conf);

    Reporter reporter = Reporter.NULL;/*w  ww .j a v  a2 s.co  m*/

    Random random = new Random();
    long seed = random.nextLong();
    LOG.info("seed = " + seed);
    random.setSeed(seed);

    localFs.delete(workDir, true);

    FileInputFormat.setInputPaths(job, workDir);

    final int length = 10000;
    final int numFiles = 10;

    // create a file with various lengths
    createFiles(length, numFiles, random);

    // create a combine split for the files
    InputFormat<IntWritable, BytesWritable> format = new CombineSequenceFileInputFormat<IntWritable, BytesWritable>();
    IntWritable key = new IntWritable();
    BytesWritable value = new BytesWritable();
    for (int i = 0; i < 3; i++) {
        int numSplits = random.nextInt(length / (SequenceFile.SYNC_INTERVAL / 20)) + 1;
        LOG.info("splitting: requesting = " + numSplits);
        InputSplit[] splits = format.getSplits(job, numSplits);
        LOG.info("splitting: got =        " + splits.length);

        // we should have a single split as the length is comfortably smaller than
        // the block size
        assertEquals("We got more than one splits!", 1, splits.length);
        InputSplit split = splits[0];
        assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass());

        // check each split
        BitSet bits = new BitSet(length);
        RecordReader<IntWritable, BytesWritable> reader = format.getRecordReader(split, job, reporter);
        try {
            while (reader.next(key, value)) {
                assertFalse("Key in multiple partitions.", bits.get(key.get()));
                bits.set(key.get());
            }
        } finally {
            reader.close();
        }
        assertEquals("Some keys in no partition.", length, bits.cardinality());
    }
}

From source file:edu.cornell.med.icb.goby.algorithmic.algorithm.TestComputeStartCount.java

@Test
public void testComputeStarts5() throws IOException {
    initializeTestDirectory();//  w w w  . j av a 2 s.  co  m
    final Random random = new Random();
    random.setSeed(023);
    final ComputeStartCount computer = new ComputeStartCount(ComputeStartCount.POSITIVE_STRAND_ONLY);
    for (int i = 0; i < 100000; i++) {
        final int start = chooseRandom(random, 1, 10000);

        final int length = chooseRandom(random, 10, 100);
        computer.populate(start, start + length, true);

    }
    computer.populate(10010, 10020, true);
    computer.populate(10010, 10020, true);
    computer.populate(10011, 10020, true);

    final String filename = "test-results/start-counts/103.bin";
    final CountsWriterI writerI = new CountsWriter(new FileOutputStream(filename));
    computer.baseCount(writerI);
    writerI.close();

    final CountsReader reader = new CountsReader(new FileInputStream(filename));
    reader.skipTo(10003);

    assertEquals(2, reader.getCount());
    assertEquals(10010, reader.getPosition());
    reader.hasNextTransition();
    reader.nextTransition();

    assertEquals(10011, reader.getPosition());
    assertEquals(1, reader.getCount());
    reader.close();
}

From source file:org.apache.hadoop.mapreduce.lib.output.TestMRSequenceFileAsBinaryOutputFormat.java

public void testBinary() throws IOException, InterruptedException {
    Configuration conf = new Configuration();
    Job job = new Job(conf);

    Path outdir = new Path(System.getProperty("test.build.data", "/tmp"), "outseq");
    Random r = new Random();
    long seed = r.nextLong();
    r.setSeed(seed);

    FileOutputFormat.setOutputPath(job, outdir);

    SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, IntWritable.class);
    SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, DoubleWritable.class);

    SequenceFileAsBinaryOutputFormat.setCompressOutput(job, true);
    SequenceFileAsBinaryOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);

    BytesWritable bkey = new BytesWritable();
    BytesWritable bval = new BytesWritable();

    TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration());
    OutputFormat<BytesWritable, BytesWritable> outputFormat = new SequenceFileAsBinaryOutputFormat();
    OutputCommitter committer = outputFormat.getOutputCommitter(context);
    committer.setupJob(job);/*  ww w. j  a va2 s  .  c  o  m*/
    RecordWriter<BytesWritable, BytesWritable> writer = outputFormat.getRecordWriter(context);

    IntWritable iwritable = new IntWritable();
    DoubleWritable dwritable = new DoubleWritable();
    DataOutputBuffer outbuf = new DataOutputBuffer();
    LOG.info("Creating data by SequenceFileAsBinaryOutputFormat");
    try {
        for (int i = 0; i < RECORDS; ++i) {
            iwritable = new IntWritable(r.nextInt());
            iwritable.write(outbuf);
            bkey.set(outbuf.getData(), 0, outbuf.getLength());
            outbuf.reset();
            dwritable = new DoubleWritable(r.nextDouble());
            dwritable.write(outbuf);
            bval.set(outbuf.getData(), 0, outbuf.getLength());
            outbuf.reset();
            writer.write(bkey, bval);
        }
    } finally {
        writer.close(context);
    }
    committer.commitTask(context);
    committer.commitJob(job);

    InputFormat<IntWritable, DoubleWritable> iformat = new SequenceFileInputFormat<IntWritable, DoubleWritable>();
    int count = 0;
    r.setSeed(seed);
    SequenceFileInputFormat.setInputPaths(job, outdir);
    LOG.info("Reading data by SequenceFileInputFormat");
    for (InputSplit split : iformat.getSplits(job)) {
        RecordReader<IntWritable, DoubleWritable> reader = iformat.createRecordReader(split, context);
        MapContext<IntWritable, DoubleWritable, BytesWritable, BytesWritable> mcontext = new MapContext<IntWritable, DoubleWritable, BytesWritable, BytesWritable>(
                job.getConfiguration(), context.getTaskAttemptID(), reader, null, null,
                MapReduceTestUtil.createDummyReporter(), split);
        reader.initialize(split, mcontext);
        try {
            int sourceInt;
            double sourceDouble;
            while (reader.nextKeyValue()) {
                sourceInt = r.nextInt();
                sourceDouble = r.nextDouble();
                iwritable = reader.getCurrentKey();
                dwritable = reader.getCurrentValue();
                assertEquals("Keys don't match: " + "*" + iwritable.get() + ":" + sourceInt + "*", sourceInt,
                        iwritable.get());
                assertTrue("Vals don't match: " + "*" + dwritable.get() + ":" + sourceDouble + "*",
                        Double.compare(dwritable.get(), sourceDouble) == 0);
                ++count;
            }
        } finally {
            reader.close();
        }
    }
    assertEquals("Some records not found", RECORDS, count);
}

From source file:org.apache.hadoop.mapred.TestSequenceFileAsBinaryOutputFormat.java

public void testBinary() throws IOException {
    JobConf job = new JobConf();
    FileSystem fs = FileSystem.getLocal(job);

    Path dir = new Path(
            new Path(new Path(System.getProperty("test.build.data", ".")), FileOutputCommitter.TEMP_DIR_NAME),
            "_" + attempt);
    Path file = new Path(dir, "testbinary.seq");
    Random r = new Random();
    long seed = r.nextLong();
    r.setSeed(seed);

    fs.delete(dir, true);/*ww  w  .j  a  v  a  2 s  .co  m*/
    if (!fs.mkdirs(dir)) {
        fail("Failed to create output directory");
    }

    job.set("mapred.task.id", attempt);
    FileOutputFormat.setOutputPath(job, dir.getParent().getParent());
    FileOutputFormat.setWorkOutputPath(job, dir);

    SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, IntWritable.class);
    SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, DoubleWritable.class);

    SequenceFileAsBinaryOutputFormat.setCompressOutput(job, true);
    SequenceFileAsBinaryOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);

    BytesWritable bkey = new BytesWritable();
    BytesWritable bval = new BytesWritable();

    RecordWriter<BytesWritable, BytesWritable> writer = new SequenceFileAsBinaryOutputFormat()
            .getRecordWriter(fs, job, file.toString(), Reporter.NULL);

    IntWritable iwritable = new IntWritable();
    DoubleWritable dwritable = new DoubleWritable();
    DataOutputBuffer outbuf = new DataOutputBuffer();
    LOG.info("Creating data by SequenceFileAsBinaryOutputFormat");
    try {
        for (int i = 0; i < RECORDS; ++i) {
            iwritable = new IntWritable(r.nextInt());
            iwritable.write(outbuf);
            bkey.set(outbuf.getData(), 0, outbuf.getLength());
            outbuf.reset();
            dwritable = new DoubleWritable(r.nextDouble());
            dwritable.write(outbuf);
            bval.set(outbuf.getData(), 0, outbuf.getLength());
            outbuf.reset();
            writer.write(bkey, bval);
        }
    } finally {
        writer.close(Reporter.NULL);
    }

    InputFormat<IntWritable, DoubleWritable> iformat = new SequenceFileInputFormat<IntWritable, DoubleWritable>();
    int count = 0;
    r.setSeed(seed);
    DataInputBuffer buf = new DataInputBuffer();
    final int NUM_SPLITS = 3;
    SequenceFileInputFormat.addInputPath(job, file);
    LOG.info("Reading data by SequenceFileInputFormat");
    for (InputSplit split : iformat.getSplits(job, NUM_SPLITS)) {
        RecordReader<IntWritable, DoubleWritable> reader = iformat.getRecordReader(split, job, Reporter.NULL);
        try {
            int sourceInt;
            double sourceDouble;
            while (reader.next(iwritable, dwritable)) {
                sourceInt = r.nextInt();
                sourceDouble = r.nextDouble();
                assertEquals("Keys don't match: " + "*" + iwritable.get() + ":" + sourceInt + "*", sourceInt,
                        iwritable.get());
                assertTrue("Vals don't match: " + "*" + dwritable.get() + ":" + sourceDouble + "*",
                        Double.compare(dwritable.get(), sourceDouble) == 0);
                ++count;
            }
        } finally {
            reader.close();
        }
    }
    assertEquals("Some records not found", RECORDS, count);
}

From source file:com.ibm.bi.dml.runtime.matrix.data.LibMatrixDatagen.java

/**
 * /*from w w  w .  j a v a2 s  .c om*/
 * @param nrow
 * @param ncol
 * @param brlen
 * @param bclen
 * @param sparsity
 * @return
 * @throws DMLRuntimeException
 */
public static long[] computeNNZperBlock(long nrow, long ncol, int brlen, int bclen, double sparsity)
        throws DMLRuntimeException {
    int numBlocks = (int) (Math.ceil((double) nrow / brlen) * Math.ceil((double) ncol / bclen));
    //System.out.println("nrow=" + nrow + ", brlen=" + brlen + ", ncol="+ncol+", bclen=" + bclen + "::: " + Math.ceil(nrow/brlen));

    // CURRENT: 
    //       Total #of NNZ is set to the expected value (nrow*ncol*sparsity).
    // TODO: 
    //      Instead of using the expected value, one should actually 
    //       treat NNZ as a random variable and accordingly generate a random value.
    long nnz = (long) Math.ceil(nrow * (ncol * sparsity));
    //System.out.println("Number of blocks = " + numBlocks + "; NNZ = " + nnz);

    if (numBlocks > Integer.MAX_VALUE) {
        throw new DMLRuntimeException(
                "A random matrix of size [" + nrow + "," + ncol + "] can not be created. Number of blocks ("
                        + numBlocks + ") exceeds the maximum integer size. Try to increase the block size.");
    }

    // Compute block-level NNZ
    long[] ret = new long[numBlocks];
    Arrays.fill(ret, 0);

    if (nnz < numBlocks) {
        // Ultra-sparse matrix

        // generate the number of blocks with at least one non-zero
        // = a random number between [1,nnz]
        Random runif = new Random(System.nanoTime());
        int numNZBlocks = 1;
        if (nnz - 1 > 0)
            numNZBlocks += runif.nextInt((int) (nnz - 1)); // To avoid exception from random.nextInt(0) 

        // distribute non-zeros across numNZBlocks

        // compute proportions for each nzblock 
        // - divide (0,1] interval into numNZBlocks portions of random size
        double[] blockNNZproportions = new double[numNZBlocks];

        runif.setSeed(System.nanoTime());
        for (int i = 0; i < numNZBlocks - 1; i++) {
            blockNNZproportions[i] = runif.nextDouble();
        }
        blockNNZproportions[numNZBlocks - 1] = 1;
        // sort the values in ascending order
        Arrays.sort(blockNNZproportions);

        // compute actual number of non zeros per block according to proportions
        long actualnnz = 0;
        int bid;
        runif.setSeed(System.nanoTime());
        for (int i = 0; i < numNZBlocks; i++) {
            bid = -1;
            do {
                bid = runif.nextInt(numBlocks);
            } while (ret[bid] != 0);

            double prop = (i == 0 ? blockNNZproportions[i]
                    : (blockNNZproportions[i] - blockNNZproportions[i - 1]));
            ret[bid] = (long) Math.floor(prop * nnz);
            actualnnz += ret[bid];
        }

        // Code to make sure exact number of non-zeros are generated
        while (actualnnz < nnz) {
            bid = runif.nextInt(numBlocks);
            ret[bid]++;
            actualnnz++;
        }
    } else {
        int bid = 0;

        //long actualnnz = 0;
        for (long r = 0; r < nrow; r += brlen) {
            long curBlockRowSize = Math.min(brlen, (nrow - r));
            for (long c = 0; c < ncol; c += bclen) {
                long curBlockColSize = Math.min(bclen, (ncol - c));
                ret[bid] = (long) (curBlockRowSize * curBlockColSize * sparsity);
                //actualnnz += ret[bid];
                bid++;
            }
        }
    }
    return ret;
}