Example usage for java.util Random nextLong

List of usage examples for java.util Random nextLong

Introduction

In this page you can find the example usage for java.util Random nextLong.

Prototype

public long nextLong() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed long value from this random number generator's sequence.

Usage

From source file:enumj.EnumerableGenerator.java

public static Enumerable<EnumerableGenerator> generators() {
    final Random rnd = new Random(9691);
    return Enumerable.of(() -> Optional.of(new EnumerableGenerator(rnd.nextLong())));
}

From source file:org.apache.gora.goraci.Walker.java

private static CINode findStartNode(Random rand, DataStore<Long, CINode> store) throws IOException {
    Query<Long, CINode> query = store.newQuery();
    query.setStartKey(rand.nextLong());
    query.setLimit(1);//from w ww .j  a v  a2s  .  com
    query.setFields(PREV_FIELD);

    long t1 = System.currentTimeMillis();
    Result<Long, CINode> rs = store.execute(query);
    long t2 = System.currentTimeMillis();

    try {
        if (rs.next()) {
            LOG.info("FSR %d %016x\n {}", new Object[] { t2 - t1, rs.getKey() });
            return rs.get();
        }
    } catch (Exception e) {
        LOG.error(e.getMessage());
        throw new RuntimeException(e);
    }

    LOG.info("FSR {}", new Object[] { (t2 - t1) });

    return null;
}

From source file:org.openxdata.server.security.util.OpenXDataSecurityUtil.java

/**
 * This method will generate a random string 
 * // w ww . ja  v a  2 s . c  o  m
 * @return a secure random token.
 */
public static String getRandomToken() {
    Random rng = new Random();
    return encodeString(Long.toString(System.currentTimeMillis()) + Long.toString(rng.nextLong()));
}

From source file:org.apache.hadoop.mapred.gridmix.TestFilePool.java

@BeforeClass
public static void setup() throws IOException {
    final Configuration conf = new Configuration();
    final FileSystem fs = FileSystem.getLocal(conf).getRaw();
    fs.delete(base, true);//from w  ww . j  a  v  a2 s.  c om
    final Random r = new Random();
    final long seed = r.nextLong();
    r.setSeed(seed);
    LOG.info("seed: " + seed);
    fs.mkdirs(base);
    for (int i = 0; i < NFILES; ++i) {
        Path file = base;
        for (double d = 0.6; d > 0.0; d *= 0.8) {
            if (r.nextDouble() < d) {
                file = new Path(base, Integer.toString(r.nextInt(3)));
                continue;
            }
            break;
        }
        OutputStream out = null;
        try {
            out = fs.create(new Path(file, "" + (char) ('A' + i)));
            final byte[] b = new byte[1024];
            Arrays.fill(b, (byte) ('A' + i));
            for (int len = ((i % 13) + 1) * 1024; len > 0; len -= 1024) {
                out.write(b);
            }
        } finally {
            if (out != null) {
                out.close();
            }
        }
    }
}

From source file:org.apache.hadoop.hdfs.notifier.benchmark.TxnGenerator.java

private static JobConf createJobConf(Configuration conf) throws IOException {
    JobConf jobConf = new JobConf(conf);
    String jobName = "transaction_generator";
    jobConf.setJobName(jobName);//ww w .jav a2 s. com

    String splitDir = workplace + "split/";

    jobConf.set(TEST_DIR_LABEL, workplace);

    jobConf.setMapSpeculativeExecution(false);
    jobConf.setJarByClass(TxnGenerator.class);
    jobConf.setMapperClass(GeneratorMapper.class);
    jobConf.setInputFormat(TextInputFormat.class);

    FileInputFormat.addInputPath(jobConf, new Path(splitDir));
    Random random = new Random();
    FileOutputFormat.setOutputPath(jobConf, new Path(workplace, "output" + random.nextLong()));

    jobConf.setNumReduceTasks(0);
    jobConf.setNumMapTasks(numMappers);

    createSplitFiles(conf, new Path(splitDir));

    return jobConf;
}

From source file:org.apache.hadoop.mapred.gridmix.TestGridmixRecord.java

static void eqSeedTest(GridmixRecord x, GridmixRecord y, int max) throws Exception {
    final Random r = new Random();
    final long s = r.nextLong();
    r.setSeed(s);//from w  ww .  ja v a  2 s  .  com
    LOG.info("eqSeed: " + s);
    assertEquals(x.fixedBytes(), y.fixedBytes());
    final int min = x.fixedBytes() + 1;
    final DataOutputBuffer out1 = new DataOutputBuffer();
    final DataOutputBuffer out2 = new DataOutputBuffer();
    for (int i = min; i < max; ++i) {
        final long seed = r.nextLong();
        setSerialize(x, seed, i, out1);
        setSerialize(y, seed, i, out2);
        assertEquals(x, y);
        assertEquals(x.hashCode(), y.hashCode());

        // verify written contents match
        assertEquals(out1.getLength(), out2.getLength());
        // assumes that writes will grow buffer deterministically
        assertEquals("Bad test", out1.getData().length, out2.getData().length);
        assertArrayEquals(out1.getData(), out2.getData());
    }
}

From source file:org.apache.hadoop.mapred.gridmix.TestGridmixRecord.java

static void lengthTest(GridmixRecord x, GridmixRecord y, int min, int max) throws Exception {
    final Random r = new Random();
    final long seed = r.nextLong();
    r.setSeed(seed);//from   ww w.  j  a va 2s . com
    LOG.info("length: " + seed);
    final DataInputBuffer in = new DataInputBuffer();
    final DataOutputBuffer out1 = new DataOutputBuffer();
    final DataOutputBuffer out2 = new DataOutputBuffer();
    for (int i = min; i < max; ++i) {
        setSerialize(x, r.nextLong(), i, out1);
        // check write
        assertEquals(i, out1.getLength());
        // write to stream
        x.write(out2);
        // check read
        in.reset(out1.getData(), 0, out1.getLength());
        y.readFields(in);
        assertEquals(i, x.getSize());
        assertEquals(i, y.getSize());
    }
    // check stream read
    in.reset(out2.getData(), 0, out2.getLength());
    for (int i = min; i < max; ++i) {
        y.readFields(in);
        assertEquals(i, y.getSize());
    }
}

From source file:org.apache.hadoop.hdfs.protocol.RandomObjectsGenerators.java

static HdfsFileStatus[] rndHdfsFileStatusArr(Random rnd, int count) {
    HdfsFileStatus[] files = new HdfsFileStatus[count];
    for (int i = 0; i < files.length; i++) {
        files[i] = new HdfsFileStatus(rnd.nextLong(), rnd.nextBoolean(), rnd.nextInt(5), rnd.nextLong(),
                rnd.nextLong(), rnd.nextLong(), new FsPermission((short) rnd.nextInt()),
                "owner" + rnd.nextInt(), "group" + rnd.nextInt(), rndByteArr(rnd, rnd.nextInt(37)));
    }/* w  w w  .  j  a  v a 2  s. c o m*/
    return files;
}

From source file:org.apache.hadoop.mapred.gridmix.TestGridmixRecord.java

static void randomReplayTest(GridmixRecord x, GridmixRecord y, int min, int max) throws Exception {
    final Random r = new Random();
    final long seed = r.nextLong();
    r.setSeed(seed);/*from www.  ja va 2s. c  o  m*/
    LOG.info("randReplay: " + seed);
    final DataOutputBuffer out1 = new DataOutputBuffer();
    for (int i = min; i < max; ++i) {
        final int s = out1.getLength();
        x.setSeed(r.nextLong());
        x.setSize(i);
        x.write(out1);
        assertEquals(i, out1.getLength() - s);
    }
    final DataInputBuffer in = new DataInputBuffer();
    in.reset(out1.getData(), 0, out1.getLength());
    final DataOutputBuffer out2 = new DataOutputBuffer();
    // deserialize written records, write to separate buffer
    for (int i = min; i < max; ++i) {
        final int s = in.getPosition();
        y.readFields(in);
        assertEquals(i, in.getPosition() - s);
        y.write(out2);
    }
    // verify written contents match
    assertEquals(out1.getLength(), out2.getLength());
    // assumes that writes will grow buffer deterministically
    assertEquals("Bad test", out1.getData().length, out2.getData().length);
    assertArrayEquals(out1.getData(), out2.getData());
}

From source file:org.apache.hadoop.mapred.gridmix.TestGridmixRecord.java

static void binSortTest(GridmixRecord x, GridmixRecord y, int min, int max, WritableComparator cmp)
        throws Exception {
    final Random r = new Random();
    final long s = r.nextLong();
    r.setSeed(s);// w w w  .ja  v  a  2  s .  c  o  m
    LOG.info("sort: " + s);
    final DataOutputBuffer out1 = new DataOutputBuffer();
    final DataOutputBuffer out2 = new DataOutputBuffer();
    for (int i = min; i < max; ++i) {
        final long seed1 = r.nextLong();
        setSerialize(x, seed1, i, out1);
        assertEquals(0, x.compareSeed(seed1, Math.max(0, i - x.fixedBytes())));

        final long seed2 = r.nextLong();
        setSerialize(y, seed2, i, out2);
        assertEquals(0, y.compareSeed(seed2, Math.max(0, i - x.fixedBytes())));

        // for eq sized records, ensure byte cmp where req
        final int chk = WritableComparator.compareBytes(out1.getData(), 0, out1.getLength(), out2.getData(), 0,
                out2.getLength());
        assertEquals(chk, x.compareTo(y));
        assertEquals(chk,
                cmp.compare(out1.getData(), 0, out1.getLength(), out2.getData(), 0, out2.getLength()));
        // write second copy, compare eq
        final int s1 = out1.getLength();
        x.write(out1);
        assertEquals(0, cmp.compare(out1.getData(), 0, s1, out1.getData(), s1, out1.getLength() - s1));
        final int s2 = out2.getLength();
        y.write(out2);
        assertEquals(0, cmp.compare(out2.getData(), 0, s2, out2.getData(), s2, out2.getLength() - s2));
        assertEquals(chk, cmp.compare(out1.getData(), 0, s1, out2.getData(), s2, out2.getLength() - s2));
    }
}