Example usage for java.util Random nextBytes

List of usage examples for java.util Random nextBytes

Introduction

In this page you can find the example usage for java.util Random nextBytes.

Prototype

public void nextBytes(byte[] bytes) 

Source Link

Document

Generates random bytes and places them into a user-supplied byte array.

Usage

From source file:org.brekka.stillingar.example.FieldTypesDOMTest.java

private static Testing writeConfig() {
    Random r = new Random();
    ConfigurationDocument doc = ConfigurationDocument.Factory.newInstance();
    Configuration newConfiguration = doc.addNewConfiguration();
    FeatureFlagType featureFlag = newConfiguration.addNewFeatureFlag();
    featureFlag.setKey("turbo");
    featureFlag.setBooleanValue(true);//from   w w w.  jav a 2  s  . com
    Testing testing = newConfiguration.addNewTesting();
    testing.setAnyURI("http://brekka.org/" + RandomStringUtils.randomAlphanumeric(10));
    testing.setBoolean(r.nextBoolean());
    testing.setByte((byte) r.nextInt());
    Calendar cal = Calendar.getInstance();
    testing.setDate(cal);
    testing.setDateTime(cal);
    testing.setDecimal(BigDecimal.valueOf(r.nextDouble()));
    testing.setDouble(r.nextDouble());
    testing.setFloat(r.nextFloat());
    testing.setInt(r.nextInt());
    testing.setInteger(BigInteger.valueOf(r.nextLong()));
    testing.setLanguage("en");
    testing.setLong(r.nextLong());
    testing.setShort((short) r.nextInt());
    testing.setString(RandomStringUtils.randomAlphanumeric(24));
    testing.setTime(cal);
    testing.setUUID(UUID.randomUUID().toString());
    testing.setPeriod(new GDuration("P5Y2M10DT15H"));
    byte[] binary = new byte[32];
    r.nextBytes(binary);
    testing.setBinary(binary);
    TestSupport.write(doc);
    return testing;
}

From source file:org.apache.hadoop.hbase.regionserver.throttle.TestCompactionWithThroughputController.java

private Store prepareData() throws IOException {
    Admin admin = TEST_UTIL.getHBaseAdmin();
    if (admin.tableExists(tableName)) {
        admin.disableTable(tableName);//  w  w  w .j a v  a  2  s  .co  m
        admin.deleteTable(tableName);
    }
    Table table = TEST_UTIL.createTable(tableName, family);
    Random rand = new Random();
    for (int i = 0; i < 10; i++) {
        for (int j = 0; j < 10; j++) {
            byte[] value = new byte[128 * 1024];
            rand.nextBytes(value);
            table.put(new Put(Bytes.toBytes(i * 10 + j)).addColumn(family, qualifier, value));
        }
        admin.flush(tableName);
    }
    return getStoreWithName(tableName);
}

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

public static long createTestFile(FileSystem fileSys, Path name, int repl, long fileSize, long blockSize,
        int seed) throws IOException {
    CRC32 crc = new CRC32();
    Random rand = new Random(seed);
    FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, blockSize);
    LOG.info("create file " + name + " size: " + fileSize + " blockSize: " + blockSize + " repl: " + repl);
    // fill random data into file
    byte[] b = new byte[(int) blockSize];
    long numBlocks = fileSize / blockSize;
    for (int i = 0; i < numBlocks; i++) {
        rand.nextBytes(b);
        stm.write(b);//from ww  w  .ja v  a 2  s . c o m
        crc.update(b);
    }
    long lastBlock = fileSize - numBlocks * blockSize;
    if (lastBlock > 0) {
        b = new byte[(int) lastBlock];
        rand.nextBytes(b);
        stm.write(b);
        crc.update(b);
    }
    stm.close();
    return crc.getValue();
}

From source file:org.apache.hadoop.fs.TestLocalFileSystem.java

void testFileCrcInternal(boolean inlineChecksum) throws IOException {
    ((Log4JLogger) HftpFileSystem.LOG).getLogger().setLevel(Level.ALL);

    Random random = new Random(1);

    final long seed = random.nextLong();
    random.setSeed(seed);//  ww  w .j  a  va  2s  .c o m
    FileSystem fs = FileSystem.getLocal(new Configuration());

    // generate random data
    final byte[] data = new byte[1024 * 1024 + 512 * 7 + 66];
    random.nextBytes(data);

    // write data to a file
    Path foo = new Path(TEST_ROOT_DIR, "foo_" + inlineChecksum);
    {
        final FSDataOutputStream out = fs.create(foo, false, 512, (short) 2, 512);
        out.write(data);
        out.close();
    }

    // compute data CRC
    DataChecksum checksum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 1);
    checksum.update(data, 0, data.length);

    // compute checksum
    final int crc = fs.getFileCrc(foo);
    System.out.println("crc=" + crc);

    TestCase.assertEquals((int) checksum.getValue(), crc);
}

From source file:org.apache.jackrabbit.core.data.ConcurrentGcTest.java

void doTest(DataStore store) throws Exception {
    this.store = store;

    Random r = new Random();

    concurrentGcLoopStart();//  www.jav a  2 s. c  o m

    int len = 100;
    if (getTestScale() > 1) {
        len = 1000;
    }

    for (int i = 0; i < len && gcException == null; i++) {
        LOG.info("test " + i);
        byte[] data = new byte[3];
        r.nextBytes(data);
        DataRecord rec = store.addRecord(new ByteArrayInputStream(data));
        LOG.debug("  added " + rec.getIdentifier());
        if (r.nextBoolean()) {
            LOG.debug("  added " + rec.getIdentifier() + " -> keep reference");
            ids.add(rec.getIdentifier());
            store.getRecord(rec.getIdentifier());
        }
        if (r.nextInt(100) == 0) {
            LOG.debug("clear i: " + i);
            ids.clear();
        }
    }
    concurrentGcLoopStop();
    store.close();
}

From source file:org.apache.hadoop.hdfs.server.datanode.TestDataNodeRollingUpgrade.java

@Test(timeout = 600000)
// Test DatanodeXceiver has correct peer-dataxceiver pairs for sending OOB message
public void testDatanodePeersXceiver() throws Exception {
    try {//ww  w .ja v a2 s.  co  m
        startCluster();

        // Create files in DFS.
        String testFile1 = "/" + GenericTestUtils.getMethodName() + ".01.dat";
        String testFile2 = "/" + GenericTestUtils.getMethodName() + ".02.dat";
        String testFile3 = "/" + GenericTestUtils.getMethodName() + ".03.dat";

        DFSClient client1 = new DFSClient(NameNode.getAddress(conf), conf);
        DFSClient client2 = new DFSClient(NameNode.getAddress(conf), conf);
        DFSClient client3 = new DFSClient(NameNode.getAddress(conf), conf);

        DFSOutputStream s1 = (DFSOutputStream) client1.create(testFile1, true);
        DFSOutputStream s2 = (DFSOutputStream) client2.create(testFile2, true);
        DFSOutputStream s3 = (DFSOutputStream) client3.create(testFile3, true);

        byte[] toWrite = new byte[1024 * 1024 * 8];
        Random rb = new Random(1111);
        rb.nextBytes(toWrite);
        s1.write(toWrite, 0, 1024 * 1024 * 8);
        s1.flush();
        s2.write(toWrite, 0, 1024 * 1024 * 8);
        s2.flush();
        s3.write(toWrite, 0, 1024 * 1024 * 8);
        s3.flush();

        assertTrue(dn0.getXferServer().getNumPeersXceiver() == dn0.getXferServer().getNumPeersXceiver());
        s1.close();
        s2.close();
        s3.close();
        assertTrue(dn0.getXferServer().getNumPeersXceiver() == dn0.getXferServer().getNumPeersXceiver());
        client1.close();
        client2.close();
        client3.close();
    } finally {
        shutdownCluster();
    }
}

From source file:com.indeed.lsmtree.core.TestImmutableBTreeIndex.java

public void testLargeKeys() throws IOException {

    final TreeMap<String, Long> map = Maps.newTreeMap();
    final Random r = new Random(0);
    final String[] strings = new String[10000];
    for (int i = 0; i < strings.length; i++) {
        final byte[] bytes = new byte[16384];
        r.nextBytes(bytes);
        strings[i] = new String(Base64.encodeBase64(bytes));
    }//  w w  w  . j av  a2s  .co  m
    Arrays.sort(strings);
    Iterator<Generation.Entry<String, Long>> iterator = new AbstractIterator<Generation.Entry<String, Long>>() {
        int index = 0;

        @Override
        protected Generation.Entry<String, Long> computeNext() {
            if (index >= strings.length)
                return endOfData();
            final String s = strings[index];
            final long l = r.nextLong();
            index++;
            map.put(s, l);
            return Generation.Entry.create(s, l);
        }
    };
    ImmutableBTreeIndex.Writer.write(tmpDir, iterator, new StringSerializer(), new LongSerializer(), 65536,
            false);
    ImmutableBTreeIndex.Reader<String, Long> index = new ImmutableBTreeIndex.Reader<String, Long>(tmpDir,
            new StringSerializer(), new LongSerializer(), false);
    Iterator<Generation.Entry<String, Long>> it1 = index.iterator();
    Iterator<Map.Entry<String, Long>> it2 = map.entrySet().iterator();
    int i = 0;
    while (it2.hasNext()) {
        i++;
        assertTrue(it1.hasNext());
        Generation.Entry<String, Long> next1 = it1.next();
        Map.Entry<String, Long> next2 = it2.next();
        assertEquals(next1.getKey(), next2.getKey());
        assertEquals(next1.getValue(), next2.getValue());
    }
    assertFalse(it1.hasNext());
}

From source file:org.apache.hadoop.hdfs.TestPread.java

private void datanodeRestartTest(MiniDFSCluster cluster, FileSystem fileSys, Path name) throws IOException {
    // skip this test if using simulated storage since simulated blocks
    // don't survive datanode restarts.
    if (simulatedStorage) {
        return;/*from ww  w  . ja v  a  2 s.c o m*/
    }
    int numBlocks = 1;
    assertTrue(numBlocks <= DFSConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT);
    byte[] expected = new byte[numBlocks * blockSize];
    Random rand = new Random(seed);
    rand.nextBytes(expected);
    byte[] actual = new byte[numBlocks * blockSize];
    FSDataInputStream stm = fileSys.open(name);
    // read a block and get block locations cached as a result
    stm.readFully(0, actual);
    checkAndEraseData(actual, 0, expected, "Pread Datanode Restart Setup");
    // restart all datanodes. it is expected that they will
    // restart on different ports, hence, cached block locations
    // will no longer work.
    assertTrue(cluster.restartDataNodes());
    cluster.waitActive();
    // verify the block can be read again using the same InputStream 
    // (via re-fetching of block locations from namenode). there is a 
    // 3 sec sleep in chooseDataNode(), which can be shortened for 
    // this test if configurable.
    stm.readFully(0, actual);
    checkAndEraseData(actual, 0, expected, "Pread Datanode Restart Test");
}

From source file:org.apache.hadoop.hdfs.server.namenode.TestFSImageWithSnapshot.java

/** Append a file without closing the output stream */
private HdfsDataOutputStream appendFileWithoutClosing(Path file, int length) throws IOException {
    byte[] toAppend = new byte[length];
    Random random = new Random();
    random.nextBytes(toAppend);
    HdfsDataOutputStream out = (HdfsDataOutputStream) hdfs.append(file);
    out.write(toAppend);/*w  ww.j a  v  a 2s. co m*/
    return out;
}

From source file:net.radai.garbanzo.GarbanzoTest.java

@Test
public void testRoundTrip() throws Exception {
    long seed = System.currentTimeMillis();
    Random random = new Random(seed);
    BeanClass original = new BeanClass();
    original.f1 = "";
    original.f2 = RandomStringUtils.randomAscii(10);
    original.f3 = random.nextDouble();/*from  w  w w. jav a 2 s .c o m*/
    original.f4 = null;
    original.f5 = UUID.randomUUID();
    original.f6 = new byte[1 + random.nextInt(10)];
    random.nextBytes(original.f6);
    original.f7 = new ArrayList<>();
    for (int i = 0; i < 5; i++) {
        original.f7.add((long) random.nextInt(10));
    }
    original.f8 = new HashMap<>();
    original.f8.put(Enum1.V1, (short) 7);
    original.f8.put(Enum1.V2, null);
    original.f9 = new ArrayList<>();
    for (int i = 0; i < 3; i++) {
        InnerBeanClass inner = new InnerBeanClass();
        inner.f1 = "bob " + i;
        original.f9.add(inner);
    }
    original.f9.add(null);

    String serialized = Garbanzo.marshal(original);

    BeanClass deserialized = Garbanzo.unmarshall(BeanClass.class, serialized);

    Assert.assertEquals(original, deserialized);
}