Example usage for java.util Random nextBytes

List of usage examples for java.util Random nextBytes

Introduction

In this page you can find the example usage for java.util Random nextBytes.

Prototype

public void nextBytes(byte[] bytes) 

Source Link

Document

Generates random bytes and places them into a user-supplied byte array.

Usage

From source file:org.apache.hadoop.raid.TestDirectoryRaidEncoder.java

private long createDirectoryFile(FileSystem fileSys, Path name, int repl, long[] fileSizes, long[] blockSizes,
        int[] seeds, long blockSize) throws IOException {
    CRC32 crc = new CRC32();
    assert fileSizes.length == blockSizes.length;
    assert fileSizes.length == seeds.length;
    FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, blockSize);
    byte[] zeros = new byte[(int) (blockSize)];
    for (int j = 0; j < zeros.length; j++) {
        zeros[j] = 0;/*from   ww  w .  j  av  a 2s.  c  om*/
    }
    // fill random data into file
    for (int i = 0; i < fileSizes.length; i++) {
        assert blockSizes[i] <= blockSize;
        byte[] b = new byte[(int) blockSizes[i]];
        long numBlocks = fileSizes[i] / blockSizes[i];
        Random rand = new Random(seeds[i]);
        for (int j = 0; j < numBlocks; j++) {
            rand.nextBytes(b);
            stm.write(b);
            crc.update(b);
            int zeroLen = (int) (blockSize - blockSizes[i]);
            stm.write(zeros, 0, zeroLen);
            crc.update(zeros, 0, zeroLen);
        }
        long lastBlock = fileSizes[i] - numBlocks * blockSizes[i];
        if (lastBlock > 0) {
            b = new byte[(int) lastBlock];
            rand.nextBytes(b);
            stm.write(b);
            crc.update(b);
            if (i + 1 < fileSizes.length) {
                // Not last block of file, write zero
                int zeroLen = (int) (blockSize - lastBlock);
                stm.write(zeros, 0, zeroLen);
                crc.update(zeros, 0, zeroLen);
            }
        }
    }
    stm.close();
    return crc.getValue();
}

From source file:org.apache.jackrabbit.oak.spi.blob.AbstractBlobStoreTest.java

@Test
public void testWriteFile() throws Exception {
    store.setBlockSize(1024 * 1024);/*from   w ww .j  a v a2 s  . c  o m*/
    byte[] data = new byte[4 * 1024 * 1024];
    Random r = new Random(0);
    r.nextBytes(data);
    String tempFileName = "target/temp/test";
    File tempFile = new File(tempFileName);
    tempFile.getParentFile().mkdirs();
    OutputStream out = new FileOutputStream(tempFile, false);
    out.write(data);
    out.close();
    String s = store.writeBlob(tempFileName);
    assertEquals(data.length, store.getBlobLength(s));
    byte[] buff = new byte[1];
    for (int i = 0; i < data.length; i += 1024) {
        store.readBlob(s, i, buff, 0, 1);
        assertEquals(data[i], buff[0]);
    }
    try {
        store.writeBlob(tempFileName + "_wrong");
        fail();
    } catch (Exception e) {
        // expected
    }
}

From source file:org.ejbca.core.protocol.ocsp.OCSPUnidClient.java

/**
 * @param keystore KeyStore client keystore used to authenticate TLS client authentication, or null if TLS is not used
 * @param pwd String password for the key store, or null if no keystore is used
 * @param ocspurl String url to the OCSP server, or null if we should try to use the AIA extension from the cert; e.g. http://127.0.0.1:8080/ejbca/publicweb/status/ocsp (or https for TLS)
 * @param certs certificate chain to signing key
 * @param _signKey signing key/*from  w  w  w  . java 2 s .  com*/
 * @param getfnr true if FNR should be fetched
 * @throws NoSuchAlgorithmException
 * @throws IOException if ASN1 parsing error occurs
 */
private OCSPUnidClient(KeyStore keystore, String pwd, String ocspurl, Certificate[] certs, PrivateKey _signKey,
        boolean getfnr) throws NoSuchAlgorithmException, IOException {
    this.httpReqPath = ocspurl;
    this.passphrase = pwd;
    this.ks = keystore;
    this.signKey = _signKey;
    this.certChain = certs != null ? Arrays.asList(certs).toArray(new X509Certificate[0]) : null;
    this.nonce = new byte[16];
    {
        List<Extension> extensionList = new ArrayList<Extension>();
        final Random randomSource = new Random();
        randomSource.nextBytes(nonce);
        extensionList
                .add(new Extension(OCSPObjectIdentifiers.id_pkix_ocsp_nonce, false, new DEROctetString(nonce)));
        // Don't bother adding Unid extension if we are not using client authentication
        if (getfnr) {
            extensionList.add(new Extension(FnrFromUnidExtension.FnrFromUnidOid, false,
                    new DEROctetString(new FnrFromUnidExtension("1"))));
        }
        extensions = new Extensions(extensionList.toArray(new Extension[extensionList.size()]));
    }
    CryptoProviderTools.installBCProviderIfNotAvailable();
}

From source file:org.apache.hadoop.hdfs.TestPread.java

private void pReadFile(FileSystem fileSys, Path name) throws IOException {
    FSDataInputStream stm = fileSys.open(name);
    byte[] expected = new byte[12 * blockSize];
    if (simulatedStorage) {
        for (int i = 0; i < expected.length; i++) {
            expected[i] = SimulatedFSDataset.DEFAULT_DATABYTE;
        }/*from w  ww .j  a  va  2  s . c om*/
    } else {
        Random rand = new Random(seed);
        rand.nextBytes(expected);
    }
    // do a sanity check. Read first 4K bytes
    byte[] actual = new byte[4096];
    stm.readFully(actual);
    checkAndEraseData(actual, 0, expected, "Read Sanity Test");
    // now do a pread for the first 8K bytes
    actual = new byte[8192];
    doPread(stm, 0L, actual, 0, 8192);
    checkAndEraseData(actual, 0, expected, "Pread Test 1");
    // Now check to see if the normal read returns 4K-8K byte range
    actual = new byte[4096];
    stm.readFully(actual);
    checkAndEraseData(actual, 4096, expected, "Pread Test 2");
    // Now see if we can cross a single block boundary successfully
    // read 4K bytes from blockSize - 2K offset
    stm.readFully(blockSize - 2048, actual, 0, 4096);
    checkAndEraseData(actual, (blockSize - 2048), expected, "Pread Test 3");
    // now see if we can cross two block boundaries successfully
    // read blockSize + 4K bytes from blockSize - 2K offset
    actual = new byte[blockSize + 4096];
    stm.readFully(blockSize - 2048, actual);
    checkAndEraseData(actual, (blockSize - 2048), expected, "Pread Test 4");
    // now see if we can cross two block boundaries that are not cached
    // read blockSize + 4K bytes from 10*blockSize - 2K offset
    actual = new byte[blockSize + 4096];
    stm.readFully(10 * blockSize - 2048, actual);
    checkAndEraseData(actual, (10 * blockSize - 2048), expected, "Pread Test 5");
    // now check that even after all these preads, we can still read
    // bytes 8K-12K
    actual = new byte[4096];
    stm.readFully(actual);
    checkAndEraseData(actual, 8192, expected, "Pread Test 6");
    // done
    stm.close();
    // check block location caching
    stm = fileSys.open(name);
    stm.readFully(1, actual, 0, 4096);
    stm.readFully(4 * blockSize, actual, 0, 4096);
    stm.readFully(7 * blockSize, actual, 0, 4096);
    actual = new byte[3 * 4096];
    stm.readFully(0 * blockSize, actual, 0, 3 * 4096);
    checkAndEraseData(actual, 0, expected, "Pread Test 7");
    actual = new byte[8 * 4096];
    stm.readFully(3 * blockSize, actual, 0, 8 * 4096);
    checkAndEraseData(actual, 3 * blockSize, expected, "Pread Test 8");
    // read the tail
    stm.readFully(11 * blockSize + blockSize / 2, actual, 0, blockSize / 2);
    IOException res = null;
    try { // read beyond the end of the file
        stm.readFully(11 * blockSize + blockSize / 2, actual, 0, blockSize);
    } catch (IOException e) {
        // should throw an exception
        res = e;
    }
    assertTrue("Error reading beyond file boundary.", res != null);

    stm.close();
}

From source file:org.apache.jackrabbit.oak.spi.blob.AbstractBlobStoreTest.java

@Test
public void testReference() throws Exception {
    assumeThat(store, instanceOf(AbstractBlobStore.class));
    AbstractBlobStore abs = (AbstractBlobStore) store;
    Random r = new Random();
    byte[] key = new byte[256];
    r.nextBytes(key);
    abs.setReferenceKey(key);/*from  ww w  .  jav  a2s.c  om*/

    byte[] data = new byte[1000];
    r.nextBytes(data);
    String blobId = store.writeBlob(new ByteArrayInputStream(data));
    String reference = store.getReference(blobId);
    String blobId2 = store.getBlobId(reference);
    assertEquals(blobId, blobId2);
}

From source file:org.apache.hadoop.hbase.regionserver.wal.HLogPerformanceEvaluation.java

private Put setupPut(Random rand, byte[] key, byte[] value, final int numFamilies) {
    rand.nextBytes(key);
    Put put = new Put(key);
    for (int cf = 0; cf < numFamilies; ++cf) {
        for (int q = 0; q < numQualifiers; ++q) {
            rand.nextBytes(value);// ww w  . ja  va2s  .  c o  m
            put.add(Bytes.toBytes(FAMILY_PREFIX + cf), Bytes.toBytes(QUALIFIER_PREFIX + q), value);
        }
    }
    return put;
}

From source file:org.apache.hadoop.hdfs.server.namenode.TestNameEditsConfigs.java

private void writeFile(FileSystem fileSys, Path name, int repl) throws IOException {
    FSDataOutputStream stm = fileSys.create(name, true,
            fileSys.getConf().getInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096), (short) repl,
            BLOCK_SIZE);/*from  w w w .ja v a 2 s .c  om*/
    byte[] buffer = new byte[FILE_SIZE];
    Random rand = new Random(SEED);
    rand.nextBytes(buffer);
    stm.write(buffer);
    stm.close();
}

From source file:org.apache.hadoop.dynamodb.DynamoDBItemWritableTest.java

@Test
public void testBinarySerialization() {
    Random rnd = new Random();
    Gson gson = DynamoDBUtil.getGson();/*from  w  w w . j ava 2s  .  c  om*/
    Type type = new TypeToken<Map<String, AttributeValue>>() {
    }.getType();

    int loop = 1000;
    int totalByteArrays = 50;
    int byteArrayLength = 1024;

    List<ByteBuffer> byteBuffers = new ArrayList<>();
    for (int i = 0; i < totalByteArrays; i++) {
        byte[] bytes = new byte[byteArrayLength];
        rnd.nextBytes(bytes);
        byteBuffers.add(ByteBuffer.wrap(bytes));
    }

    for (int i = 0; i < loop; i++) {
        Map<String, AttributeValue> map = new HashMap<>();
        map.put("hash", new AttributeValue().withB(byteBuffers.get(rnd.nextInt(totalByteArrays))));
        map.put("range", new AttributeValue().withB(byteBuffers.get(rnd.nextInt(totalByteArrays))));
        map.put("list", new AttributeValue().withBS(byteBuffers.get(rnd.nextInt(totalByteArrays)),
                byteBuffers.get(rnd.nextInt(totalByteArrays))));

        Map<String, AttributeValue> dynamoDBItem = gson.fromJson(gson.toJson(map, type), type);
        compare(map, dynamoDBItem);
    }
}

From source file:com.blockwithme.longdb.test.util.JSONUtil.java

/** Generate random byte array.
 * /* w  w  w  .  j av a 2 s . c  o m*/
 * @param theBlobsize
 *        the blobsize
 * @return the byte[] */
private byte[] generateRandomByteArray(final int theBlobsize) {
    final byte[] bytesBuffer = new byte[theBlobsize];
    final Random rnd = new Random();
    rnd.nextBytes(bytesBuffer);
    return bytesBuffer;
}

From source file:org.apache.hadoop.hbase.util.TestFSUtils.java

private void pReadFile(FileSystem fileSys, Path name) throws IOException {
    FSDataInputStream stm = fileSys.open(name);
    byte[] expected = new byte[12 * blockSize];
    Random rand = new Random(seed);
    rand.nextBytes(expected);
    // do a sanity check. Read first 4K bytes
    byte[] actual = new byte[4096];
    stm.readFully(actual);//from  w  w w.j  a va2s .  co  m
    checkAndEraseData(actual, 0, expected, "Read Sanity Test");
    // now do a pread for the first 8K bytes
    actual = new byte[8192];
    doPread(stm, 0L, actual, 0, 8192);
    checkAndEraseData(actual, 0, expected, "Pread Test 1");
    // Now check to see if the normal read returns 4K-8K byte range
    actual = new byte[4096];
    stm.readFully(actual);
    checkAndEraseData(actual, 4096, expected, "Pread Test 2");
    // Now see if we can cross a single block boundary successfully
    // read 4K bytes from blockSize - 2K offset
    stm.readFully(blockSize - 2048, actual, 0, 4096);
    checkAndEraseData(actual, (blockSize - 2048), expected, "Pread Test 3");
    // now see if we can cross two block boundaries successfully
    // read blockSize + 4K bytes from blockSize - 2K offset
    actual = new byte[blockSize + 4096];
    stm.readFully(blockSize - 2048, actual);
    checkAndEraseData(actual, (blockSize - 2048), expected, "Pread Test 4");
    // now see if we can cross two block boundaries that are not cached
    // read blockSize + 4K bytes from 10*blockSize - 2K offset
    actual = new byte[blockSize + 4096];
    stm.readFully(10 * blockSize - 2048, actual);
    checkAndEraseData(actual, (10 * blockSize - 2048), expected, "Pread Test 5");
    // now check that even after all these preads, we can still read
    // bytes 8K-12K
    actual = new byte[4096];
    stm.readFully(actual);
    checkAndEraseData(actual, 8192, expected, "Pread Test 6");
    // done
    stm.close();
    // check block location caching
    stm = fileSys.open(name);
    stm.readFully(1, actual, 0, 4096);
    stm.readFully(4 * blockSize, actual, 0, 4096);
    stm.readFully(7 * blockSize, actual, 0, 4096);
    actual = new byte[3 * 4096];
    stm.readFully(0 * blockSize, actual, 0, 3 * 4096);
    checkAndEraseData(actual, 0, expected, "Pread Test 7");
    actual = new byte[8 * 4096];
    stm.readFully(3 * blockSize, actual, 0, 8 * 4096);
    checkAndEraseData(actual, 3 * blockSize, expected, "Pread Test 8");
    // read the tail
    stm.readFully(11 * blockSize + blockSize / 2, actual, 0, blockSize / 2);
    IOException res = null;
    try { // read beyond the end of the file
        stm.readFully(11 * blockSize + blockSize / 2, actual, 0, blockSize);
    } catch (IOException e) {
        // should throw an exception
        res = e;
    }
    assertTrue("Error reading beyond file boundary.", res != null);

    stm.close();
}