Example usage for java.util Random nextBytes

List of usage examples for java.util Random nextBytes

Introduction

In this page you can find the example usage for java.util Random nextBytes.

Prototype

public void nextBytes(byte[] bytes) 

Source Link

Document

Generates random bytes and places them into a user-supplied byte array.

Usage

From source file:org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2.java

/**
 * 1. create files with dfs/*from w w  w  .j ava2s  .c om*/
 * 2. write MIN_N_PACKET to MAX_N_PACKET packets
 * 3. close file
 * 4. open the same file
 * 5. read the bytes and compare results
 */
private static void writeSeveralPackets(String methodName) throws IOException {
    final Random r = FiTestUtil.RANDOM.get();
    final int nPackets = FiTestUtil.nextRandomInt(MIN_N_PACKET, MAX_N_PACKET + 1);
    final int lastPacketSize = FiTestUtil.nextRandomInt(1, PACKET_SIZE + 1);
    final int size = (nPackets - 1) * PACKET_SIZE + lastPacketSize;

    FiTestUtil.LOG.info("size=" + size + ", nPackets=" + nPackets + ", lastPacketSize=" + lastPacketSize);

    final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(REPLICATION + 2).build();
    final FileSystem dfs = cluster.getFileSystem();
    try {
        final Path p = new Path("/" + methodName + "/foo");
        final FSDataOutputStream out = createFile(dfs, p);

        final long seed = r.nextLong();
        final Random ran = new Random(seed);
        ran.nextBytes(bytes);
        out.write(bytes, 0, size);
        out.close();

        final FSDataInputStream in = dfs.open(p);
        int totalRead = 0;
        int nRead = 0;
        while ((nRead = in.read(toRead, totalRead, size - totalRead)) > 0) {
            totalRead += nRead;
        }
        Assert.assertEquals("Cannot read file.", size, totalRead);
        for (int i = 0; i < size; i++) {
            Assert.assertTrue("File content differ.", bytes[i] == toRead[i]);
        }
    } finally {
        dfs.close();
        cluster.shutdown();
    }
}

From source file:org.apache.kylin.job.tools.GridTableHBaseBenchmark.java

private static byte[] randomBytes() {
    byte[] bytes = new byte[CELL_SIZE];
    Random rand = new Random();
    rand.nextBytes(bytes);
    return bytes;
}

From source file:org.apache.hadoop.dfs.DFSTestUtil.java

static void createFile(FileSystem fs, Path fileName, long fileLen, short replFactor, long seed)
        throws IOException {
    if (!fs.mkdirs(fileName.getParent())) {
        throw new IOException("Mkdirs failed to create " + fileName.getParent().toString());
    }/*from   w  ww . j a  v  a2s .  c  om*/
    FSDataOutputStream out = fs.create(fileName, replFactor);
    byte[] toWrite = new byte[1024];
    Random rb = new Random(seed);
    long bytesToWrite = fileLen;
    while (bytesToWrite > 0) {
        rb.nextBytes(toWrite);
        int bytesToWriteNext = (1024 < bytesToWrite) ? 1024 : (int) bytesToWrite;

        out.write(toWrite, 0, bytesToWriteNext);
        bytesToWrite -= bytesToWriteNext;
    }
    out.close();
}

From source file:org.apache.flink.runtime.blob.BlobServerCorruptionTest.java

/**
 * Checks the GET operation fails when the downloaded file (from HA store)
 * is corrupt, i.e. its content's hash does not match the {@link BlobKey}'s hash.
 *
 * @param config/*  w  w w . j  av  a 2  s.  c om*/
 *       blob server configuration (including HA settings like {@link HighAvailabilityOptions#HA_STORAGE_PATH}
 *       and {@link HighAvailabilityOptions#HA_CLUSTER_ID}) used to set up <tt>blobStore</tt>
 * @param blobStore
 *       shared HA blob store to use
 * @param expectedException
 *       expected exception rule to use
 */
public static void testGetFailsFromCorruptFile(Configuration config, BlobStore blobStore,
        ExpectedException expectedException) throws IOException {

    Random rnd = new Random();
    JobID jobId = new JobID();

    try (BlobServer server = new BlobServer(config, blobStore)) {

        server.start();

        byte[] data = new byte[2000000];
        rnd.nextBytes(data);

        // put content addressable (like libraries)
        BlobKey key = put(server, jobId, data, PERMANENT_BLOB);
        assertNotNull(key);

        // delete local file to make sure that the GET requests downloads from HA
        File blobFile = server.getStorageLocation(jobId, key);
        assertTrue(blobFile.delete());

        // change HA store file contents to make sure that GET requests fail
        byte[] data2 = Arrays.copyOf(data, data.length);
        data2[0] ^= 1;
        File tmpFile = Files.createTempFile("blob", ".jar").toFile();
        try {
            FileUtils.writeByteArrayToFile(tmpFile, data2);
            blobStore.put(tmpFile, jobId, key);
        } finally {
            //noinspection ResultOfMethodCallIgnored
            tmpFile.delete();
        }

        // issue a GET request that fails
        expectedException.expect(IOException.class);
        expectedException.expectMessage("data corruption");

        get(server, jobId, key);
    }
}

From source file:org.apache.hadoop.fs.CopyFilesBase.java

protected static void updateFiles(FileSystem fs, String topdir, MyFile[] files, int nupdate)
        throws IOException {
    assert nupdate <= NFILES;

    Path root = new Path(topdir);

    for (int idx = 0; idx < nupdate; ++idx) {
        Path fPath = new Path(root, files[idx].getName());
        // overwrite file
        assertTrue(fPath.toString() + " does not exist", fs.exists(fPath));
        FSDataOutputStream out = fs.create(fPath);
        files[idx].reset();//from w ww.  j a v a 2  s .  c o  m
        byte[] toWrite = new byte[files[idx].getSize()];
        Random rb = new Random(files[idx].getSeed());
        rb.nextBytes(toWrite);
        out.write(toWrite);
        out.close();
    }
}

From source file:org.apache.hadoop.fs.CopyFilesBase.java

protected static boolean checkFiles(FileSystem fs, String topdir, MyFile[] files, boolean existingOnly)
        throws IOException {
    Path root = new Path(topdir);

    for (int idx = 0; idx < files.length; idx++) {
        Path fPath = new Path(root, files[idx].getName());
        try {//from  w  w w  . j  a va2 s.  co m
            byte[] toCompare = new byte[files[idx].getSize()];
            Random rb = new Random(files[idx].getSeed());
            rb.nextBytes(toCompare);
            if (!checkContentOfFile(fs, fPath, toCompare)) {
                return false;
            }
        } catch (FileNotFoundException fnfe) {
            if (!existingOnly) {
                throw fnfe;
            }
        } catch (EOFException eofe) {
            throw (EOFException) new EOFException("Cannot read file" + fPath);
        }
    }

    return true;
}

From source file:org.apache.hadoop.tools.mapred.filechunk.TestCopyChunkMapper.java

/**
 * Append specified length of bytes to a given file
 *//*from   www.ja  v a2  s.c o  m*/
private static void appendFile(Path p, int length) throws IOException {
    byte[] toAppend = new byte[length];
    Random random = new Random();
    random.nextBytes(toAppend);
    FSDataOutputStream out = cluster.getFileSystem().append(p);
    try {
        out.write(toAppend);
    } finally {
        IOUtils.closeStream(out);
    }
}

From source file:org.syncany.tests.unit.util.TestFileUtil.java

public static byte[] createArray(int size, Random randomGen) {
    byte[] ret = new byte[size];
    randomGen.nextBytes(ret);
    return ret;/*  w w w. j a  v  a2  s .  c  o  m*/
}

From source file:org.zkoss.poi.ss.util.SheetUtil.java

public static String base64Random16Bytes() {
    Random rand = new Random(new Date().getTime());
    byte[] bytes = new byte[16];
    rand.nextBytes(bytes);
    final Base64 base64 = new Base64();
    return base64.encodeAsString(bytes);
}

From source file:org.sonar.server.user.UserUpdater.java

private static void setEncryptedPassWord(String password, UserDto userDto) {
    Random random = new SecureRandom();
    byte[] salt = new byte[32];
    random.nextBytes(salt);
    String saltHex = DigestUtils.sha1Hex(salt);
    userDto.setSalt(saltHex);/*from   www  .  ja  v  a2  s . c  o  m*/
    userDto.setCryptedPassword(encryptPassword(password, saltHex));
}