Example usage for java.util Random nextBytes

List of usage examples for java.util Random nextBytes

Introduction

In this page you can find the example usage for java.util Random nextBytes.

Prototype

public void nextBytes(byte[] bytes) 

Source Link

Document

Generates random bytes and places them into a user-supplied byte array.

Usage

From source file:org.sakaiproject.content.impl.db.test.CheckBlobSafety.java

@Test
public void testBlob() {
    try {/*from  www  . j a v a 2s.c o  m*/
        Random r = new Random();
        int blockSize = 4095; // use an odd size to get byte boundaries
        int nblocks = 512;
        int maxSize = blockSize * nblocks;
        byte[] b = new byte[maxSize];
        byte[] bin = new byte[maxSize];
        log.info("Loading Random Data " + maxSize);
        r.nextBytes(b);
        log.info("Loaded Random Data");

        log.info("Got Connection");
        PreparedStatement pstout = null;
        PreparedStatement pstin = null;
        InputStream instream = null;
        ResultSet rs = null;
        try {
            pstout = con.prepareStatement(p.getProperty("insert.statement"));
            pstin = con.prepareStatement(p.getProperty("select.statement"));
            for (int i = 1; i < nblocks; i += 5) {
                int size = blockSize * i;
                pstout.clearParameters();

                pstout.setBinaryStream(1, new ByteArrayInputStream(b), size);
                pstout.setInt(2, i);
                pstout.executeUpdate();
                log.info("Loaded record  " + i + " of size " + (size) + " bytes");
                con.commit();
                i++;
            }
            for (int i = 1; i < nblocks; i += 5) {
                int size = blockSize * i;
                pstin.clearParameters();
                pstin.setInt(1, i);
                rs = pstin.executeQuery();
                if (rs.next()) {
                    instream = rs.getBinaryStream(1);
                    DataInputStream din = new DataInputStream(instream);
                    din.readFully(bin, 0, size);
                    for (int j = 0; j < size; j++) {
                        Assert.assertEquals("Byte Missmatch record " + i + " offset " + j, b[j], bin[j]);
                    }
                    log.info("Checked Record " + i + " of size " + size + " bytes");
                    din.close();
                    instream.close();
                    rs.close();
                    i++;
                } else {
                    Assert.assertEquals("Didnt get any record at " + i, true, false);
                }
                con.commit();
            }
        } finally {
            try {
                pstin.close();
            } catch (SQLException e) {

            }
            try {
                pstout.close();
            } catch (SQLException e) {

            }
            try {
                instream.close();
            } catch (Exception ex) {
            }
            try {
                rs.close();
            } catch (Exception ex) {
            }

        }
    } catch (Exception ex) {
        log.error("Failed ", ex);
    }

}

From source file:svnserver.TemporaryOutputStreamTest.java

@SuppressWarnings("OverlyLongMethod")
@Test(dataProvider = "providerReadWrite")
public void checkReadWrite(int blockSize, int totalSize) throws IOException {
    final ByteArrayOutputStream expectedStream = new ByteArrayOutputStream();
    try (final TemporaryOutputStream outputStream = new TemporaryOutputStream(MAX_MEMORY_SIZE)) {
        final Random random = new Random(0);
        int writeSize = 0;
        while (writeSize < totalSize) {
            if (blockSize == 0) {
                final byte data = (byte) random.nextInt();
                outputStream.write(data);
                expectedStream.write(data);
                writeSize++;/*w w w  .  ja v a 2  s.co m*/
            } else {
                final byte[] data = new byte[blockSize];
                random.nextBytes(data);
                final int offset = random.nextInt(blockSize - 1);
                final int count = Math.min(random.nextInt(blockSize - offset - 1) + 1, totalSize - writeSize);
                outputStream.write(data, offset, count);
                expectedStream.write(data, offset, count);
                writeSize += count;
            }
        }
        Assert.assertEquals(outputStream.tempFile() == null, totalSize <= MAX_MEMORY_SIZE);
        Assert.assertEquals(expectedStream.size(), totalSize);
        Assert.assertEquals(outputStream.size(), totalSize);

        final ByteArrayOutputStream actualStream = new ByteArrayOutputStream();
        //noinspection NestedTryStatement
        try (final InputStream inputStream = outputStream.toInputStream()) {
            int readSize = 0;
            while (true) {
                Assert.assertTrue(readSize <= totalSize);
                if (blockSize == 0) {
                    final int data = inputStream.read();
                    if (data < 0)
                        break;
                    actualStream.write(data);
                    readSize++;
                } else {
                    final byte[] data = new byte[blockSize];
                    final int offset = random.nextInt(blockSize - 1);
                    final int count = random.nextInt(blockSize - offset - 1) + 1;
                    final int size = inputStream.read(data, offset, count);
                    Assert.assertTrue(size != 0);
                    if (size < 0) {
                        break;
                    }
                    actualStream.write(data, offset, size);
                    readSize += size;
                }
            }
            Assert.assertEquals(readSize, totalSize);
        }
        Assert.assertEquals(actualStream.size(), totalSize);

        ArrayAsserts.assertArrayEquals(actualStream.toByteArray(), expectedStream.toByteArray());
    }
}

From source file:org.apache.hadoop.hbase.quotas.TestRegionSizeUse.java

/**
 * Writes at least {@code sizeInBytes} bytes of data to HBase and returns the TableName used.
 *
 * @param sizeInBytes The amount of data to write in bytes.
 * @return The table the data was written to
 *//*from   ww  w  .  j  a va 2 s.  c o  m*/
private TableName writeData(long sizeInBytes) throws IOException {
    final Connection conn = TEST_UTIL.getConnection();
    final Admin admin = TEST_UTIL.getAdmin();
    final TableName tn = TableName.valueOf(testName.getMethodName());

    // Delete the old table
    if (admin.tableExists(tn)) {
        admin.disableTable(tn);
        admin.deleteTable(tn);
    }

    // Create the table
    HTableDescriptor tableDesc = new HTableDescriptor(tn);
    tableDesc.addFamily(new HColumnDescriptor(F1));
    admin.createTable(tableDesc, Bytes.toBytes("1"), Bytes.toBytes("9"), NUM_SPLITS);

    final Table table = conn.getTable(tn);
    try {
        List<Put> updates = new ArrayList<>();
        long bytesToWrite = sizeInBytes;
        long rowKeyId = 0L;
        final StringBuilder sb = new StringBuilder();
        final Random r = new Random();
        while (bytesToWrite > 0L) {
            sb.setLength(0);
            sb.append(Long.toString(rowKeyId));
            // Use the reverse counter as the rowKey to get even spread across all regions
            Put p = new Put(Bytes.toBytes(sb.reverse().toString()));
            byte[] value = new byte[SIZE_PER_VALUE];
            r.nextBytes(value);
            p.addColumn(Bytes.toBytes(F1), Bytes.toBytes("q1"), value);
            updates.add(p);

            // Batch 50K worth of updates
            if (updates.size() > 50) {
                table.put(updates);
                updates.clear();
            }

            // Just count the value size, ignore the size of rowkey + column
            bytesToWrite -= SIZE_PER_VALUE;
            rowKeyId++;
        }

        // Write the final batch
        if (!updates.isEmpty()) {
            table.put(updates);
        }

        return tn;
    } finally {
        table.close();
    }
}

From source file:com.k42b3.neodym.oauth.Oauth.java

private String getNonce() {
    try {//from ww  w. j  a v a2s  .  c  om
        byte[] nonce = new byte[32];

        Random rand;

        rand = SecureRandom.getInstance("SHA1PRNG");

        rand.nextBytes(nonce);

        return DigestUtils.md5Hex(rand.toString());
    } catch (Exception e) {
        return DigestUtils.md5Hex("" + System.currentTimeMillis());
    }
}

From source file:org.apache.jackrabbit.oak.spi.blob.AbstractBlobStoreTest.java

private void doTest(int maxLength, int count) throws Exception {
    String[] s = new String[count * 2];
    Random r = new Random(0);
    for (int i = 0; i < s.length;) {
        byte[] data = new byte[r.nextInt(maxLength)];
        r.nextBytes(data);
        s[i++] = store.writeBlob(new ByteArrayInputStream(data));
        s[i++] = store.writeBlob(new ByteArrayInputStream(data));
    }/*w  ww.  j ava 2  s  .c o m*/
    r.setSeed(0);
    for (int i = 0; i < s.length;) {
        int expectedLen = r.nextInt(maxLength);
        byte[] expectedData = new byte[expectedLen];
        r.nextBytes(expectedData);
        assertEquals(expectedLen, store.getBlobLength(s[i++]));

        String id = s[i++];
        doTestRead(expectedData, expectedLen, id);
    }
}

From source file:com.nginious.http.websocket.WebSocketTestCase.java

private byte[] generateRandomBytes(int len) {
    byte[] bytes = new byte[len];
    Random rnd = new Random();
    rnd.nextBytes(bytes);
    return bytes;
}

From source file:richtercloud.document.scanner.it.BlobStorageIT.java

@Test
@Ignore//from   www  . j av a 2 s .c om
//fails due to
//```
//[EL Warning]: 2017-07-31 00:11:40.03--UnitOfWork(178575564)--Exception [EclipseLink-32] (Eclipse Persistence Services - 2.6.4.v20160829-44060b6): org.eclipse.persistence.exceptions.DescriptorException
//Exception Description: Trying to set value [[B@44550792] for instance variable [data] of type [java.sql.Blob] in the object.  The specified object is not an instance of the class or interface declaring the underlying field, or an unwrapping conversion has failed.
//Internal Exception: java.lang.IllegalArgumentException: Can not set java.sql.Blob field richtercloud.document.scanner.it.entities.EntityBlob.data to [B
//Mapping: org.eclipse.persistence.mappings.DirectToFieldMapping[data-->ENTITYBLOB.DATA]
//Descriptor: RelationalDescriptor(richtercloud.document.scanner.it.entities.EntityBlob --> [DatabaseTable(ENTITYBLOB)])
//```
public void testBlobStorage() throws IOException, SQLException, StorageConfValidationException,
        StorageCreationException, InterruptedException, StorageException, FieldOrderValidationException {
    LOGGER.info("testBlobStorage");
    PersistenceStorage<Long> storage = null;
    try {
        IssueHandler issueHandler = new LoggerIssueHandler(LOGGER);
        Set<Class<?>> entityClasses = new HashSet<>(Arrays.asList(EntityImageWrapper.class));
        File databaseDir = Files.createTempDirectory("document-scanner-blob-it").toFile();
        FileUtils.forceDelete(databaseDir);
        //databaseDir mustn't exist for MySQL
        File schemeChecksumFile = File.createTempFile("document-scanner-blob-it", null);
        schemeChecksumFile.delete();
        String persistenceUnitName = "document-scanner-it";
        String username = "document-scanner";
        String password = "document-scanner";
        String databaseName = "document-scanner";
        //Testing PostgreSQL doesn't make sense because it doesn't implement
        //java.sql.Connection.createBlob (see persistence.xml in
        //document-scanner)
        //        PostgresqlAutoPersistenceStorageConf storageConf = new PostgresqlAutoPersistenceStorageConf(entityClasses,
        //                username,
        //                schemeChecksumFile,
        //                databaseDir.getAbsolutePath());
        //Apache Derby is extremely slow
        //        DerbyEmbeddedPersistenceStorageConf storageConf = new DerbyEmbeddedPersistenceStorageConf(entityClasses,
        //                databaseName,
        //                schemeChecksumFile);
        File myCnfFile = File.createTempFile("document-scanner-it-blob-it", null);
        myCnfFile.delete(); //need to delete in order to trigger creation of
        //my.cnf
        MySQLAutoPersistenceStorageConf storageConf = new MySQLAutoPersistenceStorageConf(entityClasses,
                "localhost", //hostname
                username, databaseName, databaseDir.getAbsolutePath(), schemeChecksumFile);
        storageConf.setBaseDir(
                new File(DocumentScannerConf.CONFIG_DIR_DEFAULT, "mysql-5.7.16-linux-glibc2.5-x86_64")
                        .getAbsolutePath());
        storageConf.setMyCnfFilePath(myCnfFile.getAbsolutePath());
        FieldRetriever fieldRetriever = new JPAOrderedCachedFieldRetriever(
                Constants.QUERYABLE_AND_EMBEDDABLE_CLASSES);
        storage = new MySQLAutoPersistenceStorage(storageConf, persistenceUnitName, 1, //parallelQueryCount
                issueHandler, fieldRetriever);
        storage.start();
        long randomSeed = System.currentTimeMillis();
        LOGGER.debug(String.format("random seed is %d", randomSeed));
        Random random = new Random(randomSeed);
        int entityCount = 20;
        for (int i = 0; i < entityCount; i++) {
            int mbSize = random.nextInt(256); //256 MB max.
            int byteCount = 1024 * 1024 * mbSize;
            LOGGER.debug(String.format("generating %d MB random bytes", mbSize));
            byte[] largeRandomBytes = new byte[byteCount];
            random.nextBytes(largeRandomBytes);
            EntityManager entityManager = storage.retrieveEntityManager();
            entityManager.getTransaction().begin();
            Blob blob = entityManager.unwrap(Connection.class).createBlob();
            OutputStream blobOutputStream = blob.setBinaryStream(1 //pos (begin
            //at 1)
            );
            ByteArrayInputStream largeRandomBytesInputStream = new ByteArrayInputStream(largeRandomBytes);
            IOUtils.copy(largeRandomBytesInputStream, blobOutputStream);
            EntityBlob entity1 = new EntityBlob(blob);
            LOGGER.debug(String.format("storing large binary entity (%d of %d)", i, entityCount));
            storage.store(entity1);
            entityManager.getTransaction().commit();
        }
        //shutdown and restart storage in order to simulate persistence
        //across application starts
        storage.shutdown();
        Thread.sleep(5000);
        //workaround for remaining storage process after shutdown
        storage = new MySQLAutoPersistenceStorage(storageConf, persistenceUnitName, 1, //parallelQueryCount
                issueHandler, fieldRetriever);
        storage.start();
        LOGGER.debug("querying large binary entity");
        List<EntityBlob> queryResults = storage.runQueryAll(EntityBlob.class);
        LOGGER.debug(String.format("query completed with %d results", queryResults.size()));
        int i = 0;
        for (EntityBlob queryResult : queryResults) {
            int mbSize = (int) (queryResult.getData().length() / 1024 / 1024);
            LOGGER.debug(String.format("query result %d has length %d bytes", i, mbSize));
            i++;
        }
    } finally {
        if (storage != null) {
            storage.shutdown();
        }
    }
}

From source file:richtercloud.document.scanner.it.ImageStorageIT.java

@Test
public void testImageStorage() throws IOException, StorageException, SQLException, InterruptedException,
        StorageConfValidationException, StorageCreationException, OSNotRecognizedException,
        ArchitectureNotRecognizedException, ExtractionException, MissingSystemBinary, BuildFailureException,
        ModuleBuildFailureException, FieldOrderValidationException, ImageWrapperException {
    LOGGER.info("testImageStorage");
    try {/*from ww w  .  j a va2  s.c om*/
        new JFXPanel();
        //- necessary in order to avoid
        //`java.lang.IllegalStateException: Toolkit not initialized`
        //- should be run here in order to be catchable at a useful
        //location; figure out how to do this if there's more than this
        //one test
        File databaseDir = Files.createTempDirectory(ImageStorageIT.class.getSimpleName()).toFile();
        FileUtils.forceDelete(databaseDir);
        //databaseDir mustn't exist for Apache Derby
        LOGGER.debug(String.format("database directory is %s", databaseDir.getAbsolutePath()));
        File schemeChecksumFile = File.createTempFile(ImageStorageIT.class.getSimpleName(), "scheme-checksum");
        LOGGER.debug(String.format("scheme checksum file is %s", schemeChecksumFile.getAbsolutePath()));
        File imageStorageDir = Files.createTempDirectory(ImageStorageIT.class.getSimpleName()).toFile();
        LOGGER.debug(String.format("image storage directory is %s", imageStorageDir.getAbsolutePath()));
        Connection connection = DriverManager
                .getConnection(String.format("jdbc:derby:%s;create=true", databaseDir.getAbsolutePath()));
        connection.close();
        Set<Class<?>> entityClasses = new HashSet<Class<?>>(
                Arrays.asList(EntityByteArray.class, EntityImageIcon.class));
        //        DerbyNetworkPersistenceStorageConf storageConf = new DerbyNetworkPersistenceStorageConf(entityClasses,
        //                "localhost",
        //                schemeChecksumFile);
        //        storageConf.setDatabaseDir(databaseDir);
        //        storageConf.setPassword("sa");
        //        PersistenceStorage storage = new DerbyNetworkPersistenceStorage(storageConf,
        //                "richtercloud_document-scanner-it_jar_1.0-SNAPSHOTPU");
        DerbyEmbeddedPersistenceStorageConf storageConf = new DerbyEmbeddedPersistenceStorageConf(entityClasses,
                databaseDir.getAbsolutePath(), schemeChecksumFile);
        String persistenceUnitName = "document-scanner-it";
        FieldRetriever fieldRetriever = new JPAOrderedCachedFieldRetriever(
                Constants.QUERYABLE_AND_EMBEDDABLE_CLASSES);
        final PersistenceStorage<Long> derbyEmbeddedStorage1 = new DerbyEmbeddedPersistenceStorage(storageConf,
                persistenceUnitName, 1, //parallelQueryCount
                fieldRetriever);
        derbyEmbeddedStorage1.start();
        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            LOGGER.info("shutting down storage");
            derbyEmbeddedStorage1.shutdown();
            LOGGER.info("storage shut down");
            try {
                FileUtils.deleteDirectory(databaseDir);
                LOGGER.info(String.format("database directory '%s' deleted", databaseDir.getAbsolutePath()));
            } catch (IOException ex) {
                LOGGER.info(String.format(
                        "deletion of database directory '%s' failed, see nested exception for details",
                        databaseDir.getAbsolutePath()), ex);
            }
            try {
                FileUtils.deleteDirectory(imageStorageDir);
            } catch (IOException ex) {
                java.util.logging.Logger.getLogger(ImageStorageIT.class.getName()).log(Level.SEVERE, null, ex);
            }
        }, String.format("%s shutdown hook", ImageStorageIT.class.getSimpleName())));
        List<ImageIcon> imageIcons = new LinkedList<>();
        InputStream pdfInputStream = ImageStorageIT.class.getResourceAsStream("/image_data.pdf");
        assert pdfInputStream != null;
        PDDocument document = PDDocument.load(pdfInputStream);
        @SuppressWarnings("unchecked")
        List<OCRSelectPanel> oCRSelectPanels = new LinkedList<>();
        List<ImageWrapper> imageWrappers = new LinkedList<>();
        byte[] data;
        ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
        ObjectOutputStream objectOutputStream = new ObjectOutputStream(outputStream);
        PDFRenderer pdfRenderer = new PDFRenderer(document);
        IssueHandler issueHandler = new LoggerIssueHandler(LOGGER);
        for (int page = 0; page < document.getNumberOfPages(); page++) {
            BufferedImage image = pdfRenderer.renderImageWithDPI(page, 300, ImageType.RGB);
            ImageWrapper imageWrapper = new CachingImageWrapper(databaseDir, image, issueHandler);
            OCRSelectPanel oCRSelectPanel = new DefaultOCRSelectPanel(imageWrapper,
                    DocumentScannerConf.PREFERRED_SCAN_RESULT_PANEL_WIDTH_DEFAULT, issueHandler);
            oCRSelectPanels.add(oCRSelectPanel);
            ImageIcon imageIcon = new ImageIcon(image);
            objectOutputStream.writeObject(imageIcon);
            imageIcons.add(imageIcon);
            imageWrappers.add(new CachingImageWrapper(imageStorageDir, image, issueHandler));
        }
        document.close();
        data = outputStream.toByteArray();

        EntityByteArray entityA = new EntityByteArray(1L, data);
        EntityByteArray entityA2 = new EntityByteArray(3L, data);
        EntityImageIcon entityB = new EntityImageIcon(2L, imageIcons);
        EntityImageIcon entityB2 = new EntityImageIcon(4L, imageIcons);
        EntityImageWrapper entityC1 = new EntityImageWrapper(imageWrappers);
        EntityImageWrapper entityC2 = new EntityImageWrapper(imageWrappers);
        long time0 = System.currentTimeMillis();
        entityA.setId(MemorySequentialIdGenerator.getInstance().getNextId(entityA));
        derbyEmbeddedStorage1.store(entityA);
        long time1 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityA: %d ms", time1 - time0));
        entityB.setId(MemorySequentialIdGenerator.getInstance().getNextId(entityB));
        derbyEmbeddedStorage1.store(entityB);
        long time2 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityB: %d ms", time2 - time1));
        //store another time in order to figure out caching effects
        entityA2.setId(MemorySequentialIdGenerator.getInstance().getNextId(entityA2));
        derbyEmbeddedStorage1.store(entityA2);
        long time3 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityA2: %d ms", time3 - time2));
        entityB2.setId(MemorySequentialIdGenerator.getInstance().getNextId(entityB2));
        derbyEmbeddedStorage1.store(entityB2);
        long time4 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityB2: %d ms", time4 - time3));
        entityC1.setId(MemorySequentialIdGenerator.getInstance().getNextId(entityC1));
        derbyEmbeddedStorage1.store(entityC1);
        long time5 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityC1: %d ms", time5 - time4));
        entityC2.setId(MemorySequentialIdGenerator.getInstance().getNextId(entityC2));
        derbyEmbeddedStorage1.store(entityC2);
        long time6 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityC2: %d ms", time6 - time5));
        LOGGER.info(String.format("size of entityA's data: %d KiB", entityA.getData().length / 1024));

        long randomSeed = System.currentTimeMillis();
        LOGGER.info(String.format("random seed is: %d", randomSeed));
        Random random = new Random(randomSeed);
        byte[] referenceBytes = new byte[data.length];
        random.nextBytes(referenceBytes);
        EntityByteArray entityA3 = new EntityByteArray(5L, referenceBytes);
        EntityByteArray entityA4 = new EntityByteArray(6L, referenceBytes);
        long time7 = System.currentTimeMillis();
        entityA3.setId(MemorySequentialIdGenerator.getInstance().getNextId(entityA3));
        derbyEmbeddedStorage1.store(entityA3);
        long time8 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityA3: %d ms", time8 - time7));
        entityA4.setId(MemorySequentialIdGenerator.getInstance().getNextId(entityA4));
        derbyEmbeddedStorage1.store(entityA4);
        long time9 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityA4: %d ms", time9 - time8));
        derbyEmbeddedStorage1.shutdown();

        //test whether EntityImagerWrapper is deserializable
        PersistenceStorage<Long> derbyEmbeddedStorage2 = new DerbyEmbeddedPersistenceStorage(storageConf,
                persistenceUnitName, 1, //parallelQueryCount
                fieldRetriever);
        derbyEmbeddedStorage2.start();
        List<EntityImageWrapper> queryResults = derbyEmbeddedStorage2.runQueryAll(EntityImageWrapper.class);
        assert queryResults.size() == 2;
        EntityImageWrapper queryResult0 = queryResults.get(0);
        List<ImageWrapper> queryResult0Data = queryResult0.getData();
        for (ImageWrapper queryResult0Datum : queryResult0Data) {
            LOGGER.info(String.format("inspect image wrapper file %s ms", queryResult0Datum.getStorageFile()));
        }
        derbyEmbeddedStorage2.shutdown();

        //test PostgreSQL
        File databaseDirPostgresql = Files.createTempDirectory(ImageStorageIT.class.getSimpleName()).toFile();
        FileUtils.forceDelete(databaseDirPostgresql);
        //an inexisting database directory triggers creation of database
        //with initdb
        LOGGER.debug(
                String.format("PostgreSQL database directory is %s", databaseDirPostgresql.getAbsolutePath()));
        //build PostgreSQL
        File postgresqlInstallationPrefixDir = Files.createTempDirectory(ImageStorageIT.class.getSimpleName())
                .toFile();
        LOGGER.debug(String.format("using '%s' as PostgreSQL installation prefix",
                postgresqlInstallationPrefixDir.getAbsolutePath()));
        File downloadDir = Files.createTempDirectory(ImageStorageIT.class.getSimpleName()).toFile();
        //SystemUtils.getUserHome() causes trouble
        //($HOME/jhbuild/checkout might be jhbuilds default extraction
        //directory)
        LOGGER.debug(String.format("using '%s' as JHBuild Java wrapper download directory", downloadDir));
        JHBuildJavaWrapper jHBuildJavaWrapper = new JHBuildJavaWrapper(postgresqlInstallationPrefixDir, //installationPrefixDir
                downloadDir, //downloadDir
                ActionOnMissingBinary.DOWNLOAD, ActionOnMissingBinary.DOWNLOAD, new AutoDownloader(), //downloader
                false, true, //silenceStdout
                true, //silenceStderr
                issueHandler);
        String moduleName = "postgresql-9.6.3";
        LOGGER.info(
                String.format("building module %s from JHBuild Java wrapper's default moduleset", moduleName));
        jHBuildJavaWrapper.installModuleset(moduleName);
        //moduleset shipped with jhbuild-java-wrapper
        String initdb = new File(postgresqlInstallationPrefixDir, String.join(File.separator, "bin", "initdb"))
                .getAbsolutePath();
        String postgres = new File(postgresqlInstallationPrefixDir,
                String.join(File.separator, "bin", "postgres")).getAbsolutePath();
        String createdb = new File(postgresqlInstallationPrefixDir,
                String.join(File.separator, "bin", "createdb")).getAbsolutePath();
        String pgCtl = new File(postgresqlInstallationPrefixDir, String.join(File.separator, "bin", "pg_ctl"))
                .getAbsolutePath();
        String databaseName = "image-storage-it";
        String username = "docu";
        String password = "docu";
        PostgresqlAutoPersistenceStorageConf postgresqlPersistenceStorageConf = new PostgresqlAutoPersistenceStorageConf(
                entityClasses, "localhost", //hostname
                username, //username
                password, databaseName, schemeChecksumFile, databaseDirPostgresql.getAbsolutePath(), //databaseDir
                initdb, postgres, createdb, pgCtl);
        PersistenceStorage<Long> postgresqlStorage = new PostgresqlAutoPersistenceStorage(
                postgresqlPersistenceStorageConf, persistenceUnitName, 1, //parallelQueryCount
                fieldRetriever, issueHandler);
        postgresqlStorage.start();
        time0 = System.currentTimeMillis();
        postgresqlStorage.store(entityA);
        time1 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityA: %d ms", time1 - time0));
        postgresqlStorage.store(entityB);
        time2 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityB: %d ms", time2 - time1));
        //store another time in order to figure out caching effects
        postgresqlStorage.store(entityA2);
        time3 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityA2: %d ms", time3 - time2));
        postgresqlStorage.store(entityB2);
        time4 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityB2: %d ms", time4 - time3));
        LOGGER.info(String.format("size of entityA's data: %d KiB", entityA.getData().length / 1024));

        time9 = System.currentTimeMillis();
        postgresqlStorage.store(entityA3);
        time8 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityA3: %d ms", time8 - time9));
        postgresqlStorage.store(entityA4);
        time9 = System.currentTimeMillis();
        LOGGER.info(String.format("time for storing entityA4: %d ms", time9 - time8));
        postgresqlStorage.shutdown();
        Caching.getCachingProvider().close();
    } catch (UnsupportedOperationException ex) {
        //`new JFXPanel()` for JavaFX toolkit initialization causes
        //`java.lang.UnsupportedOperationException: Unable to open DISPLAY`
        //instead of HeadlessException (which is a subclass of
        //UnsupportedOperationException
        LOGGER.warn(
                "UnsupportedOperationException indicates that the test is run on a headless machine, e.g. a CI service",
                ex);
    }
}

From source file:com.nginious.http.websocket.WebSocketTestCase.java

private byte[] generateMaskedBytes(int len, byte[] mask) {
    byte[] bytes = new byte[len];
    Random rnd = new Random();
    rnd.nextBytes(bytes);

    for (int i = 0; i < len; i++) {
        bytes[i] = (byte) (bytes[i] ^ mask[i % 4]);
    }//  w ww  .j a  v a 2s .  co  m

    return bytes;
}

From source file:jetbrains.buildServer.clouds.azure.connector.AzureApiConnector.java

private Configuration prepareConfiguration(@NotNull final String managementCertificate)
        throws RuntimeException {
    try {//from   w  ww  . j ava2  s  .c  o  m
        final File tempFile = File.createTempFile("azk", null);
        FileOutputStream fOut = new FileOutputStream(tempFile);
        Random r = new Random();
        byte[] pwdData = new byte[4];
        r.nextBytes(pwdData);
        final String base64pw = Base64.encode(pwdData).substring(0, 6);
        createKeyStorePKCS12(managementCertificate, fOut, base64pw);
        return prepareConfiguration(tempFile, base64pw, KeyStoreType.pkcs12);
    } catch (Exception e) {
        e.printStackTrace();
        return null;
    }
}