List of usage examples for java.util Random nextBytes
public void nextBytes(byte[] bytes)
From source file:org.apache.hadoop.fs.TestEnhancedByteBufferAccess.java
/** * Test fallback reads on a stream which does not support the * ByteBufferReadable * interface./* w w w . j ava2s .c om*/ */ @Test public void testIndirectFallbackReads() throws Exception { final File TEST_DIR = new File(System.getProperty("test.build.data", "build/test/data")); final String TEST_PATH = TEST_DIR + File.separator + "indirectFallbackTestFile"; final int TEST_FILE_LENGTH = 16385; final int RANDOM_SEED = 23453; FileOutputStream fos = null; FileInputStream fis = null; try { fos = new FileOutputStream(TEST_PATH); Random random = new Random(RANDOM_SEED); byte original[] = new byte[TEST_FILE_LENGTH]; random.nextBytes(original); fos.write(original); fos.close(); fos = null; fis = new FileInputStream(TEST_PATH); testFallbackImpl(fis, original); } finally { IOUtils.cleanup(LOG, fos, fis); new File(TEST_PATH).delete(); } }
From source file:org.apache.hadoop.io.file.tfile.TestTFileSeqFileComparison.java
private void setUpDictionary() { Random rng = new Random(); dictionary = new byte[options.dictSize][]; for (int i = 0; i < options.dictSize; ++i) { int len = rng.nextInt(options.maxWordLen - options.minWordLen) + options.minWordLen; dictionary[i] = new byte[len]; rng.nextBytes(dictionary[i]); }/*from www . j av a 2 s. co m*/ }
From source file:org.apache.hadoop.hbase.regionserver.TestPerColumnFamilyFlush.java
private void doPut(Table table, long memstoreFlushSize) throws IOException, InterruptedException { Region region = getRegionWithName(table.getName()).getFirst(); // cf1 4B per row, cf2 40B per row and cf3 400B per row byte[] qf = Bytes.toBytes("qf"); Random rand = new Random(); byte[] value1 = new byte[100]; byte[] value2 = new byte[200]; byte[] value3 = new byte[400]; for (int i = 0; i < 10000; i++) { Put put = new Put(Bytes.toBytes("row-" + i)); rand.setSeed(i);// ww w . ja va 2 s. co m rand.nextBytes(value1); rand.nextBytes(value2); rand.nextBytes(value3); put.addColumn(FAMILY1, qf, value1); put.addColumn(FAMILY2, qf, value2); put.addColumn(FAMILY3, qf, value3); table.put(put); // slow down to let regionserver flush region. while (region.getMemstoreSize() > memstoreFlushSize) { Thread.sleep(100); } } }
From source file:org.apache.hadoop.hdfs.TestDecommission.java
private void writeFile(FileSystem fileSys, Path name, int repl) throws IOException { // create and write a file that contains three blocks of data FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096), (short) repl, blockSize);/*from ww w . jav a 2 s . co m*/ byte[] buffer = new byte[fileSize]; Random rand = new Random(seed); rand.nextBytes(buffer); stm.write(buffer); stm.close(); LOG.info("Created file " + name + " with " + repl + " replicas."); }
From source file:richtercloud.document.scanner.it.LargeBinaryStorageIT.java
@Test public void testLargeBinaryStorage() throws IOException, StorageConfValidationException, StorageCreationException, StorageException, InterruptedException, OSNotRecognizedException, ArchitectureNotRecognizedException, ExtractionException, MissingSystemBinary, BuildFailureException, ModuleBuildFailureException, FieldOrderValidationException { LOGGER.info("testLargeBinaryStorage"); PersistenceStorage<Long> storage = null; Locale.setDefault(Locale.ENGLISH); try {//from ww w. j a v a 2 s .com Set<Class<?>> entityClasses = new HashSet<>(Arrays.asList(LargeBinaryEntity.class)); File databaseDir = Files.createTempDirectory("document-scanner-large-binary-it").toFile(); FileUtils.forceDelete(databaseDir); File schemeChecksumFile = File.createTempFile("document-scanner-large-binary-it", null); schemeChecksumFile.delete(); String persistenceUnitName = "document-scanner-it"; String username = "document-scanner"; String password = "document-scanner"; String databaseName = "document-scanner"; //build PostgreSQL File postgresqlInstallationPrefixDir = Files .createTempDirectory(LargeBinaryStorageIT.class.getSimpleName()).toFile(); LOGGER.debug(String.format("using '%s' as PostgreSQL installation prefix", postgresqlInstallationPrefixDir.getAbsolutePath())); File downloadDir = Files.createTempDirectory(LargeBinaryStorageIT.class.getSimpleName()).toFile(); //SystemUtils.getUserHome() causes trouble //($HOME/jhbuild/checkout might be jhbuilds default extraction //directory) LOGGER.debug(String.format("using '%s' as JHBuild Java wrapper download directory", downloadDir)); IssueHandler issueHandler = new LoggerIssueHandler(LOGGER); JHBuildJavaWrapper jHBuildJavaWrapper = new JHBuildJavaWrapper(postgresqlInstallationPrefixDir, //installationPrefixDir downloadDir, //downloadDir ActionOnMissingBinary.DOWNLOAD, ActionOnMissingBinary.DOWNLOAD, new AutoDownloader(), //downloader false, true, //silenceStdout true, //silenceStderr issueHandler); String moduleName = "postgresql-9.6.3"; LOGGER.info( String.format("building module %s from JHBuild Java wrapper's default moduleset", moduleName)); jHBuildJavaWrapper.installModuleset(moduleName); //moduleset shipped with jhbuild-java-wrapper String initdb = new File(postgresqlInstallationPrefixDir, String.join(File.separator, "bin", "initdb")) .getAbsolutePath(); String postgres = new File(postgresqlInstallationPrefixDir, String.join(File.separator, "bin", "postgres")).getAbsolutePath(); String createdb = new File(postgresqlInstallationPrefixDir, String.join(File.separator, "bin", "createdb")).getAbsolutePath(); String pgCtl = new File(postgresqlInstallationPrefixDir, String.join(File.separator, "bin", "pg_ctl")) .getAbsolutePath(); PostgresqlAutoPersistenceStorageConf storageConf = new PostgresqlAutoPersistenceStorageConf( entityClasses, "localhost", //hostname username, password, databaseName, schemeChecksumFile, databaseDir.getAbsolutePath(), initdb, //initdbBinaryPath postgres, //postgresBinaryPath createdb, //createdbBinaryPath pgCtl); FieldRetriever fieldRetriever = new JPAOrderedCachedFieldRetriever( Constants.QUERYABLE_AND_EMBEDDABLE_CLASSES); storage = new PostgresqlAutoPersistenceStorage(storageConf, persistenceUnitName, 1, //parallelQueryCount fieldRetriever, issueHandler); storage.start(); long randomSeed = System.currentTimeMillis(); LOGGER.debug(String.format("random seed is %d", randomSeed)); Random random = new Random(randomSeed); int entityCount = 20; for (int i = 0; i < entityCount; i++) { int mbSize = random.nextInt(64); //64 MB max. //128 MB cause trouble on Travis CI (crash because of //limited `vm.max_map_count` which causes //`Native memory allocation (mmap) failed to map 109576192 bytes for committing reserved memory.` //) and it's not worth figuring this out for now int byteCount = 1024 * 1024 * mbSize; LOGGER.debug(String.format("generating %d MB random bytes", mbSize)); byte[] largeRandomBytes = new byte[byteCount]; random.nextBytes(largeRandomBytes); LargeBinaryEntity entity1 = new LargeBinaryEntity(largeRandomBytes); LOGGER.debug(String.format("storing large binary entity (%d of %d)", i, entityCount)); entity1.setId(MemorySequentialIdGenerator.getInstance().getNextId(entity1)); storage.store(entity1); } storage.shutdown(); Thread.sleep(20000); //10000 causes //`Caused by: org.postgresql.util.PSQLException: FATAL: the database system is starting up` storage = new PostgresqlAutoPersistenceStorage(storageConf, persistenceUnitName, 1, //parallelQueryCount fieldRetriever, issueHandler); storage.start(); LOGGER.debug("querying large binary entity"); storage.runQueryAll(LargeBinaryEntity.class); LOGGER.debug("query completed"); } finally { if (storage != null) { storage.shutdown(); } } }
From source file:org.sakaiproject.search.util.test.FileUtilsTest.java
private void createFlatFiles(File base) throws GeneralSecurityException, IOException { Random random = new Random(); byte[] buffer = new byte[1024]; log.info("Create Test Tree " + base.getAbsolutePath()); for (int i = 0; i < 20; i++) { String name = FileUtils.digest(String.valueOf(System.currentTimeMillis() + i)); File f = new File(base, name); if (!f.getParentFile().exists()) { if (!f.getParentFile().mkdirs()) { log.warn("createFlatFiles: can't create parent folder for " + f.getParentFile().getPath()); }/*from ww w .jav a2s . co m*/ } FileOutputStream fw = new FileOutputStream(f); random.nextBytes(buffer); fw.write(buffer); fw.close(); } assertEquals("Failed to create test tree ", true, base.exists()); }
From source file:org.apache.nifi.processors.standard.DebugFlow.java
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { final ComponentLog logger = getLogger(); FlowFile ff = session.get();//from www . j a v a2 s. c o m // Make up to 2 passes to allow rollover from last cycle to first. // (This could be "while(true)" since responses should break out if selected, but this // prevents endless loops in the event of unexpected errors or future changes.) for (int pass = 2; pass > 0; pass--) { if (ff == null) { if (curr_noff_resp.state() == NoFlowFileResponseState.NO_FF_SKIP_RESPONSE) { if (noFlowFileCurrSkip < noFlowFileMaxSkip) { noFlowFileCurrSkip += 1; logger.info("DebugFlow skipping with no flow file"); return; } else { noFlowFileCurrSkip = 0; curr_noff_resp.getNextCycle(); } } if (curr_noff_resp.state() == NoFlowFileResponseState.NO_FF_EXCEPTION_RESPONSE) { if (noFlowFileCurrException < noFlowFileMaxException) { noFlowFileCurrException += 1; logger.info("DebugFlow throwing NPE with no flow file"); String message = "forced by " + this.getClass().getName(); RuntimeException rte; try { rte = noFlowFileExceptionClass.getConstructor(String.class).newInstance(message); throw rte; } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { if (logger.isErrorEnabled()) { logger.error("{} unexpected exception throwing DebugFlow exception: {}", new Object[] { this, e }); } } } else { noFlowFileCurrException = 0; curr_noff_resp.getNextCycle(); } } if (curr_noff_resp.state() == NoFlowFileResponseState.NO_FF_YIELD_RESPONSE) { if (noFlowFileCurrYield < noFlowFileMaxYield) { noFlowFileCurrYield += 1; logger.info("DebugFlow yielding with no flow file"); context.yield(); break; } else { noFlowFileCurrYield = 0; curr_noff_resp.getNextCycle(); } } return; } else { final int writeIterations = context.getProperty(WRITE_ITERATIONS).asInteger(); if (writeIterations > 0 && pass == 1) { final Random random = new Random(); for (int i = 0; i < writeIterations; i++) { final byte[] data = new byte[context.getProperty(CONTENT_SIZE).asDataSize(DataUnit.B) .intValue()]; random.nextBytes(data); ff = session.write(ff, new OutputStreamCallback() { @Override public void process(final OutputStream out) throws IOException { out.write(data); } }); } } if (curr_ff_resp.state() == FlowFileResponseState.FF_SUCCESS_RESPONSE) { if (flowFileCurrSuccess < flowFileMaxSuccess) { flowFileCurrSuccess += 1; logger.info("DebugFlow transferring to success file={} UUID={}", new Object[] { ff.getAttribute(CoreAttributes.FILENAME.key()), ff.getAttribute(CoreAttributes.UUID.key()) }); session.transfer(ff, REL_SUCCESS); session.commit(); break; } else { flowFileCurrSuccess = 0; curr_ff_resp.getNextCycle(); } } if (curr_ff_resp.state() == FlowFileResponseState.FF_FAILURE_RESPONSE) { if (flowFileCurrFailure < flowFileMaxFailure) { flowFileCurrFailure += 1; logger.info("DebugFlow transferring to failure file={} UUID={}", new Object[] { ff.getAttribute(CoreAttributes.FILENAME.key()), ff.getAttribute(CoreAttributes.UUID.key()) }); session.transfer(ff, REL_FAILURE); session.commit(); break; } else { flowFileCurrFailure = 0; curr_ff_resp.getNextCycle(); } } if (curr_ff_resp.state() == FlowFileResponseState.FF_ROLLBACK_RESPONSE) { if (flowFileCurrRollback < flowFileMaxRollback) { flowFileCurrRollback += 1; logger.info("DebugFlow rolling back (no penalty) file={} UUID={}", new Object[] { ff.getAttribute(CoreAttributes.FILENAME.key()), ff.getAttribute(CoreAttributes.UUID.key()) }); session.rollback(); session.commit(); break; } else { flowFileCurrRollback = 0; curr_ff_resp.getNextCycle(); } } if (curr_ff_resp.state() == FlowFileResponseState.FF_YIELD_RESPONSE) { if (flowFileCurrYield < flowFileMaxYield) { flowFileCurrYield += 1; logger.info("DebugFlow yielding file={} UUID={}", new Object[] { ff.getAttribute(CoreAttributes.FILENAME.key()), ff.getAttribute(CoreAttributes.UUID.key()) }); session.rollback(); context.yield(); return; } else { flowFileCurrYield = 0; curr_ff_resp.getNextCycle(); } } if (curr_ff_resp.state() == FlowFileResponseState.FF_PENALTY_RESPONSE) { if (flowFileCurrPenalty < flowFileMaxPenalty) { flowFileCurrPenalty += 1; logger.info("DebugFlow rolling back (with penalty) file={} UUID={}", new Object[] { ff.getAttribute(CoreAttributes.FILENAME.key()), ff.getAttribute(CoreAttributes.UUID.key()) }); session.rollback(true); session.commit(); break; } else { flowFileCurrPenalty = 0; curr_ff_resp.getNextCycle(); } } if (curr_ff_resp.state() == FlowFileResponseState.FF_EXCEPTION_RESPONSE) { if (flowFileCurrException < flowFileMaxException) { flowFileCurrException += 1; String message = "forced by " + this.getClass().getName(); logger.info("DebugFlow throwing NPE file={} UUID={}", new Object[] { ff.getAttribute(CoreAttributes.FILENAME.key()), ff.getAttribute(CoreAttributes.UUID.key()) }); RuntimeException rte; try { rte = flowFileExceptionClass.getConstructor(String.class).newInstance(message); throw rte; } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { if (logger.isErrorEnabled()) { logger.error("{} unexpected exception throwing DebugFlow exception: {}", new Object[] { this, e }); } } } else { flowFileCurrException = 0; curr_ff_resp.getNextCycle(); } } } } }
From source file:org.mule.util.queue.AbstractTransactionQueueManagerTestCase.java
@Test public void testBench() throws Exception { TransactionalQueueManager mgr = createQueueManager(); try {//from www .ja v a2 s . c o m mgr.start(); QueueSession s = mgr.getQueueSession(); Queue q = s.getQueue("queue1"); Random rnd = new Random(); long t0 = System.currentTimeMillis(); for (int i = 0; i < 1; i++) { for (int j = 0; j < 500; j++) { byte[] o = new byte[2048]; rnd.nextBytes(o); q.put(o); } while (q.size() > 0) { q.take(); } } long t1 = System.currentTimeMillis(); logger.info("Time: " + (t1 - t0) + " ms"); purgeQueue(q); } finally { mgr.stop(AbstractResourceManager.SHUTDOWN_MODE_NORMAL); } }
From source file:org.apache.hadoop.raid.TestBlockCopier.java
private long[] createRandomFile(Path file, int repl, int numBlocks) throws IOException { long[] crcs = new long[numBlocks]; CRC32 crc = new CRC32(); Random rand = new Random(); FSDataOutputStream stm = fileSys.create(file, true, fileSys.getConf().getInt("io.file.buffer.size", 4096), (short) repl, BLOCK_SIZE); // Write whole blocks. byte[] b = new byte[(int) BLOCK_SIZE]; for (int i = 1; i < numBlocks; i++) { rand.nextBytes(b); stm.write(b);// w ww .j a v a2 s . c om crc.update(b); crcs[i - 1] = crc.getValue(); crc.reset(); } // Write partial block. b = new byte[(int) BLOCK_SIZE / 2 - 1]; rand.nextBytes(b); stm.write(b); crc.update(b); crcs[crcs.length - 1] = crc.getValue(); stm.close(); return crcs;//crc.getValue(); }
From source file:com.jivesoftware.os.amza.service.storage.binary.BinaryRowReaderWriterTest.java
@Test public void testOpenCloseAppend() throws Exception { File dir = Files.createTempDir(); IoStats ioStats = new IoStats(); Random rand = new Random(); for (long i = 0; i < 1000; i++) { DiskBackedWALFiler filer = new DiskBackedWALFiler(new File(dir, "foo").getAbsolutePath(), "rw", false, 0);/* ww w . j ava2 s. c om*/ BinaryRowReader binaryRowReader = new BinaryRowReader(filer); BinaryRowWriter binaryRowWriter = new BinaryRowWriter(filer); ReadStream readStream = new ReadStream(); if (i > 0) { binaryRowReader.reverseScan(ioStats, readStream); Assert.assertEquals(readStream.rows.size(), i); } readStream.clear(); byte[] row = new byte[4]; rand.nextBytes(row); binaryRowWriter.write(ioStats, i, RowType.primary, 1, row.length, stream -> stream.stream(row), indexableKeys, txKeyPointerFpStream, true, false); filer.close(); } }