List of usage examples for java.util Random nextBoolean
public boolean nextBoolean()
From source file:op.tools.SYSTools.java
public static Date anonymizeDate(Date in) { Date result = in;//w ww . j a va2s. c o m if (OPDE.isAnonym()) { Random rnd = new Random(System.nanoTime()); DateTime dt = new DateTime(in); int factor = rnd.nextBoolean() ? -1 : 1; result = dt.plusDays(rnd.nextInt(300) * factor).plusYears(rnd.nextInt(5) * factor).toDate(); } return result; }
From source file:org.apache.hadoop.hbase.regionserver.wal.TestHLogFiltering.java
private void fillTable() throws IOException, InterruptedException { HTable table = TEST_UTIL.createTable(TABLE_NAME, FAMILIES, 3, Bytes.toBytes("row0"), Bytes.toBytes("row99"), NUM_RS);/* w w w . ja va 2s . com*/ Random rand = new Random(19387129L); for (int iStoreFile = 0; iStoreFile < 4; ++iStoreFile) { for (int iRow = 0; iRow < 100; ++iRow) { final byte[] row = Bytes.toBytes("row" + iRow); Put put = new Put(row); Delete del = new Delete(row); for (int iCol = 0; iCol < 10; ++iCol) { final byte[] cf = rand.nextBoolean() ? CF1 : CF2; final long ts = Math.abs(rand.nextInt()); final byte[] qual = Bytes.toBytes("col" + iCol); if (rand.nextBoolean()) { final byte[] value = Bytes .toBytes("value_for_row_" + iRow + "_cf_" + Bytes.toStringBinary(cf) + "_col_" + iCol + "_ts_" + ts + "_random_" + rand.nextLong()); put.add(cf, qual, ts, value); } else if (rand.nextDouble() < 0.8) { del.deleteColumn(cf, qual, ts); } else { del.deleteColumns(cf, qual, ts); } } table.put(put); table.delete(del); table.flushCommits(); } } TEST_UTIL.waitUntilAllRegionsAssigned(TABLE_NAME); }
From source file:org.broadinstitute.gatk.tools.walkers.variantrecalibration.MultivariateGaussian.java
public void initializeRandomSigma(final Random rand) { final double[][] randSigma = new double[mu.length][mu.length]; for (int iii = 0; iii < mu.length; iii++) { for (int jjj = iii; jjj < mu.length; jjj++) { randSigma[jjj][iii] = 0.55 + 1.25 * rand.nextDouble(); if (rand.nextBoolean()) { randSigma[jjj][iii] *= -1.0; }/* w w w . j a v a2 s.com*/ if (iii != jjj) { randSigma[iii][jjj] = 0.0; } // Sigma is a symmetric, positive-definite matrix created by taking a lower diagonal matrix and multiplying it by its transpose } } Matrix tmp = new Matrix(randSigma); tmp = tmp.times(tmp.transpose()); sigma.setMatrix(0, mu.length - 1, 0, mu.length - 1, tmp); }
From source file:org.apache.metron.stellar.dsl.functions.HashFunctionsTest.java
@Test public void tlsh_multithread() throws Exception { //we want to ensure that everything is threadsafe, so we'll spin up some random data //generate some hashes and then do it all in parallel and make sure it all matches. Map<Map.Entry<byte[], Map<String, Object>>, String> hashes = new HashMap<>(); Random r = new Random(0); for (int i = 0; i < 20; ++i) { byte[] d = new byte[256]; r.nextBytes(d);/* w ww.jav a 2s . c om*/ Map<String, Object> config = new HashMap<String, Object>() { { put(TLSHHasher.Config.BUCKET_SIZE.key, r.nextBoolean() ? 128 : 256); put(TLSHHasher.Config.CHECKSUM.key, r.nextBoolean() ? 1 : 3); } }; String hash = (String) run("HASH(data, 'tlsh', config)", ImmutableMap.of("config", config, "data", d)); Assert.assertNotNull(hash); hashes.put(new AbstractMap.SimpleEntry<>(d, config), hash); } ForkJoinPool forkJoinPool = new ForkJoinPool(5); forkJoinPool.submit(() -> hashes.entrySet().parallelStream().forEach(kv -> { Map<String, Object> config = kv.getKey().getValue(); byte[] data = kv.getKey().getKey(); String hash = (String) run("HASH(data, 'tlsh', config)", ImmutableMap.of("config", config, "data", data)); Assert.assertEquals(hash, kv.getValue()); })); }
From source file:com.streamsets.pipeline.lib.jdbc.multithread.TestMultithreadedTableProvider.java
private static Map<TableRuntimeContext, Map<String, String>> createRandomPartitionsAndStoredOffsets( boolean enablePartitioning) { Random random = RandomTestUtils.getRandom(); Map<TableRuntimeContext, Map<String, String>> partitions = new HashMap<>(); List<Integer> sqlTypes = new ArrayList<>(TableContextUtil.PARTITIONABLE_TYPES); String schemaName = "schema"; String offsetColName = "OFFSET_COL"; int numTables = RandomTestUtils.nextInt(1, 8); for (int t = 0; t < numTables; t++) { String tableName = String.format("table%d", t); int type = sqlTypes.get(RandomTestUtils.nextInt(0, sqlTypes.size())); PartitioningMode partitioningMode = enablePartitioning && random.nextBoolean() ? PartitioningMode.BEST_EFFORT : PartitioningMode.DISABLED; final boolean partitioned = partitioningMode == PartitioningMode.BEST_EFFORT; int maxNumPartitions = partitioned ? RandomTestUtils.nextInt(1, 10) : 1; // an integer should be compatible with all partitionable types int partitionSize = RandomTestUtils.nextInt(1, 1000000); TableContext table = new TableContext(schemaName, tableName, Maps.newLinkedHashMap(Collections.singletonMap(offsetColName, type)), Collections.singletonMap(offsetColName, null), Collections.singletonMap(offsetColName, String.valueOf(partitionSize)), Collections.singletonMap(offsetColName, "0"), TableConfigBean.ENABLE_NON_INCREMENTAL_DEFAULT_VALUE, partitioningMode, maxNumPartitions, null); for (int p = 0; p < maxNumPartitions; p++) { if (partitioned && random.nextBoolean() && !(p == maxNumPartitions - 1 && partitions.isEmpty())) { // only create some partitions continue; }/* w w w .ja va 2 s . c o m*/ int startOffset = p * partitionSize; int maxOffset = (p + 1) * partitionSize; Map<String, String> partitionStoredOffsets = null; if (random.nextBoolean()) { // only simulate stored offsets sometimes int storedOffset = RandomTestUtils.nextInt(startOffset + 1, maxOffset + 1); partitionStoredOffsets = Collections.singletonMap(offsetColName, String.valueOf(storedOffset)); } TableRuntimeContext partition = new TableRuntimeContext(table, false, partitioned, partitioned ? p + 1 : TableRuntimeContext.NON_PARTITIONED_SEQUENCE, Collections.singletonMap(offsetColName, String.valueOf(startOffset)), Collections.singletonMap(offsetColName, String.valueOf(maxOffset)), partitionStoredOffsets); partitions.put(partition, partitionStoredOffsets); } } return partitions; }
From source file:org.activiti.explorer.conf.DemoDataConfiguration.java
protected void generateReportData() { // Report data is generated in background thread Thread thread = new Thread(new Runnable() { public void run() { // We need to temporarily disable the job executor or it would interfere with the process execution processEngineConfiguration.getJobExecutor().shutdown(); Random random = new Random(); Date now = new Date(new Date().getTime() - (24 * 60 * 60 * 1000)); processEngineConfiguration.getClock().setCurrentTime(now); for (int i = 0; i < 50; i++) { if (random.nextBoolean()) { runtimeService.startProcessInstanceByKey("fixSystemFailure"); }//from w ww . ja va 2s.c o m if (random.nextBoolean()) { identityService.setAuthenticatedUserId("kermit"); Map<String, Object> variables = new HashMap<String, Object>(); variables.put("customerName", "testCustomer"); variables.put("details", "Looks very interesting!"); variables.put("notEnoughInformation", false); runtimeService.startProcessInstanceByKey("reviewSaledLead", variables); } if (random.nextBoolean()) { runtimeService.startProcessInstanceByKey("escalationExample"); } if (random.nextInt(100) < 20) { now = new Date(now.getTime() - ((24 * 60 * 60 * 1000) - (60 * 60 * 1000))); processEngineConfiguration.getClock().setCurrentTime(now); } } List<Job> jobs = managementService.createJobQuery().list(); for (int i = 0; i < jobs.size() / 2; i++) { processEngineConfiguration.getClock().setCurrentTime(jobs.get(i).getDuedate()); managementService.executeJob(jobs.get(i).getId()); } List<Task> tasks = taskService.createTaskQuery().list(); while (!tasks.isEmpty()) { for (Task task : tasks) { if (task.getAssignee() == null) { String assignee = random.nextBoolean() ? "kermit" : "fozzie"; taskService.claim(task.getId(), assignee); } processEngineConfiguration.getClock().setCurrentTime( new Date(task.getCreateTime().getTime() + random.nextInt(60 * 60 * 1000))); taskService.complete(task.getId()); } tasks = taskService.createTaskQuery().list(); } processEngineConfiguration.getClock().reset(); processEngineConfiguration.getJobExecutor().start(); LOGGER.info("Demo report data generated"); } }); thread.start(); }
From source file:org.activiti.explorer.conf.DemoDataInitializer.java
protected void generateReportData() { // Report data is generated in background thread Thread thread = new Thread(new Runnable() { @Override/*from ww w . j a v a 2s. co m*/ public void run() { // We need to temporarily disable the job executor or it would interfere with the process execution processEngineConfiguration.getJobExecutor().shutdown(); Random random = new Random(); Date now = new Date(new Date().getTime() - (24 * 60 * 60 * 1000)); processEngineConfiguration.getClock().setCurrentTime(now); for (int i = 0; i < 50; i++) { if (random.nextBoolean()) { runtimeService.startProcessInstanceByKey("fixSystemFailure"); } if (random.nextBoolean()) { identityService.setAuthenticatedUserId("kermit"); Map<String, Object> variables = new HashMap<String, Object>(); variables.put("customerName", "testCustomer"); variables.put("details", "Looks very interesting!"); variables.put("notEnoughInformation", false); runtimeService.startProcessInstanceByKey("reviewSaledLead", variables); } if (random.nextBoolean()) { runtimeService.startProcessInstanceByKey("escalationExample"); } if (random.nextInt(100) < 20) { now = new Date(now.getTime() - ((24 * 60 * 60 * 1000) - (60 * 60 * 1000))); processEngineConfiguration.getClock().setCurrentTime(now); } } List<Job> jobs = managementService.createJobQuery().list(); for (int i = 0; i < jobs.size() / 2; i++) { processEngineConfiguration.getClock().setCurrentTime(jobs.get(i).getDuedate()); managementService.executeJob(jobs.get(i).getId()); } List<Task> tasks = taskService.createTaskQuery().list(); while (!tasks.isEmpty()) { for (Task task : tasks) { if (task.getAssignee() == null) { String assignee = random.nextBoolean() ? "kermit" : "fozzie"; taskService.claim(task.getId(), assignee); } processEngineConfiguration.getClock().setCurrentTime( new Date(task.getCreateTime().getTime() + random.nextInt(60 * 60 * 1000))); taskService.complete(task.getId()); } tasks = taskService.createTaskQuery().list(); } processEngineConfiguration.getClock().reset(); processEngineConfiguration.getJobExecutor().start(); LOG.info("Demo report data generated"); } }); thread.start(); }
From source file:org.apache.hadoop.hdfs.qjournal.client.TestImageUploadStream.java
/** * Write random data by using write(byte) and write(byt[]). *//*from www . ja va 2 s . c o m*/ private byte[] writeData(OutputStream os, int size) throws IOException { Random r = new Random(); int approxMaxLen = size; int bytesWritten = 0; ByteArrayOutputStream bos = new ByteArrayOutputStream(); while (bytesWritten < approxMaxLen) { if (r.nextBoolean()) { int b = r.nextInt(); os.write(b); bos.write(b); bytesWritten++; } else { byte[] rand = new byte[r.nextInt(10) + 1]; r.nextBytes(rand); os.write(rand); bos.write(rand); bytesWritten += rand.length; } } return bos.toByteArray(); }
From source file:org.activiti.explorer.conf.DefaultDataConfiguration.java
protected void generateReportData() { // Report data is generated in background thread Thread thread = new Thread(new Runnable() { public void run() { // We need to temporarily disable the job executor or it would interfere with the process execution if (processEngineConfiguration.isAsyncExecutorEnabled() && processEngineConfiguration.getAsyncExecutor() != null) { processEngineConfiguration.getAsyncExecutor().shutdown(); } else if (processEngineConfiguration.isAsyncExecutorEnabled() == false && processEngineConfiguration.getJobExecutor() != null) { processEngineConfiguration.getJobExecutor().shutdown(); }//w w w . j a v a 2s. c o m Random random = new Random(); Date now = new Date(new Date().getTime() - (24 * 60 * 60 * 1000)); processEngineConfiguration.getClock().setCurrentTime(now); for (int i = 0; i < 50; i++) { if (random.nextBoolean()) { runtimeService.startProcessInstanceByKey("fixSystemFailure"); } if (random.nextBoolean()) { identityService.setAuthenticatedUserId("kermit"); Map<String, Object> variables = new HashMap<String, Object>(); variables.put("customerName", "testCustomer"); variables.put("details", "Looks very interesting!"); variables.put("notEnoughInformation", false); runtimeService.startProcessInstanceByKey("reviewSaledLead", variables); } if (random.nextBoolean()) { runtimeService.startProcessInstanceByKey("escalationExample"); } if (random.nextInt(100) < 20) { now = new Date(now.getTime() - ((24 * 60 * 60 * 1000) - (60 * 60 * 1000))); processEngineConfiguration.getClock().setCurrentTime(now); } } List<Job> jobs = managementService.createJobQuery().list(); for (int i = 0; i < jobs.size() / 2; i++) { processEngineConfiguration.getClock().setCurrentTime(jobs.get(i).getDuedate()); managementService.executeJob(jobs.get(i).getId()); } List<Task> tasks = taskService.createTaskQuery().list(); while (!tasks.isEmpty()) { for (Task task : tasks) { if (task.getAssignee() == null) { String assignee = random.nextBoolean() ? "kermit" : "fozzie"; taskService.claim(task.getId(), assignee); } processEngineConfiguration.getClock().setCurrentTime( new Date(task.getCreateTime().getTime() + random.nextInt(60 * 60 * 1000))); taskService.complete(task.getId()); } tasks = taskService.createTaskQuery().list(); } processEngineConfiguration.getClock().reset(); if (processEngineConfiguration.isAsyncExecutorEnabled() && processEngineConfiguration.getAsyncExecutor() != null) { processEngineConfiguration.getAsyncExecutor().start(); } else if (processEngineConfiguration.isAsyncExecutorEnabled() == false && processEngineConfiguration.getJobExecutor() != null) { processEngineConfiguration.getJobExecutor().start(); } LOGGER.info("Demo report data generated"); } }); thread.start(); }
From source file:de.upb.timok.run.GenericSmacPipeline.java
private void changeTimeValue(Random mutation, TDoubleList timeValues, double changePercent, int i) { double newValue = timeValues.get(i); if (mutation.nextBoolean()) { newValue = newValue + newValue * changePercent; } else {/*from w w w . ja v a 2 s .c o m*/ newValue = newValue - newValue * changePercent; } timeValues.set(i, newValue); }