List of usage examples for java.util Random nextLong
public long nextLong()
From source file:com.linkedin.pinot.index.writer.FixedByteWidthRowColDataFileWriterTest.java
@Test public void testSingleColLong() throws Exception { File wfile = new File("test_single_col_writer.dat"); wfile.delete();/*ww w . j av a 2 s . c o m*/ final int rows = 100; final int cols = 1; final int[] columnSizes = new int[] { 8 }; FixedByteSingleValueMultiColWriter writer = new FixedByteSingleValueMultiColWriter(wfile, rows, cols, columnSizes); final long[] data = new long[rows]; Random r = new Random(); for (int i = 0; i < rows; i++) { data[i] = r.nextLong(); writer.setLong(i, 0, data[i]); } writer.close(); File rfile = new File("test_single_col_writer.dat"); PinotDataBuffer buffer = PinotDataBuffer.fromFile(rfile, ReadMode.mmap, FileChannel.MapMode.READ_WRITE, "testing"); FixedByteSingleValueMultiColReader reader = new FixedByteSingleValueMultiColReader(buffer, rows, cols, columnSizes); for (int i = 0; i < rows; i++) { Assert.assertEquals(reader.getLong(i, 0), data[i]); } reader.close(); rfile.delete(); }
From source file:com.denimgroup.threadfix.service.ManualFindingServiceImpl.java
private String getRandomNativeId() { Random random = new Random(); // get next long value long value = random.nextLong(); return String.valueOf(value); }
From source file:com.ge.predix.test.utils.PolicyHelper.java
public PolicyEvaluationRequestV1 createRandomEvalRequest() { Random r = new Random(System.currentTimeMillis()); Set<Attribute> subjectAttributes = Collections.emptySet(); return this.createEvalRequest(ACTIONS[r.nextInt(4)], String.valueOf(r.nextLong()), "/alarms/sites/" + String.valueOf(r.nextLong()), subjectAttributes); }
From source file:lirmm.inria.fr.math.OpenLongToDoubleHashMapTest.java
private Map<Long, Double> generate() { Map<Long, Double> map = new HashMap<>(); Random r = new Random(); for (int i = 0; i < 2000; ++i) { map.put(r.nextLong(), r.nextDouble()); }// ww w . ja v a2 s .c o m return map; }
From source file:com.google.cloud.metrics.MetricsUtils.java
/** * Creates the parameters required to record the Google Analytics event. * * @see https://developers.google.com/analytics/devguides/collection/protocol/v1/parameters * * @param analyticsId Google Analytics ID to receive the report data. * @param clientId Client ID - must not include PII. * @param virtualPageName Full relative URL of the virtual page for the event. * @param virtualPageTitle Title of the virtual page for the event. * @param eventType Full event type string. * @param eventName Event name.//from w w w . j a v a2 s . c om * @param isUserSignedIn Whether the event involves a signed-in user. * @param isUserInternal Whether the event involves an internal user. * @param isUserTrialEligible Whether the event involves a user eligible for free trial. * Use {@link Optional#absent()} if not known. * @param projectNumberHash Hashed numeric project ID. * @param billingIdHash Hashed billing ID, if applicable. * @param clientHostname Hostname of the client where the event occurred, if any. * @param random Random number generator to use for cache busting. * @return immutable list of parameters as name-value pairs. */ static ImmutableList<NameValuePair> buildParameters(String analyticsId, String clientId, String virtualPageName, String virtualPageTitle, String eventType, String eventName, boolean isUserSignedIn, boolean isUserInternal, Optional<Boolean> isUserTrialEligible, Optional<String> projectNumberHash, Optional<String> billingIdHash, Optional<String> clientHostname, Random random) { checkNotNull(analyticsId); checkNotNull(clientId); checkNotNull(virtualPageTitle); checkNotNull(virtualPageName); checkNotNull(eventType); checkNotNull(eventName); checkNotNull(projectNumberHash); checkNotNull(billingIdHash); checkNotNull(clientHostname); checkNotNull(random); ImmutableList.Builder<NameValuePair> listBuilder = new ImmutableList.Builder<>(); // Analytics information // Protocol version listBuilder.add(new BasicNameValuePair(PARAM_PROTOCOL, "1")); // Analytics ID to send report to listBuilder.add(new BasicNameValuePair(PARAM_PROPERTY_ID, analyticsId)); // Always report as a pageview listBuilder.add(new BasicNameValuePair(PARAM_TYPE, VALUE_TYPE_PAGEVIEW)); // Always report as interactive listBuilder.add(new BasicNameValuePair(PARAM_IS_NON_INTERACTIVE, VALUE_FALSE)); // Add a randomly generated cache buster listBuilder.add(new BasicNameValuePair(PARAM_CACHEBUSTER, Long.toString(random.nextLong()))); // Event information listBuilder.add(new BasicNameValuePair(PARAM_EVENT_TYPE, eventType)); listBuilder.add(new BasicNameValuePair(PARAM_EVENT_NAME, eventName)); if (clientHostname.isPresent() && !clientHostname.get().isEmpty()) { listBuilder.add(new BasicNameValuePair(PARAM_HOSTNAME, clientHostname.get())); } // User information listBuilder.add(new BasicNameValuePair(PARAM_CLIENT_ID, clientId)); if (projectNumberHash.isPresent() && !projectNumberHash.get().isEmpty()) { listBuilder.add(new BasicNameValuePair(PARAM_PROJECT_NUM_HASH, projectNumberHash.get())); } if (billingIdHash.isPresent() && !billingIdHash.get().isEmpty()) { listBuilder.add(new BasicNameValuePair(PARAM_BILLING_ID_HASH, billingIdHash.get())); } listBuilder.add(new BasicNameValuePair(PARAM_USER_SIGNED_IN, toValue(isUserSignedIn))); listBuilder.add(new BasicNameValuePair(PARAM_USER_INTERNAL, toValue(isUserInternal))); if (isUserTrialEligible.isPresent()) { listBuilder.add(new BasicNameValuePair(PARAM_USER_TRIAL_ELIGIBLE, toValue(isUserTrialEligible.get()))); } // Virtual page information listBuilder.add(new BasicNameValuePair(PARAM_IS_VIRTUAL, VALUE_TRUE)); listBuilder.add(new BasicNameValuePair(PARAM_PAGE, virtualPageName)); if (!virtualPageTitle.isEmpty()) { listBuilder.add(new BasicNameValuePair(PARAM_PAGE_TITLE, virtualPageTitle)); } return listBuilder.build(); }
From source file:org.apache.hadoop.security.authentication.util.TestZKSignerSecretProvider.java
/** * @param order:// w w w .ja v a 2 s. c o m * 1: secretProviderA wins both realRollSecret races * 2: secretProviderA wins 1st race, B wins 2nd * @throws Exception */ public void testMultiple(int order) throws Exception { long seedA = System.currentTimeMillis(); Random rand = new Random(seedA); byte[] secretA2 = Long.toString(rand.nextLong()).getBytes(); byte[] secretA1 = Long.toString(rand.nextLong()).getBytes(); byte[] secretA3 = Long.toString(rand.nextLong()).getBytes(); byte[] secretA4 = Long.toString(rand.nextLong()).getBytes(); // use the same seed so we can predict the RNG long seedB = System.currentTimeMillis() + rand.nextLong(); rand = new Random(seedB); byte[] secretB2 = Long.toString(rand.nextLong()).getBytes(); byte[] secretB1 = Long.toString(rand.nextLong()).getBytes(); byte[] secretB3 = Long.toString(rand.nextLong()).getBytes(); byte[] secretB4 = Long.toString(rand.nextLong()).getBytes(); MockZKSignerSecretProvider secretProviderA = spy(new MockZKSignerSecretProvider(seedA)); MockZKSignerSecretProvider secretProviderB = spy(new MockZKSignerSecretProvider(seedB)); Properties config = new Properties(); config.setProperty(ZKSignerSecretProvider.ZOOKEEPER_CONNECTION_STRING, zkServer.getConnectString()); config.setProperty(ZKSignerSecretProvider.ZOOKEEPER_PATH, "/secret"); try { secretProviderA.init(config, getDummyServletContext(), rolloverFrequency); secretProviderB.init(config, getDummyServletContext(), rolloverFrequency); byte[] currentSecretA = secretProviderA.getCurrentSecret(); byte[][] allSecretsA = secretProviderA.getAllSecrets(); byte[] currentSecretB = secretProviderB.getCurrentSecret(); byte[][] allSecretsB = secretProviderB.getAllSecrets(); Assert.assertArrayEquals(secretA1, currentSecretA); Assert.assertArrayEquals(secretA1, currentSecretB); Assert.assertEquals(2, allSecretsA.length); Assert.assertEquals(2, allSecretsB.length); Assert.assertArrayEquals(secretA1, allSecretsA[0]); Assert.assertArrayEquals(secretA1, allSecretsB[0]); Assert.assertNull(allSecretsA[1]); Assert.assertNull(allSecretsB[1]); verify(secretProviderA, timeout(timeout).atLeastOnce()).rollSecret(); verify(secretProviderB, timeout(timeout).atLeastOnce()).rollSecret(); secretProviderA.realRollSecret(); secretProviderB.realRollSecret(); currentSecretA = secretProviderA.getCurrentSecret(); allSecretsA = secretProviderA.getAllSecrets(); Assert.assertArrayEquals(secretA2, currentSecretA); Assert.assertEquals(2, allSecretsA.length); Assert.assertArrayEquals(secretA2, allSecretsA[0]); Assert.assertArrayEquals(secretA1, allSecretsA[1]); currentSecretB = secretProviderB.getCurrentSecret(); allSecretsB = secretProviderB.getAllSecrets(); Assert.assertArrayEquals(secretA2, currentSecretB); Assert.assertEquals(2, allSecretsA.length); Assert.assertArrayEquals(secretA2, allSecretsB[0]); Assert.assertArrayEquals(secretA1, allSecretsB[1]); verify(secretProviderA, timeout(timeout).atLeast(2)).rollSecret(); verify(secretProviderB, timeout(timeout).atLeastOnce()).rollSecret(); switch (order) { case 1: secretProviderA.realRollSecret(); secretProviderB.realRollSecret(); secretProviderA.realRollSecret(); secretProviderB.realRollSecret(); break; case 2: secretProviderB.realRollSecret(); secretProviderA.realRollSecret(); secretProviderB.realRollSecret(); secretProviderA.realRollSecret(); break; default: throw new Exception("Invalid order selected"); } currentSecretA = secretProviderA.getCurrentSecret(); allSecretsA = secretProviderA.getAllSecrets(); currentSecretB = secretProviderB.getCurrentSecret(); allSecretsB = secretProviderB.getAllSecrets(); Assert.assertArrayEquals(currentSecretA, currentSecretB); Assert.assertEquals(2, allSecretsA.length); Assert.assertEquals(2, allSecretsB.length); Assert.assertArrayEquals(allSecretsA[0], allSecretsB[0]); Assert.assertArrayEquals(allSecretsA[1], allSecretsB[1]); switch (order) { case 1: Assert.assertArrayEquals(secretA4, allSecretsA[0]); break; case 2: Assert.assertArrayEquals(secretB4, allSecretsA[0]); break; } } finally { secretProviderB.destroy(); secretProviderA.destroy(); } }
From source file:org.apache.hadoop.hdfs.server.datanode.TestBatchIbr.java
static void runIbrTest(final long ibrInterval) throws Exception { final ExecutorService executor = createExecutor(); final Random ran = new Random(); final Configuration conf = newConf(ibrInterval); final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATANODES).build(); final DistributedFileSystem dfs = cluster.getFileSystem(); try {// ww w.j a v a 2s.c om final String dirPathString = "/dir"; final Path dir = new Path(dirPathString); dfs.mkdirs(dir); // start testing final long testStartTime = Time.monotonicNow(); final ExecutorCompletionService<Path> createService = new ExecutorCompletionService<>(executor); final AtomicLong createFileTime = new AtomicLong(); final AtomicInteger numBlockCreated = new AtomicInteger(); // create files for (int i = 0; i < NUM_FILES; i++) { createService.submit(new Callable<Path>() { @Override public Path call() throws Exception { final long start = Time.monotonicNow(); try { final long seed = ran.nextLong(); final int numBlocks = ran.nextInt(MAX_BLOCK_NUM) + 1; numBlockCreated.addAndGet(numBlocks); return createFile(dir, numBlocks, seed, dfs); } finally { createFileTime.addAndGet(Time.monotonicNow() - start); } } }); } // verify files final ExecutorCompletionService<Boolean> verifyService = new ExecutorCompletionService<>(executor); final AtomicLong verifyFileTime = new AtomicLong(); for (int i = 0; i < NUM_FILES; i++) { final Path file = createService.take().get(); verifyService.submit(new Callable<Boolean>() { @Override public Boolean call() throws Exception { final long start = Time.monotonicNow(); try { return verifyFile(file, dfs); } finally { verifyFileTime.addAndGet(Time.monotonicNow() - start); } } }); } for (int i = 0; i < NUM_FILES; i++) { Assert.assertTrue(verifyService.take().get()); } final long testEndTime = Time.monotonicNow(); LOG.info("ibrInterval=" + ibrInterval + " (" + toConfString(DFS_BLOCKREPORT_INCREMENTAL_INTERVAL_MSEC_KEY, conf) + "), numBlockCreated=" + numBlockCreated); LOG.info("duration=" + toSecondString(testEndTime - testStartTime) + ", createFileTime=" + toSecondString(createFileTime.get()) + ", verifyFileTime=" + toSecondString(verifyFileTime.get())); LOG.info("NUM_FILES=" + NUM_FILES + ", MAX_BLOCK_NUM=" + MAX_BLOCK_NUM + ", BLOCK_SIZE=" + BLOCK_SIZE + ", NUM_THREADS=" + NUM_THREADS + ", NUM_DATANODES=" + NUM_DATANODES); logIbrCounts(cluster.getDataNodes()); } finally { executor.shutdown(); cluster.shutdown(); } }
From source file:info.debatty.java.datasets.gaussian.Dataset.java
/** * */// w w w.j ava 2s . c o m public Dataset() { this.centers = new ArrayList<Center>(); Random rand = new Random(); this.random_seed = rand.nextLong(); }
From source file:com.github.lukaszbudnik.dqueue.QueueClientPerformanceTest.java
@Test public void doIt5Filters() throws ExecutionException, InterruptedException { byte[] data = new byte[2045]; Random r = new Random(); r.nextBytes(data);/* w w w . j a va 2 s . c o m*/ ByteBuffer buffer = ByteBuffer.wrap(data); Map<String, String> filters = ImmutableMap.of( // f1 "f1", Long.toHexString(r.nextLong()), // f2 "f2", Long.toHexString(r.nextLong()), // f3 "f3", Long.toHexString(r.nextLong()), // f4 "f4", Long.toHexString(r.nextLong()), // f5 "f5", Long.toHexString(r.nextLong())); IntStream.range(0, NUMBER_OF_ITERATIONS).forEach((i) -> { UUID startTime = UUIDs.timeBased(); Future<UUID> id = queueClient.publish(new Item(startTime, buffer, filters)); try { Assert.assertEquals(startTime, id.get()); } catch (Exception e) { fail(e.getMessage()); } }); IntStream.range(0, NUMBER_OF_ITERATIONS).forEach((i) -> { Future<Optional<Item>> itemFuture = queueClient.consume(filters); Optional<Item> item = null; try { item = itemFuture.get(); } catch (Exception e) { fail(e.getMessage()); } assertTrue(item.isPresent()); }); }
From source file:org.chiba.xml.xforms.connector.serializer.FormDataSerializer.java
/** * Serialize instance into multipart/form-data stream as defined in * http://www.w3.org/TR/xforms/slice11.html#serialize-form-data * * @param submission/*from ww w .j a va 2 s. c o m*/ * @param instance * @param stream * @param defaultEncoding * @throws Exception on error */ public void serialize(Submission submission, Node instance, OutputStream stream, String defaultEncoding) throws Exception { // sanity checks if (instance == null) return; switch (instance.getNodeType()) { case Node.ELEMENT_NODE: break; case Node.DOCUMENT_NODE: instance = ((Document) instance).getDocumentElement(); break; default: return; } String encoding = defaultEncoding; if (submission.getEncoding() != null) { encoding = submission.getEncoding(); } // generate boundary Random rnd = new Random(System.currentTimeMillis()); String boundary = DigestUtils.md5Hex(getClass().getName() + rnd.nextLong()); // serialize the instance ByteArrayOutputStream bos = new ByteArrayOutputStream(); PrintWriter writer = new PrintWriter(new BufferedWriter(new OutputStreamWriter(bos, encoding))); serializeElement(writer, (Element) instance, boundary, encoding); writer.print("\r\n--" + boundary + "--"); writer.flush(); // write to the stream String header = "Content-Type: multipart/form-data;\r\n" + "\tcharset=\"" + encoding + "\";\r\n" + "\tboundary=\"" + boundary + "\";\r\n" + "Content-Length: " + bos.size() + "\r\n\r\n"; stream.write(header.getBytes(encoding)); bos.writeTo(stream); }