List of usage examples for java.util Random nextLong
public long nextLong()
From source file:de.betterform.connector.serializer.FormDataSerializer.java
/** * Serialize instance into multipart/form-data stream as defined in * http://www.w3.org/TR/xforms/slice11.html#serialize-form-data * * @param submission//from w w w. j a v a2 s . co m * @param instance * @param wrapper * @param defaultEncoding * @throws Exception on error */ public void serialize(Submission submission, Node instance, SerializerRequestWrapper wrapper, String defaultEncoding) throws Exception { // sanity checks if (instance == null) { return; } switch (instance.getNodeType()) { case Node.ELEMENT_NODE: case Node.TEXT_NODE: break; case Node.DOCUMENT_NODE: instance = ((Document) instance).getDocumentElement(); break; default: return; } String encoding = defaultEncoding; if (submission.getEncoding() != null) { encoding = submission.getEncoding(); } // generate boundary Random rnd = new Random(System.currentTimeMillis()); String boundary = DigestUtils.md5Hex(getClass().getName() + rnd.nextLong()); // serialize the instance ByteArrayOutputStream bos = new ByteArrayOutputStream(); PrintWriter writer = new PrintWriter(new BufferedWriter(new OutputStreamWriter(bos, encoding))); if (instance.getNodeType() == Node.ELEMENT_NODE) { serializeElement(writer, (Element) instance, boundary, encoding); } else { writer.print(instance.getTextContent()); } writer.print("\r\n--" + boundary + "--"); writer.flush(); bos.writeTo(wrapper.getBodyStream()); wrapper.addHeader("internal-boundary-mark", boundary); }
From source file:org.apache.hadoop.mapreduce.lib.input.TestCombineTextInputFormat.java
@Test(timeout = 10000) public void testFormat() throws Exception { Job job = Job.getInstance(new Configuration(defaultConf)); Random random = new Random(); long seed = random.nextLong(); LOG.info("seed = " + seed); random.setSeed(seed);/* w w w. jav a 2 s . com*/ localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; // create files with various lengths createFiles(length, numFiles, random); // create a combined split for the files CombineTextInputFormat format = new CombineTextInputFormat(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length / 20) + 1; LOG.info("splitting: requesting = " + numSplits); List<InputSplit> splits = format.getSplits(job); LOG.info("splitting: got = " + splits.size()); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.size()); InputSplit split = splits.get(0); assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass()); // check the split BitSet bits = new BitSet(length); LOG.debug("split= " + split); TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration()); RecordReader<LongWritable, Text> reader = format.createRecordReader(split, context); assertEquals("reader class is CombineFileRecordReader.", CombineFileRecordReader.class, reader.getClass()); MapContext<LongWritable, Text, LongWritable, Text> mcontext = new MapContextImpl<LongWritable, Text, LongWritable, Text>( job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split); reader.initialize(split, mcontext); try { int count = 0; while (reader.nextKeyValue()) { LongWritable key = reader.getCurrentKey(); assertNotNull("Key should not be null.", key); Text value = reader.getCurrentValue(); final int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.debug("split=" + split + " count=" + count); } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }
From source file:edu.vt.vbi.patric.portlets.SingleFIGfam.java
@SuppressWarnings("unchecked") public void serveResource(ResourceRequest request, ResourceResponse response) throws PortletException, IOException { String callType = request.getParameter("callType"); if (callType != null) { Map<String, String> key = new HashMap<>(); if (callType.equals("saveState")) { String genomeIds = request.getParameter("genomeIds"); String familyIds = request.getParameter("familyIds"); String familyType = request.getParameter("familyType"); key.put("genomeIds", genomeIds); key.put("familyIds", familyIds); key.put("familyType", familyType); Random g = new Random(); long pk = g.nextLong(); SessionHandler.getInstance().set(SessionHandler.PREFIX + pk, jsonWriter.writeValueAsString(key)); PrintWriter writer = response.getWriter(); writer.write("" + pk); writer.close();//from w w w . j av a2 s . com } else if (callType.equals("getData")) { Map data = processFeatureTab(request); int numFound = (Integer) data.get("numFound"); List<GenomeFeature> features = (List<GenomeFeature>) data.get("features"); JSONArray docs = new JSONArray(); for (GenomeFeature feature : features) { docs.add(feature.toJSONObject()); } JSONObject jsonResult = new JSONObject(); jsonResult.put("results", docs); jsonResult.put("total", numFound); response.setContentType("application/json"); PrintWriter writer = response.getWriter(); jsonResult.writeJSONString(writer); writer.close(); } else if (callType.equals("download")) { List<String> tableHeader = new ArrayList<>(); List<String> tableField = new ArrayList<>(); JSONArray tableSource = new JSONArray(); String fileName = "FeatureTable"; String fileFormat = request.getParameter("fileformat"); // features Map data = processFeatureTab(request); List<GenomeFeature> features = (List<GenomeFeature>) data.get("features"); for (GenomeFeature feature : features) { tableSource.add(feature.toJSONObject()); } tableHeader.addAll(DownloadHelper.getHeaderForFeatures()); tableField.addAll(DownloadHelper.getFieldsForFeatures()); ExcelHelper excel = new ExcelHelper("xssf", tableHeader, tableField, tableSource); excel.buildSpreadsheet(); if (fileFormat.equalsIgnoreCase("xlsx")) { response.setContentType("application/octetstream"); response.addProperty("Content-Disposition", "attachment; filename=\"" + fileName + "." + fileFormat + "\""); excel.writeSpreadsheettoBrowser(response.getPortletOutputStream()); } else if (fileFormat.equalsIgnoreCase("txt")) { response.setContentType("application/octetstream"); response.addProperty("Content-Disposition", "attachment; filename=\"" + fileName + "." + fileFormat + "\""); response.getPortletOutputStream().write(excel.writeToTextFile().getBytes()); } } } }
From source file:org.apache.hadoop.security.authentication.util.TestZKSignerSecretProvider.java
@Test // Test just one ZKSignerSecretProvider to verify that it works in the // simplest case/*w ww. j av a 2 s . c o m*/ public void testOne() throws Exception { // use the same seed so we can predict the RNG long seed = System.currentTimeMillis(); Random rand = new Random(seed); byte[] secret2 = Long.toString(rand.nextLong()).getBytes(); byte[] secret1 = Long.toString(rand.nextLong()).getBytes(); byte[] secret3 = Long.toString(rand.nextLong()).getBytes(); MockZKSignerSecretProvider secretProvider = spy(new MockZKSignerSecretProvider(seed)); Properties config = new Properties(); config.setProperty(ZKSignerSecretProvider.ZOOKEEPER_CONNECTION_STRING, zkServer.getConnectString()); config.setProperty(ZKSignerSecretProvider.ZOOKEEPER_PATH, "/secret"); try { secretProvider.init(config, getDummyServletContext(), rolloverFrequency); byte[] currentSecret = secretProvider.getCurrentSecret(); byte[][] allSecrets = secretProvider.getAllSecrets(); Assert.assertArrayEquals(secret1, currentSecret); Assert.assertEquals(2, allSecrets.length); Assert.assertArrayEquals(secret1, allSecrets[0]); Assert.assertNull(allSecrets[1]); verify(secretProvider, timeout(timeout).atLeastOnce()).rollSecret(); secretProvider.realRollSecret(); currentSecret = secretProvider.getCurrentSecret(); allSecrets = secretProvider.getAllSecrets(); Assert.assertArrayEquals(secret2, currentSecret); Assert.assertEquals(2, allSecrets.length); Assert.assertArrayEquals(secret2, allSecrets[0]); Assert.assertArrayEquals(secret1, allSecrets[1]); verify(secretProvider, timeout(timeout).atLeast(2)).rollSecret(); secretProvider.realRollSecret(); currentSecret = secretProvider.getCurrentSecret(); allSecrets = secretProvider.getAllSecrets(); Assert.assertArrayEquals(secret3, currentSecret); Assert.assertEquals(2, allSecrets.length); Assert.assertArrayEquals(secret3, allSecrets[0]); Assert.assertArrayEquals(secret2, allSecrets[1]); verify(secretProvider, timeout(timeout).atLeast(3)).rollSecret(); secretProvider.realRollSecret(); } finally { secretProvider.destroy(); } }
From source file:com.ebay.erl.mobius.core.mapred.MobiusInputSampler.java
@Override public Object[] getSample(InputFormat inf, JobConf job) throws IOException { // the following codes are copied from {@link InputSampler#RandomSampler}, // but require some modifications. InputSplit[] splits = inf.getSplits(job, job.getNumMapTasks()); ArrayList<DataJoinKey> samples = new ArrayList<DataJoinKey>(this.numSamples); int splitsToSample = Math.min(this.maxSplitsSampled, splits.length); Random r = new Random(); long seed = r.nextLong(); r.setSeed(seed);/*from w w w .java2s . c o m*/ // get Sorters Sorter[] sorters = null; if (job.get(ConfigureConstants.SORTERS, null) != null) { // total sort job sorters = (Sorter[]) SerializableUtil.deserializeFromBase64(job.get(ConfigureConstants.SORTERS), job); } else { // there is no sorter, should be reducer/join job Column[] keys = (Column[]) SerializableUtil .deserializeFromBase64(job.get(ConfigureConstants.ALL_GROUP_KEY_COLUMNS), job); sorters = new Sorter[keys.length]; for (int i = 0; i < keys.length; i++) { sorters[i] = new Sorter(keys[i].getInputColumnName(), Ordering.ASC); } } long proportion = 10L; while ((int) (this.freq * proportion) == 0) { proportion = proportion * 10; } proportion = 5L * proportion; // shuffle splits for (int i = 0; i < splits.length; ++i) { InputSplit tmp = splits[i]; int j = r.nextInt(splits.length); splits[i] = splits[j]; splits[j] = tmp; } SamplingOutputCollector collector = new SamplingOutputCollector(); for (int i = 0; i < splitsToSample || (i < splits.length && samples.size() < numSamples); i++) { LOGGER.info("Sampling from split #" + (i + 1) + ", collected samples:" + samples.size()); RecordReader<WritableComparable, WritableComparable> reader = inf.getRecordReader(splits[i], job, Reporter.NULL); WritableComparable key = reader.createKey(); WritableComparable value = reader.createValue(); if (!(inf instanceof MobiusDelegatingInputFormat)) { // not mobius delegating input format, so the CURRENT_DATASET_ID // will not be set by inf#getRecordReader, we set them here. // // set the current dataset id, as the AbstractMobiusMapper#configure // method needs this property. job.set(ConfigureConstants.CURRENT_DATASET_ID, job.get(ConfigureConstants.ALL_DATASET_IDS)); } Byte datasetID = Byte.valueOf(job.get(ConfigureConstants.CURRENT_DATASET_ID)); LOGGER.info("Samples coming from dataset: " + datasetID.toString()); AbstractMobiusMapper mapper = this.getMapper(inf, splits[i], job); mapper.configure(job); // reading elements from one split long readElement = 0; while (reader.next(key, value)) { collector.clear(); Tuple tuple = mapper.parse(key, value); readElement++; if (readElement > (((long) numSamples) * ((long) proportion))) { // a split might be very big (ex: a large gz file), // so we just need to read the break; } if (r.nextDouble() <= freq) { if (samples.size() < numSamples) { mapper.joinmap(key, value, collector, Reporter.NULL); // joinmap function might generate more than one output key // per <code>key</code> input. for (Tuple t : collector.getOutKey()) { Tuple mt = Tuple.merge(tuple, t); DataJoinKey nkey = this.getKey(mt, sorters, datasetID, mapper, job); samples.add(nkey); } } else { // When exceeding the maximum number of samples, replace // a random element with this one, then adjust the // frequency to reflect the possibility of existing // elements being pushed out mapper.joinmap(key, value, collector, Reporter.NULL); for (Tuple t : collector.getOutKey()) { int ind = r.nextInt(numSamples); if (ind != numSamples) { Tuple mt = Tuple.merge(tuple, t); DataJoinKey nkey = this.getKey(mt, sorters, datasetID, mapper, job); samples.set(ind, nkey); } } freq *= (numSamples - collector.getOutKey().size()) / (double) numSamples; } key = reader.createKey(); value = reader.createValue(); } } reader.close(); } LOGGER.info("Samples have been collected, return."); return samples.toArray(); }
From source file:at.salzburgresearch.kmt.zkconfig.ZookeeperConfigurationTest.java
@Test public void testLong() throws Exception { Configuration config = new ZookeeperConfiguration(zkConnection, 5000, "/test"); final String key = UUID.randomUUID().toString(); final Random random = new Random(); final long val1 = random.nextLong(); final Long val2 = random.nextLong(); assertThat(config.getProperty(key), nullValue()); config.setProperty(key, val1); assertEquals(val1, config.getLong(key)); assertEquals(Long.valueOf(val1), config.getLong(key, val2)); config.setProperty(key, val2); assertEquals(val2.longValue(), config.getLong(key)); assertEquals(val2, config.getLong(key, Long.valueOf(val1))); }
From source file:org.drools.util.BinaryHeapPriorityQueueTest.java
public void testOptimised() { final Random random = new Random(); final List items = new LinkedList(); final Queue queue = new BinaryHeapFifoQueue(NaturalComparator.INSTANCE, 100000); for (int i = 0; i < 100000; ++i) { items.add(new LongQueueable(random.nextLong())); }// ww w. j av a 2 s .c om final long startEnqueue = System.currentTimeMillis(); for (final Iterator i = items.iterator(); i.hasNext();) { queue.enqueue((Queueable) i.next()); } final long elapsedEnqueue = System.currentTimeMillis() - startEnqueue; final long startDequeue = System.currentTimeMillis(); for (final Iterator i = items.iterator(); i.hasNext();) { ((Queueable) i.next()).dequeue(); } // while (!queue.isEmpty()) { // queue.dequeue(); // } final long elapsedDequeue = System.currentTimeMillis() - startDequeue; System.out.println("elapsedEnqueue = " + elapsedEnqueue); System.out.println("elapsedDequeue = " + elapsedDequeue); }
From source file:org.apache.hadoop.hdfs.TestGetBlocks.java
@Test public void testBlockKey() { Map<Block, Long> map = new HashMap<>(); final Random RAN = new Random(); final long seed = RAN.nextLong(); System.out.println("seed=" + seed); RAN.setSeed(seed);/*from w ww .jav a 2s .c om*/ long[] blkids = new long[10]; for (int i = 0; i < blkids.length; i++) { blkids[i] = 1000L + RAN.nextInt(100000); map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]); } System.out.println("map=" + map.toString().replace(",", "\n ")); for (long blkid : blkids) { Block b = new Block(blkid, 0, GenerationStamp.GRANDFATHER_GENERATION_STAMP); Long v = map.get(b); System.out.println(b + " => " + v); assertEquals(v.longValue(), blkid); } }
From source file:com.jivesoftware.os.routing.bird.http.client.StableShuffleStrategy.java
@Override public int[] getClients(ConnectionDescriptor[] connectionDescriptors) { Random random = new Random(seed); long[] hash = new long[connectionDescriptors.length]; int[] indexes = new int[connectionDescriptors.length]; for (int i = 0; i < hash.length; i++) { hash[i] = random.nextLong(); indexes[i] = i;// w w w. j av a2 s . co m } mirrorSort(hash, indexes); return indexes; }
From source file:org.apache.hadoop.fs.TestFileSystem.java
public static void createControlFile(FileSystem fs, long megaBytes, int numFiles, long seed) throws Exception { LOG.info("creating control file: " + megaBytes + " bytes, " + numFiles + " files"); Path controlFile = new Path(CONTROL_DIR, "files"); fs.delete(controlFile, true);//w ww . j av a 2 s. co m Random random = new Random(seed); SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, controlFile, UTF8.class, LongWritable.class, CompressionType.NONE); long totalSize = 0; long maxSize = ((megaBytes / numFiles) * 2) + 1; try { while (totalSize < megaBytes) { UTF8 name = new UTF8(Long.toString(random.nextLong())); long size = random.nextLong(); if (size < 0) size = -size; size = size % maxSize; //LOG.info(" adding: name="+name+" size="+size); writer.append(name, new LongWritable(size)); totalSize += size; } } finally { writer.close(); } LOG.info("created control file for: " + totalSize + " bytes"); }