List of usage examples for java.util Random nextDouble
public double nextDouble()
From source file:com.caseystella.analytics.outlier.streaming.mad.SketchyMovingMADTest.java
@Test public void testSketchyMovingMAD() throws IOException { Random r = new Random(0); List<DataPoint> points = new ArrayList<>(); DescriptiveStatistics stats = new DescriptiveStatistics(); DescriptiveStatistics medianStats = new DescriptiveStatistics(); OutlierConfig config = JSONUtil.INSTANCE.load(madConfig, OutlierConfig.class); SketchyMovingMAD madAlgo = ((SketchyMovingMAD) config.getSketchyOutlierAlgorithm()).withConfig(config); int i = 0;// w ww . j av a 2 s . co m for (i = 0; i < 10000; ++i) { double val = r.nextDouble() * 1000 - 10000; stats.addValue(val); DataPoint dp = (new DataPoint(i, val, null, "foo")); madAlgo.analyze(dp); points.add(dp); } for (DataPoint dp : points) { medianStats.addValue(Math.abs(dp.getValue() - stats.getPercentile(50))); } double mad = medianStats.getPercentile(50); double median = stats.getPercentile(50); { double val = getValAtModifiedZScore(3.6, mad, median); System.out.println("MODERATE => " + val); DataPoint dp = (new DataPoint(i++, val, null, "foo")); Severity s = madAlgo.analyze(dp).getSeverity(); Assert.assertTrue(s == Severity.MODERATE_OUTLIER); } { double val = getValAtModifiedZScore(6, mad, median); System.out.println("SEVERE => " + val); DataPoint dp = (new DataPoint(i++, val, null, "foo")); Severity s = madAlgo.analyze(dp).getSeverity(); Assert.assertTrue(s == Severity.SEVERE_OUTLIER); } Assert.assertTrue(madAlgo.getMedianDistributions().get("foo").getAmount() <= 110); Assert.assertTrue(madAlgo.getMedianDistributions().get("foo").getChunks().size() <= 12); }
From source file:com.google.android.apps.santatracker.presentquest.PlacesIntentService.java
private LatLng randomLatLng(LatLng center, int radius) { // Based on http://gis.stackexchange.com/questions/25877/how-to-generate-random-locations-nearby-my-location Random random = new Random(); double radiusInDegrees = radius / 111000f; double u = random.nextDouble(); double v = random.nextDouble(); double w = radiusInDegrees * Math.sqrt(u); double t = 2 * Math.PI * v; double x = w * Math.cos(t); double y = w * Math.sin(t); double new_x = x / Math.cos(center.latitude); return new LatLng(y + center.latitude, new_x + center.longitude); }
From source file:com.example.geomesa.lambda.LambdaQuickStart.java
@Override public void run() { try {/*from ww w . jav a2s . co m*/ // create the schema final String sftName = "lambda-quick-start"; final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326"; SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema); if (ds.getSchema(sftName) != null) { out.println("'" + sftName + "' feature type already exists - quick start will not work correctly"); out.println("Please delete it and re-run"); return; } out.println("Creating feature type '" + sftName + "'"); ds.createSchema(sft); out.println("Feature type created - register the layer '" + sftName + "' in geoserver then hit <enter> to continue"); in.read(); SimpleFeatureWriter writer = ds.getFeatureWriterAppend(sftName, Transaction.AUTO_COMMIT); out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes"); // creates and adds SimpleFeatures to the producer every 1/5th of a second final int COUNT = 1000; final int MIN_X = -180; final int MAX_X = 180; final int MIN_Y = -90; final int MAX_Y = 90; final int DX = 2; final String[] PEOPLE_NAMES = { "James", "John", "Peter", "Hannah", "Claire", "Gabriel" }; final long SECONDS_PER_YEAR = 365L * 24L * 60L * 60L; ZonedDateTime MIN_DATE = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); final Random random = new Random(); int numUpdates = (MAX_X - MIN_X) / DX; for (int j = 0; j < numUpdates; j++) { for (int i = 0; i < COUNT; i++) { SimpleFeature feature = writer.next(); feature.setAttribute(0, PEOPLE_NAMES[i % PEOPLE_NAMES.length]); // name feature.setAttribute(1, (int) Math.round(random.nextDouble() * 110)); // age feature.setAttribute(2, Date.from(MIN_DATE .plusSeconds((int) Math.round(random.nextDouble() * SECONDS_PER_YEAR)).toInstant())); // dtg feature.setAttribute(3, "POINT(" + (MIN_X + (DX * j)) + " " + (MIN_Y + ((MAX_Y - MIN_Y) / ((double) COUNT)) * i) + ")"); // geom feature.getUserData().put(Hints.PROVIDED_FID, String.format("%04d", i)); writer.write(); } Thread.sleep(200); } writer.close(); out.println("Waiting for expiry and persistence..."); long total = 0, persisted = 0; do { long newTotal = (long) ds.stats().getCount(sft, Filter.INCLUDE, true).get(); long newPersisted = (long) ((AccumuloDataStore) ds.persistence()).stats() .getCount(sft, Filter.INCLUDE, true).get(); if (newTotal != total || newPersisted != persisted) { total = newTotal; persisted = newPersisted; out.println("Total features: " + total + ", features persisted to Accumulo: " + persisted); } Thread.sleep(100); } while (persisted < COUNT || total > COUNT); } catch (Exception e) { throw new RuntimeException(e); } finally { ds.dispose(); } }
From source file:mase.app.allocation.AllocationProblem.java
private double[][] generateDispersedPoints(Random rand, int num, double distThreshold, double[] min, double[] max) { double[][] points = new double[num][]; int filled = 0; while (filled < num) { // generate random double[] candidate = new double[dimensions]; for (int i = 0; i < candidate.length; i++) { candidate[i] = min[i] + rand.nextDouble() * (max[i] - min[i]); }/* w ww . j av a 2 s. co m*/ // check if it is not similar to a previous one boolean ok = true; if (distThreshold > 0) { for (int i = 0; i < filled; i++) { if (DIST.compute(points[i], candidate) < distThreshold) { ok = false; break; } } } // add it if (ok) { points[filled] = candidate; filled++; } } return points; }
From source file:edu.washington.gs.skyline.model.quantification.WeightedRegressionTest.java
public void testWeighted() { Random random = new Random((int) new Date().getTime()); SimpleRegression simpleRegressionWithIntercept = new SimpleRegression(true); SimpleRegression simpleRegressionWithoutIntercept = new SimpleRegression(false); final int nPoints = 10; double[][] xValues = new double[nPoints][]; double[] yValues = new double[nPoints]; double[] weights = new double[nPoints]; for (int i = 0; i < nPoints; i++) { int weight = random.nextInt(10) + 1; weights[i] = weight;/*from w w w . ja v a 2 s . com*/ double x = random.nextDouble(); double y = random.nextDouble(); xValues[i] = new double[] { x }; yValues[i] = y; for (int w = 0; w < weight; w++) { simpleRegressionWithIntercept.addData(x, y); simpleRegressionWithoutIntercept.addData(x, y); } } final double epsilon = 1E-12; double repeatedIntercept = simpleRegressionWithIntercept.getIntercept(); double repeatedSlope = simpleRegressionWithIntercept.getSlope(); double[] weightedRegression = WeightedRegression.weighted(xValues, yValues, weights, true); assertEquals(repeatedIntercept, weightedRegression[0], epsilon); assertEquals(repeatedSlope, weightedRegression[1], epsilon); double[] weightedRegressionWithoutIntercept = WeightedRegression.weighted(xValues, yValues, weights, false); double repeatedSlopeWithoutIntercept = simpleRegressionWithoutIntercept.getSlope(); assertEquals(repeatedSlopeWithoutIntercept, weightedRegressionWithoutIntercept[0], epsilon); }
From source file:tools.RunSingleTileTest.java
private GetVectorTileRequest createRequest() { GetVectorTileRequest request = new GetVectorTileRequest(); Random rand = new Random(); TileCode code = new TileCode(5, 16, 8); request.setCode(code);//from w w w. j a va 2 s . c o m request.setLayerId(KtunaxaConstant.LAYER_REFERENCE_BASE_SERVER_ID); request.setCrs(KtunaxaConstant.MAP_CRS); // request.setScale(0.006541332273339661); request.setPanOrigin(new Coordinate(-1.2803202237767024E7, 6306054.833527042)); NamedStyleInfo style = new NamedStyleInfo(); style.setName("referenceBaseStyleInfo"); request.setStyleInfo(style); request.setScale(0.05233065818671729 + rand.nextDouble() / 1000); request.setPaintGeometries(true); request.setPaintLabels(false); request.setFilter("layer_id = 1 or layer_id = 2 or layer_id = 5 or layer_id = 6 or layer_id = 7 or " + "layer_id = 8 or layer_id = 9 or layer_id = 19 or layer_id = 20 or layer_id = 20 or " + "layer_id = 22 or layer_id = 22 or layer_id = 24 or layer_id = 25 or layer_id = 26 or " + "layer_id = 27 or layer_id = 28 or layer_id = 34 or layer_id = 78 or layer_id = 79 or " + "layer_id = 82 or layer_id = 83"); return request; }
From source file:com.linkedin.pinot.segments.v1.creator.OnHeapDictionariesTest.java
/** * Helper method to build a segment with random data as per the schema. * * @param segmentDirName Name of segment directory * @param segmentName Name of segment//from w w w . ja v a2 s . c o m * @param schema Schema for segment * @return Schema built for the segment * @throws Exception */ private Schema buildSegment(String segmentDirName, String segmentName, Schema schema) throws Exception { SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema); config.setOutDir(segmentDirName); config.setFormat(FileFormat.AVRO); config.setSegmentName(segmentName); Random random = new Random(RANDOM_SEED); List<GenericRow> rows = new ArrayList<>(NUM_ROWS); for (int rowId = 0; rowId < NUM_ROWS; rowId++) { HashMap<String, Object> map = new HashMap<>(); map.put(INT_COLUMN, random.nextInt()); map.put(LONG_COLUMN, random.nextLong()); map.put(FLOAT_COLUMN, random.nextFloat()); map.put(DOUBLE_COLUMN, random.nextDouble()); map.put(STRING_COLUMN, RandomStringUtils.randomAscii(100)); GenericRow genericRow = new GenericRow(); genericRow.init(map); rows.add(genericRow); } SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl(); driver.init(config, new GenericRowRecordReader(rows, schema)); driver.build(); LOGGER.info("Built segment {} at {}", segmentName, segmentDirName); return schema; }
From source file:org.apache.ctakes.ytex.kernel.FoldGeneratorImpl.java
/** * iterate through the labels, split instances into folds * /*from w w w .ja va 2s . co m*/ * @param mapClassToInstanceId * @param nFolds * @param nMinPerClass * @param nSeed * @return list with nFolds sets of instance ids corresponding to the folds */ private static List<Set<Long>> createFolds(Map<String, List<Long>> mapClassToInstanceId, int nFolds, int nMinPerClass, Random r) { List<Set<Long>> folds = new ArrayList<Set<Long>>(nFolds); Map<String, List<Set<Long>>> mapLabelFolds = new HashMap<String, List<Set<Long>>>(); for (Map.Entry<String, List<Long>> classToInstanceId : mapClassToInstanceId.entrySet()) { List<Long> instanceIds = classToInstanceId.getValue(); Collections.shuffle(instanceIds, r); List<Set<Long>> classFolds = new ArrayList<Set<Long>>(nFolds); int blockSize = instanceIds.size() / nFolds; for (int i = 0; i < nFolds; i++) { Set<Long> foldInstanceIds = new HashSet<Long>(blockSize); if (instanceIds.size() <= nMinPerClass) { // we don't have minPerClass for the given class // just add all of them to each fold foldInstanceIds.addAll(instanceIds); } else if (blockSize < nMinPerClass) { // too few of the given class - just randomly select // nMinPerClass double fraction = (double) nMinPerClass / (double) instanceIds.size(); // iterate through the list, start somewhere in the middle int instanceIdIndex = (int) (r.nextDouble() * instanceIds.size()); while (foldInstanceIds.size() < nMinPerClass) { // go back to beginning of list if we hit the end if (instanceIdIndex >= instanceIds.size()) { instanceIdIndex = 0; } // randomly select this line if (r.nextDouble() <= fraction) { long instanceId = instanceIds.get(instanceIdIndex); foldInstanceIds.add(instanceId); } // go to next line instanceIdIndex++; } } else { int nStart = i * blockSize; int nEnd = (i == nFolds - 1) ? instanceIds.size() : nStart + blockSize; for (int instanceIdIndex = nStart; instanceIdIndex < nEnd; instanceIdIndex++) { foldInstanceIds.add(instanceIds.get(instanceIdIndex)); } } classFolds.add(foldInstanceIds); } mapLabelFolds.put(classToInstanceId.getKey(), classFolds); } for (int i = 0; i < nFolds; i++) { Set<Long> foldInstanceIds = new HashSet<Long>(); for (List<Set<Long>> labelFold : mapLabelFolds.values()) { foldInstanceIds.addAll(labelFold.get(i)); } folds.add(foldInstanceIds); } return folds; }
From source file:com.pivotal.gemfire.tools.pulse.testbed.PropMockDataUpdater.java
private Client initMemberClient(int count, String host) { Client memberClient = new Client(); Random r = new Random(System.currentTimeMillis()); memberClient.setName("Name_" + count); long processCpuTime = (long) (r.nextDouble() * 100); memberClient.setProcessCpuTime(processCpuTime); memberClient.setCpuUsage(0);// w ww . j ava 2s. co m memberClient.setGets(Math.abs(r.nextInt(100))); memberClient.setHost(host); memberClient.setId(String.valueOf(1000 + count)); memberClient.setPuts(Math.abs(r.nextInt(100))); memberClient.setCpus(Math.abs(r.nextInt(20))); memberClient.setQueueSize(Math.abs(r.nextInt(100))); if ((count % 2) == 0) { memberClient.setStatus("up"); } else { memberClient.setStatus("down"); } memberClient.setThreads(Math.abs(r.nextInt(100))); memberClient.setUptime(Math.abs(System.currentTimeMillis() - r.nextLong())); return memberClient; }
From source file:org.apache.hadoop.hbase.stargate.TestScannerResource.java
int insertData(String tableName, String column, double prob) throws IOException { Random rng = new Random(); int count = 0; HTable table = new HTable(conf, tableName); byte[] k = new byte[3]; byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column)); for (byte b1 = 'a'; b1 < 'z'; b1++) { for (byte b2 = 'a'; b2 < 'z'; b2++) { for (byte b3 = 'a'; b3 < 'z'; b3++) { if (rng.nextDouble() < prob) { k[0] = b1;//from w ww.j av a2s . c o m k[1] = b2; k[2] = b3; Put put = new Put(k); if (famAndQf.length > 1 && famAndQf[1] != null) { put.add(famAndQf[0], famAndQf[1], k); } else { put.add(famAndQf[0], null, k); } table.put(put); count++; } } } } table.flushCommits(); return count; }