List of usage examples for java.util Random nextFloat
public float nextFloat()
From source file:io.druid.segment.IndexMergerV9WithSpatialIndexTest.java
private static IncrementalIndex makeIncrementalIndex() throws IOException { IncrementalIndex theIndex = new IncrementalIndex.Builder() .setIndexSchema(/*from w w w .j a v a 2 s.c o m*/ new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS) .withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList( new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")) ))).build()).setReportParseExceptions(false).setMaxRowCount(NUM_POINTS) .buildOnheap(); theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, "val", 17L))); theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, "val", 29L))); theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, "val", 13L))); theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, "val", 91L))); theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, "val", 47L))); theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", "val", 101L))); theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L))); theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L))); // Add a bunch of random points Random rand = new Random(); for (int i = 8; i < NUM_POINTS; i++) { theIndex.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), "val", i))); } return theIndex; }
From source file:io.druid.segment.IndexMergerV9WithSpatialIndexTest.java
private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) { try {//from w w w .jav a2s . c om IncrementalIndex first = new IncrementalIndex.Builder().setIndexSchema( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS) .withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList( new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")) ))).build()).setReportParseExceptions(false).setMaxRowCount(1000).buildOnheap(); IncrementalIndex second = new IncrementalIndex.Builder().setIndexSchema( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS) .withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList( new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")) ))).build()).setReportParseExceptions(false).setMaxRowCount(1000).buildOnheap(); IncrementalIndex third = new IncrementalIndex.Builder().setIndexSchema( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS) .withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList( new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")) ))).build()).setReportParseExceptions(false).setMaxRowCount(NUM_POINTS).buildOnheap(); first.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, "val", 17L))); first.add(new MapBasedInputRow(DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, "val", 29L))); first.add(new MapBasedInputRow(DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, "val", 13L))); first.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", "val", 101L))); first.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L))); second.add(new MapBasedInputRow(DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, "val", 91L))); second.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, "val", 47L))); second.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L))); // Add a bunch of random points Random rand = new Random(); for (int i = 8; i < NUM_POINTS; i++) { third.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.<String, Object>of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), "val", i))); } File tmpFile = File.createTempFile("yay", "who"); tmpFile.delete(); File firstFile = new File(tmpFile, "first"); File secondFile = new File(tmpFile, "second"); File thirdFile = new File(tmpFile, "third"); File mergedFile = new File(tmpFile, "merged"); firstFile.mkdirs(); secondFile.mkdirs(); thirdFile.mkdirs(); mergedFile.mkdirs(); INDEX_MERGER_V9.persist(first, DATA_INTERVAL, firstFile, indexSpec); INDEX_MERGER_V9.persist(second, DATA_INTERVAL, secondFile, indexSpec); INDEX_MERGER_V9.persist(third, DATA_INTERVAL, thirdFile, indexSpec); try { QueryableIndex mergedRealtime = INDEX_IO .loadIndex( INDEX_MERGER_V9.mergeQueryableIndex( Arrays.asList(INDEX_IO.loadIndex(firstFile), INDEX_IO.loadIndex(secondFile), INDEX_IO.loadIndex(thirdFile)), true, METRIC_AGGS, mergedFile, indexSpec)); return mergedRealtime; } finally { FileUtils.deleteDirectory(firstFile); FileUtils.deleteDirectory(secondFile); FileUtils.deleteDirectory(thirdFile); FileUtils.deleteDirectory(mergedFile); } } catch (IOException e) { throw Throwables.propagate(e); } }
From source file:org.apache.druid.segment.IndexMergerV9WithSpatialIndexTest.java
private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec, IndexMergerV9 indexMergerV9, IndexIO indexIO) {// ww w . j a va2s . c o m try { IncrementalIndex first = new IncrementalIndex.Builder().setIndexSchema( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS) .withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList( new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")) ))).build()).setReportParseExceptions(false).setMaxRowCount(1000).buildOnheap(); IncrementalIndex second = new IncrementalIndex.Builder().setIndexSchema( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS) .withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList( new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")) ))).build()).setReportParseExceptions(false).setMaxRowCount(1000).buildOnheap(); IncrementalIndex third = new IncrementalIndex.Builder().setIndexSchema( new IncrementalIndexSchema.Builder().withMinTimestamp(DATA_INTERVAL.getStartMillis()) .withQueryGranularity(Granularities.DAY).withMetrics(METRIC_AGGS) .withDimensionsSpec(new DimensionsSpec(null, null, Arrays.asList( new SpatialDimensionSchema("dim.geo", Arrays.asList("lat", "long")), new SpatialDimensionSchema("spatialIsRad", Arrays.asList("lat2", "long2")) ))).build()).setReportParseExceptions(false).setMaxRowCount(NUM_POINTS).buildOnheap(); first.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, "val", 17L))); first.add(new MapBasedInputRow(DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, "val", 29L))); first.add(new MapBasedInputRow(DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, "val", 13L))); first.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", "val", 101L))); first.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L))); second.add(new MapBasedInputRow(DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, "val", 91L))); second.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, "val", 47L))); second.add(new MapBasedInputRow(DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L))); // Add a bunch of random points Random rand = ThreadLocalRandom.current(); for (int i = 8; i < NUM_POINTS; i++) { third.add(new MapBasedInputRow(DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of("timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), "val", i))); } File tmpFile = File.createTempFile("yay", "who"); tmpFile.delete(); File firstFile = new File(tmpFile, "first"); File secondFile = new File(tmpFile, "second"); File thirdFile = new File(tmpFile, "third"); File mergedFile = new File(tmpFile, "merged"); firstFile.mkdirs(); secondFile.mkdirs(); thirdFile.mkdirs(); mergedFile.mkdirs(); indexMergerV9.persist(first, DATA_INTERVAL, firstFile, indexSpec, null); indexMergerV9.persist(second, DATA_INTERVAL, secondFile, indexSpec, null); indexMergerV9.persist(third, DATA_INTERVAL, thirdFile, indexSpec, null); try { QueryableIndex mergedRealtime = indexIO .loadIndex(indexMergerV9.mergeQueryableIndex( Arrays.asList(indexIO.loadIndex(firstFile), indexIO.loadIndex(secondFile), indexIO.loadIndex(thirdFile)), true, METRIC_AGGS, mergedFile, indexSpec, null)); return mergedRealtime; } finally { FileUtils.deleteDirectory(firstFile); FileUtils.deleteDirectory(secondFile); FileUtils.deleteDirectory(thirdFile); FileUtils.deleteDirectory(mergedFile); } } catch (IOException e) { throw Throwables.propagate(e); } }
From source file:dataGen.DataGen.java
/** * Creates normal or uniform distributed Test-Data, without correlation. * The resulting Data is stored in the resources Folder. * @param dimensions/*from w w w . ja v a2 s.c o m*/ * The dimension count of the resulting Data * @param rowCount * How many data tuples should be created? * @throws IOException * If Stream to a File couldn't be written/closed */ public static void genData(int dimensions, int rowCount) throws IOException { logger.info("Generating uniform random Data with " + rowCount + " Tuples in " + dimensions + " dimensions"); Writer fw = new FileWriter("src/main/resources/random-" + rowCount + "-" + dimensions + ".dat"); Random gen = new Random(); for (int i = 1; i <= rowCount; i++) { // Each Row should start with the Row count String row = i + " "; // Files should be created OS/Language independent DecimalFormatSymbols dfs = DecimalFormatSymbols.getInstance(); dfs.setDecimalSeparator('.'); NumberFormat nf = new DecimalFormat("0.000000000", dfs); for (int j = 0; j < dimensions; j++) { Float n = gen.nextFloat(); row = row + nf.format(n) + " "; } fw.write(row); fw.append(System.getProperty("line.separator")); } fw.close(); logger.info(rowCount + " entries generated"); }
From source file:org.latticesoft.util.common.NumeralUtil.java
/** * Generates next random float/*from w w w . j av a 2 s . c om*/ * @return the generated float */ public static float getRandomFloat() { Random rand = new Random(System.currentTimeMillis()); return rand.nextFloat(); }
From source file:hivemall.utils.lang.ArrayUtils.java
public static void fill(@Nonnull final float[] a, @Nonnull final Random rand) { for (int i = 0, len = a.length; i < len; i++) { a[i] = rand.nextFloat(); }//from ww w. j a va2 s. c o m }
From source file:org.apache.hadoop.hbase.util.ChaosMonkey.java
/** Selects and returns ceil(ratio * items.length) random items from the given array */ static <T> List<T> selectRandomItems(T[] items, float ratio) { Random random = new Random(); int remaining = (int) Math.ceil(items.length * ratio); List<T> selectedItems = new ArrayList<T>(remaining); for (int i = 0; i < items.length && remaining > 0; i++) { if (random.nextFloat() < ((float) remaining / (items.length - i))) { selectedItems.add(items[i]); remaining--;//from w w w . jav a 2 s.c om } } return selectedItems; }
From source file:org.brekka.stillingar.example.FieldTypesDOMTest.java
private static Testing writeConfig() { Random r = new Random(); ConfigurationDocument doc = ConfigurationDocument.Factory.newInstance(); Configuration newConfiguration = doc.addNewConfiguration(); FeatureFlagType featureFlag = newConfiguration.addNewFeatureFlag(); featureFlag.setKey("turbo"); featureFlag.setBooleanValue(true);//from www .ja va 2 s .co m Testing testing = newConfiguration.addNewTesting(); testing.setAnyURI("http://brekka.org/" + RandomStringUtils.randomAlphanumeric(10)); testing.setBoolean(r.nextBoolean()); testing.setByte((byte) r.nextInt()); Calendar cal = Calendar.getInstance(); testing.setDate(cal); testing.setDateTime(cal); testing.setDecimal(BigDecimal.valueOf(r.nextDouble())); testing.setDouble(r.nextDouble()); testing.setFloat(r.nextFloat()); testing.setInt(r.nextInt()); testing.setInteger(BigInteger.valueOf(r.nextLong())); testing.setLanguage("en"); testing.setLong(r.nextLong()); testing.setShort((short) r.nextInt()); testing.setString(RandomStringUtils.randomAlphanumeric(24)); testing.setTime(cal); testing.setUUID(UUID.randomUUID().toString()); testing.setPeriod(new GDuration("P5Y2M10DT15H")); byte[] binary = new byte[32]; r.nextBytes(binary); testing.setBinary(binary); TestSupport.write(doc); return testing; }
From source file:com.linkedin.pinot.query.aggregation.groupby.NoDictionaryGroupKeyGeneratorTest.java
/** * Helper method to build a segment as follows: * <ul>/*w w w. j a va 2s. co m*/ * <li> One string column without dictionary. </li> * <li> One integer column with dictionary. </li> * </ul> * * It also computes the unique group keys while it generates the index. * * @return Set containing unique group keys from the created segment. * * @throws Exception */ private static RecordReader buildSegment() throws Exception { Schema schema = new Schema(); for (int i = 0; i < COLUMN_NAMES.length; i++) { DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(COLUMN_NAMES[i], DATA_TYPES[i], true); schema.addField(dimensionFieldSpec); } SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema); config.setRawIndexCreationColumns(Arrays.asList(NO_DICT_COLUMN_NAMES)); config.setOutDir(INDEX_DIR_PATH); config.setSegmentName(SEGMENT_NAME); Random random = new Random(); List<GenericRow> rows = new ArrayList<>(NUM_ROWS); for (int i = 0; i < NUM_ROWS; i++) { Map<String, Object> map = new HashMap<>(NUM_COLUMNS); for (FieldSpec fieldSpec : schema.getAllFieldSpecs()) { String column = fieldSpec.getName(); FieldSpec.DataType dataType = fieldSpec.getDataType(); switch (dataType) { case INT: map.put(column, random.nextInt()); break; case LONG: map.put(column, random.nextLong()); break; case FLOAT: map.put(column, random.nextFloat()); break; case DOUBLE: map.put(column, random.nextDouble()); break; case STRING: map.put(column, "value_" + i); break; default: throw new IllegalArgumentException("Illegal data type specified: " + dataType); } } GenericRow genericRow = new GenericRow(); genericRow.init(map); rows.add(genericRow); } RecordReader recordReader = new GenericRowRecordReader(rows, schema); SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl(); driver.init(config, recordReader); driver.build(); return recordReader; }
From source file:org.voidsink.anewjkuapp.utils.AppUtils.java
public static int getRandomColor() { Random rand = new Random(System.currentTimeMillis()); float hue;// w ww .j ava2s . c o m do { hue = rand.nextFloat() * 360; } while ((Math.abs(mLastHue - hue) < 45) || (hue > 280 && hue < 320)); mLastHue = hue; float[] hsv = new float[3]; hsv[0] = hue; hsv[1] = 0.95f; hsv[2] = 0.8f; return Color.HSVToColor(hsv); }