List of usage examples for org.apache.commons.math3.random RandomGenerator nextLong
long nextLong();
long
value from this random number generator's sequence. From source file:it.unimi.dsi.sux4j.mph.TwoStepsLcpMonotoneMinimalPerfectHashFunction.java
/** * Creates a new two-steps LCP monotone minimal perfect hash function for the given keys. * //from ww w .j av a 2 s.co m * @param keys the keys to hash. * @param numKeys the number of keys, or -1 if the number of keys is not known (will be computed). * @param transform a transformation strategy for the keys. * @param signatureWidth a signature width, or 0 for no signature. * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory. */ @SuppressWarnings("unused") protected TwoStepsLcpMonotoneMinimalPerfectHashFunction(final Iterable<? extends T> keys, final long numKeys, final TransformationStrategy<? super T> transform, final int signatureWidth, final File tempDir) throws IOException { final ProgressLogger pl = new ProgressLogger(LOGGER); pl.displayLocalSpeed = true; pl.displayFreeMemory = true; this.transform = transform; final RandomGenerator r = new XorShift1024StarRandomGenerator(); if (numKeys == -1) { if (keys instanceof Size64) n = ((Size64) keys).size64(); else if (keys instanceof Collection) n = ((Collection<?>) keys).size(); else { long c = 0; for (T dummy : keys) c++; n = c; } } else n = numKeys; defRetValue = -1; // For the very few cases in which we can decide if (n == 0) { seed = bucketSize = bucketSizeMask = log2BucketSize = 0; lcp2Bucket = null; offsets = null; lcpLengths = null; signatureMask = 0; signatures = null; return; } int t = (int) Math.ceil(1 + GOV3Function.C * Math.log(2) + Math.log(n) - Math.log(1 + Math.log(n))); log2BucketSize = Fast.ceilLog2(t); bucketSize = 1 << log2BucketSize; bucketSizeMask = bucketSize - 1; LOGGER.debug("Bucket size: " + bucketSize); final long numBuckets = (n + bucketSize - 1) / bucketSize; LongArrayBitVector prev = LongArrayBitVector.getInstance(); LongArrayBitVector curr = LongArrayBitVector.getInstance(); int currLcp = 0; @SuppressWarnings("resource") final OfflineIterable<BitVector, LongArrayBitVector> lcps = new OfflineIterable<BitVector, LongArrayBitVector>( BitVectors.OFFLINE_SERIALIZER, LongArrayBitVector.getInstance()); final int[][] lcpLengths = IntBigArrays.newBigArray((n + bucketSize - 1) / bucketSize); int maxLcp = 0; long maxLength = 0; @SuppressWarnings("resource") final ChunkedHashStore<BitVector> chunkedHashStore = new ChunkedHashStore<BitVector>( TransformationStrategies.identity(), pl); chunkedHashStore.reset(r.nextLong()); pl.expectedUpdates = n; pl.start("Scanning collection..."); Iterator<? extends T> iterator = keys.iterator(); for (long b = 0; b < numBuckets; b++) { prev.replace(transform.toBitVector(iterator.next())); chunkedHashStore.add(prev); pl.lightUpdate(); maxLength = Math.max(maxLength, prev.length()); currLcp = (int) prev.length(); final int currBucketSize = (int) Math.min(bucketSize, n - b * bucketSize); for (int i = 0; i < currBucketSize - 1; i++) { curr.replace(transform.toBitVector(iterator.next())); chunkedHashStore.add(curr); pl.lightUpdate(); final int prefix = (int) curr.longestCommonPrefixLength(prev); if (prefix == prev.length() && prefix == curr.length()) throw new IllegalArgumentException("The input bit vectors are not distinct"); if (prefix == prev.length() || prefix == curr.length()) throw new IllegalArgumentException("The input bit vectors are not prefix-free"); if (prev.getBoolean(prefix)) throw new IllegalArgumentException("The input bit vectors are not lexicographically sorted"); currLcp = Math.min(prefix, currLcp); prev.replace(curr); maxLength = Math.max(maxLength, prev.length()); } lcps.add(prev.subVector(0, currLcp)); IntBigArrays.set(lcpLengths, b, currLcp); maxLcp = Math.max(maxLcp, currLcp); } pl.done(); // We must be sure that both functions are built on the same store. chunkedHashStore.checkAndRetry(TransformationStrategies.wrap(keys, transform)); this.seed = chunkedHashStore.seed(); if (ASSERTS) { ObjectOpenHashSet<BitVector> s = new ObjectOpenHashSet<BitVector>(); for (LongArrayBitVector bv : lcps) s.add(bv.copy()); assert s.size() == lcps.size() : s.size() + " != " + lcps.size(); // No duplicates. } // Build function assigning each lcp to its bucket. lcp2Bucket = new GOV3Function.Builder<BitVector>().keys(lcps).transform(TransformationStrategies.identity()) .build(); if (DEBUG) { int p = 0; for (BitVector v : lcps) System.err.println(v + " " + v.length()); for (BitVector v : lcps) { final long value = lcp2Bucket.getLong(v); if (p++ != value) { System.err.println("p: " + (p - 1) + " value: " + value + " key:" + v); throw new AssertionError(); } } } lcps.close(); // Build function assigning the bucket offset to each element. offsets = new GOV3Function.Builder<BitVector>().store(chunkedHashStore).values(new AbstractLongBigList() { public long getLong(long index) { return index & bucketSizeMask; } public long size64() { return n; } }, log2BucketSize).indirect().build(); // Build function assigning the lcp length to each element. this.lcpLengths = new TwoStepsGOV3Function.Builder<BitVector>().store(chunkedHashStore) .values(new AbstractLongBigList() { public long getLong(long index) { return IntBigArrays.get(lcpLengths, index >>> log2BucketSize); } public long size64() { return n; } }).build(); // Build function assigning the lcp length and the bucketing data to each element. final double p = 1.0 / (this.lcpLengths.rankMean + 1); final double s = s(p, this.lcpLengths.width); LOGGER.debug("Forecast best threshold: " + s); if (DEBUG) { int j = 0; for (T key : keys) { BitVector bv = transform.toBitVector(key); if (j++ != lcp2Bucket.getLong(bv.subVector(0, this.lcpLengths.getLong(bv))) * bucketSize + offsets.getLong(bv)) { System.err.println("p: " + (j - 1) + " Key: " + key + " bucket size: " + bucketSize + " lcp " + transform.toBitVector(key).subVector(0, this.lcpLengths.getLong(bv)) + " lcp length: " + this.lcpLengths.getLong(bv) + " bucket " + lcp2Bucket .getLong(transform.toBitVector(key).subVector(0, this.lcpLengths.getLong(bv))) + " offset: " + offsets.getLong(bv)); throw new AssertionError(); } } } double secondFunctionForecastBitsPerElement = (s + GOV3Function.C + (Math.pow(2, s) - 1) * this.lcpLengths.width / n + (this.lcpLengths.width + GOV3Function.C) * (Math.pow(1 - p, Math.pow(2, s) + 1))); LOGGER.debug("Forecast bit cost per element: " + (log2BucketSize + GOV3Function.C + secondFunctionForecastBitsPerElement + (Fast.log2(Math.E)))); LOGGER.info("Actual bit cost per element: " + (double) numBits() / n); if (signatureWidth != 0) { signatureMask = -1L >>> Long.SIZE - signatureWidth; chunkedHashStore.filter(null); // two-steps functions use filtering. signatures = chunkedHashStore.signatures(signatureWidth, pl); } else { signatureMask = 0; signatures = null; } chunkedHashStore.close(); }
From source file:com.github.rinde.rinsim.scenario.generator.NHPoissonProcessTest.java
@Ignore @Test//from w w w .j a va 2 s . c o m public void test() throws IOException { final int numSamples = 100; final long lengthOfScenario = 4 * 60 * 60 * 1000; final double period = 30 * 60 * 1000; final int[] orders = new int[] { 10, 20, 30, 40, 50, 75, 100, 150, 200, 500 }; final List<Point> dataPoints = newArrayList(); final RandomGenerator rng = new MersenneTwister(123); final List<Double> relHeights = newArrayList(); for (int i = 0; i < 10; i++) { relHeights.add(-.999 + i * .001); } for (int i = 0; i < 100; i++) { relHeights.add(-.99 + i * .05); } // for (int i = 0; i < 50; i++) { // relHeights.add(3.99 + (i * .5)); // } Files.createParentDirs(new File("files/test/times/relheight-dynamism.txt")); final BufferedWriter writer = Files.newWriter(new File("files/test/times/relheight-dynamism.txt"), Charsets.UTF_8); for (int k = 0; k < orders.length; k++) { for (int i = 0; i < relHeights.size(); i++) { final double d = relHeights.get(i); final double relHeight = d;// -.99 + (j * .05); // final double period = 3600d; final double ordersPerPeriod = orders[k] / (lengthOfScenario / period); final IntensityFunction intensity = IntensityFunctions.sineIntensity().height(d).period(period) .area(ordersPerPeriod).build(); System.out.printf("%1d relative height: %1.3f%n", i, relHeight); // final List<Double> sineTimes = FluentIterable // .from( // ContiguousSet.create(Range.closedOpen(0L, lengthOfScenario), // DiscreteDomain.longs())) // .transform(Conversion.LONG_TO_DOUBLE) // .transform(intensity) // .toList(); // Analysis // .writeLoads( // sineTimes, // new File( // "files/test/times/sine/sine-" // + Strings.padStart(Integer.toString(i), 2, '0') // + ".intens")); final TimeSeriesGenerator generator = TimeSeries.nonHomogenousPoisson(lengthOfScenario, intensity); double max = 0; double sum = 0; final StandardDeviation sd = new StandardDeviation(); final List<Double> dynamismValues = newArrayList(); for (int j = 0; j < numSamples; j++) { List<Double> times = generator.generate(rng.nextLong()); while (times.size() < 2) { times = generator.generate(rng.nextLong()); } final double dyn = Metrics.measureDynamism(times, lengthOfScenario); dynamismValues.add(dyn); sd.increment(dyn); sum += dyn; max = Math.max(max, dyn); // if (j < 3) { // // System.out.printf("%1.3f%% %d%n", dyn * 100, times.size()); // Analysis.writeTimes( // lengthOfScenario, // times, // new File( // "files/test/times/orders" // + Strings.padStart(Integer.toString(i), 2, '0') + "_" // + j // + "-" + (dyn * 100) // + ".times")); // } } try { writer.append(Double.toString(relHeight)); writer.append(" "); writer.append(Integer.toString(orders[k])); writer.append(" "); writer.append(Joiner.on(" ").join(dynamismValues).toString()); writer.append("\n"); } catch (final IOException e) { checkState(false); } System.out.printf(" > dyn %1.3f+-%1.3f%n", +(sum / numSamples), sd.getResult()); dataPoints.add(new Point(relHeight, sum / numSamples)); } } writer.close(); // Analysis.writeLocationList(dataPoints, new File( // "files/test/times/intensity-analysis.txt")); }
From source file:com.github.rinde.datgen.pdptw.DatasetGenerator.java
Dataset<GeneratedScenario> doGenerate() { final ListeningExecutorService service = MoreExecutors .listeningDecorator(Executors.newFixedThreadPool(builder.numThreads)); final Dataset<GeneratedScenario> dataset = Dataset.naturalOrder(); final List<ScenarioCreator> jobs = new ArrayList<>(); final RandomGenerator rng = new MersenneTwister(builder.randomSeed); final Map<GeneratorSettings, IdSeedGenerator> rngMap = new LinkedHashMap<>(); for (final Long urgency : builder.urgencyLevels) { for (final Double scale : builder.scaleLevels) { for (final Entry<TimeSeriesType, Collection<Range<Double>>> dynLevel : builder.dynamismLevels .asMap().entrySet()) { final int reps = builder.numInstances * dynLevel.getValue().size(); final long urg = urgency * 60 * 1000L; // The office hours is the period in which new orders are accepted, // it is defined as [0,officeHoursLength). final long officeHoursLength; if (urg < halfDiagTT) { officeHoursLength = builder.scenarioLengthMs - twoDiagTT - PICKUP_DURATION - DELIVERY_DURATION; } else { officeHoursLength = builder.scenarioLengthMs - urg - oneAndHalfDiagTT - PICKUP_DURATION - DELIVERY_DURATION; }//from ww w.j av a 2 s .c om final int numOrders = DoubleMath.roundToInt(scale * numOrdersPerScale, RoundingMode.UNNECESSARY); final ImmutableMap.Builder<String, String> props = ImmutableMap.builder(); props.put("expected_num_orders", Integer.toString(numOrders)); props.put("pickup_duration", Long.toString(PICKUP_DURATION)); props.put("delivery_duration", Long.toString(DELIVERY_DURATION)); props.put("width_height", String.format("%1.1fx%1.1f", AREA_WIDTH, AREA_WIDTH)); // TODO store this in TimeSeriesType? final RangeSet<Double> rset = TreeRangeSet.create(); for (final Range<Double> r : dynLevel.getValue()) { rset.add(r); } // createTimeSeriesGenerator(dynLevel.getKey(), officeHoursLength, // numOrders, numOrdersPerScale, props); final GeneratorSettings set = GeneratorSettings.builder().setDayLength(builder.scenarioLengthMs) .setOfficeHours(officeHoursLength).setTimeSeriesType(dynLevel.getKey()) .setDynamismRangeCenters(builder.dynamismRangeMap.subRangeMap(rset.span())) .setUrgency(urg).setScale(scale).setNumOrders(numOrders).setProperties(props.build()) .build(); final IdSeedGenerator isg = new IdSeedGenerator(rng.nextLong()); rngMap.put(set, isg); for (int i = 0; i < reps; i++) { final LocationGenerator lg = Locations.builder().min(0d).max(AREA_WIDTH).buildUniform(); final TimeSeriesGenerator tsg2 = createTimeSeriesGenerator(dynLevel.getKey(), officeHoursLength, numOrders, numOrdersPerScale, ImmutableMap.<String, String>builder()); final ScenarioGenerator gen = createGenerator(officeHoursLength, urg, scale, tsg2, set.getDynamismRangeCenters(), lg, builder, numOrdersPerScale); jobs.add(ScenarioCreator.create(isg.next(), set, gen)); } } } } final AtomicLong currentJobs = new AtomicLong(0L); final AtomicLong datasetSize = new AtomicLong(0L); LOGGER.info(" - Submitting " + jobs.size() + " Jobs"); for (final ScenarioCreator job : jobs) { submitJob(currentJobs, service, job, builder.numInstances, dataset, rngMap, datasetSize); } final long targetSize = builder.numInstances * builder.dynamismLevels.values().size() * builder.scaleLevels.size() * builder.urgencyLevels.size(); while (datasetSize.get() < targetSize || dataset.size() < targetSize) { try { // LOGGER.info(" - Waiting, current size ==" + dataset.size()); Thread.sleep(THREAD_SLEEP_DURATION); } catch (final InterruptedException e) { throw new IllegalStateException(e); } } LOGGER.info(" - Shutdown Service, Awaiting Termination"); service.shutdown(); try { service.awaitTermination(1L, TimeUnit.HOURS); } catch (final InterruptedException e) { throw new IllegalStateException(e); } LOGGER.info(" - Returning dataset"); return dataset; }
From source file:org.asoem.greyfish.utils.collect.AbstractBitStringImplementationTest.java
@Test public void testRandom() throws Exception { // given/*from w ww .j a v a2s.c o m*/ final RandomGenerator generator = mock(RandomGenerator.class); given(generator.nextLong()).willReturn(105L); // when final BitString bitString = BitString.random(8, generator); // then verify(generator, only()).nextLong(); assertThat(bitString.toString(), is(equalTo("01101001"))); }
From source file:org.asoem.greyfish.utils.collect.BitString.java
/** * Create a random bit string of given {@code length}. * * @param length the length of the bit string * @param rng the random number generator to use * @return a new bit string of given length. */// w w w . j a v a 2 s . c o m public static BitString random(final int length, final RandomGenerator rng) { checkNotNull(rng); checkArgument(length >= 0); if (length == 0) { return emptyBitSequence(); } long[] longs = new long[(length + 63) / 64]; for (int i = 0; i < longs.length; i++) { longs[i] = rng.nextLong(); } longs[longs.length - 1] &= (~0L >>> (longs.length * 64 - length)); return new BitSetString(BitSet.valueOf(longs), length); }