Example usage for org.apache.commons.math3.random RandomGenerator nextLong

List of usage examples for org.apache.commons.math3.random RandomGenerator nextLong

Introduction

In this page you can find the example usage for org.apache.commons.math3.random RandomGenerator nextLong.

Prototype

long nextLong();

Source Link

Document

Returns the next pseudorandom, uniformly distributed long value from this random number generator's sequence.

Usage

From source file:com.github.rinde.rinsim.util.StochasticSuppliersTest.java

@Test
public void testUniform() {
    final RandomGenerator rng = new MersenneTwister(123L);
    final StochasticSupplier<Integer> sup = uniformInt(2, 10);
    final IntegerDistribution id = new UniformIntegerDistribution(2, 10);

    final Multiset<Integer> ms = TreeMultiset.create();
    for (int i = 0; i < 1000; i++) {
        ms.add(sup.get(rng.nextLong()));
    }//from w w w  .  j  ava2 s .co m
    final List<Integer> observations = newArrayList();
    final List<Double> expectations = newArrayList();
    for (final Multiset.Entry<Integer> entry : ms.entrySet()) {
        observations.add(entry.getCount());
        expectations.add(id.probability(entry.getElement()));
    }
    assertTrue(chiSquare(expectations, observations, .01));
}

From source file:com.github.rinde.rinsim.scenario.generator.TimeWindowsTest.java

/**
 * Tests the generated time windows on two properties:
 * <ul>//from   www .j ava2s.co  m
 * <li>The distance between pickupTW.begin and deliveryTW.begin</li>
 * <li>The distance between pickupTW.end and deliveryTW.end</li>
 * </ul>
 */
@Test
public void overlapTest() {
    final RandomGenerator rng = new MersenneTwister(123L);

    for (final TravelTimes tt : DistanceTT.values()) {
        for (final Parcel.Builder parcelBuilder : parcelBuilders()) {
            for (int i = 0; i < 10; i++) {
                timeWindowGenerator.generate(rng.nextLong(), parcelBuilder, tt, END_TIME);

                final long pickDelTT = tt.getShortestTravelTime(parcelBuilder.getPickupLocation(),
                        parcelBuilder.getDeliveryLocation());

                final long toDepotTT = tt.getTravelTimeToNearestDepot(parcelBuilder.getDeliveryLocation());

                final TimeWindow pickTW = parcelBuilder.getPickupTimeWindow();
                final TimeWindow delTW = parcelBuilder.getDeliveryTimeWindow();
                final long pickDur = parcelBuilder.getPickupDuration();

                assertTrue(pickTW.begin() >= 0);

                assertTrue(i + " " + tt + " " + pickTW + " " + delTW,
                        pickTW.end() <= delTW.end() + pickDelTT + pickDur);
                // FIXME update and re-enable this test
                // assertTrue(i + " " + tt + " " + pickTW + " " + delTW + " "
                // + pickDelTT + " " + pickDur,
                // delTW.begin >= pickTW.begin + pickDelTT + pickDur);
            }
        }
    }
}

From source file:com.github.rinde.rinsim.scenario.generator.PoissonIntensityTest.java

/**
 * Tests whether the Poisson process (crudely) approximates the intensity
 * function when a large number of runs is done.
 *//*from  w w w.ja v  a 2 s.com*/
@Test
public void intensityApproximationPoissonProcessTest() {
    final RandomGenerator rng = new MersenneTwister(123);

    final TimeSeriesGenerator pp = TimeSeries.nonHomogenousPoisson(100d, intensityFunction);

    final Multiset<Double> ms = TreeMultiset.create();
    final int repetitions = 10000;
    for (int i = 0; i < repetitions; i++) {
        final List<Double> times = pp.generate(rng.nextLong());
        for (final Double d : times) {
            ms.add(new Double(Math.ceil(d)));
        }
    }
    for (final Multiset.Entry<Double> entry : ms.entrySet()) {
        final double exp = IntensityFunctions.areaByIntegration(intensityFunction, entry.getElement() - 1d,
                entry.getElement());

        final double observation = entry.getCount() / (double) repetitions;
        assertEquals(exp, observation, 0.05);
    }
}

From source file:com.milaboratory.core.io.util.IOUtilTest.java

@Test
public void test2() throws Exception {
    RandomGenerator rg = new Well19937a();

    int count = 1000;
    long[] values = new long[count];

    for (int n = 0; n < 10; ++n) {
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        for (int i = 0; i < count; ++i) {
            final int d = rg.nextInt(63) + 1;
            values[i] = (rg.nextLong() >>> d);
            IOUtil.writeRawVarint64(bos, values[i]);
        }/*from   w w w. jav  a2  s. co  m*/

        byte[] data = bos.toByteArray();

        ByteArrayInputStream bis = new ByteArrayInputStream(data);
        int g;
        for (int i = 0; i < count; ++i) {
            Assert.assertEquals(values[i], IOUtil.readRawVarint64(bis, -1));
        }
        Assert.assertEquals(-1L, IOUtil.readRawVarint64(bis, -1));
    }
}

From source file:com.milaboratory.core.io.util.IOUtilTest.java

@Test
public void test4() throws Exception {
    RandomGenerator rg = new Well19937a();

    int count = 1000;
    long[] values = new long[count];

    for (int n = 0; n < 10; ++n) {
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        for (int i = 0; i < count; ++i) {
            final int d = rg.nextInt(63) + 1;

            values[i] = (rg.nextLong() >>> d);

            if (rg.nextBoolean())
                values[i] *= -1;/* ww w  .  ja  va2s .  c  o  m*/

            final long encoded = IOUtil.encodeZigZag64(values[i]);
            Assert.assertTrue(-1L != encoded);
            IOUtil.writeRawVarint64(bos, encoded);
        }

        byte[] data = bos.toByteArray();

        ByteArrayInputStream bis = new ByteArrayInputStream(data);
        int g;
        for (int i = 0; i < count; ++i) {
            Assert.assertEquals(values[i], IOUtil.decodeZigZag64(IOUtil.readRawVarint64(bis, -1)));
        }
        Assert.assertEquals(-1L, IOUtil.readRawVarint64(bis, -1));
    }
}

From source file:it.unimi.dsi.sux4j.mph.ZFastTrieDistributorMonotoneMinimalPerfectHashFunction.java

/** Creates a new monotone minimal perfect hash function based on a z-fast trie distributor using the given
 * keys, transformation strategy and bucket size. 
 * //www.  j  av a2s  .  c  o  m
 * @param keys the keys among which the trie must be able to rank.
 * @param transform a transformation strategy that must turn the keys into a list of
 * distinct, prefix-free, lexicographically increasing (in iteration order) bit vectors.
 * @param log2BucketSize the logarithm of the bucket size, or -1 for the default value.
 * @param signatureWidth a signature width, or 0 for no signature.
 * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory.
 */
protected ZFastTrieDistributorMonotoneMinimalPerfectHashFunction(final Iterable<? extends T> keys,
        final TransformationStrategy<? super T> transform, final int log2BucketSize, final int signatureWidth,
        final File tempDir) throws IOException {

    this.transform = transform;
    defRetValue = -1; // For the very few cases in which we can decide

    long maxLength = 0;
    long totalLength = 0;
    RandomGenerator r = new XorShift1024StarRandomGenerator();
    final ChunkedHashStore<BitVector> chunkedHashStore = new ChunkedHashStore<BitVector>(
            TransformationStrategies.identity(), tempDir);
    chunkedHashStore.reset(r.nextLong());
    final Iterable<BitVector> bitVectors = TransformationStrategies.wrap(keys, transform);
    final ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    pl.itemsName = "keys";
    pl.start("Scanning collection...");
    for (BitVector bv : bitVectors) {
        maxLength = Math.max(maxLength, bv.length());
        totalLength += bv.length();
        chunkedHashStore.add(bv);
        pl.lightUpdate();
    }

    pl.done();

    chunkedHashStore.checkAndRetry(bitVectors);
    size = chunkedHashStore.size();

    if (size == 0) {
        this.log2BucketSize = -1;
        distributor = null;
        offset = null;
        signatureMask = 0;
        signatures = null;
        chunkedHashStore.close();
        return;
    }

    final long averageLength = (totalLength + size - 1) / size;

    final long forecastBucketSize = (long) Math.ceil(
            10.5 + 4.05 * log(averageLength) + 2.43 * log(log(size) + 1) + 2.43 * log(log(averageLength) + 1));
    this.log2BucketSize = log2BucketSize == -1 ? Fast.mostSignificantBit(forecastBucketSize) : log2BucketSize;

    LOGGER.debug("Average length: " + averageLength);
    LOGGER.debug("Max length: " + maxLength);
    LOGGER.debug("Bucket size: " + (1L << this.log2BucketSize));
    LOGGER.info("Computing z-fast trie distributor...");
    distributor = new ZFastTrieDistributor<BitVector>(bitVectors, this.log2BucketSize,
            TransformationStrategies.identity(), chunkedHashStore);

    LOGGER.info("Computing offsets...");
    offset = new GOV3Function.Builder<BitVector>().store(chunkedHashStore).values(new AbstractLongBigList() {
        final long bucketSizeMask = (1L << ZFastTrieDistributorMonotoneMinimalPerfectHashFunction.this.log2BucketSize)
                - 1;

        public long getLong(long index) {
            return index & bucketSizeMask;
        }

        public long size64() {
            return size;
        }
    }, this.log2BucketSize).indirect().build();

    seed = chunkedHashStore.seed();
    double logU = averageLength * log(2);
    LOGGER.info("Forecast bit cost per element: " + 1.0 / forecastBucketSize
            * (-6 * log2(log(2)) + 5 * log2(logU) + 2 * log2(forecastBucketSize) + log2(log(logU) - log(log(2)))
                    + 6 * GOV3Function.C + 3 * log2(E) + 3 * log2(log(3.0 * size)) + 3
                    + GOV3Function.C * forecastBucketSize
                    + GOV3Function.C * forecastBucketSize * log2(forecastBucketSize)));

    LOGGER.info("Actual bit cost per element: " + (double) numBits() / size);

    if (signatureWidth != 0) {
        signatureMask = -1L >>> Long.SIZE - signatureWidth;
        signatures = chunkedHashStore.signatures(signatureWidth, pl);
    } else {
        signatureMask = 0;
        signatures = null;
    }

    chunkedHashStore.close();

}

From source file:com.github.rinde.rinsim.scenario.generator.ScenarioGenerator.java

/**
 * Generates a new {@link Scenario} instance.
 * @param rng The random number generator used for drawing random numbers.
 * @param id The id of this specific scenario.
 * @return A new instance.//w  w w . java  2s .c om
 */
// TODO change rng to seed?
public Scenario generate(RandomGenerator rng, String id) {
    final ImmutableList.Builder<TimedEvent> b = ImmutableList.builder();
    // depots
    final Iterable<? extends AddDepotEvent> depots = depotGenerator.generate(rng.nextLong(),
            parcelGenerator.getCenter());
    b.addAll(depots);

    // vehicles
    final ImmutableList<AddVehicleEvent> vehicles = vehicleGenerator.generate(rng.nextLong(),
            parcelGenerator.getCenter(), builder.getTimeWindow().end());
    b.addAll(vehicles);

    final TravelTimes tm = createTravelTimes(modelBuilders, getTimeUnit(), depots, vehicles);

    // parcels
    b.addAll(parcelGenerator.generate(rng.nextLong(), tm, builder.getTimeWindow().end()));

    // time out
    b.add(TimeOutEvent.create(builder.getTimeWindow().end()));

    // create
    return Scenario.builder(builder, builder.problemClass).addModels(modelBuilders).addEvents(b.build())
            .instanceId(id).build();
}

From source file:it.unimi.dsi.sux4j.mph.PaCoTrieDistributorMonotoneMinimalPerfectHashFunction.java

/** Creates a new PaCo-trie-based monotone minimal perfect hash function using the given
 * elements and transformation strategy. 
 * /*from  w w  w  .  j  ava2 s  . c o  m*/
 * @param elements the elements among which the trie must be able to rank.
 * @param transform a transformation strategy that must turn the elements in <code>elements</code> into a list of
 * distinct, prefix-free, lexicographically increasing (in iteration order) bit vectors.
 */
public PaCoTrieDistributorMonotoneMinimalPerfectHashFunction(final Iterable<? extends T> elements,
        final TransformationStrategy<? super T> transform) throws IOException {

    this.transform = transform;
    defRetValue = -1; // For the very few cases in which we can decide

    long maxLength = 0;
    long totalLength = 0;
    BitVector bv;
    final RandomGenerator random = new XorShift1024StarRandomGenerator();

    ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    pl.itemsName = "keys";

    pl.start("Creating chunked hash store...");
    final ChunkedHashStore<BitVector> chunkedHashStore = new ChunkedHashStore<BitVector>(
            TransformationStrategies.identity());
    chunkedHashStore.reset(random.nextLong());
    for (T s : elements) {
        bv = transform.toBitVector(s);
        chunkedHashStore.add(bv);
        maxLength = Math.max(maxLength, bv.length());
        totalLength += bv.length();
        pl.lightUpdate();
    }

    pl.done();

    LOGGER.debug("Maximum length: " + maxLength);
    LOGGER.debug("Average length: " + totalLength / (double) chunkedHashStore.size());

    size = chunkedHashStore.size();

    if (size == 0) {
        bucketSize = log2BucketSize = 0;
        distributor = null;
        offset = null;
        chunkedHashStore.close();
        return;
    }

    final long averageLength = (totalLength + size - 1) / size;

    int t = Fast.mostSignificantBit(
            (int) Math.floor(averageLength - Math.log(size) - Math.log(averageLength - Math.log(size)) - 1));
    final int firstbucketSize = 1 << t;
    LOGGER.debug("First bucket size estimate: " + firstbucketSize);

    final Iterable<BitVector> bitVectors = TransformationStrategies.wrap(elements, transform);

    LOGGER.info("Creating distributor...");

    PaCoTrieDistributor<BitVector> firstDistributor = new PaCoTrieDistributor<BitVector>(bitVectors, t,
            TransformationStrategies.identity());

    if (firstDistributor.numBits() == 0 || firstbucketSize >= size)
        log2BucketSize = t;
    else {
        // Reassign bucket size based on empirical estimation
        log2BucketSize = t
                - Fast.mostSignificantBit((int) Math.ceil(size / (firstDistributor.numBits() * Math.log(2))));
    }

    bucketSize = 1 << log2BucketSize;
    LOGGER.debug("Second bucket size estimate: " + bucketSize);

    if (firstbucketSize == bucketSize)
        distributor = firstDistributor;
    else {
        firstDistributor = null;
        distributor = new PaCoTrieDistributor<BitVector>(bitVectors, log2BucketSize,
                TransformationStrategies.identity());
    }

    LOGGER.debug("Bucket size: " + bucketSize);
    final int bucketSizeMask = bucketSize - 1;

    LOGGER.info("Generating offset function...");

    offset = new GOV3Function.Builder<BitVector>().keys(bitVectors)
            .transform(TransformationStrategies.identity()).store(chunkedHashStore)
            .values(new AbstractLongBigList() {
                public long getLong(long index) {
                    return index & bucketSizeMask;
                }

                public long size64() {
                    return size;
                }
            }, log2BucketSize).indirect().build();

    chunkedHashStore.close();

    LOGGER.debug("Forecast distributor bit cost: "
            + (size / bucketSize) * (maxLength + log2BucketSize - Math.log(size)));
    LOGGER.debug("Actual distributor bit cost: " + distributor.numBits());
    LOGGER.debug("Forecast bit cost per element: " + (GOV3Function.C + Fast.log2(Math.E)
            - Fast.log2(Fast.log2(Math.E)) + Fast.log2(maxLength - Fast.log2(size))));
    LOGGER.info("Actual bit cost per element: " + (double) numBits() / size);
}

From source file:it.unimi.dsi.sux4j.mph.VLPaCoTrieDistributorMonotoneMinimalPerfectHashFunction.java

/** Creates a new PaCo-trie-based monotone minimal perfect hash function using the given
 * elements and transformation strategy. 
 * //from   www.j  a  v  a2s .c o m
 * @param elements the elements among which the trie must be able to rank.
 * @param transform a transformation strategy that must turn the elements in <code>elements</code> into a list of
 * distinct, prefix-free, lexicographically increasing (in iteration order) bit vectors.
 */
public VLPaCoTrieDistributorMonotoneMinimalPerfectHashFunction(final Iterable<? extends T> elements,
        final TransformationStrategy<? super T> transform) throws IOException {

    this.transform = transform;
    defRetValue = -1; // For the very few cases in which we can decide

    long maxLength = 0;
    long totalLength = 0;
    BitVector bv;
    final RandomGenerator random = new XorShift1024StarRandomGenerator();
    ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    pl.itemsName = "keys";

    pl.start("Creating chunked hash store...");
    final ChunkedHashStore<BitVector> chunkedHashStore = new ChunkedHashStore<BitVector>(
            TransformationStrategies.identity());
    chunkedHashStore.reset(random.nextLong());
    for (T s : elements) {
        bv = transform.toBitVector(s);
        chunkedHashStore.add(bv);
        maxLength = Math.max(maxLength, bv.length());
        totalLength += bv.length();
        pl.lightUpdate();
    }

    pl.done();

    size = chunkedHashStore.size();

    if (size == 0) {
        bucketSize = log2BucketSize = 0;
        distributor = null;
        offset = null;
        chunkedHashStore.close();
        return;
    }

    final long averageLength = (totalLength + size - 1) / size;

    int t = Fast.mostSignificantBit(
            (int) Math.floor(averageLength - Math.log(size) - Math.log(averageLength - Math.log(size)) - 1));
    final int firstbucketSize = 1 << t;
    LOGGER.debug("First bucket size estimate: " + firstbucketSize);

    final Iterable<BitVector> bitVectors = TransformationStrategies.wrap(elements, transform);

    VLPaCoTrieDistributor<BitVector> firstDistributor = new VLPaCoTrieDistributor<BitVector>(bitVectors, size,
            firstbucketSize, TransformationStrategies.identity());

    if (firstDistributor.numBits() == 0 || firstbucketSize >= size)
        log2BucketSize = t;
    else {
        // Reassign bucket size based on empirical estimation
        log2BucketSize = t
                - Fast.mostSignificantBit((int) Math.ceil(size / (firstDistributor.numBits() * Math.log(2))));
    }

    bucketSize = 1 << log2BucketSize;
    LOGGER.debug("Second bucket size estimate: " + bucketSize);

    if (firstbucketSize == bucketSize)
        distributor = firstDistributor;
    else {
        firstDistributor = null;
        distributor = new VLPaCoTrieDistributor<BitVector>(bitVectors, size, bucketSize,
                TransformationStrategies.identity());
    }

    LOGGER.info("Bucket size: " + bucketSize);

    final SparseRank sparseRank;
    if (size > 2 * bucketSize) {
        sparseRank = new SparseRank(distributor.offset.getLong(distributor.offset.size64() - 1) + 1,
                distributor.offset.size64(), distributor.offset.iterator());
        if (ASSERTS) {
            long i = 0;
            for (BitVector b : bitVectors) {
                final long d = distributor.getLong(b);
                assert sparseRank.rank(i) == d : "At " + i + ": " + sparseRank.rank(i) + " != " + d;
                i++;
            }
        }

        select = sparseRank.getSelect();
    } else {
        sparseRank = null;
        select = null;
    }

    if (size > 0) {
        offset = new GOV3Function.Builder<BitVector>().keys(bitVectors)
                .transform(TransformationStrategies.identity()).store(chunkedHashStore)
                .values(new AbstractLongBigList() {
                    public long getLong(long index) {
                        final long rank = sparseRank == null ? 0 : sparseRank.rank(index);
                        if (ASSERTS) {
                            assert rank == 0
                                    || distributor.offset.getLong(rank - 1) <= index : distributor.offset
                                            .getLong(rank - 1) + " >= " + index + "(rank=" + rank + ")";
                            assert rank == 0 && index < bucketSize * 2 || rank > 0
                                    && index - distributor.offset.getLong(rank - 1) < bucketSize * 2;
                        }
                        return rank == 0 ? index : index - distributor.offset.getLong(rank - 1);
                    }

                    public long size64() {
                        return size;
                    }
                }, log2BucketSize + 1).indirect().build();

    } else
        offset = null;

    chunkedHashStore.close();

    LOGGER.debug("Forecast distributor bit cost: "
            + (size / bucketSize) * (maxLength + log2BucketSize - Math.log(size)));
    LOGGER.debug("Actual distributor bit cost: " + distributor.numBits());
    LOGGER.debug("Forecast bit cost per element: " + (GOV3Function.C + Fast.log2(Math.E)
            - Fast.log2(Fast.log2(Math.E)) + Fast.log2(maxLength - Fast.log2(size))));
    LOGGER.info("Actual bit cost per element: " + (double) numBits() / size);
}

From source file:com.milaboratory.core.motif.MotifTest.java

@Test
public void testRandom1() throws Exception {
    RandomGenerator rg = new Well19937c();
    for (WildcardSymbol wildcardSymbol : NucleotideSequence.ALPHABET.getAllWildcards()) {
        int seqLength = 20 + rg.nextInt(100);
        int motifSize = rg.nextInt(20);
        StringBuilder builder = new StringBuilder(motifSize);
        for (int i = 0; i < motifSize; ++i)
            builder.append(wildcardSymbol.getSymbol());
        Motif<NucleotideSequence> motif = new Motif<>(NucleotideSequence.ALPHABET, builder.toString());
        NucleotideSequenceBuilder seqBuilder = new NucleotideSequenceBuilder().ensureCapacity(seqLength);
        for (int i = 0; i < seqLength; ++i)
            seqBuilder.append(wildcardSymbol.getUniformlyDistributedSymbol(rg.nextLong()));
        NucleotideSequence seq = seqBuilder.createAndDestroy();
        for (int i = 0; i < seq.size() - motif.size(); ++i)
            assertTrue(motif.matches(seq, i));
    }// w  w  w .  jav a2s .  c o  m
}