Example usage for java.util Random nextLong

List of usage examples for java.util Random nextLong

Introduction

In this page you can find the example usage for java.util Random nextLong.

Prototype

public long nextLong() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed long value from this random number generator's sequence.

Usage

From source file:org.apache.druid.hll.HyperLogLogCollectorTest.java

@Ignore
@Test//from  w  ww . j a va  2  s  .  c o  m
public void showErrorRate() {
    HashFunction fn = Hashing.murmur3_128();
    Random random = ThreadLocalRandom.current();

    double error = 0.0d;
    int count = 0;

    final int[] valsToCheck = { 10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000,
            2000000, 10000000, Integer.MAX_VALUE };

    for (int numThings : valsToCheck) {
        long startTime = System.currentTimeMillis();
        HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();

        for (int i = 0; i < numThings; ++i) {
            if (i != 0 && i % 100000000 == 0) {
                ++count;
                error = computeError(error, count, i, startTime, collector);
            }
            collector.add(fn.hashLong(random.nextLong()).asBytes());
        }

        ++count;
        error = computeError(error, count, numThings, startTime, collector);
    }
}

From source file:com.facebook.LinkBench.LinkBenchDriverInj.java

void sendrequests() throws IOException, InterruptedException, Throwable {

    if (!doRequest) {
        logger.info("Skipping request phase per the cmdline arg");
        return;//ww w .  jav a2 s.  c om
    }

    // config info for requests
    nrequesters = ConfigUtil.getInt(props, Config.NUM_REQUESTERS);
    if (nrequesters == 0) {
        logger.info("NO REQUEST PHASE CONFIGURED. ");
        return;
    }
    List<LinkBenchRequestInj> requesters = new LinkedList<LinkBenchRequestInj>();

    RequestProgress progress = LinkBenchRequestInj.createProgress(logger, props);

    Random masterRandom = createMasterRNG(props, Config.REQUEST_RANDOM_SEED);
    requestrate = ConfigUtil.getLong(props, Config.REQUEST_RATE, 0L);
    maxTime = ConfigUtil.getLong(props, Config.MAX_TIME);

    genQueue = new ArrayBlockingQueue<Long>(1000000); // 10000 should be in Config really. TODO

    statsQueue = new ArrayBlockingQueue<StatMessage>(1000000); // 1000000 should be in Config. TODO

    // create GlobalStats thread

    GlobalStats gs = new GlobalStats(statsQueue, props, csvStreamFile);
    Thread t = new Thread(gs, "Global Stats Thread");
    t.start();

    // create requesters
    for (int i = 0; i < nrequesters; i++) {
        Stores stores = initStores();
        LinkBenchRequestInj l = new LinkBenchRequestInj(stores.linkStore, stores.nodeStore, props,
                csvStreamFile, progress, new Random(masterRandom.nextLong()), i, nrequesters, genQueue,
                statsQueue);
        requesters.add(l);
    }
    progress.startTimer();
    // run requesters
    concurrentExec(requesters, true, new Random(masterRandom.nextLong()));

    // stop Thread with global statistics
    t.interrupt();
    t.join();
    gs.printQuantileStats();

    long finishTime = System.currentTimeMillis();
    // Calculate duration accounting for warmup time
    long benchmarkTime = finishTime - progress.getBenchmarkStartTime();

    long requestsdone = 0;
    int abortedRequesters = 0;
    // wait for requesters
    for (LinkBenchRequestInj requester : requesters) {
        requestsdone += requester.getRequestsDone();
        if (requester.didAbort()) {
            abortedRequesters++;
        }
    }

    logger.info("REQUEST PHASE COMPLETED. " + requestsdone + " requests done in " + (benchmarkTime / 1000)
            + " seconds." + " Requests/second = " + (1000 * requestsdone) / benchmarkTime);
    if (abortedRequesters > 0) {
        logger.error(String.format(
                "Benchmark did not complete cleanly: %d/%d "
                        + "request threads aborted.  See error log entries for details.",
                abortedRequesters, nrequesters));
    }
}

From source file:com.hubcap.task.TaskRunner.java

/**
 * Private Constructor, use 'createNew()' to make new task runners.
 *//*  www  .  j  a  va2s.c  om*/
private TaskRunner() {

    taskId = (new Date().getTime());
    Random r = new Random(taskId);

    // get a pseudo random to lessen the chance that two tasks started at
    // the same time will have the same id.
    // getTime() returns milliseconds, and milliseconds are rather long in
    // the scheme of things.
    taskId += Math.abs(r.nextLong());
}

From source file:org.apache.pig.pen.DerivedDataVisitor.java

public DataBag evaluateIsolatedOperator(LOCogroup op) {
    // return null if the inputs are not already evaluated
    for (LogicalOperator in : op.getPlan().getPredecessors(op)) {
        if (derivedData.get(in) == null)
            return null;
    }/*from   w  w  w. j  ava2s  .  c o m*/

    LineageTracer oldLineage = this.lineage;
    this.lineage = new LineageTracer();

    PhysicalOperator physOp = LogToPhyMap.get(op);
    Random r = new Random();
    // get the list of original inputs
    // List<PhysicalOperator> inputs = physOp.getInputs();
    List<PhysicalOperator> inputs = new ArrayList<PhysicalOperator>();
    PhysicalPlan phy = new PhysicalPlan();
    phy.add(physOp);

    for (PhysicalOperator input : physOp.getInputs()) {
        inputs.add(input.getInputs().get(0));
        input.setInputs(null);
        phy.add(input);
        try {
            phy.connect(input, physOp);
        } catch (PlanException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            log.error("Error connecting " + input.name() + " to " + physOp.name());
        }
    }
    physOp.setLineageTracer(lineage);

    physOp.setLineageTracer(null);

    // replace the original inputs by POReads
    for (int i = 0; i < inputs.size(); i++) {
        DataBag bag = derivedData.get(op.getInputs().get(i));
        PORead por = new PORead(new OperatorKey("", r.nextLong()), bag);
        phy.add(por);
        try {
            phy.connect(por, physOp.getInputs().get(i));
        } catch (PlanException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            log.error("Error connecting " + por.name() + " to " + physOp.name());
        }
    }

    // replace the original inputs by POReads
    // for(LogicalOperator l : op.getPlan().getPredecessors(op)) {
    // DataBag bag = derivedData.get(l);
    // PORead por = new PORead(new OperatorKey("", r.nextLong()), bag);
    // phy.add(por);
    // try {
    // phy.connect(por, physOp);
    // } catch (PlanException e) {
    // // TODO Auto-generated catch block
    // e.printStackTrace();
    // log.error("Error connecting " + por.name() + " to " + physOp.name());
    // }
    // }

    DataBag output = BagFactory.getInstance().newDefaultBag();
    Tuple t = null;
    try {
        for (Result res = physOp.getNext(t); res.returnStatus != POStatus.STATUS_EOP; res = physOp.getNext(t)) {
            output.add((Tuple) res.result);
        }
    } catch (ExecException e) {
        log.error("Error evaluating operator : " + physOp.name());
    }

    this.lineage = oldLineage;

    physOp.setInputs(inputs);
    physOp.setLineageTracer(null);

    return output;
}

From source file:io.druid.hll.HyperLogLogCollectorTest.java

@Test
public void testEstimationReadOnlyByteBuffers() throws Exception {
    Random random = new Random(0L);

    final int[] valsToCheck = { 10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000,
            2000000 };//from  w  ww  . j  av a  2 s. co m
    final double[] expectedVals = { 11.029647221949576, 21.108407720752034, 51.64575281885815,
            100.42231726408892, 981.8579991802412, 1943.1337257462792, 4946.192042635218, 9935.088157579434,
            20366.1486889433, 49433.56029693898, 100615.26273314281, 980831.624899156000, 1982408.2608981386 };

    int valsToCheckIndex = 0;
    HyperLogLogCollector collector = HyperLogLogCollector
            .makeCollector(ByteBuffer.allocateDirect(HyperLogLogCollector.getLatestNumBytesForDenseStorage()));
    for (int i = 0; i < valsToCheck[valsToCheck.length - 1]; ++i) {
        collector.add(fn.hashLong(random.nextLong()).asBytes());
        if (i == valsToCheck[valsToCheckIndex]) {
            Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
            ++valsToCheckIndex;
        }
    }
    Assert.assertEquals(expectedVals.length, valsToCheckIndex + 1);
    Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
}

From source file:org.apache.druid.hll.HyperLogLogCollectorTest.java

@Test
public void testEstimationReadOnlyByteBuffers() {
    Random random = new Random(0L);

    final int[] valsToCheck = { 10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000,
            2000000 };/* www. j av a  2 s. c o m*/
    final double[] expectedVals = { 11.029647221949576, 21.108407720752034, 51.64575281885815,
            100.42231726408892, 981.8579991802412, 1943.1337257462792, 4946.192042635218, 9935.088157579434,
            20366.1486889433, 49433.56029693898, 100615.26273314281, 980831.624899156000, 1982408.2608981386 };

    int valsToCheckIndex = 0;
    HyperLogLogCollector collector = HyperLogLogCollector
            .makeCollector(ByteBuffer.allocateDirect(HyperLogLogCollector.getLatestNumBytesForDenseStorage()));
    for (int i = 0; i < valsToCheck[valsToCheck.length - 1]; ++i) {
        collector.add(fn.hashLong(random.nextLong()).asBytes());
        if (i == valsToCheck[valsToCheckIndex]) {
            Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
            ++valsToCheckIndex;
        }
    }
    Assert.assertEquals(expectedVals.length, valsToCheckIndex + 1);
    Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
}

From source file:org.apache.pig.pen.DerivedDataVisitor.java

@Override
protected void visit(LOCogroup cg) throws VisitorException {
    // evaluateOperator(cg);
    // there is a slightly different code path for cogroup because of the
    // local rearranges
    PhysicalOperator physOp = LogToPhyMap.get(cg);
    Random r = new Random();
    // get the list of original inputs

    // List<PhysicalOperator> inputs = physOp.getInputs();
    List<PhysicalOperator> inputs = new ArrayList<PhysicalOperator>();
    PhysicalPlan phy = new PhysicalPlan();
    phy.add(physOp);//  w  w w.  j  av  a2  s  .c  o m

    // for(PhysicalOperator input : physOp.getInputs()) {
    for (PhysicalOperator input : physPlan.getPredecessors(physOp)) {
        inputs.add(input.getInputs().get(0));
        // input.setInputs(null);
        phy.add(input);
        try {
            phy.connect(input, physOp);
        } catch (PlanException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            log.error("Error connecting " + input.name() + " to " + physOp.name());
        }
    }

    physOp.setLineageTracer(lineage);

    // replace the original inputs by POReads
    for (int i = 0; i < inputs.size(); i++) {
        DataBag bag = derivedData.get(cg.getInputs().get(i));
        PORead por = new PORead(new OperatorKey("", r.nextLong()), bag);
        phy.add(por);
        try {
            phy.connect(por, physOp.getInputs().get(i));
        } catch (PlanException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            log.error("Error connecting " + por.name() + " to " + physOp.name());
        }
    }

    DataBag output = BagFactory.getInstance().newDefaultBag();
    Tuple t = null;
    try {
        for (Result res = physOp.getNext(t); res.returnStatus != POStatus.STATUS_EOP; res = physOp.getNext(t)) {
            output.add((Tuple) res.result);
        }
    } catch (ExecException e) {
        log.error("Error evaluating operator : " + physOp.name());
    }
    derivedData.put(cg, output);

    try {
        Collection<IdentityHashSet<Tuple>> eq = EquivalenceClasses.getEquivalenceClasses(cg, derivedData);
        EqClasses.addAll(eq);
        OpToEqClasses.put(cg, eq);
    } catch (ExecException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        log.error("Error updating equivalence classes while evaluating operators. \n" + e.getMessage());
    }

    // re-attach the original operators
    // for(int i = 0; i < inputs.size(); i++) {
    // try {
    // physPlan.connect(inputs.get(i), physOp.getInputs().get(i));
    //      
    // } catch (PlanException e) {
    // // TODO Auto-generated catch block
    // e.printStackTrace();
    // log.error("Error connecting " + inputs.get(i).name() + " to " +
    // physOp.getInputs().get(i).name());
    // }
    // }
    physOp.setLineageTracer(null);
}

From source file:org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.TestResourceLocalizationService.java

private static LocalResource getMockedResource(Random r, LocalResourceVisibility vis) {
    String name = Long.toHexString(r.nextLong());
    URL url = getPath("/local/PRIVATE/" + name);
    LocalResource rsrc = BuilderUtils.newLocalResource(url, LocalResourceType.FILE, vis,
            r.nextInt(1024) + 1024L, r.nextInt(1024) + 2048L, false);
    return rsrc;/*from   w w  w . j  ava 2 s  .  c o  m*/
}

From source file:io.druid.hll.HyperLogLogCollectorTest.java

@Test
public void testEstimationLimitDifferentFromCapacity() throws Exception {
    Random random = new Random(0L);

    final int[] valsToCheck = { 10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000,
            2000000 };//from  w  w  w  . ja  v  a  2s  .  c  o  m
    final double[] expectedVals = { 11.029647221949576, 21.108407720752034, 51.64575281885815,
            100.42231726408892, 981.8579991802412, 1943.1337257462792, 4946.192042635218, 9935.088157579434,
            20366.1486889433, 49433.56029693898, 100615.26273314281, 980831.624899156000, 1982408.2608981386 };

    int valsToCheckIndex = 0;
    HyperLogLogCollector collector = HyperLogLogCollector.makeCollector((ByteBuffer) ByteBuffer.allocate(10000)
            .position(0).limit(HyperLogLogCollector.getLatestNumBytesForDenseStorage()));
    for (int i = 0; i < valsToCheck[valsToCheck.length - 1]; ++i) {
        collector.add(fn.hashLong(random.nextLong()).asBytes());
        if (i == valsToCheck[valsToCheckIndex]) {
            Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
            ++valsToCheckIndex;
        }
    }
    Assert.assertEquals(expectedVals.length, valsToCheckIndex + 1);
    Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
}

From source file:org.apache.druid.hll.HyperLogLogCollectorTest.java

@Test
public void testEstimationLimitDifferentFromCapacity() {
    Random random = new Random(0L);

    final int[] valsToCheck = { 10, 20, 50, 100, 1000, 2000, 5000, 10000, 20000, 50000, 100000, 1000000,
            2000000 };/*from w  w w. j  a va2 s .co  m*/
    final double[] expectedVals = { 11.029647221949576, 21.108407720752034, 51.64575281885815,
            100.42231726408892, 981.8579991802412, 1943.1337257462792, 4946.192042635218, 9935.088157579434,
            20366.1486889433, 49433.56029693898, 100615.26273314281, 980831.624899156000, 1982408.2608981386 };

    int valsToCheckIndex = 0;
    HyperLogLogCollector collector = HyperLogLogCollector.makeCollector((ByteBuffer) ByteBuffer.allocate(10000)
            .position(0).limit(HyperLogLogCollector.getLatestNumBytesForDenseStorage()));
    for (int i = 0; i < valsToCheck[valsToCheck.length - 1]; ++i) {
        collector.add(fn.hashLong(random.nextLong()).asBytes());
        if (i == valsToCheck[valsToCheckIndex]) {
            Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
            ++valsToCheckIndex;
        }
    }
    Assert.assertEquals(expectedVals.length, valsToCheckIndex + 1);
    Assert.assertEquals(expectedVals[valsToCheckIndex], collector.estimateCardinality(), 0.0d);
}