Example usage for java.util Random nextLong

List of usage examples for java.util Random nextLong

Introduction

In this page you can find the example usage for java.util Random nextLong.

Prototype

public long nextLong() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed long value from this random number generator's sequence.

Usage

From source file:org.apache.druid.hll.HyperLogLogCollectorTest.java

@Test
public void testSparseEstimation() {
    final Random random = new Random(0);
    HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();

    for (int i = 0; i < 100; ++i) {
        collector.add(fn.hashLong(random.nextLong()).asBytes());
    }//from   w w  w . j a v a  2s.  c o  m

    Assert.assertEquals(collector.estimateCardinality(),
            HyperLogLogCollector.estimateByteBuffer(collector.toByteBuffer()), 0.0d);
}

From source file:org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.java

public int randomFill(Random r, int randField, int field, ExtraTypeInfo extraTypeInfo) {
    myBool = chooseNull(r, randField, field++) ? null : Boolean.valueOf(r.nextInt(1) == 1);
    myByte = chooseNull(r, randField, field++) ? null : Byte.valueOf((byte) r.nextInt());
    myShort = chooseNull(r, randField, field++) ? null : Short.valueOf((short) r.nextInt());
    myInt = chooseNull(r, randField, field++) ? null : Integer.valueOf(r.nextInt());
    myLong = chooseNull(r, randField, field++) ? null : Long.valueOf(r.nextLong());
    myFloat = chooseNull(r, randField, field++) ? null : Float.valueOf(r.nextFloat() * 10 - 5);
    myDouble = chooseNull(r, randField, field++) ? null : Double.valueOf(r.nextDouble() * 10 - 5);
    myString = chooseNull(r, randField, field++) ? null : getRandString(r);
    myHiveChar = chooseNull(r, randField, field++) ? null : getRandHiveChar(r, extraTypeInfo);
    myHiveVarchar = chooseNull(r, randField, field++) ? null : getRandHiveVarchar(r, extraTypeInfo);
    myBinary = getRandBinary(r, r.nextInt(1000));
    myDecimal = chooseNull(r, randField, field++) ? null : getRandHiveDecimal(r, extraTypeInfo);
    myDate = chooseNull(r, randField, field++) ? null : getRandDate(r);
    myTimestamp = chooseNull(r, randField, field++) ? null : RandomTypeUtil.getRandTimestamp(r);
    myIntervalYearMonth = chooseNull(r, randField, field++) ? null : getRandIntervalYearMonth(r);
    myIntervalDayTime = chooseNull(r, randField, field++) ? null : getRandIntervalDayTime(r);
    return field;
}

From source file:org.apache.hadoop.mapred.split.TestGroupedSplits.java

@Test(timeout = 10000)
public void testFormat() throws Exception {
    JobConf job = new JobConf(defaultConf);

    Random random = new Random();
    long seed = random.nextLong();
    LOG.info("seed = " + seed);
    random.setSeed(seed);//from   w w  w  . ja  v a  2s. c o  m

    localFs.delete(workDir, true);
    FileInputFormat.setInputPaths(job, workDir);

    final int length = 10000;
    final int numFiles = 10;

    createFiles(length, numFiles, random);

    // create a combined split for the files
    TextInputFormat wrappedFormat = new TextInputFormat();
    wrappedFormat.configure(job);
    TezGroupedSplitsInputFormat<LongWritable, Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>();
    format.setConf(job);
    format.setDesiredNumberOfSplits(1);
    format.setInputFormat(wrappedFormat);
    LongWritable key = new LongWritable();
    Text value = new Text();
    for (int i = 0; i < 3; i++) {
        int numSplits = random.nextInt(length / 20) + 1;
        LOG.info("splitting: requesting = " + numSplits);
        InputSplit[] splits = format.getSplits(job, numSplits);
        LOG.info("splitting: got =        " + splits.length);

        // we should have a single split as the length is comfortably smaller than
        // the block size
        assertEquals("We got more than one splits!", 1, splits.length);
        InputSplit split = splits[0];
        assertEquals("It should be TezGroupedSplit", TezGroupedSplit.class, split.getClass());

        // check the split
        BitSet bits = new BitSet(length);
        LOG.debug("split= " + split);
        RecordReader<LongWritable, Text> reader = format.getRecordReader(split, job, voidReporter);
        try {
            int count = 0;
            while (reader.next(key, value)) {
                int v = Integer.parseInt(value.toString());
                LOG.debug("read " + v);
                if (bits.get(v)) {
                    LOG.warn("conflict with " + v + " at position " + reader.getPos());
                }
                assertFalse("Key in multiple partitions.", bits.get(v));
                bits.set(v);
                count++;
            }
            LOG.info("splits=" + split + " count=" + count);
        } finally {
            reader.close();
        }
        assertEquals("Some keys in no partition.", length, bits.cardinality());
    }
}

From source file:org.apache.pig.pen.DerivedDataVisitor.java

private void evaluateOperator(LogicalOperator op) {
    PhysicalOperator physOp = LogToPhyMap.get(op);
    Random r = new Random();
    // get the list of original inputs

    List<PhysicalOperator> inputs = physOp.getInputs();
    physOp.setInputs(null);/*from ww  w.  jav a  2  s.  c o m*/
    physOp.setLineageTracer(lineage);
    PhysicalPlan phy = new PhysicalPlan();
    phy.add(physOp);

    // replace the original inputs by POReads
    for (LogicalOperator l : op.getPlan().getPredecessors(op)) {
        DataBag bag = derivedData.get(l);
        PORead por = new PORead(new OperatorKey("", r.nextLong()), bag);
        phy.add(por);
        try {
            phy.connect(por, physOp);
        } catch (PlanException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            log.error("Error connecting " + por.name() + " to " + physOp.name());
        }
    }

    DataBag output = BagFactory.getInstance().newDefaultBag();
    Tuple t = null;
    try {
        for (Result res = physOp.getNext(t); res.returnStatus != POStatus.STATUS_EOP; res = physOp.getNext(t)) {
            output.add((Tuple) res.result);
        }
    } catch (ExecException e) {
        log.error("Error evaluating operator : " + physOp.name());
    }
    derivedData.put(op, output);

    try {
        Collection<IdentityHashSet<Tuple>> eq = EquivalenceClasses.getEquivalenceClasses(op, derivedData);
        EqClasses.addAll(eq);
        OpToEqClasses.put(op, eq);
    } catch (ExecException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        log.error("Error updating equivalence classes while evaluating operators. \n" + e.getMessage());
    }

    // re-attach the original operators
    physOp.setInputs(inputs);
    physOp.setLineageTracer(null);
}

From source file:org.apache.hadoop.hdfs.server.namenode.TestBlocksMap.java

private void insertBlocks(int numBlocks, boolean underConstruction) {
    Random r = new Random();
    map = new BlocksMap(1000, 0.75f, new MyNamesystem());
    Set<Long> ids = new HashSet<Long>(numBlocks);

    blockList = new HashSet<Block>(numBlocks);
    if (underConstruction) {
        INodeFile node = new INodeFile();
        iNode = new INodeFileUnderConstruction(node.getId(), node.getLocalNameBytes(), (short) 2,
                node.getModificationTime(), 0, node.getPreferredBlockSize(), node.getBlocks(),
                node.getPermissionStatus(), "", "", null);
    } else {/*w w w .  ja  v a  2s .  c o m*/
        iNode = new INodeFile();
    }
    int inserted = 0;

    while (inserted < numBlocks) {
        long id;
        while (ids.contains((id = r.nextLong())))
            ;
        ids.add(id);
        Block b = new Block(id, 0, GenerationStamp.FIRST_VALID_STAMP);
        blockList.add(b);
        BlockInfo info = map.addINode(b, iNode, iNode.getReplication());

        // create 2 datanode descriptors
        DatanodeDescriptor dd;

        dd = new DatanodeDescriptor();
        dd.addBlock(info);
        dd = new DatanodeDescriptor();
        dd.addBlock(info);

        inserted++;
    }
}

From source file:org.apache.hadoop.hbase.regionserver.wal.TestHLogFiltering.java

private void fillTable() throws IOException, InterruptedException {
    HTable table = TEST_UTIL.createTable(TABLE_NAME, FAMILIES, 3, Bytes.toBytes("row0"), Bytes.toBytes("row99"),
            NUM_RS);/*from   w ww. j  a v a2 s  .  c  om*/
    Random rand = new Random(19387129L);
    for (int iStoreFile = 0; iStoreFile < 4; ++iStoreFile) {
        for (int iRow = 0; iRow < 100; ++iRow) {
            final byte[] row = Bytes.toBytes("row" + iRow);
            Put put = new Put(row);
            Delete del = new Delete(row);
            for (int iCol = 0; iCol < 10; ++iCol) {
                final byte[] cf = rand.nextBoolean() ? CF1 : CF2;
                final long ts = Math.abs(rand.nextInt());
                final byte[] qual = Bytes.toBytes("col" + iCol);
                if (rand.nextBoolean()) {
                    final byte[] value = Bytes
                            .toBytes("value_for_row_" + iRow + "_cf_" + Bytes.toStringBinary(cf) + "_col_"
                                    + iCol + "_ts_" + ts + "_random_" + rand.nextLong());
                    put.add(cf, qual, ts, value);
                } else if (rand.nextDouble() < 0.8) {
                    del.deleteColumn(cf, qual, ts);
                } else {
                    del.deleteColumns(cf, qual, ts);
                }
            }
            table.put(put);
            table.delete(del);
            table.flushCommits();
        }
    }
    TEST_UTIL.waitUntilAllRegionsAssigned(TABLE_NAME);
}

From source file:com.indeed.lsmtree.core.TestImmutableBTreeIndex.java

public void testLargeKeys() throws IOException {

    final TreeMap<String, Long> map = Maps.newTreeMap();
    final Random r = new Random(0);
    final String[] strings = new String[10000];
    for (int i = 0; i < strings.length; i++) {
        final byte[] bytes = new byte[16384];
        r.nextBytes(bytes);//from  ww  w.  ja v a2  s  .  co m
        strings[i] = new String(Base64.encodeBase64(bytes));
    }
    Arrays.sort(strings);
    Iterator<Generation.Entry<String, Long>> iterator = new AbstractIterator<Generation.Entry<String, Long>>() {
        int index = 0;

        @Override
        protected Generation.Entry<String, Long> computeNext() {
            if (index >= strings.length)
                return endOfData();
            final String s = strings[index];
            final long l = r.nextLong();
            index++;
            map.put(s, l);
            return Generation.Entry.create(s, l);
        }
    };
    ImmutableBTreeIndex.Writer.write(tmpDir, iterator, new StringSerializer(), new LongSerializer(), 65536,
            false);
    ImmutableBTreeIndex.Reader<String, Long> index = new ImmutableBTreeIndex.Reader<String, Long>(tmpDir,
            new StringSerializer(), new LongSerializer(), false);
    Iterator<Generation.Entry<String, Long>> it1 = index.iterator();
    Iterator<Map.Entry<String, Long>> it2 = map.entrySet().iterator();
    int i = 0;
    while (it2.hasNext()) {
        i++;
        assertTrue(it1.hasNext());
        Generation.Entry<String, Long> next1 = it1.next();
        Map.Entry<String, Long> next2 = it2.next();
        assertEquals(next1.getKey(), next2.getKey());
        assertEquals(next1.getValue(), next2.getValue());
    }
    assertFalse(it1.hasNext());
}

From source file:io.druid.query.aggregation.hyperloglog.HyperLogLogCollectorTest.java

@Test
public void testSparseEstimation() throws Exception {
    final Random random = new Random(0);
    HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();

    for (int i = 0; i < 100; ++i) {
        collector.add(fn.hashLong(random.nextLong()).asBytes());
    }/*from  w  w  w .  j  a  v  a  2s .  c om*/

    Assert.assertEquals(collector.estimateCardinality(), collector.estimateByteBuffer(collector.toByteBuffer()),
            0.0d);
}

From source file:org.apache.flink.table.codegen.SortCodeGeneratorTest.java

private Object[] generateValues(InternalType type) {

    Random rnd = new Random();

    int seedNum = RECORD_NUM / 5;
    Object[] seeds = new Object[seedNum];
    seeds[0] = null;/*from  ww  w . jav a 2s .com*/
    seeds[1] = value1(type, rnd);
    seeds[2] = value2(type, rnd);
    seeds[3] = value3(type, rnd);
    for (int i = 4; i < seeds.length; i++) {
        if (type.equals(InternalTypes.BOOLEAN)) {
            seeds[i] = rnd.nextBoolean();
        } else if (type.equals(InternalTypes.BYTE)) {
            seeds[i] = (byte) rnd.nextLong();
        } else if (type.equals(InternalTypes.SHORT)) {
            seeds[i] = (short) rnd.nextLong();
        } else if (type.equals(InternalTypes.INT)) {
            seeds[i] = rnd.nextInt();
        } else if (type.equals(InternalTypes.LONG)) {
            seeds[i] = rnd.nextLong();
        } else if (type.equals(InternalTypes.FLOAT)) {
            seeds[i] = rnd.nextFloat() * rnd.nextLong();
        } else if (type.equals(InternalTypes.DOUBLE)) {
            seeds[i] = rnd.nextDouble() * rnd.nextLong();
        } else if (type.equals(InternalTypes.STRING)) {
            seeds[i] = BinaryString.fromString(RandomStringUtils.random(rnd.nextInt(20)));
        } else if (type instanceof DecimalType) {
            DecimalType decimalType = (DecimalType) type;
            BigDecimal decimal = new BigDecimal(rnd.nextInt()).divide(
                    new BigDecimal(ThreadLocalRandom.current().nextInt(1, 256)),
                    ThreadLocalRandom.current().nextInt(1, 30), BigDecimal.ROUND_HALF_EVEN);
            seeds[i] = Decimal.fromBigDecimal(decimal, decimalType.precision(), decimalType.scale());
        } else if (type instanceof ArrayType || type.equals(InternalTypes.BINARY)) {
            byte[] bytes = new byte[rnd.nextInt(16) + 1];
            rnd.nextBytes(bytes);
            seeds[i] = type.equals(InternalTypes.BINARY) ? bytes : BinaryArray.fromPrimitiveArray(bytes);
        } else if (type instanceof RowType) {
            RowType rowType = (RowType) type;
            if (rowType.getTypeAt(0).equals(InternalTypes.INT)) {
                seeds[i] = GenericRow.of(rnd.nextInt());
            } else {
                seeds[i] = GenericRow.of(GenericRow.of(rnd.nextInt()));
            }
        } else if (type instanceof GenericType) {
            seeds[i] = new BinaryGeneric<>(rnd.nextInt(), IntSerializer.INSTANCE);
        } else {
            throw new RuntimeException("Not support!");
        }
    }

    // result values
    Object[] results = new Object[RECORD_NUM];
    for (int i = 0; i < RECORD_NUM; i++) {
        results[i] = seeds[rnd.nextInt(seedNum)];
    }
    return results;
}

From source file:us.mn.state.health.lims.result.action.util.ResultsLoadUtility.java

private void addUserSelectionReflexes(List<TestResultItem> testList) {
    TestReflexUtil reflexUtil = new TestReflexUtil();

    Map<String, TestResultItem> groupedSibReflexMapping = new HashMap<String, TestResultItem>();

    for (TestResultItem resultItem : testList) {
        //N.B. showSampleDetails should be renamed.  It means that it is the first result for that group of accession numbers
        if (resultItem.isShowSampleDetails()) {
            groupedSibReflexMapping = new HashMap<String, TestResultItem>();
            reflexGroup++;// w w w  .  j  a va 2 s. c om
        }

        if (resultItem.isReflexGroup()) {
            resultItem.setReflexParentGroup(reflexGroup);
        }

        List<TestReflex> reflexList = reflexUtil
                .getPossibleUserChoiceTestReflexsForTest(resultItem.getTestId());
        resultItem.setUserChoiceReflex(reflexList.size() > 0);

        boolean possibleSibs = !groupedSibReflexMapping.isEmpty();

        for (TestReflex testReflex : reflexList) {
            if (!GenericValidator.isBlankOrNull(testReflex.getSiblingReflexId())) {
                if (possibleSibs) {
                    TestResultItem sibTestResultItem = groupedSibReflexMapping
                            .get(testReflex.getSiblingReflexId());
                    if (sibTestResultItem != null) {
                        Random r = new Random();
                        String key1 = Long.toString(Math.abs(r.nextLong()), 36);
                        String key2 = Long.toString(Math.abs(r.nextLong()), 36);

                        sibTestResultItem.setThisReflexKey(key1);
                        sibTestResultItem.setSiblingReflexKey(key2);

                        resultItem.setThisReflexKey(key2);
                        resultItem.setSiblingReflexKey(key1);

                        break;
                    }
                }
                groupedSibReflexMapping.put(testReflex.getId(), resultItem);
            }

        }

    }

}