Example usage for java.util Random nextBoolean

List of usage examples for java.util Random nextBoolean

Introduction

In this page you can find the example usage for java.util Random nextBoolean.

Prototype

public boolean nextBoolean() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed boolean value from this random number generator's sequence.

Usage

From source file:org.apache.flink.table.codegen.SortCodeGeneratorTest.java

private Object[] generateValues(InternalType type) {

    Random rnd = new Random();

    int seedNum = RECORD_NUM / 5;
    Object[] seeds = new Object[seedNum];
    seeds[0] = null;//from ww  w.ja v a2 s . co m
    seeds[1] = value1(type, rnd);
    seeds[2] = value2(type, rnd);
    seeds[3] = value3(type, rnd);
    for (int i = 4; i < seeds.length; i++) {
        if (type.equals(InternalTypes.BOOLEAN)) {
            seeds[i] = rnd.nextBoolean();
        } else if (type.equals(InternalTypes.BYTE)) {
            seeds[i] = (byte) rnd.nextLong();
        } else if (type.equals(InternalTypes.SHORT)) {
            seeds[i] = (short) rnd.nextLong();
        } else if (type.equals(InternalTypes.INT)) {
            seeds[i] = rnd.nextInt();
        } else if (type.equals(InternalTypes.LONG)) {
            seeds[i] = rnd.nextLong();
        } else if (type.equals(InternalTypes.FLOAT)) {
            seeds[i] = rnd.nextFloat() * rnd.nextLong();
        } else if (type.equals(InternalTypes.DOUBLE)) {
            seeds[i] = rnd.nextDouble() * rnd.nextLong();
        } else if (type.equals(InternalTypes.STRING)) {
            seeds[i] = BinaryString.fromString(RandomStringUtils.random(rnd.nextInt(20)));
        } else if (type instanceof DecimalType) {
            DecimalType decimalType = (DecimalType) type;
            BigDecimal decimal = new BigDecimal(rnd.nextInt()).divide(
                    new BigDecimal(ThreadLocalRandom.current().nextInt(1, 256)),
                    ThreadLocalRandom.current().nextInt(1, 30), BigDecimal.ROUND_HALF_EVEN);
            seeds[i] = Decimal.fromBigDecimal(decimal, decimalType.precision(), decimalType.scale());
        } else if (type instanceof ArrayType || type.equals(InternalTypes.BINARY)) {
            byte[] bytes = new byte[rnd.nextInt(16) + 1];
            rnd.nextBytes(bytes);
            seeds[i] = type.equals(InternalTypes.BINARY) ? bytes : BinaryArray.fromPrimitiveArray(bytes);
        } else if (type instanceof RowType) {
            RowType rowType = (RowType) type;
            if (rowType.getTypeAt(0).equals(InternalTypes.INT)) {
                seeds[i] = GenericRow.of(rnd.nextInt());
            } else {
                seeds[i] = GenericRow.of(GenericRow.of(rnd.nextInt()));
            }
        } else if (type instanceof GenericType) {
            seeds[i] = new BinaryGeneric<>(rnd.nextInt(), IntSerializer.INSTANCE);
        } else {
            throw new RuntimeException("Not support!");
        }
    }

    // result values
    Object[] results = new Object[RECORD_NUM];
    for (int i = 0; i < RECORD_NUM; i++) {
        results[i] = seeds[rnd.nextInt(seedNum)];
    }
    return results;
}

From source file:com.facebook.presto.execution.resourceGroups.TestResourceGroups.java

@Test(timeOut = 10_000)
public void testPriorityScheduling() {
    RootInternalResourceGroup root = new RootInternalResourceGroup("root", (group, export) -> {
    }, directExecutor());/*from  www. j  av a2  s  .c o  m*/
    root.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    root.setMaxQueuedQueries(100);
    // Start with zero capacity, so that nothing starts running until we've added all the queries
    root.setMaxRunningQueries(0);
    root.setSchedulingPolicy(QUERY_PRIORITY);
    InternalResourceGroup group1 = root.getOrCreateSubGroup("1");
    group1.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    group1.setMaxQueuedQueries(100);
    group1.setMaxRunningQueries(1);
    InternalResourceGroup group2 = root.getOrCreateSubGroup("2");
    group2.setSoftMemoryLimit(new DataSize(1, MEGABYTE));
    group2.setMaxQueuedQueries(100);
    group2.setMaxRunningQueries(1);

    SortedMap<Integer, MockQueryExecution> queries = new TreeMap<>();

    Random random = new Random();
    for (int i = 0; i < 100; i++) {
        int priority;
        do {
            priority = random.nextInt(1_000_000) + 1;
        } while (queries.containsKey(priority));

        MockQueryExecution query = new MockQueryExecution(0, "query_id", priority);
        if (random.nextBoolean()) {
            group1.run(query);
        } else {
            group2.run(query);
        }
        queries.put(priority, query);
    }

    root.setMaxRunningQueries(1);

    List<MockQueryExecution> orderedQueries = new ArrayList<>(queries.values());
    reverse(orderedQueries);

    for (MockQueryExecution query : orderedQueries) {
        root.processQueuedQueries();
        assertEquals(query.getState(), RUNNING);
        query.complete();
    }
}

From source file:dremel.common.AvroTest.java

@SuppressWarnings(value = "unchecked")
private static Object generateRandomDataRecursive(Schema schema, Random random, int size) {
    switch (schema.getType()) {
    case RECORD://from w  w  w.  j  a  v a  2  s  .c  o m
        GenericRecord record = new GenericData.Record(schema);
        boolean isFieldsEmpty = true;
        for (Schema.Field field : schema.getFields()) {
            Object o = generateRandomDataRecursive(field.schema(), random, size);
            if (o != null) {
                record.put(field.name(), o);
                isFieldsEmpty = isFieldsEmpty && o instanceof GenericArray
                        && ((GenericArray<Object>) o).size() == 0;
            }
        }
        return isFieldsEmpty ? null : record;
    case ARRAY:
        int length = size + (random.nextInt(10));
        GenericArray<Object> array = new GenericData.Array<Object>(length <= 0 ? 0 : length, schema);
        Object o;
        for (int i = 0; i < length; i++) {
            o = generateRandomDataRecursive(schema.getElementType(), random, size > 0 ? size - 1 : 0);
            if (o != null)
                array.add(o);
        }
        return array;
    case STRING:
        return generateRandomUtf8(random, 40);
    case INT:
        return random.nextInt();
    case LONG:
        return random.nextLong();
    case FLOAT:
        return random.nextFloat();
    case DOUBLE:
        return random.nextDouble();
    case BOOLEAN:
        return random.nextBoolean();
    default:
        throw new RuntimeException("Unknown type: " + schema);
    }
}

From source file:org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableFast.java

private void testBinarySortableFast(SerdeRandomRowSource source, Object[][] rows,
        boolean[] columnSortOrderIsDesc, byte[] columnNullMarker, byte[] columnNotNullMarker,
        AbstractSerDe serde, StructObjectInspector rowOI, AbstractSerDe serde_fewer,
        StructObjectInspector writeRowOI, boolean ascending, TypeInfo[] typeInfos, boolean useIncludeColumns,
        boolean doWriteFewerColumns, Random r) throws Throwable {

    int rowCount = rows.length;
    int columnCount = typeInfos.length;

    boolean[] columnsToInclude = null;
    if (useIncludeColumns) {
        columnsToInclude = new boolean[columnCount];
        for (int i = 0; i < columnCount; i++) {
            columnsToInclude[i] = r.nextBoolean();
        }/*from w w w .j  a v a 2s .c  o m*/
    }

    int writeColumnCount = columnCount;
    if (doWriteFewerColumns) {
        writeColumnCount = writeRowOI.getAllStructFieldRefs().size();
    }

    BinarySortableSerializeWrite binarySortableSerializeWrite = new BinarySortableSerializeWrite(
            columnSortOrderIsDesc, columnNullMarker, columnNotNullMarker);

    // Try to serialize

    // One Writable per row.
    BytesWritable serializeWriteBytes[] = new BytesWritable[rowCount];

    int[][] perFieldWriteLengthsArray = new int[rowCount][];
    for (int i = 0; i < rowCount; i++) {
        Object[] row = rows[i];
        Output output = new Output();
        binarySortableSerializeWrite.set(output);

        int[] perFieldWriteLengths = new int[columnCount];
        for (int index = 0; index < writeColumnCount; index++) {
            VerifyFast.serializeWrite(binarySortableSerializeWrite, typeInfos[index], row[index]);
            perFieldWriteLengths[index] = output.getLength();
        }
        perFieldWriteLengthsArray[i] = perFieldWriteLengths;

        BytesWritable bytesWritable = new BytesWritable();
        bytesWritable.set(output.getData(), 0, output.getLength());
        serializeWriteBytes[i] = bytesWritable;
        if (i > 0) {
            BytesWritable previousBytesWritable = serializeWriteBytes[i - 1];
            int compareResult = previousBytesWritable.compareTo(bytesWritable);
            if ((compareResult < 0 && !ascending) || (compareResult > 0 && ascending)) {
                System.out.println("Test failed in " + (ascending ? "ascending" : "descending") + " order with "
                        + (i - 1) + " and " + i);
                System.out.println("serialized data [" + (i - 1) + "] = "
                        + TestBinarySortableSerDe.hexString(serializeWriteBytes[i - 1]));
                System.out.println("serialized data [" + i + "] = "
                        + TestBinarySortableSerDe.hexString(serializeWriteBytes[i]));
                fail("Sort order of serialized " + (i - 1) + " and " + i + " are reversed!");
            }
        }
    }

    // Try to deserialize using DeserializeRead our Writable row objects created by SerializeWrite.
    for (int i = 0; i < rowCount; i++) {
        Object[] row = rows[i];
        BinarySortableDeserializeRead binarySortableDeserializeRead = new BinarySortableDeserializeRead(
                typeInfos, /* useExternalBuffer */ false, columnSortOrderIsDesc, columnNullMarker,
                columnNotNullMarker);

        BytesWritable bytesWritable = serializeWriteBytes[i];
        binarySortableDeserializeRead.set(bytesWritable.getBytes(), 0, bytesWritable.getLength());

        for (int index = 0; index < columnCount; index++) {
            if (useIncludeColumns && !columnsToInclude[index]) {
                binarySortableDeserializeRead.skipNextField();
            } else if (index >= writeColumnCount) {
                // Should come back a null.
                VerifyFast.verifyDeserializeRead(binarySortableDeserializeRead, typeInfos[index], null);
            } else {
                verifyRead(binarySortableDeserializeRead, typeInfos[index], row[index]);
            }
        }
        if (writeColumnCount == columnCount) {
            TestCase.assertTrue(binarySortableDeserializeRead.isEndOfInputReached());
        }

        /*
         * Clip off one byte and expect to get an EOFException on the write field.
         */
        BinarySortableDeserializeRead binarySortableDeserializeRead2 = new BinarySortableDeserializeRead(
                typeInfos, /* useExternalBuffer */ false, columnSortOrderIsDesc, columnNullMarker,
                columnNotNullMarker);

        binarySortableDeserializeRead2.set(bytesWritable.getBytes(), 0, bytesWritable.getLength() - 1); // One fewer byte.

        for (int index = 0; index < writeColumnCount; index++) {
            if (index == writeColumnCount - 1) {
                boolean threw = false;
                try {
                    verifyRead(binarySortableDeserializeRead2, typeInfos[index], row[index]);
                } catch (EOFException e) {
                    //          debugDetailedReadPositionString = binarySortableDeserializeRead2.getDetailedReadPositionString();
                    //          debugStackTrace = e.getStackTrace();
                    threw = true;
                }

                if (!threw && row[index] != null) {
                    Assert.fail();
                }
            } else {
                if (useIncludeColumns && !columnsToInclude[index]) {
                    binarySortableDeserializeRead2.skipNextField();
                } else {
                    verifyRead(binarySortableDeserializeRead2, typeInfos[index], row[index]);
                }
            }
        }

    }

    // Try to deserialize using SerDe class our Writable row objects created by SerializeWrite.
    for (int i = 0; i < rowCount; i++) {
        BytesWritable bytesWritable = serializeWriteBytes[i];

        // Note that regular SerDe doesn't tolerate fewer columns.
        List<Object> deserializedRow;
        if (doWriteFewerColumns) {
            deserializedRow = (List<Object>) serde_fewer.deserialize(bytesWritable);
        } else {
            deserializedRow = (List<Object>) serde.deserialize(bytesWritable);
        }

        Object[] row = rows[i];
        for (int index = 0; index < writeColumnCount; index++) {
            Object expected = row[index];
            Object object = deserializedRow.get(index);
            if (expected == null || object == null) {
                if (expected != null || object != null) {
                    fail("SerDe deserialized NULL column mismatch");
                }
            } else {
                if (!object.equals(expected)) {
                    fail("SerDe deserialized value does not match (expected " + expected.getClass().getName()
                            + " " + expected.toString() + ", actual " + object.getClass().getName() + " "
                            + object.toString() + ")");
                }
            }
        }
    }

    // One Writable per row.
    BytesWritable serdeBytes[] = new BytesWritable[rowCount];

    // Serialize using the SerDe, then below deserialize using DeserializeRead.
    for (int i = 0; i < rowCount; i++) {
        Object[] row = rows[i];

        // Since SerDe reuses memory, we will need to make a copy.
        BytesWritable serialized;
        if (doWriteFewerColumns) {
            serialized = (BytesWritable) serde_fewer.serialize(row, rowOI);
        } else {
            serialized = (BytesWritable) serde.serialize(row, rowOI);
            ;
        }
        BytesWritable bytesWritable = new BytesWritable();
        bytesWritable.set(serialized);
        byte[] serDeOutput = Arrays.copyOfRange(bytesWritable.getBytes(), 0, bytesWritable.getLength());

        byte[] serializeWriteExpected = Arrays.copyOfRange(serializeWriteBytes[i].getBytes(), 0,
                serializeWriteBytes[i].getLength());
        if (!Arrays.equals(serDeOutput, serializeWriteExpected)) {
            int mismatchPos = -1;
            if (serDeOutput.length != serializeWriteExpected.length) {
                for (int b = 0; b < Math.min(serDeOutput.length, serializeWriteExpected.length); b++) {
                    if (serDeOutput[b] != serializeWriteExpected[b]) {
                        mismatchPos = b;
                        break;
                    }
                }
                fail("Different byte array lengths: serDeOutput.length " + serDeOutput.length
                        + ", serializeWriteExpected.length " + serializeWriteExpected.length + " mismatchPos "
                        + mismatchPos + " perFieldWriteLengths "
                        + Arrays.toString(perFieldWriteLengthsArray[i]));
            }
            List<Integer> differentPositions = new ArrayList();
            for (int b = 0; b < serDeOutput.length; b++) {
                if (serDeOutput[b] != serializeWriteExpected[b]) {
                    differentPositions.add(b);
                }
            }
            if (differentPositions.size() > 0) {
                List<String> serializeWriteExpectedFields = new ArrayList<String>();
                List<String> serDeFields = new ArrayList<String>();
                int f = 0;
                int lastBegin = 0;
                for (int b = 0; b < serDeOutput.length; b++) {
                    int writeLength = perFieldWriteLengthsArray[i][f];
                    if (b + 1 == writeLength) {
                        serializeWriteExpectedFields
                                .add(displayBytes(serializeWriteExpected, lastBegin, writeLength - lastBegin));
                        serDeFields.add(displayBytes(serDeOutput, lastBegin, writeLength - lastBegin));
                        f++;
                        lastBegin = b + 1;
                    }
                }
                fail("SerializeWrite and SerDe serialization does not match at positions "
                        + differentPositions.toString() + "\n(SerializeWrite: "
                        + serializeWriteExpectedFields.toString() + "\nSerDe: " + serDeFields.toString()
                        + "\nperFieldWriteLengths " + Arrays.toString(perFieldWriteLengthsArray[i])
                        + "\nprimitiveTypeInfos " + Arrays.toString(typeInfos) + "\nrow "
                        + Arrays.toString(row));
            }
        }
        serdeBytes[i] = bytesWritable;
    }

    // Try to deserialize using DeserializeRead our Writable row objects created by SerDe.
    for (int i = 0; i < rowCount; i++) {
        Object[] row = rows[i];
        BinarySortableDeserializeRead binarySortableDeserializeRead = new BinarySortableDeserializeRead(
                typeInfos, /* useExternalBuffer */ false, columnSortOrderIsDesc, columnNullMarker,
                columnNotNullMarker);

        BytesWritable bytesWritable = serdeBytes[i];
        binarySortableDeserializeRead.set(bytesWritable.getBytes(), 0, bytesWritable.getLength());

        for (int index = 0; index < columnCount; index++) {
            if (useIncludeColumns && !columnsToInclude[index]) {
                binarySortableDeserializeRead.skipNextField();
            } else if (index >= writeColumnCount) {
                // Should come back a null.
                verifyRead(binarySortableDeserializeRead, typeInfos[index], null);
            } else {
                verifyRead(binarySortableDeserializeRead, typeInfos[index], row[index]);
            }
        }
        if (writeColumnCount == columnCount) {
            TestCase.assertTrue(binarySortableDeserializeRead.isEndOfInputReached());
        }
    }
}

From source file:org.elasticsearch.test.ESIntegTestCase.java

private static Settings.Builder setRandomIndexNormsLoading(Random random, Settings.Builder builder) {
    if (random.nextBoolean()) {
        builder.put(SearchService.NORMS_LOADING_KEY, RandomPicks.randomFrom(random,
                Arrays.asList(MappedFieldType.Loading.EAGER, MappedFieldType.Loading.LAZY)));
    }//  w  w  w .  ja va2 s . com
    return builder;
}

From source file:org.elasticsearch.test.ESIntegTestCase.java

private static Settings.Builder setRandomIndexMergeSettings(Random random, Settings.Builder builder) {
    if (random.nextBoolean()) {
        builder.put(MergePolicyConfig.INDEX_COMPOUND_FORMAT,
                random.nextBoolean() ? random.nextDouble() : random.nextBoolean());
    }/*  ww w  .j a v  a  2s  .  c  om*/
    switch (random.nextInt(4)) {
    case 3:
        final int maxThreadCount = RandomInts.randomIntBetween(random, 1, 4);
        final int maxMergeCount = RandomInts.randomIntBetween(random, maxThreadCount, maxThreadCount + 4);
        builder.put(MergeSchedulerConfig.MAX_MERGE_COUNT, maxMergeCount);
        builder.put(MergeSchedulerConfig.MAX_THREAD_COUNT, maxThreadCount);
        break;
    }

    return builder;
}

From source file:org.elasticsearch.test.ESIntegTestCase.java

private static Settings.Builder setRandomIndexTranslogSettings(Random random, Settings.Builder builder) {
    if (random.nextBoolean()) {
        builder.put(TranslogService.INDEX_TRANSLOG_FLUSH_THRESHOLD_OPS,
                RandomInts.randomIntBetween(random, 1, 10000));
    }//  ww  w  .j a  va  2 s  .co m
    if (random.nextBoolean()) {
        builder.put(TranslogService.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE,
                new ByteSizeValue(RandomInts.randomIntBetween(random, 1, 300), ByteSizeUnit.MB));
    }
    if (random.nextBoolean()) {
        builder.put(TranslogService.INDEX_TRANSLOG_FLUSH_THRESHOLD_PERIOD,
                TimeValue.timeValueMinutes(RandomInts.randomIntBetween(random, 1, 60)));
    }
    if (random.nextBoolean()) {
        builder.put(TranslogService.INDEX_TRANSLOG_FLUSH_INTERVAL,
                TimeValue.timeValueMillis(RandomInts.randomIntBetween(random, 1, 10000)));
    }
    if (random.nextBoolean()) {
        builder.put(TranslogService.INDEX_TRANSLOG_DISABLE_FLUSH, random.nextBoolean());
    }
    if (random.nextBoolean()) {
        builder.put(TranslogConfig.INDEX_TRANSLOG_DURABILITY,
                RandomPicks.randomFrom(random, Translog.Durabilty.values()));
    }

    if (random.nextBoolean()) {
        builder.put(TranslogConfig.INDEX_TRANSLOG_FS_TYPE,
                RandomPicks.randomFrom(random, TranslogWriter.Type.values()));
        if (rarely(random)) {
            builder.put(TranslogConfig.INDEX_TRANSLOG_SYNC_INTERVAL, 0); // 0 has special meaning to sync each op
        } else {
            builder.put(TranslogConfig.INDEX_TRANSLOG_SYNC_INTERVAL,
                    RandomInts.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS);
        }
    }

    return builder;
}

From source file:fr.xebia.demo.objectgrid.ticketing.test.AbstractTicketingGridTest.java

protected void loadData(EntityManager entityManager) {

    Random random = new Random();

    Date today = DateUtils.truncate(new Date(), Calendar.DAY_OF_MONTH);

    // PARIS-MARSEILLE
    for (int hour = 6; hour <= 24; hour++) {

        Train train = new Train(random.nextInt(), "PARIS-MARSEILLE-" + hour, Type.HIGH_SPEED);
        train.getTrainStops()/*  w  w  w.j  a v  a  2 s . c o  m*/
                .add(new TrainStop(random.nextInt(), DateUtils.addHours(today, hour), PARIS_GARE_DE_LYON));
        train.getTrainStops()
                .add(new TrainStop(random.nextInt(), DateUtils.addHours(today, 2 + hour), AVIGNON_TGV));
        train.getTrainStops().add(
                new TrainStop(random.nextInt(), DateUtils.addHours(today, 3 + hour), MARSEILLE_SAINT_CHARLES));
        for (int seatNumber = 1; seatNumber <= 10; seatNumber++) {
            train.getSeats().add(
                    new Seat(random.nextInt(), seatNumber, random.nextBoolean(), 100 + random.nextInt(100)));
        }

        logger.info("persist " + train);
        entityManager.getTransaction().begin();
        entityManager.persist(train);
        entityManager.getTransaction().commit();
        persistedTrainIds.add(train.getId());
    }

    // MARSEILLE-PARIS
    for (int hour = 6; hour <= 24; hour++) {

        Train train = new Train(random.nextInt(), "MARSEILLE-PARIS-" + hour, Type.HIGH_SPEED);
        train.getTrainStops()
                .add(new TrainStop(random.nextInt(), DateUtils.addHours(today, hour), MARSEILLE_SAINT_CHARLES));
        train.getTrainStops()
                .add(new TrainStop(random.nextInt(), DateUtils.addHours(today, 1 + hour), AVIGNON_TGV));
        train.getTrainStops()
                .add(new TrainStop(random.nextInt(), DateUtils.addHours(today, 3 + hour), PARIS_GARE_DE_LYON));
        for (int seatNumber = 1; seatNumber <= 10; seatNumber++) {
            train.getSeats().add(
                    new Seat(random.nextInt(), seatNumber, random.nextBoolean(), 200 + random.nextInt(100)));
        }

        logger.info("persist " + train);
        entityManager.getTransaction().begin();
        entityManager.persist(train);
        entityManager.getTransaction().commit();
        persistedTrainIds.add(train.getId());
    }

}

From source file:org.apache.flink.table.codegen.SortCodeGeneratorTest.java

private void randomKeysAndOrders() {
    Random rnd = new Random();
    fields = new int[rnd.nextInt(9) + 1];
    for (int i = 0; i < fields.length; i++) {
        fields[i] = rnd.nextInt(types.length);
    }/*from  w  w w.j a v a  2  s .co  m*/

    keys = new int[rnd.nextInt(fields.length) + 1];
    LinkedList<Integer> indexQueue = new LinkedList<>();
    for (int i = 0; i < fields.length; i++) {
        indexQueue.add(i);
    }
    Collections.shuffle(indexQueue);
    orders = new boolean[keys.length];
    for (int i = 0; i < keys.length; i++) {
        keys[i] = indexQueue.poll();
        orders[i] = rnd.nextBoolean();
    }
    nullsIsLast = SortUtil.getNullDefaultOrders(orders);
}

From source file:org.apache.jackrabbit.core.data.TestCaseBase.java

/**
 * Assert randomly read stream from record.
 *//* ww w  .j  a va  2  s . c o  m*/
void doTest(DataStore ds, int offset) throws Exception {
    ArrayList<DataRecord> list = new ArrayList<DataRecord>();
    HashMap<DataRecord, Integer> map = new HashMap<DataRecord, Integer>();
    for (int i = 0; i < 10; i++) {
        int size = 1000000 - (i * 100);
        RandomInputStream in = new RandomInputStream(size + offset, size);
        DataRecord rec = ds.addRecord(in);
        list.add(rec);
        map.put(rec, new Integer(size));
    }
    Random random = new Random(1);
    for (int i = 0; i < list.size(); i++) {
        int pos = random.nextInt(list.size());
        DataRecord rec = list.get(pos);
        int size = map.get(rec);
        rec = ds.getRecord(rec.getIdentifier());
        assertEquals(size, rec.getLength());
        InputStream in = rec.getStream();
        RandomInputStream expected = new RandomInputStream(size + offset, size);
        if (random.nextBoolean()) {
            in = readInputStreamRandomly(in, random);
        }
        assertEquals(expected, in);
    }
}