Example usage for java.nio ByteBuffer rewind

List of usage examples for java.nio ByteBuffer rewind

Introduction

In this page you can find the example usage for java.nio ByteBuffer rewind.

Prototype

public final Buffer rewind() 

Source Link

Document

Rewinds this buffer.

Usage

From source file:com.knewton.mapreduce.example.StudentEventMapperTest.java

/**
 * Test the end time range filters in the mapper.
 *///from  w  ww . ja va  2 s . c om
@Test
public void testEndRangeStudentEvents() throws Exception {
    conf.set(PropertyConstants.END_DATE.txt, "2013-03-28T23:03:02.394Z");
    underTest.setup(mockedContext);

    // send event outside of time range

    // Mar.29.2013.03:07:21.0.0
    DateTime dt = new DateTime(2013, 3, 29, 3, 7, 21, DateTimeZone.UTC);
    long eventId = dt.getMillis();
    ByteBuffer randomKey = RandomStudentEventGenerator.getRandomIdBuffer();
    ByteBuffer columnName = ByteBuffer.wrap(new byte[8]);
    columnName.putLong(eventId);
    columnName.rewind();
    CellName cellName = simpleDenseCellType.cellFromByteBuffer(columnName);
    Cell column = new BufferCell(cellName, ByteBuffer.wrap(Hex.decodeHex(eventDataString.toCharArray())));
    underTest.map(randomKey, column, mockedContext);
    verify(mockedContext, never()).write(any(LongWritable.class), any(StudentEventWritable.class));

    // send event inside of time range

    // Mar.28.2013.19:53:38.0.0
    dt = new DateTime(2013, 3, 28, 19, 53, 10, DateTimeZone.UTC);
    eventId = dt.getMillis();
    randomKey = RandomStudentEventGenerator.getRandomIdBuffer();
    columnName = ByteBuffer.wrap(new byte[8]);
    columnName.putLong(eventId);
    columnName.rewind();
    cellName = simpleDenseCellType.cellFromByteBuffer(columnName);
    column = new BufferCell(cellName, ByteBuffer.wrap(Hex.decodeHex(eventDataString.toCharArray())));
    underTest.map(randomKey, column, mockedContext);
    verify(mockedContext).write(any(LongWritable.class), any(StudentEventWritable.class));
}

From source file:com.knewton.mapreduce.example.StudentEventMapperTest.java

/**
 * Test the start time range filters in the mapper.
 *//*from ww  w  .j a  v a 2s . c om*/
@Test
public void testMapWithStartRangeStudentEvents() throws Exception {
    conf.set(PropertyConstants.START_DATE.txt, "2013-03-28T23:03:02.394Z");
    underTest.setup(mockedContext);

    // send event outside of time range

    // Mar.28.2013.19:53:38.0.0
    DateTime dt = new DateTime(2013, 3, 28, 19, 53, 10, DateTimeZone.UTC);
    long eventId = dt.getMillis();

    ByteBuffer randomKey = RandomStudentEventGenerator.getRandomIdBuffer();
    ByteBuffer columnName = ByteBuffer.wrap(new byte[8]);
    columnName.putLong(eventId);
    columnName.rewind();
    CellName cellName = simpleDenseCellType.cellFromByteBuffer(columnName);
    Cell column = new BufferCell(cellName, ByteBuffer.wrap(Hex.decodeHex(eventDataString.toCharArray())));

    underTest.map(randomKey, column, mockedContext);
    verify(mockedContext, never()).write(any(LongWritable.class), any(StudentEventWritable.class));

    // send event inside of time range

    // Mar.29.2013.03:07:21.0.0
    dt = new DateTime(2013, 3, 29, 3, 7, 21, DateTimeZone.UTC);
    eventId = dt.getMillis();

    columnName = ByteBuffer.wrap(new byte[8]);
    columnName.putLong(eventId);
    columnName.rewind();
    cellName = simpleDenseCellType.cellFromByteBuffer(columnName);
    column = new BufferCell(cellName, ByteBuffer.wrap(Hex.decodeHex(eventDataString.toCharArray())));
    randomKey = RandomStudentEventGenerator.getRandomIdBuffer();
    underTest.map(randomKey, column, mockedContext);
    verify(mockedContext).write(any(LongWritable.class), any(StudentEventWritable.class));
}

From source file:jext2.BlockGroupDescriptor.java

protected ByteBuffer allocateByteBuffer() {
    ByteBuffer buf = ByteBuffer.allocate(32);
    buf.rewind();
    return buf;
}

From source file:org.cosmo.common.file.VariableFilePartition.java

public void checkIncompleteWrite(File readFile)
        throws FileNotFoundException, IOException, IncompleteWriteException {
    for (int i = 0; i < _readChannels.length; i++) {
        long size = _readChannels[i].size();
        if (size > MetaBytes) { // there is data
            ByteBuffer buf = ByteBuffer.wrap(new byte[4]);
            _readChannels[i].read(buf, _readChannels[i].size() - 4);
            buf.rewind();
            int marker = buf.getInt();
            if (marker != StoreCompleteMarker) {
                throw new IncompleteWriteException(New.str("File [", Util.addSuffixToFile(readFile, i),
                        "] did not complete last write operation"));
            }/* w  w w .j  a  v a2 s  .  co m*/
        }
    }
}

From source file:com.knewton.mapreduce.SSTableColumnMapperTest.java

/**
 * Test the start time range filters in the mapper.
 * /*from  w w w  .  j  a  va 2 s.c  o  m*/
 * @throws IOException
 * @throws InterruptedException
 * @throws DecoderException
 */
@Test
public void testStartRangeStudentEvents() throws IOException, InterruptedException, DecoderException {

    // Mar.28.2013.19:53:38.0.0
    DateTime dt = new DateTime(2013, 3, 28, 19, 53, 10, DateTimeZone.UTC);
    long eventId1 = dt.getMillis();
    // Mar.29.2013.03:07:21.0.0
    dt = new DateTime(2013, 3, 29, 3, 7, 21, DateTimeZone.UTC);
    long eventId5 = dt.getMillis();

    ByteBuffer columnName = ByteBuffer.wrap(new byte[8]);
    columnName.putLong(eventId1);
    columnName.rewind();
    IColumn column = new Column(columnName, ByteBuffer.wrap(Hex.decodeHex(eventDataString.toCharArray())));
    Configuration conf = new Configuration();
    conf.set(StudentEventAbstractMapper.START_DATE_PARAMETER_NAME, "2013-03-28T23:03:02.394-04:00");
    DoNothingStudentEventMapper dnsem = new DoNothingStudentEventMapper();
    Mapper<ByteBuffer, IColumn, LongWritable, StudentEventWritable>.Context context = dnsem.new Context(conf,
            new TaskAttemptID(), null, null, null, new DoNothingStatusReporter(), null);
    dnsem.setup(context);
    dnsem.map(RandomStudentEventGenerator.getRandomIdBuffer(), column, context);
    assertNull(dnsem.getRowKey());
    columnName = ByteBuffer.wrap(new byte[8]);
    columnName.putLong(eventId5);
    columnName.rewind();
    column = new Column(columnName, ByteBuffer.wrap(Hex.decodeHex(eventDataString.toCharArray())));
    ByteBuffer randomKey = RandomStudentEventGenerator.getRandomIdBuffer();
    dnsem.map(randomKey, column, context);
    assertEquals(dnsem.getRowKey(), randomKey);
}

From source file:com.knewton.mapreduce.SSTableColumnMapperTest.java

/**
 * Test the end time range filters in the mapper.
 * /* www  .  j ava2 s.  com*/
 * @throws IOException
 * @throws InterruptedException
 * @throws DecoderException
 */
@Test
public void testEndRangeStudentEvents() throws IOException, InterruptedException, DecoderException {

    // Mar.28.2013.19:53:38.0.0
    DateTime dt = new DateTime(2013, 3, 28, 19, 53, 10, DateTimeZone.UTC);
    long eventId1 = dt.getMillis();
    // Mar.29.2013.03:07:21.0.0
    dt = new DateTime(2013, 3, 29, 3, 7, 21, DateTimeZone.UTC);
    long eventId5 = dt.getMillis();

    ByteBuffer columnName = ByteBuffer.wrap(new byte[8]);
    columnName.putLong(eventId5);
    columnName.rewind();
    IColumn column = new Column(columnName, ByteBuffer.wrap(Hex.decodeHex(eventDataString.toCharArray())));
    Configuration conf = new Configuration();
    conf.set(StudentEventAbstractMapper.END_DATE_PARAMETER_NAME, "2013-03-28T23:03:02.394-04:00");
    DoNothingStudentEventMapper dnsem = new DoNothingStudentEventMapper();
    Mapper<ByteBuffer, IColumn, LongWritable, StudentEventWritable>.Context context = dnsem.new Context(conf,
            new TaskAttemptID(), null, null, null, new DoNothingStatusReporter(), null);
    dnsem.setup(context);
    dnsem.map(RandomStudentEventGenerator.getRandomIdBuffer(), column, context);
    assertNull(dnsem.getRowKey());
    ByteBuffer randomKey = RandomStudentEventGenerator.getRandomIdBuffer();
    columnName = ByteBuffer.wrap(new byte[8]);
    columnName.putLong(eventId1);
    columnName.rewind();
    column = new Column(columnName, ByteBuffer.wrap(Hex.decodeHex(eventDataString.toCharArray())));
    dnsem.map(randomKey, column, context);
    assertEquals(dnsem.getRowKey(), randomKey);
}

From source file:org.cosmo.common.file.VariableFilePartition.java

/**
 *   Writes an entry in which first 8 bytes stores the size of this entry follow by the actual content
 *   so an entry of 100 will take 108 bytes.
 *
 *   Also this operation appends to the end of the last file and returns the file position
 *//*from   w  w  w.  j  ava  2  s. co  m*/
public synchronized long writeSizedEntry(byte[] b, int off, int len) throws IOException {
    // 8 bytes extra for Len (int4) + Marker (int4)
    int chunkId = chunkIdForWrite(size(), MetaBytes + len);
    long writePosition = _writeChannels[chunkId].position();
    ByteBuffer buf = ByteBuffer.allocate(MetaBytes + len);
    buf.putInt(len);
    buf.put(b, off, len);
    buf.putInt(StoreCompleteMarker);
    buf.rewind();
    _writeChannels[chunkId].write(buf);
    return writePosition + (_sizePerFile * chunkId);
}

From source file:com.sastix.cms.server.services.cache.CacheFileUtilsServiceImpl.java

@Override
public byte[] downloadResource(URL url) throws IOException {
    //create buffer with capacity in bytes
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    try {/*from w w  w  .  j a  va2  s .  c  o  m*/
        ByteBuffer bufIn = ByteBuffer.allocate(1024);
        ReadableByteChannel rbc = Channels.newChannel(url.openStream());
        int bytesRead;
        while ((bytesRead = rbc.read(bufIn)) > 0) {
            baos.write(bufIn.array(), 0, bytesRead);
            bufIn.rewind();
        }
        bufIn.clear();
        return baos.toByteArray();
    } finally {
        baos.close();
    }
}

From source file:org.apache.hadoop.hbase.io.encoding.EncodedDataBlock.java

/**
 * Do the encoding, but do not cache the encoded data.
 * @return encoded data block with header and checksum
 *//*from www  . j  a va2 s .c  o m*/
public byte[] encodeData() {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    try {
        baos.write(HConstants.HFILEBLOCK_DUMMY_HEADER);
        DataOutputStream out = new DataOutputStream(baos);
        this.dataBlockEncoder.startBlockEncoding(encodingCtx, out);
        ByteBuffer in = getUncompressedBuffer();
        in.rewind();
        int klength, vlength;
        short tagsLength = 0;
        long memstoreTS = 0L;
        KeyValue kv = null;
        while (in.hasRemaining()) {
            int kvOffset = in.position();
            klength = in.getInt();
            vlength = in.getInt();
            ByteBufferUtils.skip(in, klength + vlength);
            if (this.meta.isIncludesTags()) {
                tagsLength = in.getShort();
                ByteBufferUtils.skip(in, tagsLength);
            }
            if (this.meta.isIncludesMvcc()) {
                memstoreTS = ByteBufferUtils.readVLong(in);
            }
            kv = new KeyValue(in.array(), kvOffset,
                    (int) KeyValue.getKeyValueDataStructureSize(klength, vlength, tagsLength));
            kv.setMvccVersion(memstoreTS);
            this.dataBlockEncoder.encode(kv, encodingCtx, out);
        }
        BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream();
        baos.writeTo(stream);
        this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.buf);
    } catch (IOException e) {
        throw new RuntimeException(String.format("Bug in encoding part of algorithm %s. "
                + "Probably it requested more bytes than are available.", toString()), e);
    }
    return baos.toByteArray();
}

From source file:com.linkedin.haivvreo.AvroSerializer.java

private Object serializeAvroBytes(ListTypeInfo typeInfo, ListObjectInspector fieldOI, Object structFieldData,
        Schema schema) throws HaivvreoException {
    ByteBuffer bb = ByteBuffer.wrap(extraByteArray(fieldOI, structFieldData));
    return bb.rewind();
}