Example usage for java.nio ByteBuffer array

List of usage examples for java.nio ByteBuffer array

Introduction

In this page you can find the example usage for java.nio ByteBuffer array.

Prototype

public final byte[] array() 

Source Link

Document

Returns the byte array which this buffer is based on, if there is one.

Usage

From source file:com.alibaba.jstorm.utils.JStormUtils.java

public static byte[] longToBytes(long x) {
    ByteBuffer buffer = ByteBuffer.allocate(Long.SIZE);
    buffer.putLong(x);// w  ww . j  ava 2 s .  co  m
    return buffer.array();
}

From source file:com.facebook.hive.orc.ReaderImpl.java

public ReaderImpl(FileSystem fs, Path path, Configuration conf) throws IOException {
    try {/*from  w w w  .  j a v  a  2s .  c o m*/
        this.fileSystem = fs;
        this.path = path;
        this.conf = conf;
        FSDataInputStream file = fs.open(path);
        long size = fs.getFileStatus(path).getLen();
        int readSize = (int) Math.min(size, DIRECTORY_SIZE_GUESS);
        ByteBuffer buffer = ByteBuffer.allocate(readSize);
        InStream.read(file, size - readSize, buffer.array(), buffer.arrayOffset() + buffer.position(),
                buffer.remaining());
        int psLen = buffer.get(readSize - 1);
        int psOffset = readSize - 1 - psLen;
        CodedInputStream in = CodedInputStream.newInstance(buffer.array(), buffer.arrayOffset() + psOffset,
                psLen);
        OrcProto.PostScript ps = OrcProto.PostScript.parseFrom(in);
        int footerSize = (int) ps.getFooterLength();
        bufferSize = (int) ps.getCompressionBlockSize();
        switch (ps.getCompression()) {
        case NONE:
            compressionKind = CompressionKind.NONE;
            break;
        case ZLIB:
            compressionKind = CompressionKind.ZLIB;
            break;
        case SNAPPY:
            compressionKind = CompressionKind.SNAPPY;
            break;
        case LZO:
            compressionKind = CompressionKind.LZO;
            break;
        default:
            throw new IllegalArgumentException("Unknown compression");
        }
        codec = WriterImpl.createCodec(compressionKind);

        InputStream instream = InStream.create("footer", file, size - 1 - psLen - footerSize, footerSize, codec,
                bufferSize);
        footer = OrcProto.Footer.parseFrom(instream);
        inspector = new OrcLazyRowObjectInspector(0, footer.getTypesList());
        file.close();
    } catch (IndexOutOfBoundsException e) {
        /**
         * When a non ORC file is read by ORC reader, we get IndexOutOfBoundsException exception while
         * creating a reader. Caught that exception and checked the file header to see if the input
         * file was ORC or not. If its not ORC, throw a NotAnORCFileException with the file
         * attempted to be reading (thus helping to figure out which table-partition was being read).
         */
        checkIfORC(fs, path);
        throw new IOException("Failed to create record reader for file " + path, e);
    } catch (IOException e) {
        throw new IOException("Failed to create record reader for file " + path, e);
    }
}

From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryPackingStringReader.java

public void ensureDecompress() throws IOException {
    org.apache.hadoop.io.compress.Decompressor decompressor = this.compressAlgo.getDecompressor();
    InputStream is = this.compressAlgo.createDecompressionStream(inBuf, decompressor, 0);
    ByteBuffer buf = ByteBuffer.allocate(decompressedSize);
    // ByteBuffer buf = ByteBuffer.allocate(is.available());
    IOUtils.readFully(is, buf.array(), 0, buf.capacity());
    is.close();/*  w  w  w . j  a  va2  s. c  o  m*/
    this.compressAlgo.returnDecompressor(decompressor);
    inBuf.reset(buf.array(), offset, buf.capacity());
}

From source file:au.org.ala.delta.io.BinaryKeyFile.java

public String readString(int recordNum, int numChars) {
    seek(recordOffset(recordNum));/*  ww w  .j  av a2  s . c  o  m*/
    ByteBuffer bb = readByteBuffer(numChars);

    return BinFileEncoding.decode(bb.array());
}

From source file:com.alibaba.napoli.metamorphosis.client.extension.producer.LocalMessageStorageManager.java

@Override
public void append(final Message message, final Partition partition) throws IOException {
    final Store store = this.getOrCreateStore(message.getTopic(), partition);
    final ByteBuffer buf = ByteBuffer.allocate(16);
    buf.putLong(this.idWorker.nextId());
    store.add(buf.array(), this.serializer.encodeObject(message));
}

From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryBitPackingZigZarIntReader.java

public byte[] CompressensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 0, inBuf.getLength());
    FlexibleEncoding.Parquet.DeltaBinaryPackingValuesReader reader = new FlexibleEncoding.Parquet.DeltaBinaryPackingValuesReader();
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();//w w  w .  jav a  2 s  .  c  o  m
    reader.initFromPage(numPairs, byteBuf.array(), 0);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {

        int tmp = reader.readInteger();
        decoding.writeInt(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();
}

From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryBitPackingZigZarIntReader.java

@Override
public byte[] ensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12);
    FlexibleEncoding.Parquet.DeltaBinaryPackingValuesReader reader = new FlexibleEncoding.Parquet.DeltaBinaryPackingValuesReader();
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();//from w  w w. ja v  a 2 s.co  m
    reader.initFromPage(numPairs, byteBuf.array(), 0);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {
        int tmp = reader.readInteger();
        decoding.writeInt(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();
}

From source file:com.bigdata.dastor.db.RowMutation.java

public void delete(QueryPath path, long timestamp) {
    assert path.columnFamilyName != null;
    String cfName = path.columnFamilyName;

    int localDeleteTime = (int) (System.currentTimeMillis() / 1000);

    ColumnFamily columnFamily = modifications_.get(cfName);
    if (columnFamily == null)
        columnFamily = ColumnFamily.create(table_, cfName);

    if (path.superColumnName == null && path.columnName == null) {
        columnFamily.delete(localDeleteTime, timestamp);
    } else if (path.columnName == null) {
        SuperColumn sc = new SuperColumn(path.superColumnName,
                DatabaseDescriptor.getSubComparator(table_, cfName));
        sc.markForDeleteAt(localDeleteTime, timestamp);
        columnFamily.addColumn(sc);//from   w  ww.ja  v a 2 s . c  om
    } else {
        ByteBuffer bytes = ByteBuffer.allocate(4);
        bytes.putInt(localDeleteTime);
        columnFamily.addColumn(path, bytes.array(), timestamp, true);
    }

    modifications_.put(cfName, columnFamily);
}

From source file:edu.umn.cs.spatialHadoop.visualization.FrequencyMap.java

@Override
public void write(DataOutput out) throws IOException {
    super.write(out);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    GZIPOutputStream gzos = new GZIPOutputStream(baos);
    ByteBuffer bbuffer = ByteBuffer.allocate(getHeight() * 4 + 8);
    bbuffer.putInt(getWidth());/* w  ww. java 2 s . c  o  m*/
    bbuffer.putInt(getHeight());
    gzos.write(bbuffer.array(), 0, bbuffer.position());
    for (int x = 0; x < getWidth(); x++) {
        bbuffer.clear();
        for (int y = 0; y < getHeight(); y++) {
            bbuffer.putFloat(frequencies[x][y]);
        }
        gzos.write(bbuffer.array(), 0, bbuffer.position());
    }
    gzos.close();

    byte[] serializedData = baos.toByteArray();
    out.writeInt(serializedData.length);
    out.write(serializedData);
}

From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryArrayZigZarByteReader.java

public byte[] CompressensureDecompressed() throws IOException {
    FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray();
    dynamicBuffer.add(inBuf.getData(), 0, inBuf.getLength());
    FlexibleEncoding.Parquet.DeltaByteArrayReader reader = new FlexibleEncoding.Parquet.DeltaByteArrayReader();
    ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size());
    dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size());
    byteBuf.flip();//w  w w.j  a  v a2s  . c  om
    reader.initFromPage(numPairs, byteBuf.array(), 0);
    DataOutputBuffer decoding = new DataOutputBuffer();
    decoding.writeInt(decompressedSize);
    decoding.writeInt(numPairs);
    decoding.writeInt(startPos);
    for (int i = 0; i < numPairs; i++) {
        byte tmp = Byte.parseByte(reader.readBytes().toStringUsingUTF8());
        decoding.writeByte(tmp);
    }
    byteBuf.clear();
    inBuf.close();
    return decoding.getData();
}