List of usage examples for java.nio ByteBuffer array
public final byte[] array()
From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryArrayZigZarByteReader.java
@Override public byte[] ensureDecompressed() throws IOException { FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray(); dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12); FlexibleEncoding.Parquet.DeltaByteArrayReader reader = new FlexibleEncoding.Parquet.DeltaByteArrayReader(); ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size()); dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size()); byteBuf.flip();// w w w .j a va2 s . c o m reader.initFromPage(numPairs, byteBuf.array(), 0); DataOutputBuffer decoding = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs); decoding.writeInt(startPos); for (int i = 0; i < numPairs; i++) { byte tmp = Byte.parseByte(reader.readBytes().toStringUsingUTF8()); decoding.writeByte(tmp); } byteBuf.clear(); inBuf.close(); return decoding.getData(); }
From source file:com.cloudera.sqoop.testutil.LobAvroImportTestCase.java
/** Import blob data that is smaller than inline lob limit. Blob data * should be saved as Avro bytes./* ww w .j a v a2 s . c o m*/ * @throws IOException * @throws SQLException */ public void testBlobAvroImportInline() throws IOException, SQLException { String[] types = { getBlobType() }; String expectedVal = "This is short BLOB data"; String[] vals = { getBlobInsertStr(expectedVal) }; createTableWithColTypes(types, vals); runImport(getArgv()); Path outputFile = new Path(getTablePath(), "part-m-00000.avro"); DataFileReader<GenericRecord> reader = read(outputFile); GenericRecord record = reader.next(); // Verify that blob data is imported as Avro bytes. ByteBuffer buf = (ByteBuffer) record.get(getColName(0)); String returnVal = new String(buf.array()); assertEquals(getColName(0), expectedVal, returnVal); }
From source file:com.cloudera.sqoop.testutil.LobAvroImportTestCase.java
/** * Import blob data that is smaller than inline lob limit and compress with * deflate codec. Blob data should be encoded and saved as Avro bytes. * @throws IOException// w ww .j av a 2 s .c om * @throws SQLException */ public void testBlobCompressedAvroImportInline() throws IOException, SQLException { String[] types = { getBlobType() }; String expectedVal = "This is short BLOB data"; String[] vals = { getBlobInsertStr(expectedVal) }; createTableWithColTypes(types, vals); runImport(getArgv("--compression-codec", CodecMap.DEFLATE)); Path outputFile = new Path(getTablePath(), "part-m-00000.avro"); DataFileReader<GenericRecord> reader = read(outputFile); GenericRecord record = reader.next(); // Verify that the data block of the Avro file is compressed with deflate // codec. assertEquals(CodecMap.DEFLATE, reader.getMetaString(DataFileConstants.CODEC)); // Verify that all columns are imported correctly. ByteBuffer buf = (ByteBuffer) record.get(getColName(0)); String returnVal = new String(buf.array()); assertEquals(getColName(0), expectedVal, returnVal); }
From source file:com.github.neoio.net.message.staging.memory.TestMemoryMessageStaging.java
@Test public void test_tempRead() { ByteBuffer buffer = ByteBuffer.allocate(1024); buffer.put("Hello World".getBytes()); buffer.rewind();// ww w .j a v a 2 s.c o m staging.writeTempReadBytes(buffer); Assert.assertTrue(staging.hasTempReadBytes()); buffer.clear(); staging.readTempReadBytes(buffer); Assert.assertEquals("Hello World", new String(ArrayUtils.subarray(buffer.array(), 0, "Hello World".getBytes().length))); staging.resetTempReadBytes(); Assert.assertFalse(staging.hasTempReadBytes()); }
From source file:de.hofuniversity.iisys.neo4j.websock.query.encoding.safe.TSafeDeflateJsonQueryHandler.java
@Override public boolean willDecode(ByteBuffer buff) { boolean valid = true; //TODO: actually check whether it's a query try {//from w w w . j ava2 s . c o m //decompress final Inflater inflater = new Inflater(true); inflater.setInput(buff.array()); int read = 0; int totalSize = 0; final List<byte[]> buffers = new LinkedList<byte[]>(); byte[] buffer = new byte[BUFFER_SIZE]; read = inflater.inflate(buffer); while (read > 0) { totalSize += read; buffers.add(buffer); buffer = new byte[BUFFER_SIZE]; read = inflater.inflate(buffer); } final byte[] data = fuse(buffers, totalSize).array(); new JSONObject(new String(data)); } catch (Exception e) { valid = false; } return valid; }
From source file:org.eclipse.jgit.lfs.server.fs.LfsServerTest.java
private void checkResponseStatus(HttpResponse response) { StatusLine statusLine = response.getStatusLine(); int status = statusLine.getStatusCode(); if (statusLine.getStatusCode() >= 400) { String error;/*from w w w.j a va2 s . co m*/ try { ByteBuffer buf = IO.readWholeStream(new BufferedInputStream(response.getEntity().getContent()), 1024); if (buf.hasArray()) { error = new String(buf.array(), buf.arrayOffset() + buf.position(), buf.remaining(), UTF_8); } else { final byte[] b = new byte[buf.remaining()]; buf.duplicate().get(b); error = new String(b, UTF_8); } } catch (IOException e) { error = statusLine.getReasonPhrase(); } throw new RuntimeException("Status: " + status + " " + error); } assertEquals(200, status); }
From source file:com.github.neoio.net.message.staging.memory.TestMemoryMessageStaging.java
@Test public void test_tempWrite() { ByteBuffer buffer = ByteBuffer.allocate(1024); buffer.put("Hello World".getBytes()); buffer.rewind();// w ww . ja v a 2 s. c o m staging.writeTempWriteBytes(buffer); Assert.assertTrue(staging.hasTempWriteBytes()); buffer.clear(); staging.readTempWriteBytes(buffer); Assert.assertEquals("Hello World", new String(ArrayUtils.subarray(buffer.array(), 0, "Hello World".getBytes().length))); staging.resetTempWriteBytes(); Assert.assertFalse(staging.hasTempWriteBytes()); }
From source file:com.netflix.astyanax.thrift.AbstractThriftMutationBatchImpl.java
@Override public void deserialize(ByteBuffer data) throws Exception { ByteArrayInputStream in = new ByteArrayInputStream(data.array()); TIOStreamTransport transport = new TIOStreamTransport(in); batch_mutate_args args = new batch_mutate_args(); try {// ww w.ja va 2s.c om TBinaryProtocol bp = new TBinaryProtocol(transport); bp.setReadLength(data.remaining()); args.read(bp); mutationMap = args.getMutation_map(); } catch (TException e) { throw ThriftConverter.ToConnectionPoolException(e); } }
From source file:net.beaconpe.jraklib.protocol.Packet.java
protected byte[] get(int len) { if (len < 0) { offset = buffer.length - 1;/* ww w .j a v a2s. c om*/ return new byte[] {}; } else { ByteBuffer bb = ByteBuffer.allocate(len); while (len > 0) { bb.put(buffer[offset]); len = len - 1; if (len != 0) { offset = offset + 1; } } return bb.array(); } }
From source file:com.cloudera.sqoop.testutil.LobAvroImportTestCase.java
/** * Import multiple columns of blob data. Blob data should be saved as Avro * bytes.// w w w.java 2s.co m * @throws IOException * @throws SQLException */ public void testBlobAvroImportMultiCols() throws IOException, SQLException { String[] types = { getBlobType(), getBlobType(), getBlobType(), }; String expectedVal1 = "This is short BLOB data1"; String expectedVal2 = "This is short BLOB data2"; String expectedVal3 = "This is short BLOB data3"; String[] vals = { getBlobInsertStr(expectedVal1), getBlobInsertStr(expectedVal2), getBlobInsertStr(expectedVal3), }; createTableWithColTypes(types, vals); runImport(getArgv()); Path outputFile = new Path(getTablePath(), "part-m-00000.avro"); DataFileReader<GenericRecord> reader = read(outputFile); GenericRecord record = reader.next(); // Verify that all columns are imported correctly. ByteBuffer buf = (ByteBuffer) record.get(getColName(0)); String returnVal = new String(buf.array()); assertEquals(getColName(0), expectedVal1, returnVal); buf = (ByteBuffer) record.get(getColName(1)); returnVal = new String(buf.array()); assertEquals(getColName(1), expectedVal2, returnVal); buf = (ByteBuffer) record.get(getColName(2)); returnVal = new String(buf.array()); assertEquals(getColName(2), expectedVal3, returnVal); }