List of usage examples for java.nio ByteBuffer array
public final byte[] array()
From source file:net.fenyo.mail4hotspot.service.HttpProxy.java
public void sendData(final byte data[]) throws IOException { // log.debug("sendData on id " + local_port); last_use = System.currentTimeMillis(); // log.debug("sendData(): " + data.length + " bytes"); if (closed)//from ww w. j av a2s.com return; try { final ByteBuffer bb = ByteBuffer.allocate(to_socket_array.length + data.length); bb.put(to_socket_array); bb.put(data); bb.flip(); final int nbytes = socket_channel.write(bb); to_socket_array = ArrayUtils.subarray(bb.array(), nbytes, bb.array().length); } catch (final IOException ex) { log.warn(ex); ex.printStackTrace(); socket_channel.close(); closed = true; throw ex; } }
From source file:com.streamsets.pipeline.stage.origin.maprjson.MapRJsonOriginSource.java
private String getDocumentKey(Document document) { String ans = ""; if (jsonDataTypes.get(MAPR_ID) == Value.Type.BINARY) { ByteBuffer bb = document.getBinary(MAPR_ID); ans = new String(b64.encode(bb.array())); } else if (jsonDataTypes.get(MAPR_ID) == Value.Type.STRING) { ans = document.getString(MAPR_ID); }//from w w w. j a v a2 s.co m return ans; }
From source file:io.druid.indexer.IndexGeneratorJobTest.java
private void writeDataToLocalSequenceFile(File outputFile, List<String> data) throws IOException { Configuration conf = new Configuration(); LocalFileSystem fs = FileSystem.getLocal(conf); Writer fileWriter = SequenceFile.createWriter(fs, conf, new Path(outputFile.getAbsolutePath()), BytesWritable.class, BytesWritable.class, SequenceFile.CompressionType.NONE, (CompressionCodec) null);/*from w w w. jav a2 s .c o m*/ int keyCount = 10; for (String line : data) { ByteBuffer buf = ByteBuffer.allocate(4); buf.putInt(keyCount); BytesWritable key = new BytesWritable(buf.array()); BytesWritable value = new BytesWritable(line.getBytes(Charsets.UTF_8)); fileWriter.append(key, value); keyCount += 1; } fileWriter.close(); }
From source file:jeeves.utils.Xml.java
/** * Decode byte array as specified charset, then convert to UTF-8 * by encoding as UTF8//from w w w.j a v a 2 s.c o m * * @param buf byte array to decode and convert to UTF8 * @param charsetName charset to decode byte array into * @return * @throws CharacterCodingException */ public synchronized static byte[] convertByteArrayToUTF8ByteArray(byte[] buf, String charsetName) throws CharacterCodingException { Charset cset; cset = Charset.forName(charsetName); // detected character set name CharsetDecoder csetDecoder = cset.newDecoder(); Charset utf8 = Charset.forName("UTF-8"); CharsetEncoder utf8Encoder = utf8.newEncoder(); ByteBuffer inputBuffer = ByteBuffer.wrap(buf); // decode as detected character set CharBuffer data = csetDecoder.decode(inputBuffer); // encode as UTF-8 ByteBuffer outputBuffer = utf8Encoder.encode(data); // remove any nulls from the end of the encoded data why? - this is a // bug in the encoder???? could also be that the file has characters // from more than one charset? byte[] out = outputBuffer.array(); int length = out.length; while (out[length - 1] == 0) length--; byte[] result = new byte[length]; System.arraycopy(out, 0, result, 0, length); // now return the converted bytes return result; }
From source file:com.cloudera.sqoop.testutil.LobAvroImportTestCase.java
/** * Import blob data that is larger than inline lob limit. The reference file * should be saved as Avro bytes. Blob data should be saved in LOB file * format./*from ww w. ja v a2s .c o m*/ * @throws IOException * @throws SQLException */ public void testBlobAvroImportExternal() throws IOException, SQLException { String[] types = { getBlobType() }; String data = "This is short BLOB data"; String[] vals = { getBlobInsertStr(data) }; createTableWithColTypes(types, vals); // Set inline lob limit to a small value so that blob data will be // written to an external file. runImport(getArgv("--inline-lob-limit", "1")); Path outputFile = new Path(getTablePath(), "part-m-00000.avro"); DataFileReader<GenericRecord> reader = read(outputFile); GenericRecord record = reader.next(); // Verify that the reference file is written in Avro bytes. ByteBuffer buf = (ByteBuffer) record.get(getColName(0)); String returnVal = new String(buf.array()); String expectedVal = "externalLob(lf,_lob/large_obj_task_local_000" + getTableNum() + "_m_0000000.lob,68," + data.length() + ")"; assertEquals(expectedVal, returnVal); // Verify that blob data stored in the external lob file is correct. BlobRef br = BlobRef.parse(returnVal); Path lobFileDir = new Path(getWarehouseDir(), getTableName()); InputStream in = br.getDataStream(getConf(), lobFileDir); byte[] bufArray = new byte[data.length()]; int chars = in.read(bufArray); in.close(); assertEquals(chars, data.length()); returnVal = new String(bufArray); expectedVal = data; assertEquals(getColName(0), returnVal, expectedVal); }
From source file:com.cloudera.sqoop.testutil.LobAvroImportTestCase.java
/** * Import blob data that is larger than inline lob limit and compress with * deflate codec. The reference file should be encoded and saved as Avro * bytes. Blob data should be saved in LOB file format without compression. * @throws IOException/* w ww .j a v a 2s .c o m*/ * @throws SQLException */ public void testBlobCompressedAvroImportExternal() throws IOException, SQLException { String[] types = { getBlobType() }; String data = "This is short BLOB data"; String[] vals = { getBlobInsertStr(data) }; createTableWithColTypes(types, vals); // Set inline lob limit to a small value so that blob data will be // written to an external file. runImport(getArgv("--inline-lob-limit", "1", "--compression-codec", CodecMap.DEFLATE)); Path outputFile = new Path(getTablePath(), "part-m-00000.avro"); DataFileReader<GenericRecord> reader = read(outputFile); GenericRecord record = reader.next(); // Verify that the data block of the Avro file is compressed with deflate // codec. assertEquals(CodecMap.DEFLATE, reader.getMetaString(DataFileConstants.CODEC)); // Verify that the reference file is written in Avro bytes. ByteBuffer buf = (ByteBuffer) record.get(getColName(0)); String returnVal = new String(buf.array()); String expectedVal = "externalLob(lf,_lob/large_obj_task_local_000" + getTableNum() + "_m_0000000.lob,68," + data.length() + ")"; assertEquals(expectedVal, returnVal); // Verify that blob data stored in the external lob file is correct. BlobRef br = BlobRef.parse(returnVal); Path lobFileDir = new Path(getWarehouseDir(), getTableName()); InputStream in = br.getDataStream(getConf(), lobFileDir); byte[] bufArray = new byte[data.length()]; int chars = in.read(bufArray); in.close(); assertEquals(chars, data.length()); returnVal = new String(bufArray); expectedVal = data; assertEquals(getColName(0), returnVal, expectedVal); }
From source file:com.github.neoio.net.message.staging.file.TestFileMessageStaging.java
@Test public void test_tempRead() { ByteBuffer buffer = ByteBuffer.allocate(1024); buffer.put("Hello World".getBytes()); buffer.flip();//w w w .j a v a 2 s.c o m staging.writeTempReadBytes(buffer); Assert.assertTrue(staging.hasTempReadBytes()); buffer.clear(); staging.readTempReadBytes(buffer); Assert.assertEquals("Hello World", new String(ArrayUtils.subarray(buffer.array(), 0, "Hello World".getBytes().length))); staging.resetTempReadBytes(); Assert.assertFalse(staging.hasTempReadBytes()); }
From source file:com.rackspacecloud.blueflood.io.SerializationTest.java
@Test public void testRollupSerializationAndDeserialization() throws IOException { // works the same way as testFullResSerializationAndDeserialization if (System.getProperty("GENERATE_ROLLUP_SERIALIZATION") != null) { OutputStream os = new FileOutputStream( "src/test/resources/serializations/rollup_version_" + Constants.VERSION_1_ROLLUP + ".bin", false);/*from w ww . jav a 2 s . co m*/ for (Rollup rollup : toSerializeRollup) { for (Granularity g : Granularity.rollupGranularities()) { ByteBuffer bb = NumericSerializer.get(g).toByteBuffer(rollup); os.write(Base64.encodeBase64(bb.array())); os.write("\n".getBytes()); } } os.close(); } Assert.assertTrue(new File("src/test/resources/serializations").exists()); // ensure we can read historical serializations. int version = 0; int maxVersion = Constants.VERSION_1_ROLLUP; while (version <= maxVersion) { BufferedReader reader = new BufferedReader( new FileReader("src/test/resources/serializations/rollup_version_" + version + ".bin")); for (int i = 0; i < toSerializeRollup.length; i++) { for (Granularity g : Granularity.rollupGranularities()) { ByteBuffer bb = ByteBuffer.wrap(Base64.decodeBase64(reader.readLine().getBytes())); Rollup rollup = (Rollup) NumericSerializer.get(g).fromByteBuffer(bb); Assert.assertTrue(String.format("Deserialization for rollup broken at %d", version), toSerializeRollup[i].equals(rollup)); } version += 1; } } // current round tripping. for (Rollup rollup : toSerializeRollup) { for (Granularity g : Granularity.rollupGranularities()) { ByteBuffer bb = NumericSerializer.get(g).toByteBuffer(rollup); Assert.assertTrue(rollup.equals(NumericSerializer.get(g).fromByteBuffer(bb))); } } }
From source file:com.github.neoio.net.message.staging.file.TestFileMessageStaging.java
@Test public void test_tempWrite() { ByteBuffer buffer = ByteBuffer.allocate(1024); buffer.put("Hello World".getBytes()); buffer.flip();// ww w.j ava 2 s. c o m staging.writeTempWriteBytes(buffer); Assert.assertTrue(staging.hasTempWriteBytes()); buffer.clear(); staging.readTempWriteBytes(buffer); Assert.assertEquals("Hello World", new String(ArrayUtils.subarray(buffer.array(), 0, "Hello World".getBytes().length))); staging.resetTempWriteBytes(); Assert.assertFalse(staging.hasTempWriteBytes()); }