List of usage examples for java.nio ByteBuffer flip
public final Buffer flip()
From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryBitPackingZigZarIntReader.java
@Override public byte[] ensureDecompressed() throws IOException { FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray(); dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12); FlexibleEncoding.Parquet.DeltaBinaryPackingValuesReader reader = new FlexibleEncoding.Parquet.DeltaBinaryPackingValuesReader(); ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size()); dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size()); byteBuf.flip(); reader.initFromPage(numPairs, byteBuf.array(), 0); DataOutputBuffer decoding = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs);/* w ww.j av a 2s. c o m*/ decoding.writeInt(startPos); for (int i = 0; i < numPairs; i++) { int tmp = reader.readInteger(); decoding.writeInt(tmp); } byteBuf.clear(); inBuf.close(); return decoding.getData(); }
From source file:org.alfresco.patch.PatchServiceImpl.java
@Override public PatchDocument getPatch(String nodeId, long nodeVersion) throws IOException { if (contentStore.exists(nodeId, nodeVersion - 1, true)) { // previous version NodeChecksums nodeChecksums = checksumService.getChecksums(nodeId, nodeVersion - 1); if (nodeChecksums != null) { // parameters version NodeInfo nodeInfo1 = contentDAO.getByNodeId(nodeId, nodeVersion, true); String contentPath1 = nodeInfo1.getContentPath(); FileChannel inChannel = contentStore.getChannel(contentPath1); ByteBuffer buffer = ByteBuffer.allocate(1024 * 100); inChannel.read(buffer);/*from ww w .j a v a 2 s . c o m*/ buffer.flip(); PatchDocument patchDocument = checksumService.createPatchDocument(nodeChecksums, buffer); return patchDocument; } else { throw new RuntimeException("No patches available, no checksums for node " + nodeId + ", nodeVersion " + (nodeVersion - 1)); } } else { throw new RuntimeException("No patches available, only a single version of the node"); } }
From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryArrayZigZarByteReader.java
public byte[] CompressensureDecompressed() throws IOException { FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray(); dynamicBuffer.add(inBuf.getData(), 0, inBuf.getLength()); FlexibleEncoding.Parquet.DeltaByteArrayReader reader = new FlexibleEncoding.Parquet.DeltaByteArrayReader(); ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size()); dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size()); byteBuf.flip(); reader.initFromPage(numPairs, byteBuf.array(), 0); DataOutputBuffer decoding = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs);// w ww.j a v a 2s .co m decoding.writeInt(startPos); for (int i = 0; i < numPairs; i++) { byte tmp = Byte.parseByte(reader.readBytes().toStringUsingUTF8()); decoding.writeByte(tmp); } byteBuf.clear(); inBuf.close(); return decoding.getData(); }
From source file:com.robonobo.eon.DEONConnection.java
void receivePacket(EONPacket eonPacket) { DEONPacket packet = (DEONPacket) eonPacket; ByteBuffer buf = ByteBuffer.allocate(packet.getPayload().limit()); buf.put(packet.getPayload());/*from w w w . j a v a 2s. c o m*/ buf.flip(); EonSocketAddress addr = packet.getSourceSocketAddress(); receiveLock.lock(); try { incomingDataBufs.add(buf); incomingDataAddrs.add(addr); if (dataReceiver == null) canReceive.signal(); else { // Async read if (dataReceiverRunning) // This will be picked up by the already-running receiver return; else fireAsyncReceiver(); } } finally { receiveLock.unlock(); } }
From source file:org.apache.hadoop.hdfs.hoss.db.HotStore.java
/** * /* w ww .j a v a2 s . c o m*/ * @param objId * @param createTime * @param lastTime * @param size object size(unit:MB) * @return */ public boolean put(long objId, long createTime, long lastTime, long size) { final WriteBuffer wbuf = fbs.set((int) objId); final ByteBuffer buf = wbuf.buf(); // create time buf.putLong(createTime); // last access time buf.putLong(lastTime); long sizeMB = size < 0 ? -1 : convertMB(size); // object size(unit:MB) buf.putLong(sizeMB); buf.flip(); boolean ret = wbuf.save(); return ret; }
From source file:cn.ac.ncic.mastiff.io.coding.DeltaBinaryArrayZigZarByteReader.java
@Override public byte[] ensureDecompressed() throws IOException { FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray(); dynamicBuffer.add(inBuf.getData(), 12, inBuf.getLength() - 12); FlexibleEncoding.Parquet.DeltaByteArrayReader reader = new FlexibleEncoding.Parquet.DeltaByteArrayReader(); ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size()); dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size()); byteBuf.flip(); reader.initFromPage(numPairs, byteBuf.array(), 0); DataOutputBuffer decoding = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs);/*from www . ja v a 2s.c om*/ decoding.writeInt(startPos); for (int i = 0; i < numPairs; i++) { byte tmp = Byte.parseByte(reader.readBytes().toStringUsingUTF8()); decoding.writeByte(tmp); } byteBuf.clear(); inBuf.close(); return decoding.getData(); }
From source file:tachyon.master.RawTables.java
/** * Update the metadata of the specified raw table. It will check if the table exists. * * @param tableId The id of the raw table * @param metadata The new metadata of the raw table * @throws TachyonException//from ww w . j ava2 s .c o m */ // TODO add version number. public synchronized void updateMetadata(int tableId, ByteBuffer metadata) throws TachyonException { Pair<Integer, ByteBuffer> data = mData.get(tableId); if (null == data) { throw new TachyonException("The raw table " + tableId + " does not exist."); } if (metadata == null) { data.setSecond(ByteBuffer.allocate(0)); } else { long maxVal = mTachyonConf.getBytes(Constants.MAX_TABLE_METADATA_BYTE, 0L); if (metadata.limit() - metadata.position() >= maxVal) { throw new TachyonException("Too big table metadata: " + metadata.toString()); } ByteBuffer tMetadata = ByteBuffer.allocate(metadata.limit() - metadata.position()); tMetadata.put(metadata.array(), metadata.position(), metadata.limit() - metadata.position()); tMetadata.flip(); data.setSecond(tMetadata); } }
From source file:org.alfresco.cacheserver.dropwizard.resources.CacheServerResource.java
private void fastChannelCopy(final ReadableByteChannel src, final WritableByteChannel dest) throws IOException { final ByteBuffer buffer = ByteBuffer.allocateDirect(16 * 1024); while (src.read(buffer) != -1) { // prepare the buffer to be drained buffer.flip(); // write to the channel, may block dest.write(buffer);/*from w w w .j a v a 2 s .c om*/ // If partial transfer, shift remainder down // If buffer is empty, same as doing clear() buffer.compact(); } // EOF will leave buffer in fill state buffer.flip(); // make sure the buffer is fully drained. while (buffer.hasRemaining()) { dest.write(buffer); } }
From source file:org.apache.asterix.experiment.builder.AbstractExperiment8Builder.java
protected String getPointLookUpAQL(int round) { ByteBuffer bb = ByteBuffer.allocate(8); bb.put((byte) 0); bb.put((byte) randGen.nextInt(N_PARTITIONS)); bb.putShort((short) 0); bb.putInt(randGen.nextInt((int) (((1 + round) * dataInterval) / 1000))); bb.flip(); long key = bb.getLong(); return pointQueryTemplate.replaceAll("\\$KEY\\$", Long.toString(key)); }
From source file:cn.ac.ncic.mastiff.io.coding.RunLengthEncodingByteReader.java
public byte[] CompressensureDecompressed() throws IOException { FlexibleEncoding.ORC.DynamicByteArray dynamicBuffer = new FlexibleEncoding.ORC.DynamicByteArray(); dynamicBuffer.add(inBuf.getData(), 0, inBuf.getLength()); ByteBuffer byteBuf = ByteBuffer.allocate(dynamicBuffer.size()); dynamicBuffer.setByteBuffer(byteBuf, 0, dynamicBuffer.size()); byteBuf.flip(); FlexibleEncoding.ORC.InStream instream = FlexibleEncoding.ORC.InStream.create("test", byteBuf, null, dynamicBuffer.size());// w ww . j av a2 s .c o m RunLengthByteReader rlein = new RunLengthByteReader(instream); DataOutputBuffer decoding = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs); decoding.writeInt(startPos); for (int i = 0; i < numPairs; i++) { byte tmp = rlein.next(); decoding.writeByte(tmp); } byteBuf.clear(); inBuf.close(); return decoding.getData(); }