List of usage examples for java.nio ByteBuffer limit
public final Buffer limit(int newLimit)
From source file:com.glaf.core.util.ByteBufferUtils.java
/** * buffer, ?positoin limit ?0/*from w ww.j a va2 s. c om*/ * * @param buffer */ public static void clear(ByteBuffer buffer) { if (buffer != null) { buffer.position(0); buffer.limit(0); } }
From source file:com.glaf.core.util.ByteBufferUtils.java
/** * buffer, fill?/*w w w . j av a 2s .co m*/ * * @param buffer */ public static void clearToFill(ByteBuffer buffer) { if (buffer != null) { buffer.position(0); buffer.limit(buffer.capacity()); } }
From source file:org.gephi.io.importer.api.ImportUtils.java
/** * Uncompress a GZIP file./* www . j a va 2s . c o m*/ */ public static File getGzFile(FileObject in, File out, boolean isTar) throws IOException { // Stream buffer final int BUFF_SIZE = 8192; final byte[] buffer = new byte[BUFF_SIZE]; GZIPInputStream inputStream = null; FileOutputStream outStream = null; try { inputStream = new GZIPInputStream(new FileInputStream(in.getPath())); outStream = new FileOutputStream(out); if (isTar) { // Read Tar header int remainingBytes = readTarHeader(inputStream); // Read content ByteBuffer bb = ByteBuffer.allocateDirect(4 * BUFF_SIZE); byte[] tmpCache = new byte[BUFF_SIZE]; int nRead, nGet; while ((nRead = inputStream.read(tmpCache)) != -1) { if (nRead == 0) { continue; } bb.put(tmpCache); bb.position(0); bb.limit(nRead); while (bb.hasRemaining() && remainingBytes > 0) { nGet = Math.min(bb.remaining(), BUFF_SIZE); nGet = Math.min(nGet, remainingBytes); bb.get(buffer, 0, nGet); outStream.write(buffer, 0, nGet); remainingBytes -= nGet; } bb.clear(); } } else { int len; while ((len = inputStream.read(buffer)) > 0) { outStream.write(buffer, 0, len); } } } catch (IOException ex) { Exceptions.printStackTrace(ex); } finally { if (inputStream != null) { inputStream.close(); } if (outStream != null) { outStream.close(); } } return out; }
From source file:org.gephi.io.importer.api.ImportUtils.java
/** * Uncompress a Bzip2 file./*from w ww.j a v a 2 s. c om*/ */ public static File getBzipFile(FileObject in, File out, boolean isTar) throws IOException { // Stream buffer final int BUFF_SIZE = 8192; final byte[] buffer = new byte[BUFF_SIZE]; BZip2CompressorInputStream inputStream = null; FileOutputStream outStream = null; try { FileInputStream is = new FileInputStream(in.getPath()); inputStream = new BZip2CompressorInputStream(is); outStream = new FileOutputStream(out.getAbsolutePath()); if (isTar) { // Read Tar header int remainingBytes = readTarHeader(inputStream); // Read content ByteBuffer bb = ByteBuffer.allocateDirect(4 * BUFF_SIZE); byte[] tmpCache = new byte[BUFF_SIZE]; int nRead, nGet; while ((nRead = inputStream.read(tmpCache)) != -1) { if (nRead == 0) { continue; } bb.put(tmpCache); bb.position(0); bb.limit(nRead); while (bb.hasRemaining() && remainingBytes > 0) { nGet = Math.min(bb.remaining(), BUFF_SIZE); nGet = Math.min(nGet, remainingBytes); bb.get(buffer, 0, nGet); outStream.write(buffer, 0, nGet); remainingBytes -= nGet; } bb.clear(); } } else { int len; while ((len = inputStream.read(buffer)) > 0) { outStream.write(buffer, 0, len); } } } catch (IOException ex) { Exceptions.printStackTrace(ex); } finally { if (inputStream != null) { inputStream.close(); } if (outStream != null) { outStream.close(); } } return out; }
From source file:LamportBasicVersion.java
private static String byteToString(ByteBuffer byteBufferFromNeighbor, MessageInfo messageInfoFromNeighbor) { byteBufferFromNeighbor.position(0);//from w w w. j a v a 2s .c o m byteBufferFromNeighbor.limit(messageInfoFromNeighbor.bytes()); byte[] bufArr = new byte[byteBufferFromNeighbor.remaining()]; byteBufferFromNeighbor.get(bufArr); return new String(bufArr); }
From source file:org.apache.orc.impl.RecordReaderUtils.java
/** * Read the list of ranges from the file. * @param file the file to read/*from w w w.j a v a2s . co m*/ * @param base the base of the stripe * @param range the disk ranges within the stripe to read * @return the bytes read for each disk range, which is the same length as * ranges * @throws IOException */ static DiskRangeList readDiskRanges(FSDataInputStream file, HadoopShims.ZeroCopyReaderShim zcr, long base, DiskRangeList range, boolean doForceDirect) throws IOException { if (range == null) return null; DiskRangeList prev = range.prev; if (prev == null) { prev = new MutateHelper(range); } while (range != null) { if (range.hasData()) { range = range.next; continue; } int len = (int) (range.getEnd() - range.getOffset()); long off = range.getOffset(); if (zcr != null) { file.seek(base + off); boolean hasReplaced = false; while (len > 0) { ByteBuffer partial = zcr.readBuffer(len, false); BufferChunk bc = new BufferChunk(partial, off); if (!hasReplaced) { range.replaceSelfWith(bc); hasReplaced = true; } else { range.insertAfter(bc); } range = bc; int read = partial.remaining(); len -= read; off += read; } } else { // Don't use HDFS ByteBuffer API because it has no readFully, and is buggy and pointless. byte[] buffer = new byte[len]; file.readFully((base + off), buffer, 0, buffer.length); ByteBuffer bb = null; if (doForceDirect) { bb = ByteBuffer.allocateDirect(len); bb.put(buffer); bb.position(0); bb.limit(len); } else { bb = ByteBuffer.wrap(buffer); } range = range.replaceSelfWith(new BufferChunk(bb, range.getOffset())); } range = range.next; } return prev.next; }
From source file:com.glaf.core.util.ByteBufferUtils.java
public static void readFrom(InputStream is, int needed, ByteBuffer buffer) throws IOException { ByteBuffer tmp = allocate(8192); while (needed > 0 && buffer.hasRemaining()) { int l = is.read(tmp.array(), 0, 8192); if (l < 0) { break; }/*from w w w. jav a 2 s . com*/ tmp.position(0); tmp.limit(l); buffer.put(tmp); } }
From source file:org.apache.hadoop.hbase.KeyValueUtil.java
public static void appendToByteBuffer(final ByteBuffer bb, final KeyValue kv, final boolean includeMvccVersion) { // keep pushing the limit out. assume enough capacity bb.limit(bb.position() + kv.getLength()); bb.put(kv.getBuffer(), kv.getOffset(), kv.getLength()); if (includeMvccVersion) { int numMvccVersionBytes = WritableUtils.getVIntSize(kv.getSequenceId()); ByteBufferUtils.extendLimit(bb, numMvccVersionBytes); ByteBufferUtils.writeVLong(bb, kv.getSequenceId()); }//from ww w . j a v a2 s. c o m }
From source file:org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.MappableBlock.java
/** * Verifies the block's checksum. This is an I/O intensive operation. * @return if the block was successfully checksummed. *///from w w w . ja v a 2 s . c o m private static void verifyChecksum(long length, FileInputStream metaIn, FileChannel blockChannel, String blockFileName) throws IOException, ChecksumException { // Verify the checksum from the block's meta file // Get the DataChecksum from the meta file header BlockMetadataHeader header = BlockMetadataHeader.readHeader( new DataInputStream(new BufferedInputStream(metaIn, BlockMetadataHeader.getHeaderSize()))); FileChannel metaChannel = null; try { metaChannel = metaIn.getChannel(); if (metaChannel == null) { throw new IOException("Block InputStream meta file has no FileChannel."); } DataChecksum checksum = header.getChecksum(); final int bytesPerChecksum = checksum.getBytesPerChecksum(); final int checksumSize = checksum.getChecksumSize(); final int numChunks = (8 * 1024 * 1024) / bytesPerChecksum; ByteBuffer blockBuf = ByteBuffer.allocate(numChunks * bytesPerChecksum); ByteBuffer checksumBuf = ByteBuffer.allocate(numChunks * checksumSize); // Verify the checksum int bytesVerified = 0; while (bytesVerified < length) { Preconditions.checkState(bytesVerified % bytesPerChecksum == 0, "Unexpected partial chunk before EOF"); assert bytesVerified % bytesPerChecksum == 0; int bytesRead = fillBuffer(blockChannel, blockBuf); if (bytesRead == -1) { throw new IOException("checksum verification failed: premature EOF"); } blockBuf.flip(); // Number of read chunks, including partial chunk at end int chunks = (bytesRead + bytesPerChecksum - 1) / bytesPerChecksum; checksumBuf.limit(chunks * checksumSize); fillBuffer(metaChannel, checksumBuf); checksumBuf.flip(); checksum.verifyChunkedSums(blockBuf, checksumBuf, blockFileName, bytesVerified); // Success bytesVerified += bytesRead; blockBuf.clear(); checksumBuf.clear(); } } finally { IOUtils.closeQuietly(metaChannel); } }
From source file:de.csdev.ebus.command.EBusCommandUtils.java
/** * Builds an escaped master telegram part or if slaveData is used a complete telegram incl. master ACK and SYN * * @param source// w ww.j a v a 2s . c o m * @param target * @param command * @param masterData * @return * @throws EBusTypeException */ public static ByteBuffer buildPartMasterTelegram(byte source, byte target, byte[] command, byte[] masterData) throws EBusTypeException { ByteBuffer buf = ByteBuffer.allocate(50); buf.put(source); // QQ - Source buf.put(target); // ZZ - Target buf.put(command); // PB SB - Command buf.put((byte) masterData.length); // NN - Length, will be set later // add the escaped bytes for (byte b : masterData) { buf.put(escapeSymbol(b)); } // calculate crc byte crc8 = EBusUtils.crc8(buf.array(), buf.position()); buf.put(escapeSymbol(crc8)); // set limit and reset position buf.limit(buf.position()); buf.position(0); return buf; }