List of usage examples for java.nio ByteBuffer capacity
public final int capacity()
From source file:io.blobkeeper.file.util.FileUtils.java
public static ByteBuffer readFile(@NotNull File file, long offset, long length) { ByteBuffer byteBuffer = ByteBuffer.allocate((int) length); int bytesRead = 0; try {/*from w w w .j a v a 2 s .com*/ // TODO: for exclusively read of file channel by single thread we can use read w/o offset (avoid additional seeks?) while ((bytesRead = file.getFileChannel().read(byteBuffer, offset)) != -1) { if (!byteBuffer.hasRemaining()) { byteBuffer.flip(); return byteBuffer; } offset += bytesRead; } } catch (Exception e) { log.error("Can't read file", e); } if (bytesRead < byteBuffer.capacity()) { String error = String.format("File read error for file %s", file); log.error(error); throw new IllegalArgumentException(error); } byteBuffer.flip(); return byteBuffer; }
From source file:Main.java
public static List<ByteBuffer> mergeAdjacentBuffers(List<ByteBuffer> samples) { ArrayList<ByteBuffer> nuSamples = new ArrayList<ByteBuffer>(samples.size()); for (ByteBuffer buffer : samples) { int lastIndex = nuSamples.size() - 1; if (lastIndex >= 0 && buffer.hasArray() && nuSamples.get(lastIndex).hasArray() && buffer.array() == nuSamples.get(lastIndex).array() && nuSamples.get(lastIndex).arrayOffset() + nuSamples.get(lastIndex).limit() == buffer.arrayOffset()) { ByteBuffer oldBuffer = nuSamples.remove(lastIndex); ByteBuffer nu = ByteBuffer .wrap(buffer.array(), oldBuffer.arrayOffset(), oldBuffer.limit() + buffer.limit()).slice(); // We need to slice here since wrap([], offset, length) just sets position and not the arrayOffset. nuSamples.add(nu);//from w w w .j av a2 s. co m } else if (lastIndex >= 0 && buffer instanceof MappedByteBuffer && nuSamples.get(lastIndex) instanceof MappedByteBuffer && nuSamples.get(lastIndex) .limit() == nuSamples.get(lastIndex).capacity() - buffer.capacity()) { // This can go wrong - but will it? ByteBuffer oldBuffer = nuSamples.get(lastIndex); oldBuffer.limit(buffer.limit() + oldBuffer.limit()); } else { buffer.reset(); nuSamples.add(buffer); } } return nuSamples; }
From source file:org.apache.htrace.impl.TestPackedBuffer.java
@Test(timeout = 60000) public void testPackSpans() throws Exception { Random rand = new Random(123); byte[] arr = new byte[16384]; ByteBuffer bb = ByteBuffer.wrap(arr); bb.limit(bb.capacity()); PackedBuffer buf = new PackedBuffer(bb); final int NUM_TEST_SPANS = 5; Span[] spans = new Span[NUM_TEST_SPANS]; for (int i = 0; i < NUM_TEST_SPANS; i++) { spans[i] = TestUtil.randomSpan(rand); }// www.ja v a 2 s . c om for (int i = 0; i < NUM_TEST_SPANS; i++) { buf.writeSpan(spans[i]); } LOG.info("wrote " + buf.toHexString()); MessagePack msgpack = new MessagePack(PackedBuffer.MSGPACK_CONF); MessageUnpacker unpacker = msgpack.newUnpacker(arr, 0, bb.position()); Span[] respans = new Span[NUM_TEST_SPANS]; for (int i = 0; i < NUM_TEST_SPANS; i++) { respans[i] = PackedBuffer.readSpan(unpacker); } for (int i = 0; i < NUM_TEST_SPANS; i++) { Assert.assertEquals("Failed to read back span " + i, spans[i].toJson(), respans[i].toJson()); } }
From source file:byps.BWire.java
/** * Reads a ByteBuffer from an InputStream * Closes the InputStream./*from w w w. j a v a2 s. c o m*/ * @param is * @return * @throws IOException */ public static ByteBuffer bufferFromStream(InputStream is, Boolean gzip) throws IOException { if (is == null) return null; try { ByteBuffer ibuf = ByteBuffer.allocate(10 * 1000); if (gzip != null) { if (gzip) { is = new GZIPInputStream(is); } } else { if (!is.markSupported()) is = new BufferedInputStream(is, 2); is.mark(2); int magic = is.read() | (is.read() << 8); is.reset(); if (magic == GZIPInputStream.GZIP_MAGIC) { is = new GZIPInputStream(is); } } ReadableByteChannel rch = Channels.newChannel(is); while (rch.read(ibuf) != -1) { if (ibuf.remaining() == 0) { ByteBuffer nbuf = ByteBuffer.allocate(ibuf.capacity() * 2); ibuf.flip(); nbuf.put(ibuf); ibuf = nbuf; } } ibuf.flip(); return ibuf; } finally { is.close(); } }
From source file:org.apache.hadoop.hbase.io.ByteBufferPool.java
/** * Return back a ByteBuffer after its use. Do not try to return put back a ByteBuffer, not * obtained from this pool.// w w w .j a va 2 s.co m * @param buf ByteBuffer to return. */ public void putbackBuffer(ByteBuffer buf) { if (buf.capacity() != this.bufferSize || (this.directByteBuffer ^ buf.isDirect())) { LOG.warn("Trying to put a buffer, not created by this pool! Will be just ignored"); return; } buffers.offer(buf); }
From source file:org.apache.hadoop.hbase.io.hfile.slab.Slab.java
void free(ByteBuffer toBeFreed) { Preconditions.checkArgument(toBeFreed.capacity() == blockSize); buffers.add(toBeFreed); }
From source file:com.openteach.diamond.network.waverider.network.Packet.java
/** * ??Packet, ??// w w w.java 2s. c o m * @param inputBuffer * @return * @throws IOException, InterruptedException */ public static Packet parse(BlockingQueue<ByteBuffer> inputBuffer, NetWorkEndPoint endPoint, SocketChannel channel) throws IOException, InterruptedException { // Buffer for packet header byte[] tmpBuf = new byte[NetWorkConstants.DEFAULT_NETWORK_BUFFER_SIZE]; ByteBuffer header = ByteBuffer.allocate(Packet.getHeaderSize()); ByteBuffer currentBuffer = null; int rest = 0; boolean isRemove = false; // ? while (true) { while ((currentBuffer = inputBuffer.peek()) == null) { if (!endPoint.notifyRead(channel)) { throw new IOException("Socket closed by other thread"); } // ? //endPoint.waitMoreData(5); // FIXME 2ms //Thread.sleep(1); Thread.yield(); } isRemove = false; rest = header.capacity() - header.position(); if (currentBuffer.remaining() >= rest) { if (currentBuffer.remaining() == rest) { isRemove = true; } currentBuffer.get(tmpBuf, 0, rest); header.put(tmpBuf, 0, rest); if (isRemove) { inputBuffer.remove(); } break; } else { header.put(currentBuffer); inputBuffer.remove(); } } header.flip(); // , ??? // ? Integer size = header.getInt(Packet.getLengthPosition()); // For test /*if(size < 0 || size > 100000) { logger.info("Error"); }*/ //logger.debug(new StringBuilder("Try to allocate ").append(size).append(" bytes memory")); ByteBuffer buffer = ByteBuffer.allocate(size); buffer.put(header); header.clear(); // ? while (true) { while ((currentBuffer = inputBuffer.peek()) == null) { endPoint.notifyRead(channel); Thread.sleep(1000); } isRemove = false; rest = buffer.capacity() - buffer.position(); if (currentBuffer.remaining() >= rest) { if (currentBuffer.remaining() == rest) { isRemove = true; } currentBuffer.get(tmpBuf, 0, rest); buffer.put(tmpBuf, 0, rest); if (isRemove) { inputBuffer.remove(); } break; } else { buffer.put(currentBuffer); inputBuffer.remove(); } } //buffer.position(0); buffer.flip(); Packet packet = Packet.unmarshall(buffer); //logger.info("Parse one packet from network"); //packet.dump(); return packet; }
From source file:Main.java
public static List<ByteBuffer> mergeAdjacentBuffers(List<ByteBuffer> paramList) { ArrayList localArrayList = new ArrayList(paramList.size()); Iterator localIterator = paramList.iterator(); while (localIterator.hasNext()) { ByteBuffer localByteBuffer1 = (ByteBuffer) localIterator.next(); int i = -1 + localArrayList.size(); if ((i >= 0) && (localByteBuffer1.hasArray()) && (((ByteBuffer) localArrayList.get(i)).hasArray()) && (localByteBuffer1.array() == ((ByteBuffer) localArrayList.get(i)).array()) && (((ByteBuffer) localArrayList.get(i)).arrayOffset() + ((ByteBuffer) localArrayList.get(i)).limit() == localByteBuffer1.arrayOffset())) { ByteBuffer localByteBuffer3 = (ByteBuffer) localArrayList.remove(i); localArrayList.add(ByteBuffer.wrap(localByteBuffer1.array(), localByteBuffer3.arrayOffset(), localByteBuffer3.limit() + localByteBuffer1.limit()).slice()); } else if ((i >= 0) && ((localByteBuffer1 instanceof MappedByteBuffer)) && ((localArrayList.get(i) instanceof MappedByteBuffer)) && (((ByteBuffer) localArrayList.get(i)) .limit() == ((ByteBuffer) localArrayList.get(i)).capacity() - localByteBuffer1.capacity())) { ByteBuffer localByteBuffer2 = (ByteBuffer) localArrayList.get(i); localByteBuffer2.limit(localByteBuffer1.limit() + localByteBuffer2.limit()); } else {/* w ww. j a va 2 s.c om*/ localByteBuffer1.reset(); localArrayList.add(localByteBuffer1); } } return localArrayList; }
From source file:de.undercouch.bson4jackson.io.StaticBuffers.java
/** * @see #charBuffer(Key, int)//from ww w . j av a 2s. c o m */ public ByteBuffer byteBuffer(Key key, int minSize) { minSize = Math.max(minSize, GLOBAL_MIN_SIZE); ByteBuffer r = _byteBuffers[key.ordinal()]; if (r == null || r.capacity() < minSize) { r = ByteBuffer.allocate(minSize); } else { _byteBuffers[key.ordinal()] = null; r.clear(); } return r; }
From source file:org.apache.hadoop.hbase.io.hfile.slab.Slab.java
private void allocateAndSlice(int size, int sliceSize) { ByteBuffer newSlab = ByteBuffer.allocateDirect(size); slabs.add(newSlab);//from www. j av a 2 s .c o m for (int j = 0; j < newSlab.capacity(); j += sliceSize) { newSlab.limit(j + sliceSize).position(j); ByteBuffer aSlice = newSlab.slice(); buffers.add(aSlice); heapSize += ClassSize.estimateBase(aSlice.getClass(), false); } }