List of usage examples for java.nio ByteBuffer capacity
public final int capacity()
From source file:com.intel.hadoop.hbase.dot.doc.serializer.AvroData.java
byte[] getValue(String field) { Object value = this.record.get(field); if (null != value) { ByteBuffer buf = (ByteBuffer) value; // when reusing record, ByteBuffer size could be larger than actual data // need to check the limit and return byte[] only contain the real data. return buf.limit() == buf.capacity() ? buf.array() : Bytes.getBytes(buf); }//from w w w .j a v a2 s .c o m return null; }
From source file:org.wso2.carbon.inbound.endpoint.protocol.hl7.codec.HL7Codec.java
private int fillBuffer(ByteBuffer byteBuffer, byte[] responseBytes) { if (responseBytes == null) { return 0; }//from ww w.j a v a2 s. c o m byte b; int count = 0; int headerPosition = 0; if (this.state == WRITE_HEADER) { byteBuffer.put(MLLPConstants.HL7_HEADER[0]); headerPosition = 1; this.state = WRITE_CONTENT; } int MAX = byteBuffer.capacity(); if (byteBuffer.capacity() - (responseBytes.length - responseReadPosition + headerPosition) > 0) { MAX = responseBytes.length - responseReadPosition + headerPosition; } for (int i = responseReadPosition; i < MAX + responseReadPosition - headerPosition; i++) { count++; b = responseBytes[i]; byteBuffer.put(b); } responseReadPosition += count; if (responseReadPosition == responseBytes.length) { this.state = WRITE_TRAILER; responseReadPosition = 0; } byteBuffer.flip(); return count; }
From source file:com.github.cambierr.lorawanpacket.semtech.Txpk.java
public JSONObject toJson() throws MalformedPacketException { JSONObject output = new JSONObject(); output.put("imme", isImme()); output.put("tmst", getTmst()); output.put("time", getTime()); output.put("freq", getFreq()); output.put("rfch", getRfch()); output.put("powe", getPowe()); output.put("modu", getModu().name()); if (getModu().equals(Modulation.LORA)) { output.put("codr", getCodr()); output.put("ipol", isIpol()); } else {//w w w . j a va2 s . c om output.put("fdev", getFdev()); } output.put("datr", getDatr()); output.put("prea", getPrea()); output.put("size", getSize()); output.put("ncrc", isNcrc()); ByteBuffer bb = ByteBuffer.allocate(384); getData().toRaw(bb); output.put("data", Base64.getEncoder() .encodeToString(Arrays.copyOfRange(bb.array(), 0, bb.capacity() - bb.remaining()))); return output; }
From source file:org.exist.versioning.svn.core.internal.io.fs.FSFile.java
private int readFromInputStream(long fromPos, ByteBuffer buffer) throws IOException { resetInputStream().skip(fromPos);/*from ww w .j av a2s . c o m*/ int capacity = buffer.capacity(); byte[] b; if (myFile.length() == -1) { b = new byte[capacity]; int readed = myInputStream.read(b); } else { int bufferSize = capacity; //BUG: buffer limit by int! long longSize = myFile.length() - fromPos; if (longSize > capacity) bufferSize = capacity; else bufferSize = (int) longSize; b = new byte[bufferSize]; myInputStream.read(b); } buffer.put(b); return b.length; }
From source file:edu.uci.ics.hyracks.dataflow.std.sort.util.DeletableFrameTupleAppenderTest.java
@Test public void testAppend() throws Exception { int count = 10; ByteBuffer bufferRead = makeAFrame(cap, count, 0); DeletableFrameTupleAppender accessor = new DeletableFrameTupleAppender(recordDescriptor); accessor.reset(bufferRead);/* www. jav a 2 s . c o m*/ ByteBuffer bufferWrite = ByteBuffer.allocate(cap); appender.clear(bufferWrite); for (int i = 0; i < accessor.getTupleCount(); i++) { appender.append(accessor, i); } for (int i = 0; i < bufferRead.capacity(); i++) { assertEquals(bufferRead.get(i), bufferWrite.get(i)); } }
From source file:org.apache.parquet.hadoop.DirectCodecFactory.java
private ByteBuffer ensure(ByteBuffer buffer, int size) { if (buffer == null) { buffer = allocator.allocate(size); } else if (buffer.capacity() >= size) { buffer.clear();//ww w . j av a 2s . com } else { release(buffer); buffer = allocator.allocate(size); } return buffer; }
From source file:org.bytesoft.bytetcc.work.CleanupWork.java
private void invokeForget(Xid xid, String resource, ByteBuffer buffer) throws IllegalStateException { try {/* ww w. j a va2 s . c om*/ this.lock.lock(); int position = buffer.capacity() + this.endIndex; if (position > this.sizeOfRaf) { try { this.raf.setLength(position); this.sizeOfRaf = position; } catch (IOException ex) { throw new IllegalStateException(ex.getMessage()); } } try { this.channel.position(this.endIndex); buffer.flip(); this.channel.write(buffer); } catch (Exception ex) { throw new IllegalStateException(ex.getMessage()); } int current = this.endIndex; this.endIndex = position; this.header.position(IDENTIFIER.length + 2 + 4); this.header.putInt(position); Record record = new Record(); record.resource = resource; record.xid = xid; record.startIndex = current; // this.recordList.add(record); List<Record> recordList = this.recordMap.get(record.resource); if (recordList == null) { recordList = new ArrayList<Record>(); this.recordMap.put(record.resource, recordList); } recordList.add(record); this.condition.signalAll(); } finally { this.lock.unlock(); } }
From source file:io.github.dsheirer.record.wave.WaveWriter.java
/** * Writes the buffer contents to the file. Assumes that the buffer is full * and the first byte of data is at position 0. *///from w w w . ja v a 2s. c om public void writeData(ByteBuffer buffer) throws IOException { buffer.position(0); openDataChunk(); /* Write the full buffer if there is room, respecting the max file size */ if (mFileChannel.size() + buffer.capacity() < mMaxSize) { while (buffer.hasRemaining()) { mDataChunkSize += mFileChannel.write(buffer); } updateTotalSize(); updateDataChunkSize(); } else { /* Split the buffer to finish filling the current file and then put * the leftover into a new file */ int remaining = (int) (mMaxSize - mFileChannel.size()); /* Ensure we write full frames to fill up the remaining size */ remaining -= (int) (remaining % mAudioFormat.getFrameSize()); byte[] bytes = buffer.array(); ByteBuffer current = ByteBuffer.wrap(Arrays.copyOf(bytes, remaining)); ByteBuffer next = ByteBuffer.wrap(Arrays.copyOfRange(bytes, remaining, bytes.length)); while (current.hasRemaining()) { mDataChunkSize += mFileChannel.write(current); } updateTotalSize(); updateDataChunkSize(); rollover(); openDataChunk(); while (next.hasRemaining()) { mDataChunkSize += mFileChannel.write(next); } updateTotalSize(); updateDataChunkSize(); } }
From source file:org.wso2.msf4j.internal.router.TestMicroservice.java
@Path("/aggregate/upload") @PUT/*w ww . j a v a 2 s .com*/ public String aggregatedUpload(@Context Request request) { ByteBuffer content = BufferUtil.merge(request.getFullMessageBody()); int bytesUploaded = content.capacity(); return "Uploaded:" + bytesUploaded; }
From source file:org.jtrfp.trcl.core.Texture.java
private void vqCompress(ByteBuffer imageRGBA8888, ByteBuffer imageESTuTv8888) { final double fuzzySideLength = Math.sqrt((imageRGBA8888.capacity() / 4)); final int sideLength = (int) Math.floor(fuzzySideLength); if (!SpecialRAWDimensions.isPowerOfTwo(sideLength)) System.err.println("WARNING: Calculated dimensions are not power-of-two. Trouble ahead."); if (Math.abs(fuzzySideLength - sideLength) > .001) System.err.println("WARNING: Calculated dimensions are not perfectly square. Trouble ahead."); // Break down into 4x4 blocks final ByteBufferVectorList bbvl = new ByteBufferVectorList(imageRGBA8888); final RGBA8888VectorList rgba8888vl = new RGBA8888VectorList(bbvl); final VectorList bbvlESTuTv = imageESTuTv8888 != null ? new ByteBufferVectorList(imageESTuTv8888) : new ConstantVectorList(0, bbvl); final RGBA8888VectorList esTuTv8888vl = bbvlESTuTv != null ? new RGBA8888VectorList(bbvlESTuTv) : null; vqCompress(rgba8888vl, esTuTv8888vl, sideLength); }