List of usage examples for java.nio ByteBuffer limit
public final int limit()
From source file:com.glaf.core.util.BinaryUtils.java
/** * Returns a copy of all the bytes from the given <code>ByteBuffer</code>, * from the beginning to the buffer's limit; or null if the input is null. * <p>/*from w w w .j av a 2 s. c o m*/ * The internal states of the given byte buffer will be restored when this * method completes execution. * <p> * When handling <code>ByteBuffer</code> from user's input, it's typical to * call the {@link #copyBytesFrom(ByteBuffer)} instead of * {@link #copyAllBytesFrom(ByteBuffer)} so as to account for the position * of the input <code>ByteBuffer</code>. The opposite is typically true, * however, when handling <code>ByteBuffer</code> from withint the * unmarshallers of the low-level clients. */ public static byte[] copyAllBytesFrom(ByteBuffer bb) { if (bb == null) return null; if (bb.hasArray()) return Arrays.copyOf(bb.array(), bb.limit()); bb.mark(); // the default ByteBuffer#mark() and reset() won't work, as the // rewind would discard the mark position final int marked = bb.position(); try { byte[] dst = new byte[bb.rewind().remaining()]; bb.get(dst); return dst; } finally { bb.position(marked); } }
From source file:com.icloud.framework.core.nio.ByteBufferUtil.java
public static int compareUnsigned(ByteBuffer o1, ByteBuffer o2) { return compareUnsigned(o1.array(), o2.array(), o1.arrayOffset() + o1.position(), o2.arrayOffset() + o2.position(), o1.limit() + o1.arrayOffset(), o2.limit() + o2.arrayOffset()); }
From source file:alluxio.util.FormatUtils.java
/** * Parses a {@link ByteBuffer} into a {@link String}. In particular, the function prints the * content of the buffer in 4-byte increments as space separated integers. * * @param buf buffer to use/* ww w . ja v a2 s . co m*/ * @return the String representation of the {@link ByteBuffer} */ public static String byteBufferToString(ByteBuffer buf) { StringBuilder sb = new StringBuilder(); for (int k = 0; k < buf.limit() / 4; k++) { if (k != 0) { sb.append(" "); } sb.append(buf.getInt()); } return sb.toString(); }
From source file:com.silverpeas.ical.StringUtils.java
static byte[] encodeArray(char[] chars, Charset encoding) throws CharacterCodingException { if (CharEncoding.US_ASCII.equals(encoding.name())) { byte[] array = new byte[chars.length]; for (int i = 0; i < array.length; i++) { array[i] = (byte) chars[i]; }//w ww.j a v a 2 s. c o m return array; } ByteBuffer buffer = encoding.newEncoder().encode(CharBuffer.wrap(chars)); byte[] array = new byte[buffer.limit()]; System.arraycopy(buffer.array(), 0, array, 0, array.length); return array; }
From source file:Main.java
public final static boolean equalsP(final ByteBuffer bb1, final ByteBuffer bb2) { final byte[] array1 = bb1.array(); final byte[] array2 = bb2.array(); final int offset1 = bb1.position(); final int offset2 = bb2.position(); final int end1 = bb1.limit(); final int end2 = bb2.limit(); return equals(array1, offset1, end1, array2, offset2, end2); }
From source file:org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.java
public static byte[] getBytesFromByteBuffer(ByteBuffer byteBuffer) { byteBuffer.rewind();/*from w w w .j ava 2s. c o m*/ byte[] result = new byte[byteBuffer.limit()]; byteBuffer.get(result); return result; }
From source file:com.glaf.core.util.BinaryUtils.java
/** * Returns a copy of the bytes from the given <code>ByteBuffer</code>, * ranging from the the buffer's current position to the buffer's limit; or * null if the input is null.//from w ww. j a v a 2s. c o m * <p> * The internal states of the given byte buffer will be restored when this * method completes execution. * <p> * When handling <code>ByteBuffer</code> from user's input, it's typical to * call the {@link #copyBytesFrom(ByteBuffer)} instead of * {@link #copyAllBytesFrom(ByteBuffer)} so as to account for the position * of the input <code>ByteBuffer</code>. The opposite is typically true, * however, when handling <code>ByteBuffer</code> from withint the * unmarshallers of the low-level clients. */ public static byte[] copyBytesFrom(ByteBuffer bb) { if (bb == null) return null; if (bb.hasArray()) return Arrays.copyOfRange(bb.array(), bb.position(), bb.limit()); bb.mark(); try { byte[] dst = new byte[bb.remaining()]; bb.get(dst); return dst; } finally { bb.reset(); } }
From source file:com.buaa.cfs.common.oncrpc.XDR.java
/** Write an XDR message to a TCP ChannelBuffer */ public static ChannelBuffer writeMessageTcp(XDR request, boolean last) { Preconditions.checkState(request.state == XDR.State.WRITING); ByteBuffer b = request.buf.duplicate(); b.flip();/* w w w . j av a 2 s .c om*/ byte[] fragmentHeader = XDR.recordMark(b.limit(), last); ByteBuffer headerBuf = ByteBuffer.wrap(fragmentHeader); // TODO: Investigate whether making a copy of the buffer is necessary. return ChannelBuffers.copiedBuffer(headerBuf, b); }
From source file:cn.ctyun.amazonaws.util.StringUtils.java
/** * Base64 encodes the data in the specified byte buffer and returns it as a * base64 encoded string./*w w w . j av a 2 s . c o m*/ * * @param byteBuffer * The data to base64 encode and return as a string. * * @return The base64 encoded contents of the specified byte buffer. */ public static String fromByteBuffer(ByteBuffer byteBuffer) { byte[] encodedBytes = null; if (byteBuffer.hasArray()) { encodedBytes = Base64.encodeBase64(byteBuffer.array()); } else { byte[] binaryData = new byte[byteBuffer.limit()]; byteBuffer.get(binaryData); encodedBytes = Base64.encodeBase64(binaryData); } return new String(encodedBytes); }
From source file:org.commoncrawl.service.queryserver.master.S3Helper.java
public static ArcFileItem retrieveArcFileItem(ArchiveInfo archiveInfo, EventLoop eventLoop) throws IOException { // the default bucket id String bucketId = "commoncrawl-crawl-002"; //ok, see if we need to switch buckets if (archiveInfo.getCrawlNumber() == 1) { bucketId = "commoncrawl"; }/*w w w .j a va2 s . c om*/ S3Downloader downloader = new S3Downloader(bucketId, "", "", false); // now activate the segment log ... final Semaphore downloadCompleteSemaphore = new Semaphore(0); final StreamingArcFileReader arcFileReader = new StreamingArcFileReader(false); //arcFileReader.setArcFileHasHeaderItemFlag(false); // create a buffer list we will append incoming content into ... final LinkedList<ByteBuffer> bufferList = new LinkedList<ByteBuffer>(); downloader.initialize(new S3Downloader.Callback() { @Override public boolean contentAvailable(int itemId, String itemKey, NIOBufferList contentBuffer) { LOG.info("ContentQuery contentAvailable called for Item:" + itemKey + " totalBytesAvailable:" + contentBuffer.available()); try { while (contentBuffer.available() != 0) { bufferList.add(contentBuffer.read()); } return true; } catch (IOException e) { LOG.error(CCStringUtils.stringifyException(e)); return false; } } @Override public void downloadComplete(int itemId, String itemKey) { LOG.info("S3 Download Complete for item:" + itemKey); downloadCompleteSemaphore.release(); } @Override public void downloadFailed(int itemId, String itemKey, String errorCode) { LOG.info("S3 Download Failed for item:" + itemKey); downloadCompleteSemaphore.release(); } @Override public boolean downloadStarting(int itemId, String itemKey, int contentLength) { LOG.info("ContentQuery DownloadStarting for Item:" + itemKey + " contentLength:" + contentLength); return true; } }, eventLoop); LOG.info("Starting request for Item:" + hdfsNameToS3ArcFileName(archiveInfo.getArcfileDate(), archiveInfo.getArcfileIndex()) + " Offset:" + archiveInfo.getArcfileOffset()); int sizeToRetrieve = (archiveInfo.getCompressedSize() != 0) ? archiveInfo.getCompressedSize() : 30000; sizeToRetrieve += 10; downloader.fetchPartialItem( hdfsNameToS3ArcFileName(archiveInfo.getArcfileDate(), archiveInfo.getArcfileIndex()), archiveInfo.getArcfileOffset() - 10, sizeToRetrieve); downloadCompleteSemaphore.acquireUninterruptibly(); if (bufferList.size() == 0) { return null; } ByteBuffer firstBuffer = bufferList.getFirst(); if (firstBuffer != null) { int offsetToGZIPHeader = scanForGZIPHeader(firstBuffer.duplicate()); if (offsetToGZIPHeader != -1) { firstBuffer.position(offsetToGZIPHeader); LOG.info("*** Offset to GZIP Header:" + offsetToGZIPHeader); } else { LOG.error("*** Failed to find GZIP Header offset"); } } // now try to decode content if possible for (ByteBuffer buffer : bufferList) { LOG.info("Adding Buffer of Size:" + buffer.remaining() + " Position:" + buffer.position() + " Limit:" + buffer.limit()); arcFileReader.available(buffer); } ArcFileItem item = arcFileReader.getNextItem(); if (item != null) { LOG.info("Request Returned item:" + item.getUri()); LOG.info("Uncompressed Size:" + item.getContent().getCount()); } return item; }