List of usage examples for java.nio ByteBuffer position
public final int position()
From source file:com.yobidrive.diskmap.needles.Needle.java
public boolean getNeedleHeaderFromBuffer(ByteBuffer input) throws Exception { try {/*from w w w. j a va 2 s. co m*/ // Reinit needle keyBytes = null; version = null; flags = 0x00; size = 0; data = null; previousNeedle = null; // Chaining readBytes = 0; // Processes reading input.rewind(); int startPosition = input.position(); int magic = input.getInt(); if (magic == MAGICSTART_BADENDIAN) { if (input.order().equals(ByteOrder.BIG_ENDIAN)) input.order(ByteOrder.LITTLE_ENDIAN); else input.order(ByteOrder.BIG_ENDIAN); } else if (magic != MAGICSTART) { logger.error("Buffer not starting with needle"); return false; } needleNumber = input.getLong(); flags = input.get(); int keyLen = input.getInt(); if (keyLen > 2028) { logger.error("Crazy needle key len"); return false; } keyBytes = new byte[keyLen]; input.get(keyBytes); int versionLen = input.getInt(); if (versionLen > 1024 * 16) { logger.error("Crazy needle version len"); return false; } if (versionLen == 0) version = null; else { byte[] versionBytes = new byte[versionLen]; input.get(versionBytes); version = new VectorClock(versionBytes); } int previousLogNumber = input.getInt(); // Chaining long previousNeedleOffset = input.getLong(); // Chaining if (previousLogNumber != -1 && previousNeedleOffset != -1L) { previousNeedle = new NeedlePointer(); previousNeedle.setNeedleFileNumber(previousLogNumber); previousNeedle.setNeedleOffset(previousNeedleOffset); } originalFileNumber = input.getInt(); // Original needle location (for cleaning) originalSize = input.getInt(); // Original needle size (for cleaning) size = input.getInt(); readBytes = input.position() - startPosition; input.rewind(); // input.mark() ; return true; } catch (BufferUnderflowException bue) { return false; } }
From source file:org.wso2.carbon.http2.transport.util.http2Encoder.java
@Override public int write(ByteBuffer src) throws IOException { int l = 0;//from w w w. j av a 2 s. co m //channel.newPromise(); while (src.hasRemaining()) { byte[] b;//= new byte[chContext.channel().alloc().buffer().capacity()]; // if(src.remaining()<b.length){ b = new byte[src.remaining()]; src.get(b); // request.replace(Unpooled.wrappedBuffer(b)); if (src.hasRemaining()) encoder.writeData(chContext, streamId, Unpooled.wrappedBuffer(b), 0, false, promise); else { encoder.writeData(chContext, streamId, Unpooled.wrappedBuffer(b), 0, true, promise); isComplete = true; } } return src.position(); }
From source file:io.warp10.continuum.gts.GTSDecoder.java
public static GTSDecoder fromBlock(byte[] block, byte[] key) throws IOException { if (block.length < 6) { throw new IOException("Invalid block."); }//ww w . j a v a2s .c om ByteBuffer buffer = ByteBuffer.wrap(block); // // Extract size // buffer.order(ByteOrder.BIG_ENDIAN); int size = buffer.getInt(); // Check size if (block.length != size) { throw new IOException("Invalid block size, expected " + size + ", block is " + block.length); } // Extract compression byte comp = buffer.get(); boolean compress = false; if (0 == comp) { compress = false; } else if (1 == comp) { compress = true; } else { throw new IOException("Invalid compression flag"); } // Extract base timestamp long base = Varint.decodeSignedLong(buffer); InputStream in; ByteArrayInputStream bain = new ByteArrayInputStream(block, buffer.position(), buffer.remaining()); if (compress) { in = new GZIPInputStream(bain); } else { in = bain; } byte[] buf = new byte[1024]; ByteArrayOutputStream out = new ByteArrayOutputStream(buffer.remaining()); while (true) { int len = in.read(buf); if (len <= 0) { break; } out.write(buf, 0, len); } GTSDecoder decoder = new GTSDecoder(base, key, ByteBuffer.wrap(out.toByteArray())); return decoder; }
From source file:edu.uci.ics.hyracks.dataflow.std.sort.util.DeletableFrameTupleAppenderTest.java
ByteBuffer makeAFrame(int capacity, int count, int deletedBytes) throws HyracksDataException { ByteBuffer buffer = ByteBuffer.allocate(capacity); int metaOffset = capacity - 4; buffer.putInt(metaOffset, deletedBytes); metaOffset -= 4;// w w w. j a v a 2 s. com buffer.putInt(metaOffset, count); metaOffset -= 4; for (int i = 0; i < count; i++, metaOffset -= 4) { makeARecord(builder, i); for (int x = 0; x < builder.getFieldEndOffsets().length; x++) { buffer.putInt(builder.getFieldEndOffsets()[x]); } buffer.put(builder.getByteArray(), 0, builder.getSize()); assert (metaOffset > buffer.position()); buffer.putInt(metaOffset, buffer.position()); } return buffer; }
From source file:me.carpela.network.pt.cracker.tools.ttorrent.Torrent.java
private static String hashFiles(List<File> files, int pieceLenght) throws InterruptedException, IOException, NoSuchAlgorithmException { int threads = getHashingThreadsCount(); ExecutorService executor = Executors.newFixedThreadPool(threads); ByteBuffer buffer = ByteBuffer.allocate(pieceLenght); List<Future<String>> results = new LinkedList<Future<String>>(); StringBuilder hashes = new StringBuilder(); long length = 0L; int pieces = 0; long start = System.nanoTime(); for (File file : files) { length += file.length();//from w w w . j a v a 2 s. c om FileInputStream fis = new FileInputStream(file); FileChannel channel = fis.getChannel(); int step = 10; try { while (channel.read(buffer) > 0) { if (buffer.remaining() == 0) { buffer.clear(); results.add(executor.submit(new CallableChunkHasher(buffer))); } if (results.size() >= threads) { pieces += accumulateHashes(hashes, results); } if (channel.position() / (double) channel.size() * 100f > step) { step += 10; } } } finally { channel.close(); fis.close(); } } // Hash the last bit, if any if (buffer.position() > 0) { buffer.limit(buffer.position()); buffer.position(0); results.add(executor.submit(new CallableChunkHasher(buffer))); } pieces += accumulateHashes(hashes, results); // Request orderly executor shutdown and wait for hashing tasks to // complete. executor.shutdown(); while (!executor.isTerminated()) { Thread.sleep(10); } long elapsed = System.nanoTime() - start; int expectedPieces = (int) (Math.ceil((double) length / pieceLenght)); return hashes.toString(); }
From source file:org.apache.cxf.transport.http.asyncclient.SharedOutputBuffer.java
public int write(ByteBuffer b) throws IOException { if (b == null) { return 0; }//from w w w . j av a 2 s. c om this.lock.lock(); try { if (this.shutdown || this.endOfStream) { throw new IllegalStateException("Buffer already closed for writing"); } setInputMode(); if (!this.buffer.hasRemaining()) { flushContent(); setInputMode(); } int c = b.limit() - b.position(); largeWrapper = b; while (largeWrapper.hasRemaining()) { flushContent(); } largeWrapper = null; return c; } finally { this.lock.unlock(); } }
From source file:com.tinspx.util.io.ChannelSourceTest.java
@Test public void testByteBufferSource() throws IOException { int off = 443, len = 17167; ByteBuffer buf, direct; direct = ByteBuffer.allocateDirect(INPUT.length); assertTrue(direct.isDirect());//from w w w. j ava 2 s . co m direct.put(INPUT); byte[] sub = Arrays.copyOfRange(INPUT, off, off + len); //full input buf = ByteBuffer.wrap(INPUT); ByteSourceTests.testByteSource(ChannelSource.of(buf), INPUT); assertEquals(0, buf.position()); assertEquals(INPUT.length, buf.limit()); buf = ByteBuffer.wrap(INPUT).asReadOnlyBuffer(); ByteSourceTests.testByteSource(ChannelSource.of(buf), INPUT); assertEquals(0, buf.position()); assertEquals(INPUT.length, buf.limit()); direct.clear(); buf = direct; ByteSourceTests.testByteSource(ChannelSource.of(buf), INPUT); assertEquals(0, buf.position()); assertEquals(INPUT.length, buf.limit()); //sub range of input buf = ByteBuffer.wrap(INPUT); buf.clear().position(off).limit(off + len); ByteSourceTests.testByteSource(ChannelSource.of(buf), sub); assertEquals(off, buf.position()); assertEquals(off + len, buf.limit()); buf = ByteBuffer.wrap(INPUT).asReadOnlyBuffer(); buf.clear().position(off).limit(off + len); ByteSourceTests.testByteSource(ChannelSource.of(buf), sub); assertEquals(off, buf.position()); assertEquals(off + len, buf.limit()); direct.clear(); buf = direct; buf.clear().position(off).limit(off + len); ByteSourceTests.testByteSource(ChannelSource.of(buf), sub); assertEquals(off, buf.position()); assertEquals(off + len, buf.limit()); }
From source file:net.beaconpe.jraklib.server.Session.java
private void handleSplit(EncapsulatedPacket packet) throws IOException { if (packet.splitCount >= 128) { return;// w ww. jav a 2 s. c o m } if (!splitPackets.containsKey(packet.splitID)) { Map<Integer, EncapsulatedPacket> map = new ConcurrentHashMap<>(); map.put(packet.splitIndex, packet); splitPackets.put(packet.splitID, map); } else { Map<Integer, EncapsulatedPacket> map = splitPackets.get(packet.splitID); map.put(packet.splitIndex, packet); splitPackets.put(packet.splitID, map); } if (splitPackets.get(packet.splitID).values().size() == packet.splitCount) { EncapsulatedPacket pk = new EncapsulatedPacket(); ByteBuffer bb = ByteBuffer.allocate(1024 * 1024); for (int i = 0; i < packet.splitCount; i++) { bb.put(splitPackets.get(packet.splitID).get(i).buffer); } pk.buffer = ArrayUtils.subarray(bb.array(), 0, bb.position()); bb = null; pk.length = pk.buffer.length; splitPackets.remove(packet.splitID); handleEncapsulatedPacketRoute(pk); } }
From source file:com.smartitengineering.util.simple.io.BufferedInputStream.java
@Override public int read() throws IOException { ByteBuffer buffer = getCurrentBuffer(); if (available() > 0) { return buffer.get(); }//from ww w . ja v a2s . c o m int remaining = buffer.remaining(); if (remaining <= 0) { if (hasNextBuffer()) { currentBuffer = nextBuffer(); return read(); } else if (eofReached) { return -1; } else { remaining = initializeNewBuffer(); buffer = getCurrentBuffer(); } } byte[] readBuffer = new byte[remaining]; int read = wrappedStream.read(readBuffer); if (read > 0) { int position = buffer.position(); buffer.put(readBuffer, 0, read); buffer.position(position); get(buffer).add(read); return buffer.get(); } else { eofReached = true; return -1; } }
From source file:de.rwhq.btree.LeafNode.java
@Override public List<V> get(final K key) { final List<V> result = new ArrayList<V>(); final byte[] keyBuf = keySerializer.serialize(key); final byte[] tmpKeyBuf = new byte[keySerializer.getSerializedLength()]; final byte[] tmpValBuf = new byte[valueSerializer.getSerializedLength()]; final int pos = offsetOfKey(key); if (pos == NOT_FOUND) return result; final ByteBuffer buffer = rawPage().bufferForReading(pos); while (buffer.position() < offsetBehindLastEntry()) { buffer.get(tmpKeyBuf);/* w ww. j a v a 2 s .c o m*/ if (Arrays.equals(tmpKeyBuf, keyBuf)) { buffer.get(tmpValBuf); result.add(valueSerializer.deserialize(tmpValBuf)); } } return result; }