List of usage examples for java.nio ByteBuffer position
public final int position()
From source file:com.act.lcms.v2.fullindex.BuilderTest.java
@Test public void testAppendOrRealloc() throws Exception { ByteBuffer dest = ByteBuffer.allocate(4); assertEquals("Initial buffer capacity matches expected", 4, dest.capacity()); dest = Utils.appendOrRealloc(dest, ByteBuffer.wrap(new byte[] { 'a', 'b', 'c', 'd' })); // No need to flip w/ wrap(). assertEquals("Post-append (fits) buffer capacity matches expected", 4, dest.capacity()); assertEquals("Post-append (fits) buffer position matches expected", 4, dest.position()); dest = Utils.appendOrRealloc(dest, ByteBuffer.wrap(new byte[] { 'e' })); assertEquals("Post-append (too large) buffer capacity has doubled", 8, dest.capacity()); assertEquals("Post-append (too large) buffer position matches expected", 5, dest.position()); dest = Utils.appendOrRealloc(dest, ByteBuffer.wrap(new byte[] { 'f', 'g', 'h' })); assertEquals("Post-append (fits) buffer capacity matches expected", 8, dest.capacity()); assertEquals("Post-append (fits) buffer position matches expected", 8, dest.position()); dest = Utils.appendOrRealloc(dest, ByteBuffer.wrap(new byte[] { 'i' })); assertEquals("Post-append (too large) buffer capacity has doubled", 16, dest.capacity()); assertEquals("Post-append (too large) buffer position matches expected", 9, dest.position()); }
From source file:com.slytechs.capture.file.editor.FileEditorImpl.java
public void add(final ByteBuffer b, final long global) throws IOException { final long length = b.limit() - b.position(); // Create a partial loader for our cache memory buffer and do the insert final PartialLoader record = new MemoryCacheLoader(b, true, headerReader); this.edits.insert(global, length, record); this.autoflushChange(length); }
From source file:edu.umn.cs.spatialHadoop.indexing.BTRPartitioner.java
@Override public void write(DataOutput out) throws IOException { mbr.write(out);/*from w w w . j a v a 2 s . com*/ out.writeInt(columns); out.writeInt(rows); ByteBuffer bbuffer = ByteBuffer.allocate((xSplits.length + ySplits.length) * 8); for (double xSplit : xSplits) bbuffer.putDouble(xSplit); for (double ySplit : ySplits) bbuffer.putDouble(ySplit); if (bbuffer.hasRemaining()) throw new RuntimeException("Did not calculate buffer size correctly"); out.write(bbuffer.array(), bbuffer.arrayOffset(), bbuffer.position()); }
From source file:edu.uci.ics.hyracks.dataflow.std.sort.util.DeletableFrameTupleAppenderTest.java
@Test public void testReset() throws Exception { ByteBuffer buffer = ByteBuffer.allocate(cap); appender.reset(buffer);//from w ww . java2s . c o m assertTrue(appender.getBuffer() == buffer); assertTrue(appender.getTupleCount() == 0); assertTrue(appender.getContiguousFreeSpace() == cap - 4 - 4); int count = 10; int deleted = 7; buffer = makeAFrame(cap, count, deleted); int pos = buffer.position(); appender.reset(buffer); assertTrue(appender.getBuffer() == buffer); assertTrue(appender.getTupleCount() == count); assertTrue(appender.getContiguousFreeSpace() == cap - 4 - 4 - count * 4 - pos); assertTrue(appender.getTotalFreeSpace() == appender.getContiguousFreeSpace() + deleted); int dataOffset = 0; for (int i = 0; i < count; i++) { dataOffset += assertTupleIsExpected(i, dataOffset); } }
From source file:edu.umn.cs.spatialHadoop.nasa.HDFRasterLayer.java
@Override public void write(DataOutput out) throws IOException { super.write(out); out.writeLong(timestamp);/* ww w .j a v a 2 s.c om*/ ByteArrayOutputStream baos = new ByteArrayOutputStream(); GZIPOutputStream gzos = new GZIPOutputStream(baos); ByteBuffer bbuffer = ByteBuffer.allocate(getHeight() * 2 * 8 + 8); bbuffer.putInt(getWidth()); bbuffer.putInt(getHeight()); gzos.write(bbuffer.array(), 0, bbuffer.position()); for (int x = 0; x < getWidth(); x++) { bbuffer.clear(); for (int y = 0; y < getHeight(); y++) { bbuffer.putLong(sum[x][y]); bbuffer.putLong(count[x][y]); } gzos.write(bbuffer.array(), 0, bbuffer.position()); } gzos.close(); byte[] serializedData = baos.toByteArray(); out.writeInt(serializedData.length); out.write(serializedData); }
From source file:de.csdev.ebus.command.EBusCommandUtils.java
/** * @param commandChannel//from www . j ava2s. c o m * @return */ public static ByteBuffer getMasterTelegramMask(IEBusCommandMethod commandChannel) { // byte len = 0; ByteBuffer buf = ByteBuffer.allocate(50); buf.put(commandChannel.getSourceAddress() == null ? (byte) 0x00 : (byte) 0xFF); // QQ - Source buf.put(commandChannel.getDestinationAddress() == null ? (byte) 0x00 : (byte) 0xFF); // ZZ - Target buf.put(new byte[] { (byte) 0xFF, (byte) 0xFF }); // PB SB - Command buf.put((byte) 0xFF); // NN - Length if (commandChannel.getMasterTypes() != null) { for (IEBusValue entry : commandChannel.getMasterTypes()) { IEBusType<?> type = entry.getType(); if (entry.getName() == null && type instanceof EBusTypeBytes && entry.getDefaultValue() != null) { for (int i = 0; i < type.getTypeLength(); i++) { buf.put((byte) 0xFF); } } else { for (int i = 0; i < type.getTypeLength(); i++) { buf.put((byte) 0x00); } } } } buf.put((byte) 0x00); // Master CRC // set limit and reset position buf.limit(buf.position()); buf.position(0); return buf; }
From source file:cn.iie.haiep.hbase.value.Bytes.java
/** * This method will get a sequence of bytes from pos -> limit, * but will restore pos after.//from ww w .ja va 2 s .com * @param buf * @return byte array */ public static byte[] getBytes(ByteBuffer buf) { int savedPos = buf.position(); byte[] newBytes = new byte[buf.remaining()]; buf.get(newBytes); buf.position(savedPos); return newBytes; }
From source file:edu.tsinghua.lumaqq.qq.packets.Packet.java
/** * bufOutPacketbuf?????/* ww w .j a va 2 s. co m*/ * * @param buf * ByteBuffer * @throws PacketParseException * ? */ protected Packet(ByteBuffer buf, QQUser user) throws PacketParseException { this(buf, buf.limit() - buf.position(), user); }
From source file:com.talis.storage.s3.S3StoreTest.java
@Override protected Store getStore() throws Exception { bucketname = UUID.randomUUID().toString(); Provider<S3Service> serviceProvider = initialiseServiceProvider(); final S3Object stubObject = new S3Object("foo"); objectFactory = new S3ObjectFactory(bucketname) { @Override// w w w .j av a2 s . c om public ExternalizableS3Object newObject(String key, int index, MediaType mediaType, ByteBuffer buffer) throws IOException { ExternalizableS3Object obj = new ExternalizableS3Object(); obj.setKey(key + "/" + index); obj.setContentType(S3Store.TMB_CHUNK_TYPE.toString()); obj.addMetadata(S3Store.ACTUAL_CONTENT_TYPE_HEADER, mediaType.toString()); byte[] bytes = new byte[buffer.position()]; buffer.rewind(); buffer.get(bytes); obj.setDataInputStream(new ByteArrayInputStream(bytes)); return obj; } }; chunkHandler = new StubChunkHandler(); return new S3Store(objectFactory, chunkHandler); }