List of usage examples for java.nio ByteBuffer put
public ByteBuffer put(byte[] src, int off, int len)
From source file:eu.europa.esig.dss.DSSUtils.java
public static long toLong(final byte[] bytes) { // Long.valueOf(new String(bytes)).longValue(); ByteBuffer buffer = ByteBuffer.allocate(8); buffer.put(bytes, 0, Long.SIZE / 8); // TODO: (Bob: 2014 Jan 22) To be checked if it is not platform dependent? buffer.flip();//need flip return buffer.getLong(); }
From source file:experts.net.ip6.ULUA.java
/** * Generate Global ID according to RFC 4193 Section 3.2.2. * * @param timeStamp/*from w ww . j av a 2 s.c o m*/ * 64-bit NTP format */ public final void generateGlobalID(long timeStamp) { ByteBuffer buf = ByteBuffer.allocate(16); buf.putLong(timeStamp); interfaceID.forEach(buf::putShort); byte[] digest = DigestUtils.sha1(buf.array()); buf = ByteBuffer.allocate(6); buf.put(GLOBAL_ID_PREFIX).put(digest, 15, 5); globalID = toList(buf); }
From source file:com.openteach.diamond.network.waverider.network.Packet.java
/** * ??Packet, ??/*from www . ja v a 2 s . co m*/ * @param inputBuffer * @return * @throws IOException, InterruptedException */ public static Packet parse(BlockingQueue<ByteBuffer> inputBuffer, NetWorkEndPoint endPoint, SocketChannel channel) throws IOException, InterruptedException { // Buffer for packet header byte[] tmpBuf = new byte[NetWorkConstants.DEFAULT_NETWORK_BUFFER_SIZE]; ByteBuffer header = ByteBuffer.allocate(Packet.getHeaderSize()); ByteBuffer currentBuffer = null; int rest = 0; boolean isRemove = false; // ? while (true) { while ((currentBuffer = inputBuffer.peek()) == null) { if (!endPoint.notifyRead(channel)) { throw new IOException("Socket closed by other thread"); } // ? //endPoint.waitMoreData(5); // FIXME 2ms //Thread.sleep(1); Thread.yield(); } isRemove = false; rest = header.capacity() - header.position(); if (currentBuffer.remaining() >= rest) { if (currentBuffer.remaining() == rest) { isRemove = true; } currentBuffer.get(tmpBuf, 0, rest); header.put(tmpBuf, 0, rest); if (isRemove) { inputBuffer.remove(); } break; } else { header.put(currentBuffer); inputBuffer.remove(); } } header.flip(); // , ??? // ? Integer size = header.getInt(Packet.getLengthPosition()); // For test /*if(size < 0 || size > 100000) { logger.info("Error"); }*/ //logger.debug(new StringBuilder("Try to allocate ").append(size).append(" bytes memory")); ByteBuffer buffer = ByteBuffer.allocate(size); buffer.put(header); header.clear(); // ? while (true) { while ((currentBuffer = inputBuffer.peek()) == null) { endPoint.notifyRead(channel); Thread.sleep(1000); } isRemove = false; rest = buffer.capacity() - buffer.position(); if (currentBuffer.remaining() >= rest) { if (currentBuffer.remaining() == rest) { isRemove = true; } currentBuffer.get(tmpBuf, 0, rest); buffer.put(tmpBuf, 0, rest); if (isRemove) { inputBuffer.remove(); } break; } else { buffer.put(currentBuffer); inputBuffer.remove(); } } //buffer.position(0); buffer.flip(); Packet packet = Packet.unmarshall(buffer); //logger.info("Parse one packet from network"); //packet.dump(); return packet; }
From source file:cn.ac.ncic.mastiff.io.coding.RedBlackTreeStringReader.java
@Override public byte[] ensureDecompressed() throws IOException { DataOutputBuffer transfer = new DataOutputBuffer(); transfer.write(inBuf.getData(), 12, inBuf.getLength() - 12); DataInputBuffer dib = new DataInputBuffer(); dib.reset(transfer.getData(), 0, transfer.getLength()); int dictionarySize = dib.readInt(); int length1 = dib.readInt(); byte[] data = transfer.getData(); transfer.close();/*from w ww.j a va 2 s. com*/ dib.reset(data, Integer.SIZE + Integer.SIZE, length1); FlexibleEncoding.ORC.StreamName name = new FlexibleEncoding.ORC.StreamName(0, OrcProto.Stream.Kind.DICTIONARY_DATA); ByteBuffer inBuf1 = ByteBuffer.allocate(length1); inBuf1.put(dib.getData(), 0, dib.getLength()); inBuf1.flip(); InStream in = InStream.create("test1", inBuf1, null, dictionarySize); if (in.available() > 0) { dictionaryBuffer = new DynamicByteArray(64, in.available()); dictionaryBuffer.readAll(in); in.close(); // read the lengths google proto buffer name = new StreamName(1, OrcProto.Stream.Kind.LENGTH); dib.reset(data, 4 + 4 + length1, 4); int length2 = dib.readInt(); dib.reset(data, 4 + 4 + length1 + 4, length2); // in = streams.get(name); ByteBuffer inBuf2 = ByteBuffer.allocate(length2); inBuf2.put(dib.getData(), 0, length2); inBuf2.flip(); in = InStream.create("test2", inBuf2, null, dictionarySize); // IntegerReader lenReader = createIntegerReader(encodings.get(columnId) // .getKind(), in, false); IntegerReader lenReader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false); int offset = 0; dictionaryOffsets = new int[dictionarySize + 1]; for (int i = 0; i < dictionarySize; ++i) { dictionaryOffsets[i] = offset; offset += (int) lenReader.next(); } dictionaryOffsets[dictionarySize] = offset; in.close(); name = new FlexibleEncoding.ORC.StreamName(2, OrcProto.Stream.Kind.DATA); dib.reset(data, 4 + 4 + length1 + 4 + length2, 4); int length3 = dib.readInt(); dib.reset(data, 4 + 4 + length1 + 4 + length2 + 4, length3); ByteBuffer inBuf3 = ByteBuffer.allocate(length3); inBuf3.put(dib.getData(), 0, length3); inBuf3.flip(); in = InStream.create("test3", inBuf3, null, dictionarySize); reader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false); } inBuf.close(); DataOutputBuffer decoding = new DataOutputBuffer(); DataOutputBuffer offsets = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs); decoding.writeInt(startPos); int dataoffset = 12; String str; for (int i = 0; i < numPairs; i++) { str = readEachValue(null); decoding.writeUTF(str); // if(i<5){ // System.out.println("304 bin[i] "+str+" decoding "+ decoding.size()); // } dataoffset = decoding.size(); offsets.writeInt(dataoffset); } System.out.println("315 offset.size() " + offsets.size() + " decoding.szie " + decoding.size()); System.out.println("316 dataoffet " + dataoffset); decoding.write(offsets.getData(), 0, offsets.size()); inBuf.close(); offsets.close(); dib.close(); System.out.println("316 decoding " + decoding.size() + decoding.getLength() + " decoding.getData() " + decoding.getData().length); inBuf1.clear(); return decoding.getData(); }
From source file:com.intel.chimera.stream.AbstractCryptoStreamTest.java
private void doByteBufferWrite(String cipherClass, ByteArrayOutputStream baos, boolean withChannel) throws Exception { baos.reset();/*from w ww . j a v a 2 s . com*/ CryptoOutputStream out = getCryptoOutputStream(baos, getCipher(cipherClass), defaultBufferSize, iv, withChannel); ByteBuffer buf = ByteBuffer.allocateDirect(dataLen / 2); buf.put(data, 0, dataLen / 2); buf.flip(); int n1 = out.write(buf); buf.clear(); buf.put(data, n1, dataLen / 3); buf.flip(); int n2 = out.write(buf); buf.clear(); buf.put(data, n1 + n2, dataLen - n1 - n2); buf.flip(); int n3 = out.write(buf); Assert.assertEquals(dataLen, n1 + n2 + n3); out.flush(); InputStream in = getCryptoInputStream(new ByteArrayInputStream(encData), getCipher(cipherClass), defaultBufferSize, iv, withChannel); buf = ByteBuffer.allocate(dataLen + 100); byteBufferReadCheck(in, buf, 0); in.close(); }
From source file:de.hpi.fgis.hdrs.Triple.java
public static void writeTripleData(ByteBuffer out, Triple t) { if (0 < t.getSubjectLength()) out.put(t.getBuffer(), t.getOffset(), t.getSubjectLength()); if (0 < t.getPredicateLength()) out.put(t.getBuffer(), t.getPredicateOffset(), t.getPredicateLength()); if (0 < t.getObjectLength()) out.put(t.getBuffer(), t.getObjectOffset(), t.getObjectLength()); }
From source file:com.rs.worldserver.io.IOClient.java
private void directFlushOutStream() throws java.io.IOException { ByteBuffer buf = ByteBuffer.allocate(outStream.currentOffset); buf.put(outStream.buffer, 0, outStream.currentOffset); buf.flip();/*from www . j av a 2s. co m*/ Server.getIoThread().writeReq(Server.getIoThread().socketFor(this), buf); try { } catch (Exception e) { e.printStackTrace(); } outStream.currentOffset = 0; // reset }
From source file:guru.benson.pinch.Pinch.java
/** * Get a {@link java.net.HttpURLConnection} that has its {@link java.io.InputStream} pointing at * the file data of the given {@link guru.benson.pinch.ExtendedZipEntry}. * * @throws IOException//from w w w .j a v a 2s . co m */ private HttpURLConnection getEntryInputStream(ExtendedZipEntry entry) throws IOException { HttpURLConnection conn; InputStream is; // Define the local header range long start = entry.getOffset(); long end = start + ZipConstants.LOCHDR; conn = openConnection(); conn.setRequestProperty("Range", "bytes=" + start + "-" + end); conn.setInstanceFollowRedirects(true); conn.connect(); int responseCode = conn.getResponseCode(); if (responseCode != HttpURLConnection.HTTP_PARTIAL) { throw new IOException("Unexpected HTTP server response: " + responseCode); } byte[] dataBuffer = new byte[2048]; int read, bytes = 0; is = conn.getInputStream(); while ((read = is.read(dataBuffer)) != -1) { bytes += read; } close(is); disconnect(conn); if (bytes < ZipConstants.LOCHDR) { throw new IOException("Unable to fetch the local header"); } ByteBuffer buffer = ByteBuffer.allocate(ZipConstants.LOCHDR); buffer.order(ByteOrder.LITTLE_ENDIAN); buffer.put(dataBuffer, 0, ZipConstants.LOCHDR); final int headerSignature = buffer.getInt(0); if (headerSignature != 0x04034b50) { disconnect(conn); throw new IOException("Local file header signature mismatch"); } final int localCompressedSize = buffer.getInt(ZipConstants.LOCSIZ); final short localFileNameLength = buffer.getShort(ZipConstants.LOCNAM); final short localExtraLength = buffer.getShort(ZipConstants.LOCEXT); // Define the local file range start = entry.getOffset() + ZipConstants.LOCHDR + localFileNameLength + localExtraLength; end = start + localCompressedSize; // Open a new one with conn = openConnection(); conn.setRequestProperty("Range", "bytes=" + start + "-" + end); conn.setInstanceFollowRedirects(true); conn.connect(); responseCode = conn.getResponseCode(); if (responseCode != HttpURLConnection.HTTP_PARTIAL) { disconnect(conn); close(is); throw new IOException("Unexpected HTTP server response: " + responseCode); } return conn; }
From source file:edu.uci.ics.hyracks.dataflow.std.sort.util.DeletableFrameTupleAppenderTest.java
ByteBuffer makeAFrame(int capacity, int count, int deletedBytes) throws HyracksDataException { ByteBuffer buffer = ByteBuffer.allocate(capacity); int metaOffset = capacity - 4; buffer.putInt(metaOffset, deletedBytes); metaOffset -= 4;/*from w w w .j a v a 2 s . c om*/ buffer.putInt(metaOffset, count); metaOffset -= 4; for (int i = 0; i < count; i++, metaOffset -= 4) { makeARecord(builder, i); for (int x = 0; x < builder.getFieldEndOffsets().length; x++) { buffer.putInt(builder.getFieldEndOffsets()[x]); } buffer.put(builder.getByteArray(), 0, builder.getSize()); assert (metaOffset > buffer.position()); buffer.putInt(metaOffset, buffer.position()); } return buffer; }
From source file:de.hpi.fgis.hdrs.Triple.java
public static void writeTriple(ByteBuffer buffer, Triple t) { buffer.putShort(t.getSubjectLength()); buffer.putShort(t.getPredicateLength()); buffer.putInt(t.getObjectLength());//from w w w . j a v a 2 s . c o m buffer.putInt(t.getMultiplicity()); if (0 < t.getSubjectLength()) buffer.put(t.getBuffer(), t.getOffset(), t.getSubjectLength()); if (0 < t.getPredicateLength()) buffer.put(t.getBuffer(), t.getPredicateOffset(), t.getPredicateLength()); if (0 < t.getObjectLength()) buffer.put(t.getBuffer(), t.getObjectOffset(), t.getObjectLength()); }