List of usage examples for io.netty.buffer ByteBuf getByte
public abstract byte getByte(int index);
From source file:org.openremote.agent.protocol.velbus.VelbusPacketEncoderDecoder.java
License:Open Source License
public static void decode(ByteBuf buf, List<VelbusPacket> messages) { int startIndex = buf.indexOf(0, buf.capacity() - 1, VelbusPacket.STX); if (startIndex < 0) { return;//from w w w . j ava 2 s. c o m } if (startIndex > 0) { buf.readerIndex(startIndex); buf.discardReadBytes(); } if (buf.readableBytes() < 4) { return; } int dataSize = buf.getByte(3); if (buf.readableBytes() < 6 + dataSize) { return; } // Find end of packet int endIndex = buf.indexOf(4 + dataSize, MAX_PACKET_SIZE, VelbusPacket.ETX); if (endIndex < 0) { if (buf.readableBytes() > MAX_PACKET_SIZE) { buf.readerIndex(MAX_PACKET_SIZE); buf.discardReadBytes(); } return; } byte[] packetBytes = new byte[endIndex + 1]; buf.readBytes(packetBytes); buf.discardReadBytes(); VelbusPacket packet = new VelbusPacket(packetBytes); if (packet.isValid()) { messages.add(packet); } }
From source file:org.proton.plug.handler.impl.ProtonHandlerImpl.java
License:Apache License
@Override public void inputBuffer(ByteBuf buffer) { dataReceived = true;/*w ww. j av a 2 s . c o m*/ synchronized (lock) { while (buffer.readableBytes() > 0) { int capacity = transport.capacity(); if (!receivedFirstPacket) { try { if (buffer.getByte(4) == 0x03) { dispatchSASL(); } } catch (Throwable ignored) { ignored.printStackTrace(); } receivedFirstPacket = true; } if (capacity > 0) { ByteBuffer tail = transport.tail(); int min = Math.min(capacity, buffer.readableBytes()); tail.limit(min); buffer.readBytes(tail); flush(); } else { if (capacity == 0) { System.out.println("abandoning: " + buffer.readableBytes()); } else { System.out.println("transport closed, discarding: " + buffer.readableBytes() + " capacity = " + transport.capacity()); } break; } } } }
From source file:org.restcomm.media.rtp.netty.RtpDemultiplexer.java
License:Open Source License
@Override protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws Exception { // Differentiate between RTP, STUN and DTLS packets in the pipeline // https://tools.ietf.org/html/rfc5764#section-5.1.2 final int offset = msg.arrayOffset(); final byte b0 = msg.getByte(offset); final int b0Int = b0 & 0xff; if (b0Int < 2) { handleStunPacket(ctx, msg);// w w w . j a va 2 s . com } else if (b0Int > 19 && b0Int < 64) { handleDtlsPacket(ctx, msg); } else if (b0Int > 127 && b0Int < 192) { handleRtpPacket(ctx, msg); } else { // Unsupported packet type. Drop it. ReferenceCountUtil.release(msg); if (log.isDebugEnabled()) { log.debug("Channel " + ctx.channel().localAddress() + " dropped unsupported packet type " + b0Int); } } }
From source file:org.restcomm.media.rtp.netty.RtpDemultiplexer.java
License:Open Source License
private void handleRtpPacket(ChannelHandlerContext ctx, ByteBuf buffer) { // Retrieve data from network final int offset = buffer.arrayOffset(); /*//from ww w . j a v a 2 s. co m * When RTP and RTCP packets are multiplexed onto a single port, the RTCP packet type field occupies the same position * in the packet as the combination of the RTP marker (M) bit and the RTP payload type (PT). This field can be used to * distinguish RTP and RTCP packets when two restrictions are observed: * * 1) the RTP payload type values used are distinct from the RTCP packet types used. * * 2) for each RTP payload type (PT), PT+128 is distinct from the RTCP packet types used. The first constraint precludes * a direct conflict between RTP payload type and RTCP packet type; the second constraint precludes a conflict between * an RTP data packet with the marker bit set and an RTCP packet. */ int type = buffer.getByte(offset + 1) & 0xff & 0x7f; int rtcpType = type + 128; // RTP payload types 72-76 conflict with the RTCP SR, RR, SDES, BYE and APP packets defined in the RTP specification switch (rtcpType) { case RtcpHeader.RTCP_SR: case RtcpHeader.RTCP_RR: case RtcpHeader.RTCP_SDES: case RtcpHeader.RTCP_BYE: case RtcpHeader.RTCP_APP: RtcpPacket rtcpPacket = buildRtcpPacket(buffer); ctx.fireChannelRead(rtcpPacket); default: RtpPacket rtpPacket = buildRtpPacket(buffer); ctx.fireChannelRead(rtpPacket); break; } }
From source file:org.springframework.cloud.gateway.rsocket.support.Metadata.java
License:Apache License
private static String decodeString(ByteBuf byteBuf, AtomicInteger offset) { int length = byteBuf.getByte(offset.get()); int index = offset.addAndGet(Byte.BYTES); String s = byteBuf.toString(index, length, StandardCharsets.UTF_8); offset.addAndGet(length);/*from ww w . j a va 2 s. co m*/ return s; }
From source file:org.springframework.core.codec.support.JsonObjectDecoder.java
License:Apache License
@Override public Flux<DataBuffer> decode(Publisher<DataBuffer> inputStream, ResolvableType type, MimeType mimeType, Object... hints) {/*from w ww . j av a2s.com*/ return Flux.from(inputStream).flatMap(new Function<DataBuffer, Publisher<? extends DataBuffer>>() { int openBraces; int index; int state; boolean insideString; ByteBuf input; Integer writerIndex; @Override public Publisher<? extends DataBuffer> apply(DataBuffer b) { List<DataBuffer> chunks = new ArrayList<>(); if (this.input == null) { this.input = Unpooled.copiedBuffer(b.asByteBuffer()); this.writerIndex = this.input.writerIndex(); } else { this.input = Unpooled.copiedBuffer(this.input, Unpooled.copiedBuffer(b.asByteBuffer())); this.writerIndex = this.input.writerIndex(); } if (this.state == ST_CORRUPTED) { this.input.skipBytes(this.input.readableBytes()); return Flux.error(new IllegalStateException("Corrupted stream")); } if (this.writerIndex > maxObjectLength) { // buffer size exceeded maxObjectLength; discarding the complete buffer. this.input.skipBytes(this.input.readableBytes()); reset(); return Flux.error(new IllegalStateException("object length exceeds " + maxObjectLength + ": " + this.writerIndex + " bytes discarded")); } for (/* use current index */; this.index < this.writerIndex; this.index++) { byte c = this.input.getByte(this.index); if (this.state == ST_DECODING_NORMAL) { decodeByte(c, this.input, this.index); // All opening braces/brackets have been closed. That's enough to conclude // that the JSON object/array is complete. if (this.openBraces == 0) { ByteBuf json = extractObject(this.input, this.input.readerIndex(), this.index + 1 - this.input.readerIndex()); if (json != null) { chunks.add(allocator.wrap(json.nioBuffer())); } // The JSON object/array was extracted => discard the bytes from // the input buffer. this.input.readerIndex(this.index + 1); // Reset the object state to get ready for the next JSON object/text // coming along the byte stream. reset(); } } else if (this.state == ST_DECODING_ARRAY_STREAM) { decodeByte(c, this.input, this.index); if (!this.insideString && (this.openBraces == 1 && c == ',' || this.openBraces == 0 && c == ']')) { // skip leading spaces. No range check is needed and the loop will terminate // because the byte at position index is not a whitespace. for (int i = this.input.readerIndex(); Character .isWhitespace(this.input.getByte(i)); i++) { this.input.skipBytes(1); } // skip trailing spaces. int idxNoSpaces = this.index - 1; while (idxNoSpaces >= this.input.readerIndex() && Character.isWhitespace(this.input.getByte(idxNoSpaces))) { idxNoSpaces--; } ByteBuf json = extractObject(this.input, this.input.readerIndex(), idxNoSpaces + 1 - this.input.readerIndex()); if (json != null) { chunks.add(allocator.wrap(json.nioBuffer())); } this.input.readerIndex(this.index + 1); if (c == ']') { reset(); } } // JSON object/array detected. Accumulate bytes until all braces/brackets are closed. } else if (c == '{' || c == '[') { initDecoding(c, streamArrayElements); if (this.state == ST_DECODING_ARRAY_STREAM) { // Discard the array bracket this.input.skipBytes(1); } // Discard leading spaces in front of a JSON object/array. } else if (Character.isWhitespace(c)) { this.input.skipBytes(1); } else { this.state = ST_CORRUPTED; return Flux.error(new IllegalStateException("invalid JSON received at byte position " + this.index + ": " + ByteBufUtil.hexDump(this.input))); } } if (this.input.readableBytes() == 0) { this.index = 0; } return Flux.fromIterable(chunks); } /** * Override this method if you want to filter the json objects/arrays that * get passed through the pipeline. */ @SuppressWarnings("UnusedParameters") protected ByteBuf extractObject(ByteBuf buffer, int index, int length) { return buffer.slice(index, length).retain(); } private void decodeByte(byte c, ByteBuf input, int index) { if ((c == '{' || c == '[') && !this.insideString) { this.openBraces++; } else if ((c == '}' || c == ']') && !this.insideString) { this.openBraces--; } else if (c == '"') { // start of a new JSON string. It's necessary to detect strings as they may // also contain braces/brackets and that could lead to incorrect results. if (!this.insideString) { this.insideString = true; // If the double quote wasn't escaped then this is the end of a string. } else if (input.getByte(index - 1) != '\\') { this.insideString = false; } } } private void initDecoding(byte openingBrace, boolean streamArrayElements) { this.openBraces = 1; if (openingBrace == '[' && streamArrayElements) { this.state = ST_DECODING_ARRAY_STREAM; } else { this.state = ST_DECODING_NORMAL; } } private void reset() { this.insideString = false; this.state = ST_INIT; this.openBraces = 0; } }); }
From source file:org.springframework.http.codec.json.JsonObjectDecoder.java
License:Apache License
@Override public Flux<DataBuffer> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType, MimeType mimeType, Map<String, Object> hints) { return Flux.from(inputStream).flatMap(new Function<DataBuffer, Publisher<? extends DataBuffer>>() { int openBraces; int index; int state; boolean insideString; ByteBuf input;/*from w w w. j a v a 2 s .c o m*/ Integer writerIndex; @Override public Publisher<? extends DataBuffer> apply(DataBuffer buffer) { List<DataBuffer> chunks = new ArrayList<>(); if (this.input == null) { this.input = Unpooled.copiedBuffer(buffer.asByteBuffer()); DataBufferUtils.release(buffer); this.writerIndex = this.input.writerIndex(); } else { this.index = this.index - this.input.readerIndex(); this.input = Unpooled.copiedBuffer(this.input, Unpooled.copiedBuffer(buffer.asByteBuffer())); DataBufferUtils.release(buffer); this.writerIndex = this.input.writerIndex(); } if (this.state == ST_CORRUPTED) { this.input.skipBytes(this.input.readableBytes()); return Flux.error(new IllegalStateException("Corrupted stream")); } if (this.writerIndex > maxObjectLength) { // buffer size exceeded maxObjectLength; discarding the complete buffer. this.input.skipBytes(this.input.readableBytes()); reset(); return Flux.error(new IllegalStateException("object length exceeds " + maxObjectLength + ": " + this.writerIndex + " bytes discarded")); } DataBufferFactory dataBufferFactory = buffer.factory(); for (/* use current index */; this.index < this.writerIndex; this.index++) { byte c = this.input.getByte(this.index); if (this.state == ST_DECODING_NORMAL) { decodeByte(c, this.input, this.index); // All opening braces/brackets have been closed. That's enough to conclude // that the JSON object/array is complete. if (this.openBraces == 0) { ByteBuf json = extractObject(this.input, this.input.readerIndex(), this.index + 1 - this.input.readerIndex()); if (json != null) { chunks.add(dataBufferFactory.wrap(json.nioBuffer())); } // The JSON object/array was extracted => discard the bytes from // the input buffer. this.input.readerIndex(this.index + 1); // Reset the object state to get ready for the next JSON object/text // coming along the byte stream. reset(); } } else if (this.state == ST_DECODING_ARRAY_STREAM) { decodeByte(c, this.input, this.index); if (!this.insideString && (this.openBraces == 1 && c == ',' || this.openBraces == 0 && c == ']')) { // skip leading spaces. No range check is needed and the loop will terminate // because the byte at position index is not a whitespace. for (int i = this.input.readerIndex(); Character .isWhitespace(this.input.getByte(i)); i++) { this.input.skipBytes(1); } // skip trailing spaces. int idxNoSpaces = this.index - 1; while (idxNoSpaces >= this.input.readerIndex() && Character.isWhitespace(this.input.getByte(idxNoSpaces))) { idxNoSpaces--; } ByteBuf json = extractObject(this.input, this.input.readerIndex(), idxNoSpaces + 1 - this.input.readerIndex()); if (json != null) { chunks.add(dataBufferFactory.wrap(json.nioBuffer())); } this.input.readerIndex(this.index + 1); if (c == ']') { reset(); } } // JSON object/array detected. Accumulate bytes until all braces/brackets are closed. } else if (c == '{' || c == '[') { initDecoding(c, streamArrayElements); if (this.state == ST_DECODING_ARRAY_STREAM) { // Discard the array bracket this.input.skipBytes(1); } // Discard leading spaces in front of a JSON object/array. } else if (Character.isWhitespace(c)) { this.input.skipBytes(1); } else { this.state = ST_CORRUPTED; return Flux.error(new IllegalStateException("invalid JSON received at byte position " + this.index + ": " + ByteBufUtil.hexDump(this.input))); } } return Flux.fromIterable(chunks); } /** * Override this method if you want to filter the json objects/arrays that * get passed through the pipeline. */ protected ByteBuf extractObject(ByteBuf buffer, int index, int length) { return buffer.slice(index, length).retain(); } private void decodeByte(byte c, ByteBuf input, int index) { if ((c == '{' || c == '[') && !this.insideString) { this.openBraces++; } else if ((c == '}' || c == ']') && !this.insideString) { this.openBraces--; } else if (c == '"') { // start of a new JSON string. It's necessary to detect strings as they may // also contain braces/brackets and that could lead to incorrect results. if (!this.insideString) { this.insideString = true; // If the double quote wasn't escaped then this is the end of a string. } else if (input.getByte(index - 1) != '\\') { this.insideString = false; } } } private void initDecoding(byte openingBrace, boolean streamArrayElements) { this.openBraces = 1; if (openingBrace == '[' && streamArrayElements) { this.state = ST_DECODING_ARRAY_STREAM; } else { this.state = ST_DECODING_NORMAL; } } private void reset() { this.insideString = false; this.state = ST_INIT; this.openBraces = 0; } }); }
From source file:org.springframework.reactive.codec.decoder.JsonObjectDecoder.java
License:Apache License
@Override public Publisher<ByteBuffer> decode(Publisher<ByteBuffer> inputStream, ResolvableType type, MediaType mediaType, Object... hints) {//from www . jav a2 s. c o m return Streams.wrap(inputStream).flatMap(new Function<ByteBuffer, Publisher<? extends ByteBuffer>>() { int openBraces; int idx; int state; boolean insideString; ByteBuf in; Integer wrtIdx; @Override public Publisher<? extends ByteBuffer> apply(ByteBuffer b) { List<ByteBuffer> chunks = new ArrayList<>(); if (in == null) { in = Unpooled.copiedBuffer(b); wrtIdx = in.writerIndex(); } else { in = Unpooled.copiedBuffer(in, Unpooled.copiedBuffer(b)); wrtIdx = in.writerIndex(); } if (state == ST_CORRUPTED) { in.skipBytes(in.readableBytes()); return Streams.fail(new IllegalStateException("Corrupted stream")); } if (wrtIdx > maxObjectLength) { // buffer size exceeded maxObjectLength; discarding the complete buffer. in.skipBytes(in.readableBytes()); reset(); return Streams.fail(new IllegalStateException( "object length exceeds " + maxObjectLength + ": " + wrtIdx + " bytes discarded")); } for (/* use current idx */; idx < wrtIdx; idx++) { byte c = in.getByte(idx); if (state == ST_DECODING_NORMAL) { decodeByte(c, in, idx); // All opening braces/brackets have been closed. That's enough to conclude // that the JSON object/array is complete. if (openBraces == 0) { ByteBuf json = extractObject(in, in.readerIndex(), idx + 1 - in.readerIndex()); if (json != null) { chunks.add(json.nioBuffer()); } // The JSON object/array was extracted => discard the bytes from // the input buffer. in.readerIndex(idx + 1); // Reset the object state to get ready for the next JSON object/text // coming along the byte stream. reset(); } } else if (state == ST_DECODING_ARRAY_STREAM) { decodeByte(c, in, idx); if (!insideString && (openBraces == 1 && c == ',' || openBraces == 0 && c == ']')) { // skip leading spaces. No range check is needed and the loop will terminate // because the byte at position idx is not a whitespace. for (int i = in.readerIndex(); Character.isWhitespace(in.getByte(i)); i++) { in.skipBytes(1); } // skip trailing spaces. int idxNoSpaces = idx - 1; while (idxNoSpaces >= in.readerIndex() && Character.isWhitespace(in.getByte(idxNoSpaces))) { idxNoSpaces--; } ByteBuf json = extractObject(in, in.readerIndex(), idxNoSpaces + 1 - in.readerIndex()); if (json != null) { chunks.add(json.nioBuffer()); } in.readerIndex(idx + 1); if (c == ']') { reset(); } } // JSON object/array detected. Accumulate bytes until all braces/brackets are closed. } else if (c == '{' || c == '[') { initDecoding(c, streamArrayElements); if (state == ST_DECODING_ARRAY_STREAM) { // Discard the array bracket in.skipBytes(1); } // Discard leading spaces in front of a JSON object/array. } else if (Character.isWhitespace(c)) { in.skipBytes(1); } else { state = ST_CORRUPTED; return Streams.fail(new IllegalStateException( "invalid JSON received at byte position " + idx + ": " + ByteBufUtil.hexDump(in))); } } if (in.readableBytes() == 0) { idx = 0; } return Streams.from(chunks); } /** * Override this method if you want to filter the json objects/arrays that get passed through the pipeline. */ @SuppressWarnings("UnusedParameters") protected ByteBuf extractObject(ByteBuf buffer, int index, int length) { return buffer.slice(index, length).retain(); } private void decodeByte(byte c, ByteBuf in, int idx) { if ((c == '{' || c == '[') && !insideString) { openBraces++; } else if ((c == '}' || c == ']') && !insideString) { openBraces--; } else if (c == '"') { // start of a new JSON string. It's necessary to detect strings as they may // also contain braces/brackets and that could lead to incorrect results. if (!insideString) { insideString = true; // If the double quote wasn't escaped then this is the end of a string. } else if (in.getByte(idx - 1) != '\\') { insideString = false; } } } private void initDecoding(byte openingBrace, boolean streamArrayElements) { openBraces = 1; if (openingBrace == '[' && streamArrayElements) { state = ST_DECODING_ARRAY_STREAM; } else { state = ST_DECODING_NORMAL; } } private void reset() { insideString = false; state = ST_INIT; openBraces = 0; } }); }
From source file:org.starnub.starnubserver.servers.starbound.TCPProxyServerPacketDecoder.java
License:Open Source License
@SuppressWarnings("unchecked") @Override/* w w w. ja v a 2 s . c o m*/ public void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception { switch (state()) { case READ_PACKET_ID: { packetIDIndex = in.readerIndex(); byte packetId = in.getByte(packetIDIndex); packet = PACKET_POOL.get(packetId); checkpoint(DecoderState.READ_VLQ); } case READ_VLQ: { clearVLQ(); int tempIndexMarker = packetIDIndex + 1; while (vlqLength <= 10) { int tmpByte = in.getByte(tempIndexMarker); payloadLength = (payloadLength << 7) | (tmpByte & 0x7f); vlqLength++; if ((tmpByte & 0x80) == 0) { break; } tempIndexMarker++; } if ((payloadLength & 1) == 0x00) { payloadLength = payloadLength >> 1; } else { payloadLength = -((payloadLength >> 1) + 1); } compressed = payloadLength < 0; if (compressed) { payloadLength = -payloadLength; } checkpoint(DecoderState.READ_PAYLOAD); } case READ_PAYLOAD: { if (packet != null) { CopyOnWriteArrayList<EventSubscription> hashSet = PACKET_EVENT_ROUTER.getEVENT_SUBSCRIPTION_MAP() .get(packet.getClass()); /* Handle Packet if there is an events handler for it, else do not create objects */ if (hashSet != null) { in.skipBytes(1 + vlqLength); if (compressed) { packet.read(Unpooled.wrappedBuffer(1, decompress(in.readBytes(payloadLength).array()))); } else { packet.read(in.readBytes(payloadLength)); } for (EventSubscription<Packet> packetEventSubscription : hashSet) { if (packet.isRecycle()) { break; } try { packetEventSubscription.getEVENT_HANDLER().onEvent(packet); } catch (Exception e) { e.printStackTrace(); } } /* Write packet out, if not recycling */ if (!packet.isRecycle()) { packet.routeToDestination(); } else { packet.resetRecycle(); } } else { destinationCTX.writeAndFlush(in.readSlice(1 + vlqLength + payloadLength).retain(), destinationCTX.voidPromise()); } } else { destinationCTX.writeAndFlush(in.readSlice(1 + vlqLength + payloadLength).retain(), destinationCTX.voidPromise()); } checkpoint(DecoderState.READ_PACKET_ID); break; } default: throw new Error("Error Decoding - Reached the unreachable void."); } }
From source file:org.traccar.protocol.AplicomFrameDecoder.java
License:Apache License
@Override protected Object decode(ChannelHandlerContext ctx, Channel channel, ByteBuf buf) throws Exception { // Skip Alive message while (buf.isReadable() && Character.isDigit(buf.getByte(buf.readerIndex()))) { buf.readByte();/*from w w w . ja va 2 s . c o m*/ } // Check minimum length if (buf.readableBytes() < 11) { return null; } // Read flags int version = buf.getUnsignedByte(buf.readerIndex() + 1); int offset = 1 + 1 + 3; if ((version & 0x80) != 0) { offset += 4; } // Get data length int length = buf.getUnsignedShort(buf.readerIndex() + offset); offset += 2; if ((version & 0x40) != 0) { offset += 3; } length += offset; // add header // Return buffer if (buf.readableBytes() >= length) { return buf.readRetainedSlice(length); } return null; }