List of usage examples for io.netty.buffer ByteBuf readSlice
public abstract ByteBuf readSlice(int length);
From source file:org.eclipse.scada.protocol.syslog.time.PatternTimestampParser.java
License:Open Source License
@Override public Calendar parseTimestamp(final ByteBuf data) { final int index = data.bytesBefore(this.endMarker); if (index < 0) { throw new CodecException("Unable to find timestamp"); }// w w w . j a va2 s . c o m final String timestampString = data.readSlice(index).toString(this.charset); logger.debug("Timestamp string: '{}'", timestampString); final Matcher m = this.pattern.matcher(timestampString); if (!m.matches()) { throw new CodecException("Timestamp string does not match pattern: " + this.pattern.pattern()); } final int year = Integer.parseInt(m.group("year")); final int month = Integer.parseInt(m.group("month")) - 1; final int day = Integer.parseInt(m.group("day")); final int hour = Integer.parseInt(m.group("hour")); final int minute = Integer.parseInt(m.group("minute")); final int second = Integer.parseInt(m.group("second")); final int ms = Integer.parseInt(m.group("subsec")) / 1000; TimeZone timezone = TimeZone.getDefault(); final String tz = m.group("tz"); if (!tz.isEmpty()) { // FIXME: implement if ("Z".equals(tz)) { timezone = TimeZone.getTimeZone("UTC"); } else { timezone = TimeZone.getTimeZone("GMT" + tz); } } final Calendar c = new GregorianCalendar(year, month, day, hour, minute, second); c.setTimeZone(timezone); c.set(Calendar.MILLISECOND, ms); // skip marker byte data.skipBytes(1); return c; }
From source file:org.graylog2.gelfclient.encoder.GelfMessageChunkEncoder.java
License:Apache License
/** * {@inheritDoc}//from w ww. j a v a2s. c om */ @Override protected void encode(ChannelHandlerContext ctx, ByteBuf buf, List<Object> out) throws Exception { if (buf.readableBytes() > MAX_MESSAGE_SIZE) { throw new EncoderException( "Message too big. " + buf.readableBytes() + " bytes (max " + MAX_MESSAGE_SIZE + ")"); } if (buf.readableBytes() <= MAX_CHUNK_SIZE) { // Need to retain() the buffer here to avoid releasing the buffer too early. out.add(buf.retain()); } else { final Chunker chunker = new Chunker(buf.readableBytes()); try { while (buf.readableBytes() > 0) { if (buf.readableBytes() >= MAX_CHUNK_SIZE) { out.add(chunker.nextChunk(buf.readSlice(MAX_CHUNK_SIZE))); } else { out.add(chunker.nextChunk(buf.readSlice(buf.readableBytes()))); } } } catch (Exception e) { LOG.error("Chunk encoder error", e); buf.release(); } } }
From source file:org.neo4j.bolt.v1.transport.BoltV1Dechunker.java
License:Open Source License
public void handle(ByteBuf data) throws IOException { while (data.readableBytes() > 0) { switch (state) { case AWAITING_CHUNK: { if (data.readableBytes() >= 2) { // Whole header available, read that chunkSize = data.readUnsignedShort(); handleHeader();/*from w w w. ja v a 2 s . c o m*/ } else { // Only one byte available, read that and wait for the second byte chunkSize = data.readUnsignedByte() << 8; state = State.IN_HEADER; } break; } case IN_HEADER: { // First header byte read, now we read the next one chunkSize = chunkSize | data.readUnsignedByte(); handleHeader(); break; } case IN_CHUNK: { if (chunkSize < data.readableBytes()) { // Current packet is larger than current chunk, slice of the chunk input.append(data.readSlice(chunkSize)); state = State.AWAITING_CHUNK; } else if (chunkSize == data.readableBytes()) { // Current packet perfectly maps to current chunk input.append(data); state = State.AWAITING_CHUNK; return; } else { // Current packet is smaller than the chunk we're reading, split the current chunk itself up chunkSize -= data.readableBytes(); input.append(data); return; } break; } case CLOSED: { // No-op return; } } } }
From source file:org.neo4j.ndp.transport.socket.SocketProtocolV1.java
License:Open Source License
/** * Handle an incoming network packet. We currently deal with the chunked input by building up full messages in * RAM before we deserialize them. This is fine with most messages, but will become a problem with very large * parameters and so on. The next step will be to write a new protocol V1 deserializer that can do incremental * deserialization, see the Netty HTTP parser for an example. *///from w w w. j a v a 2 s . c o m @Override public void handle(ChannelHandlerContext channelContext, ByteBuf data) { onBatchOfMessagesStarted(); while (data.readableBytes() > 0) { int chunkSize = data.readUnsignedShort(); if (chunkSize > 0) { if (chunkSize <= data.readableBytes()) { // Incoming buffer contains the whole chunk, forward it to our chunked input handling input.addChunk(data.readSlice(chunkSize)); } else { throw new UnsupportedOperationException("Chunks split across packets not yet supported"); // TODO } } else { processChunkedMessage(channelContext); } } onBatchOfMessagesDone(); }
From source file:org.onosproject.ovsdb.lib.utils.JsonRpcReaderUtil.java
License:Apache License
/** * Decode the bytes to Json object.// ww w. j a v a 2 s .c o m * @param in input of bytes * @param out ouput of Json object list * @param jrContext context for the last decoding process */ public static void readToJsonNode(ByteBuf in, List<Object> out, JsonReadContext jrContext) throws Exception { int lastReadBytes = jrContext.getLastReadBytes(); if (lastReadBytes == 0) { if (in.readableBytes() < 4) { return; } checkEncoding(in); } int i = lastReadBytes + in.readerIndex(); Stack<Byte> bufStack = jrContext.getBufStack(); for (; i < in.writerIndex(); i++) { byte b = in.getByte(i); switch (b) { case '{': if (!isDoubleQuote(bufStack)) { bufStack.push(b); jrContext.setStartMatch(true); } break; case '}': if (!isDoubleQuote(bufStack)) { bufStack.pop(); } break; case '"': if (in.getByte(i - 1) != '\\') { if (!bufStack.isEmpty() && bufStack.peek() != '"') { bufStack.push(b); } else { bufStack.pop(); } } break; default: break; } if (jrContext.isStartMatch() && bufStack.isEmpty()) { ByteBuf buf = in.readSlice(i - in.readerIndex() + 1); JsonParser jf = new MappingJsonFactory().createParser(new ByteBufInputStream(buf)); JsonNode jsonNode = jf.readValueAsTree(); out.add(jsonNode); lastReadBytes = 0; jrContext.setLastReadBytes(lastReadBytes); break; } } if (i >= in.writerIndex()) { lastReadBytes = in.readableBytes(); jrContext.setLastReadBytes(lastReadBytes); } }
From source file:org.onosproject.ovsdb.rfc.utils.JsonRpcReaderUtil.java
License:Apache License
/** * Decode the bytes to Json object.//from ww w . ja v a 2 s . co m * @param in input of bytes * @param out ouput of Json object list * @param jrContext context for the last decoding process * @throws IOException IOException * @throws JsonParseException JsonParseException */ public static void readToJsonNode(ByteBuf in, List<Object> out, JsonReadContext jrContext) throws IOException { int lastReadBytes = jrContext.getLastReadBytes(); if (lastReadBytes == 0) { if (in.readableBytes() < 4) { return; } checkEncoding(in); } int i = lastReadBytes + in.readerIndex(); Stack<Byte> bufStack = jrContext.getBufStack(); for (; i < in.writerIndex(); i++) { byte b = in.getByte(i); switch (b) { case '{': if (!isDoubleQuote(bufStack)) { bufStack.push(b); jrContext.setStartMatch(true); } break; case '}': if (!isDoubleQuote(bufStack)) { bufStack.pop(); } break; case '"': if (in.getByte(i - 1) != '\\') { if (!bufStack.isEmpty() && bufStack.peek() != '"') { bufStack.push(b); } else { bufStack.pop(); } } break; default: break; } if (jrContext.isStartMatch() && bufStack.isEmpty()) { ByteBuf buf = in.readSlice(i - in.readerIndex() + 1); JsonParser jf = new MappingJsonFactory().createParser(new ByteBufInputStream(buf)); JsonNode jsonNode = jf.readValueAsTree(); out.add(jsonNode); lastReadBytes = 0; jrContext.setLastReadBytes(lastReadBytes); break; } } if (i >= in.writerIndex()) { lastReadBytes = in.readableBytes(); jrContext.setLastReadBytes(lastReadBytes); } }
From source file:org.opendaylight.groupbasedpolicy.jsonrpc.JsonRpcDecoder.java
License:Open Source License
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf buf, List<Object> out) throws Exception { logger.trace("readable bytes {}, records read {}, incomplete record bytes {}", buf.readableBytes(), recordsRead, lastRecordBytes); if (lastRecordBytes == 0) { if (buf.readableBytes() < 4) { return; //wait for more data }//from ww w .j a va 2 s . co m skipSpaces(buf); byte[] buff = new byte[4]; buf.getBytes(buf.readerIndex(), buff); ByteSourceJsonBootstrapper strapper = new ByteSourceJsonBootstrapper(jacksonIOContext, buff, 0, 4); JsonEncoding jsonEncoding = strapper.detectEncoding(); if (!JsonEncoding.UTF8.equals(jsonEncoding)) { throw new InvalidEncodingException(jsonEncoding.getJavaName(), "currently only UTF-8 is supported"); } } int i = lastRecordBytes + buf.readerIndex(); for (; i < buf.writerIndex(); i++) { switch (buf.getByte(i)) { case '{': if (!inS) leftCurlies++; break; case '}': if (!inS) rightCurlies++; break; case '"': { if (buf.getByte(i - 1) != '\\') inS = !inS; break; } default: break; } if (leftCurlies != 0 && leftCurlies == rightCurlies && !inS) { ByteBuf slice = buf.readSlice(1 + i - buf.readerIndex()); JsonParser jp = jacksonJsonFactory.createParser(new ByteBufInputStream(slice)); JsonNode root = jp.readValueAsTree(); out.add(root); leftCurlies = rightCurlies = lastRecordBytes = 0; recordsRead++; break; } if (i - buf.readerIndex() >= maxFrameLength) { fail(ctx, i - buf.readerIndex()); } } // end of stream, save the incomplete record index to avoid reexamining the whole on next run if (i >= buf.writerIndex()) { lastRecordBytes = buf.readableBytes(); return; } }
From source file:org.opendaylight.openflowjava.protocol.impl.deserialization.factories.MultipartReplyMessageFactory.java
License:Open Source License
private static MultipartReplyFlowCase setFlow(ByteBuf input) { MultipartReplyFlowCaseBuilder caseBuilder = new MultipartReplyFlowCaseBuilder(); MultipartReplyFlowBuilder flowBuilder = new MultipartReplyFlowBuilder(); List<FlowStats> flowStatsList = new ArrayList<>(); while (input.readableBytes() > 0) { FlowStatsBuilder flowStatsBuilder = new FlowStatsBuilder(); int flowRecordLength = input.readUnsignedShort(); ByteBuf subInput = input.readSlice(flowRecordLength - EncodeConstants.SIZE_OF_SHORT_IN_BYTES); flowStatsBuilder.setTableId(subInput.readUnsignedByte()); subInput.skipBytes(PADDING_IN_FLOW_STATS_HEADER_01); flowStatsBuilder.setDurationSec(subInput.readUnsignedInt()); flowStatsBuilder.setDurationNsec(subInput.readUnsignedInt()); flowStatsBuilder.setPriority(subInput.readUnsignedShort()); flowStatsBuilder.setIdleTimeout(subInput.readUnsignedShort()); flowStatsBuilder.setHardTimeout(subInput.readUnsignedShort()); flowStatsBuilder.setFlags(createFlowModFlagsFromBitmap(subInput.readUnsignedShort())); subInput.skipBytes(PADDING_IN_FLOW_STATS_HEADER_02); byte[] cookie = new byte[EncodeConstants.SIZE_OF_LONG_IN_BYTES]; subInput.readBytes(cookie);/*from w w w . j av a 2 s . c o m*/ flowStatsBuilder.setCookie(new BigInteger(1, cookie)); byte[] packetCount = new byte[EncodeConstants.SIZE_OF_LONG_IN_BYTES]; subInput.readBytes(packetCount); flowStatsBuilder.setPacketCount(new BigInteger(1, packetCount)); byte[] byteCount = new byte[EncodeConstants.SIZE_OF_LONG_IN_BYTES]; subInput.readBytes(byteCount); flowStatsBuilder.setByteCount(new BigInteger(1, byteCount)); flowStatsBuilder.setMatch(MatchDeserializer.createMatch(subInput)); flowStatsBuilder.setInstruction( InstructionsDeserializer.createInstructions(subInput, subInput.readableBytes())); flowStatsList.add(flowStatsBuilder.build()); } flowBuilder.setFlowStats(flowStatsList); caseBuilder.setMultipartReplyFlow(flowBuilder.build()); return caseBuilder.build(); }
From source file:org.opendaylight.ovsdb.lib.jsonrpc.JsonRpcDecoder.java
License:Open Source License
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf buf, List<Object> out) throws Exception { logger.trace("readable bytes {}, records read {}, incomplete record bytes {}", buf.readableBytes(), recordsRead, lastRecordBytes); if (lastRecordBytes == 0) { if (buf.readableBytes() < 4) { return; //wait for more data }//from w w w.j a v a 2s. co m skipSpaces(buf); byte[] buff = new byte[4]; buf.getBytes(buf.readerIndex(), buff); ByteSourceJsonBootstrapper strapper = new ByteSourceJsonBootstrapper(jacksonIOContext, buff, 0, 4); JsonEncoding jsonEncoding = strapper.detectEncoding(); if (!JsonEncoding.UTF8.equals(jsonEncoding)) { throw new InvalidEncodingException(jsonEncoding.getJavaName(), "currently only UTF-8 is supported"); } } int index = lastRecordBytes + buf.readerIndex(); for (; index < buf.writerIndex(); index++) { switch (buf.getByte(index)) { case '{': if (!inS) { leftCurlies++; } break; case '}': if (!inS) { rightCurlies++; } break; case '"': if (buf.getByte(index - 1) != '\\') { inS = !inS; } break; default: break; } if (leftCurlies != 0 && leftCurlies == rightCurlies && !inS) { ByteBuf slice = buf.readSlice(1 + index - buf.readerIndex()); JsonParser jp = jacksonJsonFactory.createParser(new ByteBufInputStream(slice)); JsonNode root = jp.readValueAsTree(); out.add(root); leftCurlies = rightCurlies = lastRecordBytes = 0; recordsRead++; break; } if (index - buf.readerIndex() >= maxFrameLength) { fail(ctx, index - buf.readerIndex()); } } // end of stream, save the incomplete record index to avoid reexamining the whole on next run if (index >= buf.writerIndex()) { lastRecordBytes = buf.readableBytes(); return; } }
From source file:org.opendaylight.protocol.bgp.evpn.impl.nlri.EthADRParser.java
License:Open Source License
@Override public EvpnChoice parseEvpn(final ByteBuf buffer) { Preconditions.checkArgument(buffer.readableBytes() == CONTENT_LENGTH, "Wrong length of array of bytes. Passed: %s ;", buffer); final Esi esi = SimpleEsiTypeRegistry.getInstance().parseEsi(buffer.readSlice(ESI_SIZE)); final EthernetTagId eti = new EthernetTagIdBuilder().setVlanId(buffer.readUnsignedInt()).build(); final MplsLabel label = mplsLabelForByteBuf(buffer); final EthernetADRouteBuilder builder = new EthernetADRouteBuilder().setEsi(esi).setEthernetTagId(eti) .setMplsLabel(label);//from w ww .j ava2s . c om return new EthernetADRouteCaseBuilder().setEthernetADRoute(builder.build()).build(); }