List of usage examples for io.netty.channel ChannelHandlerContext alloc
ByteBufAllocator alloc();
From source file:openbns.commons.net.codec.sts.HttpObjectEncoder.java
License:Apache License
@Override protected void encode(ChannelHandlerContext ctx, Object msg, List<Object> out) throws Exception { ByteBuf buf = null;/*from w w w .jav a 2 s . c om*/ if (msg instanceof StsMessage) { if (state != ST_INIT) { throw new IllegalStateException("unexpected message type: " + StringUtil.simpleClassName(msg)); } @SuppressWarnings({ "unchecked", "CastConflictsWithInstanceof" }) H m = (H) msg; buf = ctx.alloc().buffer(); // Encode the message. encodeInitialLine(buf, m); StsHeaders.encode(m.headers(), buf); buf.writeBytes(CRLF); state = ST_CONTENT_NON_CHUNK; } if (msg instanceof StsContent || msg instanceof ByteBuf || msg instanceof FileRegion) { if (state == ST_INIT) { throw new IllegalStateException("unexpected message type: " + StringUtil.simpleClassName(msg)); } int contentLength = contentLength(msg); if (state == ST_CONTENT_NON_CHUNK) { if (contentLength > 0) { if (buf != null && buf.writableBytes() >= contentLength && msg instanceof StsContent) { // merge into other buffer for performance reasons buf.writeBytes(((StsContent) msg).content()); out.add(buf); } else { if (buf != null) { out.add(buf); } out.add(encodeAndRetain(msg)); } } else { if (buf != null) { out.add(buf); } else { // Need to produce some output otherwise an // IllegalStateException will be thrown out.add(EMPTY_BUFFER); } } if (msg instanceof LastStsContent) { state = ST_INIT; } } else { throw new Error(); } } else { if (buf != null) { out.add(buf); } } }
From source file:org.apache.camel.component.netty4.codec.DatagramPacketObjectEncoder.java
License:Apache License
@Override protected void encode(ChannelHandlerContext ctx, AddressedEnvelope<Object, InetSocketAddress> msg, List<Object> out) throws Exception { if (msg.content() instanceof Serializable) { Serializable payload = (Serializable) msg.content(); ByteBuf buf = ctx.alloc().heapBuffer(); delegateObjectEncoder.encode(ctx, payload, buf); AddressedEnvelope<Object, InetSocketAddress> addressedEnvelop = new DefaultAddressedEnvelope<Object, InetSocketAddress>( buf.retain(), msg.recipient(), msg.sender()); out.add(addressedEnvelop);//from w ww . ja v a 2s . c o m } }
From source file:org.apache.camel.component.netty4.codec.DatagramPacketStringEncoder.java
License:Apache License
@Override protected void encode(ChannelHandlerContext ctx, AddressedEnvelope<Object, InetSocketAddress> msg, List<Object> out) throws Exception { if (msg.content() instanceof CharSequence) { CharSequence payload = (CharSequence) msg.content(); if (payload.length() == 0) { return; }//from w w w .ja v a2s . c o m AddressedEnvelope<Object, InetSocketAddress> addressedEnvelop = new DefaultAddressedEnvelope<Object, InetSocketAddress>( ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(payload), charset), msg.recipient(), msg.sender()); out.add(addressedEnvelop); } }
From source file:org.apache.carbondata.core.dictionary.server.DictionaryServerHandler.java
License:Apache License
@Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { try {/*from www .j av a 2 s . c o m*/ ByteBuf data = (ByteBuf) msg; DictionaryMessage key = new DictionaryMessage(); key.readData(data); data.release(); int outPut = processMessage(key); key.setDictionaryValue(outPut); // Send back the response ByteBuf buffer = ctx.alloc().buffer(); key.writeData(buffer); ctx.writeAndFlush(buffer); } catch (Exception e) { LOGGER.error(e); throw e; } }
From source file:org.apache.carbondata.core.dictionary.server.NonSecureDictionaryServerHandler.java
License:Apache License
@Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { try {/*from w ww. j a v a 2 s . com*/ ByteBuf data = (ByteBuf) msg; DictionaryMessage key = new DictionaryMessage(); key.readSkipLength(data); data.release(); int outPut = processMessage(key); key.setDictionaryValue(outPut); // Send back the response ByteBuf buffer = ctx.alloc().buffer(); key.writeData(buffer); ctx.writeAndFlush(buffer); } catch (Exception e) { LOGGER.error(e); throw e; } }
From source file:org.apache.directory.server.dhcp.netty.Dhcp6Handler.java
@Override protected void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception { if (LOG.isDebugEnabled()) { LOG.debug("Incomming DHCP : {}, from: {}", ByteBufUtil.hexDump(msg.content()), msg.sender()); }/* ww w .j av a 2 s. c o m*/ final Dhcp6Message incommingMsg; try { incommingMsg = dhcp6MessageDecoder.decode(msg.content().nioBuffer()); } catch (final Dhcp6Exception.UnknownMsgException e) { LOG.warn("Unknown DHCP message type: {}. Ignoring", ByteBufUtil.hexDump(msg.content()), e); return; } final Optional<Dhcp6Message> reply = dhcpService .getReplyFor(new Dhcp6RequestContext(msg.sender().getAddress()), incommingMsg); if (reply.isPresent()) { LOG.debug("Responding with message: {}", reply.get()); // TODO what size to allocate the buffer to ? ByteBuf buf = ctx.alloc().buffer(1024); ByteBuffer buffer = buf.nioBuffer(buf.writerIndex(), buf.writableBytes()); dhcp6MessageEncoder.encode(buffer, reply.get()); buffer.flip(); buf.writerIndex(buf.writerIndex() + buffer.remaining()); DatagramPacket packet = new DatagramPacket(buf, msg.sender()); ctx.write(packet); } else { LOG.warn("No response from DHCP service received for: {}. Ignoring.", incommingMsg); } }
From source file:org.apache.directory.server.dhcp.netty.DhcpHandler.java
@Override protected void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception { DhcpMessage request = decoder.decode(msg.content().nioBuffer()); DhcpRequestContext context = interfaceManager.newRequestContext( (InetSocketAddress) ctx.channel().localAddress(), msg.sender(), msg.recipient(), request); if (context == null) { debug("IGNQUERY", msg.sender(), msg.recipient(), request); return;/*from ww w . j a v a 2 s. c o m*/ } // debug("READ", msg.sender(), msg.recipient(), request); MDCUtils.init(context, request); try { DhcpMessage reply = dhcpService.getReplyFor(context, request); if (reply == null) { debug("NOREPLY", msg.sender(), msg.recipient(), request); return; } InterfaceAddress localAddress = interfaceManager.getResponseInterface(request.getRelayAgentAddress(), request.getCurrentClientAddress(), msg.sender().getAddress(), reply); if (localAddress == null) { debug("NOIFACE", msg.recipient(), msg.sender(), reply); return; } debug("READ", msg.sender(), msg.recipient(), request); InetSocketAddress isa = DhcpInterfaceUtils.determineMessageDestination(request, reply, localAddress, msg.sender().getPort()); ByteBuf buf = ctx.alloc().buffer(1024); ByteBuffer buffer = buf.nioBuffer(buf.writerIndex(), buf.writableBytes()); encoder.encode(buffer, reply); buffer.flip(); buf.writerIndex(buf.writerIndex() + buffer.remaining()); DatagramPacket packet = new DatagramPacket(buf, isa); debug("WRITE", packet.sender(), packet.recipient(), reply); ctx.write(packet, ctx.voidPromise()); } finally { MDCUtils.fini(); } }
From source file:org.apache.drill.exec.rpc.ChunkCreationHandler.java
License:Apache License
@Override protected void encode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception { if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("ChunkCreationHandler called with msg {} of size {} with chunkSize {}", msg, msg.readableBytes(), chunkSize); }/*from w w w. j av a 2 s .c o m*/ if (!ctx.channel().isOpen()) { logger.debug("Channel closed, skipping encode inside {}.", RpcConstants.CHUNK_CREATION_HANDLER); msg.release(); return; } // Calculate the number of chunks based on configured chunk size and input msg size int numChunks = (int) Math.ceil((double) msg.readableBytes() / chunkSize); // Initialize a composite buffer to hold numChunks chunk. final CompositeByteBuf cbb = ctx.alloc().compositeBuffer(numChunks); int cbbWriteIndex = 0; int currentChunkLen = min(msg.readableBytes(), chunkSize); // Create slices of chunkSize from input msg and add it to the composite buffer. while (numChunks > 0) { final ByteBuf chunkBuf = msg.slice(msg.readerIndex(), currentChunkLen); chunkBuf.retain(); cbb.addComponent(chunkBuf); cbbWriteIndex += currentChunkLen; msg.skipBytes(currentChunkLen); --numChunks; currentChunkLen = min(msg.readableBytes(), chunkSize); } // Update the writerIndex of composite byte buffer. Netty doesn't do it automatically. cbb.writerIndex(cbbWriteIndex); // Add the final composite bytebuf into output buffer. out.add(cbb); }
From source file:org.apache.drill.exec.rpc.RpcEncoder.java
License:Apache License
@Override protected void encode(ChannelHandlerContext ctx, OutboundRpcMessage msg, List<Object> out) throws Exception { if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("Rpc Encoder called with msg {}", msg); }/*from www . j av a2 s . c o m*/ if (!ctx.channel().isOpen()) { //output.add(ctx.alloc().buffer(0)); logger.debug("Channel closed, skipping encode."); msg.release(); return; } try { if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("Encoding outbound message {}", msg); } // first we build the RpcHeader RpcHeader header = RpcHeader.newBuilder() // .setMode(msg.mode) // .setCoordinationId(msg.coordinationId) // .setRpcType(msg.rpcType).build(); // figure out the full length int headerLength = header.getSerializedSize(); int protoBodyLength = msg.pBody.getSerializedSize(); int rawBodyLength = msg.getRawBodySize(); int fullLength = // HEADER_TAG_LENGTH + getRawVarintSize(headerLength) + headerLength + // PROTOBUF_BODY_TAG_LENGTH + getRawVarintSize(protoBodyLength) + protoBodyLength; // if (rawBodyLength > 0) { fullLength += (RAW_BODY_TAG_LENGTH + getRawVarintSize(rawBodyLength) + rawBodyLength); } ByteBuf buf = ctx.alloc().buffer(); OutputStream os = new ByteBufOutputStream(buf); CodedOutputStream cos = CodedOutputStream.newInstance(os); // write full length first (this is length delimited stream). cos.writeRawVarint32(fullLength); // write header cos.writeRawVarint32(HEADER_TAG); cos.writeRawVarint32(headerLength); header.writeTo(cos); // write protobuf body length and body cos.writeRawVarint32(PROTOBUF_BODY_TAG); cos.writeRawVarint32(protoBodyLength); msg.pBody.writeTo(cos); // if exists, write data body and tag. if (msg.getRawBodySize() > 0) { if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("Writing raw body of size {}", msg.getRawBodySize()); } cos.writeRawVarint32(RAW_BODY_TAG); cos.writeRawVarint32(rawBodyLength); cos.flush(); // need to flush so that dbody goes after if cos is caching. CompositeByteBuf cbb = new CompositeByteBuf(buf.alloc(), true, msg.dBodies.length + 1); cbb.addComponent(buf); int bufLength = buf.readableBytes(); for (ByteBuf b : msg.dBodies) { cbb.addComponent(b); bufLength += b.readableBytes(); } cbb.writerIndex(bufLength); out.add(cbb); } else { cos.flush(); out.add(buf); } if (RpcConstants.SOME_DEBUGGING) { logger.debug("Wrote message length {}:{} bytes (head:body). Message: " + msg, getRawVarintSize(fullLength), fullLength); } if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("Sent message. Ending writer index was {}.", buf.writerIndex()); } } finally { // make sure to release Rpc Messages underlying byte buffers. //msg.release(); } }
From source file:org.apache.drill.exec.rpc.SaslDecryptionHandler.java
License:Apache License
public void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws IOException { if (!ctx.channel().isOpen()) { logger.trace("Channel closed before decoding the message of {} bytes", msg.readableBytes()); msg.skipBytes(msg.readableBytes()); return;/*from w ww .j av a2s. co m*/ } try { if (logger.isTraceEnabled()) { logger.trace("Trying to decrypt the encrypted message of size: {} with maxWrappedSize", msg.readableBytes()); } // All the encrypted blocks are prefixed with it's length in network byte order (or BigEndian format). Netty's // default Byte order of ByteBuf is Little Endian, so we cannot just do msg.getInt() as that will read the 4 // octets in little endian format. // // We will read the length of one complete encrypted chunk and decode that. msg.getBytes(msg.readerIndex(), lengthOctets.array(), 0, RpcConstants.LENGTH_FIELD_LENGTH); final int wrappedMsgLength = lengthOctets.getInt(0); msg.skipBytes(RpcConstants.LENGTH_FIELD_LENGTH); // Since lengthBasedFrameDecoder will ensure we have enough bytes it's good to have this check here. assert (msg.readableBytes() == wrappedMsgLength); // Uncomment the below code if msg can contain both of Direct and Heap ByteBuf. Currently Drill only supports // DirectByteBuf so the below condition will always be false. If the msg are always HeapByteBuf then in // addition also remove the allocation of encodedMsg from constructor. /*if (msg.hasArray()) { wrappedMsg = msg.array(); } else { if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("The input bytebuf is not backed by a byte array so allocating a new one"); }*/ // Check if the wrappedMsgLength doesn't exceed agreed upon maxWrappedSize. As per SASL RFC 2222/4422 we // should close the connection since it represents a security attack. if (wrappedMsgLength > maxWrappedSize) { throw new RpcException(String.format( "Received encoded buffer size: %d is larger than negotiated " + "maxWrappedSize: %d. Closing the connection as this is unexpected.", wrappedMsgLength, maxWrappedSize)); } final byte[] wrappedMsg = encodedMsg; // Copy the wrappedMsgLength of bytes into the byte array msg.getBytes(msg.readerIndex(), wrappedMsg, 0, wrappedMsgLength); //} // SASL library always copies the origMsg internally to a new byte array // and return another new byte array after decrypting the message. The memory for this // will be Garbage collected by JVM since SASL Library releases it's reference after // returning the byte array. final byte[] decodedMsg = saslCodec.unwrap(wrappedMsg, 0, wrappedMsgLength); if (logger.isTraceEnabled()) { logger.trace("Successfully decrypted incoming message. Length after decryption: {}", decodedMsg.length); } // Update the msg reader index since we have decrypted this chunk msg.skipBytes(wrappedMsgLength); // Allocate a new Bytebuf to copy the decrypted chunk. final ByteBuf decodedMsgBuf = ctx.alloc().buffer(decodedMsg.length); decodedMsgBuf.writeBytes(decodedMsg); // Add the decrypted chunk to output buffer for next handler to take care of it. out.add(decodedMsgBuf); } catch (OutOfMemoryException e) { logger.warn("Failure allocating buffer on incoming stream due to memory limits."); msg.resetReaderIndex(); outOfMemoryHandler.handle(); } catch (IOException e) { logger.error("Something went wrong while unwrapping the message: {} with MaxEncodeSize: {} and " + "error: {}", msg, maxWrappedSize, e.getMessage()); throw e; } }