Example usage for io.netty.buffer CompositeByteBuf addComponent

List of usage examples for io.netty.buffer CompositeByteBuf addComponent

Introduction

In this page you can find the example usage for io.netty.buffer CompositeByteBuf addComponent.

Prototype

public CompositeByteBuf addComponent(ByteBuf buffer) 

Source Link

Document

Add the given ByteBuf .

Usage

From source file:openbns.commons.net.codec.sts.HttpObjectAggregator.java

License:Apache License

@Override
protected void decode(final ChannelHandlerContext ctx, StsObject msg, List<Object> out) throws Exception {
    FullStsMessage currentMessage = this.currentMessage;

    if (msg instanceof StsMessage) {
        tooLongFrameFound = false;/*  w  ww  .  ja v  a 2s. c  o m*/
        assert currentMessage == null;

        StsMessage m = (StsMessage) msg;

        if (!m.getDecoderResult().isSuccess()) {
            this.currentMessage = null;
            out.add(ReferenceCountUtil.retain(m));
            return;
        }
        if (msg instanceof StsRequest) {
            StsRequest header = (StsRequest) msg;
            this.currentMessage = currentMessage = new DefaultFullStsRequest(header.getProtocolVersion(),
                    header.getMethod(), header.getUri(),
                    Unpooled.compositeBuffer(maxCumulationBufferComponents));
        } else if (msg instanceof StsResponse) {
            StsResponse header = (StsResponse) msg;
            this.currentMessage = currentMessage = new DefaultFullStsResponse(header.getStatus(),
                    Unpooled.compositeBuffer(maxCumulationBufferComponents));
        } else {
            throw new Error();
        }

        currentMessage.headers().set(m.headers());
    } else if (msg instanceof StsContent) {
        if (tooLongFrameFound) {
            if (msg instanceof LastStsContent) {
                this.currentMessage = null;
            }
            // already detect the too long frame so just discard the content
            return;
        }
        assert currentMessage != null;

        // Merge the received chunk into the content of the current message.
        StsContent chunk = (StsContent) msg;
        CompositeByteBuf content = (CompositeByteBuf) currentMessage.content();

        if (content.readableBytes() > maxContentLength - chunk.content().readableBytes()) {
            tooLongFrameFound = true;

            // release current message to prevent leaks
            currentMessage.release();
            this.currentMessage = null;

            throw new TooLongFrameException("HTTP content length exceeded " + maxContentLength + " bytes.");
        }

        // Append the content of the chunk
        if (chunk.content().isReadable()) {
            chunk.retain();
            content.addComponent(chunk.content());
            content.writerIndex(content.writerIndex() + chunk.content().readableBytes());
        }

        final boolean last;
        if (!chunk.getDecoderResult().isSuccess()) {
            currentMessage.setDecoderResult(DecoderResult.failure(chunk.getDecoderResult().cause()));
            last = true;
        } else {
            last = chunk instanceof LastStsContent;
        }

        if (last) {
            this.currentMessage = null;

            // Merge trailing headers into the message.
            if (chunk instanceof LastStsContent) {
                LastStsContent trailer = (LastStsContent) chunk;
                currentMessage.headers().add(trailer.trailingHeaders());
            }

            // Set the 'Content-Length' header.
            currentMessage.headers().set(StsHeaders.Names.CONTENT_LENGTH,
                    String.valueOf(content.readableBytes()));

            // All done
            out.add(currentMessage);
        }
    } else {
        throw new Error();
    }
}

From source file:org.apache.distributedlog.io.IdentityCompressionCodec.java

License:Apache License

@Override
public ByteBuf compress(ByteBuf uncompressed, int headerLen) {
    checkNotNull(uncompressed);/*from   w  ww .j a v a2 s .co m*/
    checkArgument(uncompressed.readableBytes() >= 0);
    if (headerLen == 0) {
        return uncompressed.retain();
    } else {
        CompositeByteBuf composited = PooledByteBufAllocator.DEFAULT.compositeBuffer(2);
        composited.addComponent(PooledByteBufAllocator.DEFAULT.buffer(headerLen, headerLen));
        composited.addComponent(uncompressed.retain());
        return composited;
    }
}

From source file:org.apache.drill.exec.rpc.ChunkCreationHandler.java

License:Apache License

@Override
protected void encode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {

    if (RpcConstants.EXTRA_DEBUGGING) {
        logger.debug("ChunkCreationHandler called with msg {} of size {} with chunkSize {}", msg,
                msg.readableBytes(), chunkSize);
    }//from  w  ww .  j  av a 2  s .co  m

    if (!ctx.channel().isOpen()) {
        logger.debug("Channel closed, skipping encode inside {}.", RpcConstants.CHUNK_CREATION_HANDLER);
        msg.release();
        return;
    }

    // Calculate the number of chunks based on configured chunk size and input msg size
    int numChunks = (int) Math.ceil((double) msg.readableBytes() / chunkSize);

    // Initialize a composite buffer to hold numChunks chunk.
    final CompositeByteBuf cbb = ctx.alloc().compositeBuffer(numChunks);

    int cbbWriteIndex = 0;
    int currentChunkLen = min(msg.readableBytes(), chunkSize);

    // Create slices of chunkSize from input msg and add it to the composite buffer.
    while (numChunks > 0) {
        final ByteBuf chunkBuf = msg.slice(msg.readerIndex(), currentChunkLen);
        chunkBuf.retain();
        cbb.addComponent(chunkBuf);
        cbbWriteIndex += currentChunkLen;
        msg.skipBytes(currentChunkLen);
        --numChunks;
        currentChunkLen = min(msg.readableBytes(), chunkSize);
    }

    // Update the writerIndex of composite byte buffer. Netty doesn't do it automatically.
    cbb.writerIndex(cbbWriteIndex);

    // Add the final composite bytebuf into output buffer.
    out.add(cbb);
}

From source file:org.apache.drill.exec.rpc.RpcEncoder.java

License:Apache License

@Override
protected void encode(ChannelHandlerContext ctx, OutboundRpcMessage msg, List<Object> out) throws Exception {
    if (RpcConstants.EXTRA_DEBUGGING) {
        logger.debug("Rpc Encoder called with msg {}", msg);
    }/* w  w  w .  ja  v  a 2s.  c o m*/

    if (!ctx.channel().isOpen()) {
        //output.add(ctx.alloc().buffer(0));
        logger.debug("Channel closed, skipping encode.");
        msg.release();
        return;
    }

    try {
        if (RpcConstants.EXTRA_DEBUGGING) {
            logger.debug("Encoding outbound message {}", msg);
        }
        // first we build the RpcHeader
        RpcHeader header = RpcHeader.newBuilder() //
                .setMode(msg.mode) //
                .setCoordinationId(msg.coordinationId) //
                .setRpcType(msg.rpcType).build();

        // figure out the full length
        int headerLength = header.getSerializedSize();
        int protoBodyLength = msg.pBody.getSerializedSize();
        int rawBodyLength = msg.getRawBodySize();
        int fullLength = //
                HEADER_TAG_LENGTH + getRawVarintSize(headerLength) + headerLength + //
                        PROTOBUF_BODY_TAG_LENGTH + getRawVarintSize(protoBodyLength) + protoBodyLength; //

        if (rawBodyLength > 0) {
            fullLength += (RAW_BODY_TAG_LENGTH + getRawVarintSize(rawBodyLength) + rawBodyLength);
        }

        ByteBuf buf = ctx.alloc().buffer();
        OutputStream os = new ByteBufOutputStream(buf);
        CodedOutputStream cos = CodedOutputStream.newInstance(os);

        // write full length first (this is length delimited stream).
        cos.writeRawVarint32(fullLength);

        // write header
        cos.writeRawVarint32(HEADER_TAG);
        cos.writeRawVarint32(headerLength);
        header.writeTo(cos);

        // write protobuf body length and body
        cos.writeRawVarint32(PROTOBUF_BODY_TAG);
        cos.writeRawVarint32(protoBodyLength);
        msg.pBody.writeTo(cos);

        // if exists, write data body and tag.
        if (msg.getRawBodySize() > 0) {
            if (RpcConstants.EXTRA_DEBUGGING) {
                logger.debug("Writing raw body of size {}", msg.getRawBodySize());
            }

            cos.writeRawVarint32(RAW_BODY_TAG);
            cos.writeRawVarint32(rawBodyLength);
            cos.flush(); // need to flush so that dbody goes after if cos is caching.

            CompositeByteBuf cbb = new CompositeByteBuf(buf.alloc(), true, msg.dBodies.length + 1);
            cbb.addComponent(buf);
            int bufLength = buf.readableBytes();
            for (ByteBuf b : msg.dBodies) {
                cbb.addComponent(b);
                bufLength += b.readableBytes();
            }
            cbb.writerIndex(bufLength);
            out.add(cbb);
        } else {
            cos.flush();
            out.add(buf);
        }

        if (RpcConstants.SOME_DEBUGGING) {
            logger.debug("Wrote message length {}:{} bytes (head:body).  Message: " + msg,
                    getRawVarintSize(fullLength), fullLength);
        }
        if (RpcConstants.EXTRA_DEBUGGING) {
            logger.debug("Sent message.  Ending writer index was {}.", buf.writerIndex());
        }
    } finally {
        // make sure to release Rpc Messages underlying byte buffers.
        //msg.release();
    }
}

From source file:org.apache.helix.ipc.netty.NettyHelixIPCService.java

License:Apache License

/**
 * Sends a message to all partitions with a given state in the cluster.
 *///from  www.  j a va2s .  c om
@Override
public void send(HelixAddress destination, int messageType, UUID messageId, ByteBuf message) {
    if (LOG.isTraceEnabled()) {
        LOG.trace("Sending " + messageId);
    }
    // Send message
    try {
        // Get list of channels
        List<Channel> channels = channelMap.get(destination.getSocketAddress());
        if (channels == null) {
            synchronized (channelMap) {
                channels = channelMap.get(destination.getSocketAddress());
                if (channels == null) {
                    channels = new ArrayList<Channel>(config.getNumConnections());
                    for (int i = 0; i < config.getNumConnections(); i++) {
                        channels.add(null);
                    }
                    channelMap.put(destination.getSocketAddress(), channels);
                }
            }
        }

        // Pick the channel for this scope
        int idx = (Integer.MAX_VALUE & destination.getScope().hashCode()) % channels.size();
        Channel channel = channels.get(idx);
        if (channel == null || !channel.isOpen()) {
            synchronized (channelMap) {
                channel = channels.get(idx);
                if (channel == null || !channel.isOpen()) {
                    channel = clientBootstrap.connect(destination.getSocketAddress()).sync().channel();
                    channels.set(idx, channel);
                    statChannelOpen.inc();
                }
            }
        }

        // Compute total length
        int headerLength = NUM_LENGTH_FIELDS * (Integer.SIZE / 8) + (Integer.SIZE / 8) * 2 // version, type
                + (Long.SIZE / 8) * 2 // 128 bit UUID
                + getLength(destination.getScope().getCluster())
                + getLength(destination.getScope().getResource())
                + getLength(destination.getScope().getPartition())
                + getLength(destination.getScope().getState()) + getLength(config.getInstanceName())
                + getLength(destination.getInstanceName());
        int messageLength = message == null ? 0 : message.readableBytes();

        // Build message header
        ByteBuf headerBuf = channel.alloc().buffer(headerLength);
        headerBuf.writeInt(MESSAGE_VERSION).writeInt(messageType).writeLong(messageId.getMostSignificantBits())
                .writeLong(messageId.getLeastSignificantBits());
        writeStringWithLength(headerBuf, destination.getScope().getCluster());
        writeStringWithLength(headerBuf, destination.getScope().getResource());
        writeStringWithLength(headerBuf, destination.getScope().getPartition());
        writeStringWithLength(headerBuf, destination.getScope().getState());
        writeStringWithLength(headerBuf, config.getInstanceName());
        writeStringWithLength(headerBuf, destination.getInstanceName());

        // Compose message header and payload
        headerBuf.writeInt(messageLength);
        CompositeByteBuf fullByteBuf = channel.alloc().compositeBuffer(2);
        fullByteBuf.addComponent(headerBuf);
        fullByteBuf.writerIndex(headerBuf.readableBytes());
        if (message != null) {
            fullByteBuf.addComponent(message);
            fullByteBuf.writerIndex(fullByteBuf.writerIndex() + message.readableBytes());
        }

        // Send
        NettyHelixIPCBackPressureHandler backPressureHandler = channel.pipeline()
                .get(NettyHelixIPCBackPressureHandler.class);
        backPressureHandler.waitUntilWritable(channel);
        channel.writeAndFlush(fullByteBuf);

        statTxMsg.mark();
        statTxBytes.mark(fullByteBuf.readableBytes());
    } catch (Exception e) {
        statError.inc();
        throw new IllegalStateException("Could not send message to " + destination, e);
    }
}

From source file:org.apache.spark.network.util.TransportFrameDecoder.java

License:Apache License

private ByteBuf decodeNext() {
    long frameSize = decodeFrameSize();
    if (frameSize == UNKNOWN_FRAME_SIZE || totalSize < frameSize) {
        return null;
    }//from   www  .j  a  va 2  s. c om

    // Reset size for next frame.
    nextFrameSize = UNKNOWN_FRAME_SIZE;

    Preconditions.checkArgument(frameSize < MAX_FRAME_SIZE, "Too large frame: %s", frameSize);
    Preconditions.checkArgument(frameSize > 0, "Frame length should be positive: %s", frameSize);

    // If the first buffer holds the entire frame, return it.
    int remaining = (int) frameSize;
    if (buffers.getFirst().readableBytes() >= remaining) {
        return nextBufferForFrame(remaining);
    }

    // Otherwise, create a composite buffer.
    CompositeByteBuf frame = buffers.getFirst().alloc().compositeBuffer(Integer.MAX_VALUE);
    while (remaining > 0) {
        ByteBuf next = nextBufferForFrame(remaining);
        remaining -= next.readableBytes();
        frame.addComponent(next).writerIndex(frame.writerIndex() + next.readableBytes());
    }
    assert remaining == 0;
    return frame;
}

From source file:org.apache.spark.util.TransportFrameDecoder.java

License:Apache License

private ByteBuf decodeNext() throws Exception {
    long frameSize = decodeFrameSize();
    if (frameSize == UNKNOWN_FRAME_SIZE || totalSize < frameSize) {
        return null;
    }//ww w  .j  a  v a2  s  . com

    // Reset size for next frame.
    nextFrameSize = UNKNOWN_FRAME_SIZE;

    Preconditions.checkArgument(frameSize < MAX_FRAME_SIZE, "Too large frame: %s", frameSize);
    Preconditions.checkArgument(frameSize > 0, "Frame length should be positive: %s", frameSize);

    // If the first buffer holds the entire frame, return it.
    int remaining = (int) frameSize;
    if (buffers.getFirst().readableBytes() >= remaining) {
        return nextBufferForFrame(remaining);
    }

    // Otherwise, create a composite buffer.
    CompositeByteBuf frame = buffers.getFirst().alloc().compositeBuffer(Integer.MAX_VALUE);
    while (remaining > 0) {
        ByteBuf next = nextBufferForFrame(remaining);
        remaining -= next.readableBytes();
        frame.addComponent(next).writerIndex(frame.writerIndex() + next.readableBytes());
    }
    assert remaining == 0;
    return frame;
}

From source file:org.asynchttpclient.netty.util.ByteBufUtils.java

License:Open Source License

public static String byteBuf2String(Charset charset, ByteBuf... bufs) throws CharacterCodingException {
    if (charset == UTF_8 || charset == US_ASCII) {
        return Utf8ByteBufCharsetDecoder.decodeUtf8(bufs);
    } else {/*from w w  w  . j  a v a  2s.c  o  m*/
        CompositeByteBuf composite = Unpooled.compositeBuffer(bufs.length);

        try {
            for (ByteBuf buf : bufs) {
                buf.retain();
                composite.addComponent(buf);
            }

            return composite.toString(charset);

        } finally {
            composite.release();
        }
    }
}

From source file:org.cloudfoundry.reactor.util.MultipartHttpOutbound.java

License:Apache License

public Mono<Void> done() {
    AsciiString boundary = generateMultipartBoundary();
    ByteBufAllocator allocator = this.outbound.delegate().alloc();

    CompositeByteBuf bodyBuf = allocator.compositeBuffer();
    this.partConsumers
            .forEach(partConsumer -> bodyBuf.addComponent(getPart(allocator, boundary, partConsumer)));
    bodyBuf.addComponent(getCloseDelimiter(allocator, boundary));

    return this.outbound.removeTransferEncodingChunked()
            .addHeader(CONTENT_TYPE, MULTIPART_FORM_DATA.concat(BOUNDARY_PREAMBLE).concat(boundary))
            .addHeader(CONTENT_LENGTH, String.valueOf(bodyBuf.capacity()))
            .sendOne(bodyBuf.writerIndex(bodyBuf.capacity()));
}

From source file:org.cloudfoundry.reactor.util.MultipartHttpOutbound.java

License:Apache License

private static ByteBuf getPart(ByteBufAllocator allocator, AsciiString boundary,
        Consumer<PartHttpOutbound> partConsumer) {
    PartHttpOutbound part = new PartHttpOutbound();
    partConsumer.accept(part);/*from   www.j av a2  s  .  c  om*/

    CompositeByteBuf body = allocator.compositeBuffer();
    body.addComponent(getDelimiter(allocator, boundary));
    body.addComponent(getHeaders(allocator, part.getHeaders()));
    body.addComponent(getData(allocator, part.getInputStream()));

    return body.writerIndex(body.capacity());
}