List of usage examples for io.netty.buffer ByteBuf readBytes
public abstract int readBytes(GatheringByteChannel out, int length) throws IOException;
From source file:com.replaymod.sponge.recording.AbstractRecorder.java
License:MIT License
/** * Write the specified packet data to the output streams. * @param fromServer Whether the packet is client or server bound * @param data The packet data (packet id and payload) *//*from w w w .jav a 2s. c o m*/ protected synchronized void writePacket(boolean fromServer, ByteBuf data) throws IOException { DataOutputStream out = getCombinedOutput(); long timeAndDirection = getDuration() << 1 | (fromServer ? 0 : 1); writeVar(out, timeAndDirection); int length = data.readableBytes(); writeVar(out, length); data.readBytes(out, length); }
From source file:com.sohu.jafka.http.HttpServerHandler.java
License:Apache License
@Override protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception { if (msg instanceof HttpRequest) { HttpRequest request = this.request = (HttpRequest) msg; if (HttpHeaders.is100ContinueExpected(request)) { send100Continue(ctx);/* www .ja va 2 s . c o m*/ } body = new ByteArrayOutputStream(64); args = new HashMap<String, String>(4); // if (request.getMethod() != HttpMethod.POST) { sendStatusMessage(ctx, HttpResponseStatus.METHOD_NOT_ALLOWED, "POST METHOD REQUIRED"); return; } HttpHeaders headers = request.headers(); String contentType = headers.get("Content-Type"); // ? text or octstream args.put("request_key", headers.get("request_key")); args.put("topic", headers.get("topic")); args.put("partition", headers.get("partition")); } if (msg instanceof HttpContent) { HttpContent httpContent = (HttpContent) msg; ByteBuf content = httpContent.content(); if (content.isReadable()) { //body.write(content.array()); content.readBytes(body, content.readableBytes()); //body.append(content.toString(CharsetUtil.UTF_8)); } if (msg instanceof LastHttpContent) { //process request if (server.handler != null) { server.handler.handle(args, body.toByteArray()); } if (!writeResponse(ctx)) { // If keep-alive is off, close the connection once the content is fully written. ctx.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE); } body = null; args = null; } } }
From source file:com.tesora.dve.db.mysql.portal.protocol.Packet.java
License:Open Source License
public boolean decodeMore(ByteBuf input) { if (!sealed) { int transferToHeader = Math.min(header.writableBytes(), input.readableBytes()); input.readBytes(header, transferToHeader); if (header.readableBytes() < HEADER_LENGTH) { return false;//we don't have enough to read the header. }//from w ww . j a va 2s. co m int chunkLength = header.getUnsignedMedium(0); lastDecodedSeq = header.getUnsignedByte(3); int chunkAlreadyReceived = payload.writerIndex() - totalFullBytes; int chunkExpecting = chunkLength - chunkAlreadyReceived; int payloadTransfer = Math.min(chunkExpecting, input.readableBytes()); ByteBuf slice = input.readSlice(payloadTransfer).retain(); payload.addComponent(slice); payload.writerIndex(payload.writerIndex() + slice.readableBytes()); //need to move the writer index, doesn't happen automatically. //recalculate how much we are expecting for this chunk. chunkAlreadyReceived = payload.writerIndex() - totalFullBytes; chunkExpecting = chunkLength - chunkAlreadyReceived; if (chunkExpecting == 0) { if (lastDecodedSeq != expectedSequence) { String message = context + " , sequence problem decoding packet, expected=" + expectedSequence + " , decoded=" + lastDecodedSeq; logger.warn(message); } expectedSequence++; //finished this packet, mark how many full packet bytes we've read. totalFullBytes = payload.writerIndex(); if (chunkLength < MAX_PAYLOAD) { sealed = true; } else { //an extended packet was indicated, prepare the read of the next packet. header.clear(); } } } return sealed; }
From source file:com.yahoo.pulsar.client.impl.ProducerImpl.java
License:Apache License
/** * Strips checksum from {@link OpSendMsg} command if present else ignore it. * // w w w. j av a 2 s. c om * @param op */ private void stripChecksum(OpSendMsg op) { op.cmd.markReaderIndex(); int totalMsgBufSize = op.cmd.readableBytes(); DoubleByteBuf msg = getDoubleByteBuf(op.cmd); if (msg != null) { ByteBuf headerFrame = msg.getFirst(); msg.markReaderIndex(); headerFrame.markReaderIndex(); try { headerFrame.skipBytes(4); // skip [total-size] int cmdSize = (int) headerFrame.readUnsignedInt(); // verify if checksum present headerFrame.skipBytes(cmdSize); if (!hasChecksum(headerFrame)) { headerFrame.resetReaderIndex(); return; } int headerSize = 4 + 4 + cmdSize; // [total-size] [cmd-length] [cmd-size] int checksumSize = 4 + 2; // [magic-number] [checksum-size] int checksumMark = (headerSize + checksumSize); // [header-size] [checksum-size] int metaPayloadSize = (totalMsgBufSize - checksumMark); // metadataPayload = totalSize - checksumMark int newTotalFrameSizeLength = 4 + cmdSize + metaPayloadSize; // new total-size without checksum headerFrame.resetReaderIndex(); int headerFrameSize = headerFrame.readableBytes(); headerFrame.setInt(0, newTotalFrameSizeLength); // rewrite new [total-size] ByteBuf metadata = headerFrame.slice(checksumMark, headerFrameSize - checksumMark); // sliced only // metadata headerFrame.writerIndex(headerSize); // set headerFrame write-index to overwrite metadata over checksum metadata.readBytes(headerFrame, metadata.readableBytes()); headerFrame.capacity(headerFrameSize - checksumSize); // reduce capacity by removed checksum bytes headerFrame.resetReaderIndex(); } finally { op.cmd.resetReaderIndex(); } } else { log.warn("[{}] Failed while casting {} into DoubleByteBuf", producerName, op.cmd.getClass().getName()); } }
From source file:io.datty.msgpack.core.AbstractMessageReader.java
License:Apache License
public ByteBuf readBytes(byte b, ByteBuf buffer, boolean copy) { int length = readBinaryHeader(b, buffer); if (length > buffer.readableBytes()) { throw new MessageParseException( "insufficient buffer length: " + buffer.readableBytes() + ", required length: " + length); }/*from www . j a va2 s . c o m*/ if (copy) { ByteBuf dst = buffer.alloc().buffer(length); buffer.readBytes(dst, length); return dst; } else { ByteBuf slice = buffer.slice(buffer.readerIndex(), length); buffer.readerIndex(buffer.readerIndex() + length); return slice; } }
From source file:io.datty.msgpack.core.ValueMessageReader.java
License:Apache License
public ByteBuf readBinary(ByteBuf source, boolean copy) { int length = readBinaryHeader(source); if (length > source.readableBytes()) { throw new MessageParseException( "insufficient buffer length: " + source.readableBytes() + ", required length: " + length); }/*from w w w . j a v a 2 s. c om*/ if (copy) { ByteBuf dst = source.alloc().buffer(length); source.readBytes(dst, length); return dst; } else { ByteBuf slice = source.slice(source.readerIndex(), length); source.skipBytes(length); return slice; } }
From source file:io.gatling.http.client.body.multipart.impl.PartImpl.java
License:Apache License
long transferTo(ByteBuf source, WritableByteChannel target, PartImplState sourceFullyWrittenState) throws IOException { int transferred = 0; if (target instanceof GatheringByteChannel) { transferred = source.readBytes((GatheringByteChannel) target, source.readableBytes()); } else {/*from ww w. j a va 2 s . c o m*/ for (ByteBuffer byteBuffer : source.nioBuffers()) { int len = byteBuffer.remaining(); int written = target.write(byteBuffer); transferred += written; if (written != len) { // couldn't write full buffer, exit loop break; } } // assume this is a basic single ByteBuf source.readerIndex(source.readerIndex() + transferred); } if (source.isReadable()) { slowTarget = true; } else { state = sourceFullyWrittenState; } return transferred; }
From source file:io.haze.message.framed.FramedMessageDecoder.java
License:Apache License
/** * Decode a {@link MessageContext} to get the node id and message length, then build a * {@link io.haze.message.Message} and send it to the {@link Node}. * * @param context The channel handler context. * @param message The input message./* ww w . jav a 2s.co m*/ * @param messages The output messages. * * @throws Exception If an error has occurred. */ @Override public void decode(ChannelHandlerContext context, MessageContext message, List<Object> messages) throws Exception { ByteBuf buffer = ((BufferMessage) message.getMessage()).getBuffer(); MessageReader reader = message.getTransportClient().getMessageReader(); State state = (State) message.getTransportClient().getState(State.class); if (state == null) { state = new State(); } try { while (true) { // set this each time, since we use a temporary buffer for each message reader.setBuffer(buffer); if (state.nodeId < 0 && reader.hasShort()) { state.nodeId = reader.readShort(); } if (state.length < 0 && reader.hasInt()) { state.length = reader.readInt(); } if (state.length > -1 && state.length <= buffer.readableBytes()) { if (state.nodeId > -1 && state.nodeId < nodes.length) { try { // place the entire message into a temp buffer ByteBuf tempBuffer = Unpooled.buffer(); buffer.readBytes(tempBuffer, state.length); reader.setBuffer(tempBuffer); messages.add(new FramedMessageContext(message.getTransportClient(), nodes[state.nodeId].buildMessage(reader), state.nodeId)); } catch (Exception e) { // invalid message dispatcher.writeException(message.getTransportClient(), e); logger.error(e.getMessage(), e); } } else { // invalid node dispatcher.writeError(message.getTransportClient(), "Invalid node: " + state.nodeId); } // reset values for the next message state.length = -1; state.nodeId = -1; } else { // no more messages if (logger.isDebugEnabled()) { for (Object object : messages) { logger.debug(((MessageContext) object).getMessage().toString()); } } return; } } } catch (Exception e) { // the message couldn't be read, which means the rest of the stream is foobarred dispatcher.writeError(message.getTransportClient(), "Invalid message"); // throw the exception to make sure the global handler catches it and disconnects the client throw e; } finally { message.getTransportClient().setState(state.getClass(), state); } }
From source file:io.haze.message.framed.FramedMessageEncoder.java
License:Apache License
/** * Encode the encapsulated {@link io.haze.message.Message} into a {@link BufferMessage} and then overwrite the old * {@link io.haze.message.Message}.//from w w w. j a v a 2s . c o m * * @param context The channel handler context. * @param message The input message. * @param messages The output messages. */ @Override public void encode(ChannelHandlerContext context, MessageContext message, List<Object> messages) throws Exception { BufferMessage bufferMessage = new BufferMessage(Unpooled.buffer()); ByteBuf payloadBuffer = Unpooled.buffer(); MessageWriter writer = message.getTransportClient().getMessageWriter(); if (logger.isDebugEnabled()) { logger.debug(message.getMessage().toString()); } // write node id writer.setBuffer(bufferMessage.getBuffer()); writer.writeShort((short) ((FramedMessageContext) message).getNodeId()); // serialize message into a separate buffer so we can exact the payload length writer.setBuffer(payloadBuffer); ((FramedMessageContext) message).getMessage().serialize(writer); // write payload length and serialized message writer.setBuffer(bufferMessage.getBuffer()); writer.writeInt(payloadBuffer.readableBytes()); // increase message buffer and write payload buffer into it bufferMessage.getBuffer() .capacity(bufferMessage.getBuffer().readableBytes() + payloadBuffer.readableBytes()); payloadBuffer.readBytes(bufferMessage.getBuffer(), payloadBuffer.readableBytes()); message.setMessage(bufferMessage); messages.add(message); }
From source file:io.haze.transport.udp.UDPEncoder.java
License:Apache License
/** * Encode a {@link MessageContext} into a {@link ByteBuf}. * * @param context The channel handler context. * @param message The input message.//from www . j a va 2s . co m * @param buffer The output buffer. */ @Override public void encode(ChannelHandlerContext context, MessageContext message, ByteBuf buffer) throws Exception { ByteBuf messageBuffer = ((BufferMessage) message.getMessage()).getBuffer(); int count = messageBuffer.readableBytes() / SAFE_PACKET_SIZE + ((messageBuffer.readableBytes() % SAFE_PACKET_SIZE == 0) ? 0 : 1); State state = (State) message.getTransportClient().getState(State.class); if (state == null) { state = new State(); } try { int endPacketId = state.packetId - 1 + count; int startPacketId = state.packetId; while (messageBuffer.readableBytes() > 0) { int capacity = Math.min(SAFE_PACKET_SIZE, messageBuffer.readableBytes() + HEADERS_BYTE_SIZE); ByteBuf packetBuffer = Unpooled.buffer(capacity, capacity); // write flags, current packet id, start packet id, end packet id, payload length packetBuffer.writeByte(0); packetBuffer.writeShort(state.packetId++); packetBuffer.writeShort(startPacketId); packetBuffer.writeShort(endPacketId); packetBuffer.writeShort((short) (capacity - HEADERS_BYTE_SIZE)); // read one packet worth of data from buffer and handle the packet messageBuffer.readBytes(packetBuffer, capacity - HEADERS_BYTE_SIZE); handlePacket((UDPTransportClient) message.getTransportClient(), state.packetId, new DatagramPacket(packetBuffer, message.getTransportClient().getAddress())); } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { message.getTransportClient().setState(state.getClass(), state); } }