Example usage for io.netty.buffer ByteBufInputStream ByteBufInputStream

List of usage examples for io.netty.buffer ByteBufInputStream ByteBufInputStream

Introduction

In this page you can find the example usage for io.netty.buffer ByteBufInputStream ByteBufInputStream.

Prototype

public ByteBufInputStream(ByteBuf buffer) 

Source Link

Document

Creates a new stream which reads data from the specified buffer starting at the current readerIndex and ending at the current writerIndex .

Usage

From source file:netty.WebSocketFrameHandler.java

License:Apache License

@Override
protected void channelRead0(ChannelHandlerContext ctx, WebSocketFrame frame) throws Exception {
    // ping and pong frames already handled
    //PlanetMock mock = createPlanetMock();

    logger.info("{} received {}", ctx.channel());

    if (frame instanceof TextWebSocketFrame) {
        // Send the uppercase string back.
        String request = ((TextWebSocketFrame) frame).text();
        logger.info("{} received {}", ctx.channel(), request);

        ByteBufInputStream byteBufInputStream = new ByteBufInputStream(frame.content());
        ActionEvent received = mapper.readValue(byteBufInputStream, ActionEvent.class);

        gameServer.dispatchAction(received, ctx.channel());
        //ctx.channel().writeAndFlush(new TextWebSocketFrame(mapper.writeValueAsString(mock)));
        //ctx.channel().writeAndFlush(new TextWebSocketFrame(request.toUpperCase(Locale.US)));

    } else {//w w w .  java  2 s. co m
        String message = "unsupported frame type: " + frame.getClass().getName();
        throw new UnsupportedOperationException(message);
    }
}

From source file:org.apache.bookkeeper.client.LedgerEntry.java

License:Apache License

/**
 * Returns the content of the entry.//w ww  .j av  a  2s . c  o  m
 * This method can be called only once. While using v2 wire protocol this method will automatically release
 * the internal ByteBuf when calling the close
 * method of the returned InputStream
 *
 * @return an InputStream which gives access to the content of the entry
 * @throws IllegalStateException if this method is called twice
 */
public InputStream getEntryInputStream() {
    checkState(null != data, "entry content can be accessed only once");
    ByteBufInputStream res = new ByteBufInputStream(data);
    data = null;
    return res;
}

From source file:org.apache.bookkeeper.common.coder.VarIntCoder.java

License:Apache License

@Override
public Integer decode(ByteBuf buf) {
    checkNotNull(buf, "Can not decode into a null input buffer");

    ByteBufInputStream input = new ByteBufInputStream(buf);

    try {//from  w w w . ja va2  s.c om
        return VarInt.decodeInt(input);
    } catch (IOException e) {
        throw new IllegalStateException("Failed to decode an integration from the provided buffer");
    }
}

From source file:org.apache.camel.component.netty4.NettyConverter.java

License:Apache License

@Converter
public static InputStream toInputStream(ByteBuf buffer, Exchange exchange) {
    return new ByteBufInputStream(buffer);
}

From source file:org.apache.drill.exec.rpc.InboundRpcMessage.java

License:Apache License

public InputStream getProtobufBodyAsIS() {
    return new ByteBufInputStream(pBody);
}

From source file:org.apache.drill.exec.rpc.RpcBus.java

License:Apache License

public static <T> T get(ByteBuf pBody, Parser<T> parser) throws RpcException {
    try {/*  www .  jav  a2 s . c  o  m*/
        ByteBufInputStream is = new ByteBufInputStream(pBody);
        return parser.parseFrom(is);
    } catch (InvalidProtocolBufferException e) {
        throw new RpcException(String.format("Failure while decoding message with parser of type. %s",
                parser.getClass().getCanonicalName()), e);
    }
}

From source file:org.apache.drill.exec.rpc.security.ServerAuthenticationHandler.java

License:Apache License

@Override
public void handle(S connection, int rpcType, ByteBuf pBody, ByteBuf dBody, ResponseSender sender)
        throws RpcException {
    final String remoteAddress = connection.getRemoteAddress().toString();

    // exchange involves server "challenges" and client "responses" (initiated by client)
    if (saslRequestTypeValue == rpcType) {
        final SaslMessage saslResponse;
        try {//  w w w  .j  a v a 2s.  c  o  m
            saslResponse = SaslMessage.PARSER.parseFrom(new ByteBufInputStream(pBody));
        } catch (final InvalidProtocolBufferException e) {
            handleAuthFailure(remoteAddress, sender, e, saslResponseType);
            return;
        }

        logger.trace("Received SASL message {} from {}", saslResponse.getStatus(), remoteAddress);
        final SaslResponseProcessor processor = RESPONSE_PROCESSORS.get(saslResponse.getStatus());
        if (processor == null) {
            logger.info("Unknown message type from client from {}. Will stop authentication.", remoteAddress);
            handleAuthFailure(remoteAddress, sender, new SaslException("Received unexpected message"),
                    saslResponseType);
            return;
        }

        final SaslResponseContext<S, T> context = new SaslResponseContext<>(saslResponse, connection,
                remoteAddress, sender, requestHandler, saslResponseType);
        try {
            processor.process(context);
        } catch (final Exception e) {
            handleAuthFailure(remoteAddress, sender, e, saslResponseType);
        }
    } else {

        // this handler only handles messages of SASL_MESSAGE_VALUE type

        // the response type for this request type is likely known from UserRpcConfig,
        // but the client should not be making any requests before authenticating.
        // drop connection
        throw new RpcException(String.format(
                "Request of type %d is not allowed without authentication. "
                        + "Client on %s must authenticate before making requests. Connection dropped.",
                rpcType, remoteAddress));
    }
}

From source file:org.apache.drill.exec.rpc.user.UserServer.java

License:Apache License

@Override
protected void handle(UserClientConnectionImpl connection, int rpcType, ByteBuf pBody, ByteBuf dBody,
        ResponseSender responseSender) throws RpcException {
    switch (rpcType) {

    case RpcType.RUN_QUERY_VALUE:
        logger.debug("Received query to run.  Returning query handle.");
        try {/*from w w w  .j av  a  2s  . co m*/
            final RunQuery query = RunQuery.PARSER.parseFrom(new ByteBufInputStream(pBody));
            final QueryId queryId = worker.submitWork(connection, query);
            responseSender.send(new Response(RpcType.QUERY_HANDLE, queryId));
            break;
        } catch (InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding RunQuery body.", e);
        }

    case RpcType.CANCEL_QUERY_VALUE:
        try {
            final QueryId queryId = QueryId.PARSER.parseFrom(new ByteBufInputStream(pBody));
            final Ack ack = worker.cancelQuery(queryId);
            responseSender.send(new Response(RpcType.ACK, ack));
            break;
        } catch (InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding QueryId body.", e);
        }

    case RpcType.RESUME_PAUSED_QUERY_VALUE:
        try {
            final QueryId queryId = QueryId.PARSER.parseFrom(new ByteBufInputStream(pBody));
            final Ack ack = worker.resumeQuery(queryId);
            responseSender.send(new Response(RpcType.ACK, ack));
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding QueryId body.", e);
        }
    case RpcType.GET_QUERY_PLAN_FRAGMENTS_VALUE:
        try {
            final GetQueryPlanFragments req = GetQueryPlanFragments.PARSER
                    .parseFrom(new ByteBufInputStream(pBody));
            responseSender
                    .send(new Response(RpcType.QUERY_PLAN_FRAGMENTS, worker.getQueryPlan(connection, req)));
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetQueryPlanFragments body.", e);
        }
    case RpcType.GET_CATALOGS_VALUE:
        try {
            final GetCatalogsReq req = GetCatalogsReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
            worker.submitCatalogMetadataWork(connection.getSession(), req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetCatalogsReq body.", e);
        }
    case RpcType.GET_SCHEMAS_VALUE:
        try {
            final GetSchemasReq req = GetSchemasReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
            worker.submitSchemasMetadataWork(connection.getSession(), req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetSchemasReq body.", e);
        }
    case RpcType.GET_TABLES_VALUE:
        try {
            final GetTablesReq req = GetTablesReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
            worker.submitTablesMetadataWork(connection.getSession(), req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetTablesReq body.", e);
        }
    case RpcType.GET_COLUMNS_VALUE:
        try {
            final GetColumnsReq req = GetColumnsReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
            worker.submitColumnsMetadataWork(connection.getSession(), req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetColumnsReq body.", e);
        }
    case RpcType.CREATE_PREPARED_STATEMENT_VALUE:
        try {
            final CreatePreparedStatementReq req = CreatePreparedStatementReq.PARSER
                    .parseFrom(new ByteBufInputStream(pBody));
            worker.submitPreparedStatementWork(connection, req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding CreatePreparedStatementReq body.", e);
        }
    default:
        throw new UnsupportedOperationException(
                String.format("UserServer received rpc of unknown type.  Type was %d.", rpcType));
    }
}

From source file:org.apache.drill.exec.rpc.user.UserServerRequestHandler.java

License:Apache License

@Override
public void handle(BitToUserConnection connection, int rpcType, ByteBuf pBody, ByteBuf dBody,
        ResponseSender responseSender) throws RpcException {
    switch (rpcType) {

    case RpcType.RUN_QUERY_VALUE:
        logger.debug("Received query to run.  Returning query handle.");
        try {/*from   ww w  .  j  a va2s  .  co m*/
            final RunQuery query = RunQuery.PARSER.parseFrom(new ByteBufInputStream(pBody));
            final QueryId queryId = worker.submitWork(connection, query);
            responseSender.send(new Response(RpcType.QUERY_HANDLE, queryId));
            break;
        } catch (InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding RunQuery body.", e);
        }

    case RpcType.CANCEL_QUERY_VALUE:
        try {
            final QueryId queryId = QueryId.PARSER.parseFrom(new ByteBufInputStream(pBody));
            final Ack ack = worker.cancelQuery(queryId);
            responseSender.send(new Response(RpcType.ACK, ack));
            break;
        } catch (InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding QueryId body.", e);
        }

    case RpcType.RESUME_PAUSED_QUERY_VALUE:
        try {
            final QueryId queryId = QueryId.PARSER.parseFrom(new ByteBufInputStream(pBody));
            final Ack ack = worker.resumeQuery(queryId);
            responseSender.send(new Response(RpcType.ACK, ack));
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding QueryId body.", e);
        }
    case RpcType.GET_QUERY_PLAN_FRAGMENTS_VALUE:
        try {
            final GetQueryPlanFragments req = GetQueryPlanFragments.PARSER
                    .parseFrom(new ByteBufInputStream(pBody));
            responseSender
                    .send(new Response(RpcType.QUERY_PLAN_FRAGMENTS, worker.getQueryPlan(connection, req)));
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetQueryPlanFragments body.", e);
        }
    case RpcType.GET_CATALOGS_VALUE:
        try {
            final GetCatalogsReq req = GetCatalogsReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
            worker.submitCatalogMetadataWork(connection.getSession(), req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetCatalogsReq body.", e);
        }
    case RpcType.GET_SCHEMAS_VALUE:
        try {
            final GetSchemasReq req = GetSchemasReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
            worker.submitSchemasMetadataWork(connection.getSession(), req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetSchemasReq body.", e);
        }
    case RpcType.GET_TABLES_VALUE:
        try {
            final GetTablesReq req = GetTablesReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
            worker.submitTablesMetadataWork(connection.getSession(), req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetTablesReq body.", e);
        }
    case RpcType.GET_COLUMNS_VALUE:
        try {
            final GetColumnsReq req = GetColumnsReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
            worker.submitColumnsMetadataWork(connection.getSession(), req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding GetColumnsReq body.", e);
        }
    case RpcType.CREATE_PREPARED_STATEMENT_VALUE:
        try {
            final CreatePreparedStatementReq req = CreatePreparedStatementReq.PARSER
                    .parseFrom(new ByteBufInputStream(pBody));
            worker.submitPreparedStatementWork(connection, req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding CreatePreparedStatementReq body.", e);
        }
    case RpcType.GET_SERVER_META_VALUE:
        try {
            final GetServerMetaReq req = GetServerMetaReq.PARSER.parseFrom(new ByteBufInputStream(pBody));
            worker.submitServerMetadataWork(connection.getSession(), req, responseSender);
            break;
        } catch (final InvalidProtocolBufferException e) {
            throw new RpcException("Failure while decoding CreatePreparedStatementReq body.", e);
        }
    default:
        throw new UnsupportedOperationException(
                String.format("UserServerRequestHandler received rpc of unknown type. Type was %d.", rpcType));
    }
}

From source file:org.apache.drill.exec.store.parquet.PageReadStatus.java

License:Apache License

/**
 * Grab the next page./*  w w  w.  j av  a  2 s.c  o m*/
 *
 * @return - if another page was present
 * @throws java.io.IOException
 */
public boolean next() throws IOException {

    int shift = 0;
    if (rowGroupIndex == 0)
        shift = 0;
    else
        shift = 4;
    // first ROW GROUP has a different endpoint, because there are for bytes at the beginning of the file "PAR1"
    if (parentColumnReader.readPositionInBuffer
            + shift == parentColumnReader.columnChunkMetaData.getFirstDataPageOffset()
                    + parentColumnReader.columnChunkMetaData.getTotalSize()) {
        return false;
    }
    // TODO - in the JIRA for parquet steven put a stack trace for an error with a row group with 3 values in it
    // the Math.min with the end of the buffer should fix it but now I'm not getting results back, leaving it here for now
    // because it is needed, but there might be a problem with it
    ByteBufInputStream f = new ByteBufInputStream(parentColumnReader.parentReader.getBufferWithAllData().slice(
            (int) parentColumnReader.readPositionInBuffer,
            Math.min(200, parentColumnReader.parentReader.getBufferWithAllData().capacity()
                    - (int) parentColumnReader.readPositionInBuffer)));
    int before = f.available();
    PageHeader pageHeader = readPageHeader(f);
    int length = before - f.available();
    f = new ByteBufInputStream(parentColumnReader.parentReader.getBufferWithAllData().slice(
            (int) parentColumnReader.readPositionInBuffer + length, pageHeader.getCompressed_page_size()));

    BytesInput bytesIn = parentColumnReader.parentReader.getCodecFactoryExposer().decompress(
            BytesInput.from(f, pageHeader.compressed_page_size), pageHeader.getUncompressed_page_size(),
            parentColumnReader.columnChunkMetaData.getCodec());
    currentPage = new Page(bytesIn, pageHeader.data_page_header.num_values, pageHeader.uncompressed_page_size,
            ParquetStorageEngine.parquetMetadataConverter
                    .getEncoding(pageHeader.data_page_header.repetition_level_encoding),
            ParquetStorageEngine.parquetMetadataConverter
                    .getEncoding(pageHeader.data_page_header.definition_level_encoding),
            ParquetStorageEngine.parquetMetadataConverter.getEncoding(pageHeader.data_page_header.encoding));

    parentColumnReader.readPositionInBuffer += pageHeader.compressed_page_size + length;
    byteLength = pageHeader.uncompressed_page_size;

    if (currentPage == null) {
        return false;
    }

    // if the buffer holding each page's data is not large enough to hold the current page, re-allocate, with a little extra space
    if (pageHeader.getUncompressed_page_size() > pageDataByteArray.length) {
        pageDataByteArray = new byte[pageHeader.getUncompressed_page_size() + 100];
    }
    // TODO - would like to get this into the mainline, hopefully before alpha
    pageDataByteArray = currentPage.getBytes().toByteArray();

    if (parentColumnReader.columnDescriptor.getMaxDefinitionLevel() != 0) {
        definitionLevels = currentPage.getDlEncoding().getValuesReader(parentColumnReader.columnDescriptor,
                ValuesType.DEFINITION_LEVEL);
        valueReader = currentPage.getValueEncoding().getValuesReader(parentColumnReader.columnDescriptor,
                ValuesType.VALUES);
        int endOfDefinitionLevels = definitionLevels.initFromPage(currentPage.getValueCount(),
                pageDataByteArray, 0);
        valueReader.initFromPage(currentPage.getValueCount(), pageDataByteArray, endOfDefinitionLevels);
        readPosInBytes = endOfDefinitionLevels;
    }

    readPosInBytes = 0;
    valuesRead = 0;
    return true;
}