List of usage examples for io.netty.buffer ByteBufInputStream available
@Override public int available() throws IOException
From source file:org.apache.drill.exec.rpc.RpcDecoder.java
License:Apache License
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> out) throws Exception { if (!ctx.channel().isOpen()) { return;/* ww w .j a v a 2 s.co m*/ } if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("Inbound rpc message received."); } // now, we know the entire message is in the buffer and the buffer is constrained to this message. Additionally, // this process should avoid reading beyond the end of this buffer so we inform the ByteBufInputStream to throw an // exception if be go beyond readable bytes (as opposed to blocking). final ByteBufInputStream is = new ByteBufInputStream(buffer, buffer.readableBytes()); // read the rpc header, saved in delimited format. checkTag(is, RpcEncoder.HEADER_TAG); final RpcHeader header = RpcHeader.parseDelimitedFrom(is); if (RpcConstants.EXTRA_DEBUGGING) { logger.debug(" post header read index {}", buffer.readerIndex()); } // read the protobuf body into a buffer. checkTag(is, RpcEncoder.PROTOBUF_BODY_TAG); final int pBodyLength = readRawVarint32(is); final ByteBuf pBody = buffer.slice(buffer.readerIndex(), pBodyLength); buffer.skipBytes(pBodyLength); pBody.retain(1); if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("Read protobuf body of length {} into buffer {}.", pBodyLength, pBody); } if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("post protobufbody read index {}", buffer.readerIndex()); } ByteBuf dBody = null; int dBodyLength = 0; // read the data body. if (buffer.readableBytes() > 0) { if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("Reading raw body, buffer has {} bytes available, is available {}.", buffer.readableBytes(), is.available()); } checkTag(is, RpcEncoder.RAW_BODY_TAG); dBodyLength = readRawVarint32(is); if (buffer.readableBytes() != dBodyLength) { throw new CorruptedFrameException(String.format( "Expected to receive a raw body of %d bytes but received a buffer with %d bytes.", dBodyLength, buffer.readableBytes())); } dBody = buffer.slice(); dBody.retain(1); if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("Read raw body of {}", dBody); } } else { if (RpcConstants.EXTRA_DEBUGGING) { logger.debug("No need to read raw body, no readable bytes left."); } } // return the rpc message. InboundRpcMessage m = new InboundRpcMessage(header.getMode(), header.getRpcType(), header.getCoordinationId(), pBody, dBody); // move the reader index forward so the next rpc call won't try to work with it. buffer.skipBytes(dBodyLength); messageCounter.incrementAndGet(); if (RpcConstants.SOME_DEBUGGING) { logger.debug("Inbound Rpc Message Decoded {}.", m); } out.add(m); }
From source file:org.apache.drill.exec.store.parquet.PageReadStatus.java
License:Apache License
/** * Grab the next page./*from ww w . j a va 2s. c om*/ * * @return - if another page was present * @throws java.io.IOException */ public boolean next() throws IOException { int shift = 0; if (rowGroupIndex == 0) shift = 0; else shift = 4; // first ROW GROUP has a different endpoint, because there are for bytes at the beginning of the file "PAR1" if (parentColumnReader.readPositionInBuffer + shift == parentColumnReader.columnChunkMetaData.getFirstDataPageOffset() + parentColumnReader.columnChunkMetaData.getTotalSize()) { return false; } // TODO - in the JIRA for parquet steven put a stack trace for an error with a row group with 3 values in it // the Math.min with the end of the buffer should fix it but now I'm not getting results back, leaving it here for now // because it is needed, but there might be a problem with it ByteBufInputStream f = new ByteBufInputStream(parentColumnReader.parentReader.getBufferWithAllData().slice( (int) parentColumnReader.readPositionInBuffer, Math.min(200, parentColumnReader.parentReader.getBufferWithAllData().capacity() - (int) parentColumnReader.readPositionInBuffer))); int before = f.available(); PageHeader pageHeader = readPageHeader(f); int length = before - f.available(); f = new ByteBufInputStream(parentColumnReader.parentReader.getBufferWithAllData().slice( (int) parentColumnReader.readPositionInBuffer + length, pageHeader.getCompressed_page_size())); BytesInput bytesIn = parentColumnReader.parentReader.getCodecFactoryExposer().decompress( BytesInput.from(f, pageHeader.compressed_page_size), pageHeader.getUncompressed_page_size(), parentColumnReader.columnChunkMetaData.getCodec()); currentPage = new Page(bytesIn, pageHeader.data_page_header.num_values, pageHeader.uncompressed_page_size, ParquetStorageEngine.parquetMetadataConverter .getEncoding(pageHeader.data_page_header.repetition_level_encoding), ParquetStorageEngine.parquetMetadataConverter .getEncoding(pageHeader.data_page_header.definition_level_encoding), ParquetStorageEngine.parquetMetadataConverter.getEncoding(pageHeader.data_page_header.encoding)); parentColumnReader.readPositionInBuffer += pageHeader.compressed_page_size + length; byteLength = pageHeader.uncompressed_page_size; if (currentPage == null) { return false; } // if the buffer holding each page's data is not large enough to hold the current page, re-allocate, with a little extra space if (pageHeader.getUncompressed_page_size() > pageDataByteArray.length) { pageDataByteArray = new byte[pageHeader.getUncompressed_page_size() + 100]; } // TODO - would like to get this into the mainline, hopefully before alpha pageDataByteArray = currentPage.getBytes().toByteArray(); if (parentColumnReader.columnDescriptor.getMaxDefinitionLevel() != 0) { definitionLevels = currentPage.getDlEncoding().getValuesReader(parentColumnReader.columnDescriptor, ValuesType.DEFINITION_LEVEL); valueReader = currentPage.getValueEncoding().getValuesReader(parentColumnReader.columnDescriptor, ValuesType.VALUES); int endOfDefinitionLevels = definitionLevels.initFromPage(currentPage.getValueCount(), pageDataByteArray, 0); valueReader.initFromPage(currentPage.getValueCount(), pageDataByteArray, endOfDefinitionLevels); readPosInBytes = endOfDefinitionLevels; } readPosInBytes = 0; valuesRead = 0; return true; }