Example usage for io.netty.buffer ByteBuf array

List of usage examples for io.netty.buffer ByteBuf array

Introduction

In this page you can find the example usage for io.netty.buffer ByteBuf array.

Prototype

public abstract byte[] array();

Source Link

Document

Returns the backing byte array of this buffer.

Usage

From source file:org.apache.camel.component.netty4.DatagramPacketByteArrayCodecTest.java

License:Apache License

@Test
public void testEncoder() {
    ByteBuf buf = Unpooled.buffer();/*w  ww .ja va2 s . c  o m*/
    buf.writeBytes(VALUE.getBytes());
    AddressedEnvelope<Object, InetSocketAddress> addressedEnvelop = new DefaultAddressedEnvelope<Object, InetSocketAddress>(
            VALUE.getBytes(), new InetSocketAddress(8888));
    EmbeddedChannel channel = new EmbeddedChannel(
            ChannelHandlerFactories.newByteArrayEncoder("udp").newChannelHandler());
    Assert.assertTrue(channel.writeOutbound(addressedEnvelop));
    Assert.assertTrue(channel.finish());
    AddressedEnvelope output = (AddressedEnvelope) channel.readOutbound();
    Assert.assertTrue(output.content() instanceof ByteBuf);
    ByteBuf resultContent = (ByteBuf) output.content();
    Assert.assertEquals(VALUE, new String(resultContent.array()));
    Assert.assertNull(channel.readOutbound());
}

From source file:org.apache.distributedlog.common.util.ByteBufUtils.java

License:Apache License

public static byte[] getArray(ByteBuf buffer) {
    if (buffer.hasArray() && buffer.arrayOffset() == 0 && buffer.writableBytes() == 0) {
        return buffer.array();
    }//from   www  .j  av  a2  s.c om
    byte[] data = new byte[buffer.readableBytes()];
    buffer.getBytes(buffer.readerIndex(), data);
    return data;
}

From source file:org.apache.drill.exec.store.hbase.CompareFunctionsProcessor.java

License:Apache License

@Override
public Boolean visitConvertExpression(ConvertExpression e, LogicalExpression valueArg) throws RuntimeException {
    if (e.getConvertFunction() == ConvertExpression.CONVERT_FROM) {

        String encodingType = e.getEncodingType();
        int prefixLength = 0;

        // Handle scan pruning in the following scenario:
        // The row-key is a composite key and the CONVERT_FROM() function has byte_substr() as input function which is
        // querying for the first few bytes of the row-key(start-offset 1)
        // Example WHERE clause:
        // CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'DATE_EPOCH_BE') < DATE '2015-06-17'
        if (e.getInput() instanceof FunctionCall) {

            // We can prune scan range only for big-endian encoded data
            if (encodingType.endsWith("_BE") == false) {
                return false;
            }//from  w w w  . j a va  2s  .  c o m

            FunctionCall call = (FunctionCall) e.getInput();
            String functionName = call.getName();
            if (!functionName.equalsIgnoreCase("byte_substr")) {
                return false;
            }

            LogicalExpression nameArg = call.args.get(0);
            LogicalExpression valueArg1 = call.args.size() >= 2 ? call.args.get(1) : null;
            LogicalExpression valueArg2 = call.args.size() >= 3 ? call.args.get(2) : null;

            if (((nameArg instanceof SchemaPath) == false) || (valueArg1 == null)
                    || ((valueArg1 instanceof IntExpression) == false) || (valueArg2 == null)
                    || ((valueArg2 instanceof IntExpression) == false)) {
                return false;
            }

            boolean isRowKey = ((SchemaPath) nameArg).getAsUnescapedPath().equals(DrillHBaseConstants.ROW_KEY);
            int offset = ((IntExpression) valueArg1).getInt();

            if (!isRowKey || (offset != 1)) {
                return false;
            }

            this.path = (SchemaPath) nameArg;
            prefixLength = ((IntExpression) valueArg2).getInt();
            this.isRowKeyPrefixComparison = true;
            return visitRowKeyPrefixConvertExpression(e, prefixLength, valueArg);
        }

        if (e.getInput() instanceof SchemaPath) {
            ByteBuf bb = null;
            switch (encodingType) {
            case "INT_BE":
            case "INT":
            case "UINT_BE":
            case "UINT":
            case "UINT4_BE":
            case "UINT4":
                if (valueArg instanceof IntExpression && (isEqualityFn || encodingType.startsWith("U"))) {
                    bb = newByteBuf(4, encodingType.endsWith("_BE"));
                    bb.writeInt(((IntExpression) valueArg).getInt());
                }
                break;
            case "BIGINT_BE":
            case "BIGINT":
            case "UINT8_BE":
            case "UINT8":
                if (valueArg instanceof LongExpression && (isEqualityFn || encodingType.startsWith("U"))) {
                    bb = newByteBuf(8, encodingType.endsWith("_BE"));
                    bb.writeLong(((LongExpression) valueArg).getLong());
                }
                break;
            case "FLOAT":
                if (valueArg instanceof FloatExpression && isEqualityFn) {
                    bb = newByteBuf(4, true);
                    bb.writeFloat(((FloatExpression) valueArg).getFloat());
                }
                break;
            case "DOUBLE":
                if (valueArg instanceof DoubleExpression && isEqualityFn) {
                    bb = newByteBuf(8, true);
                    bb.writeDouble(((DoubleExpression) valueArg).getDouble());
                }
                break;
            case "TIME_EPOCH":
            case "TIME_EPOCH_BE":
                if (valueArg instanceof TimeExpression) {
                    bb = newByteBuf(8, encodingType.endsWith("_BE"));
                    bb.writeLong(((TimeExpression) valueArg).getTime());
                }
                break;
            case "DATE_EPOCH":
            case "DATE_EPOCH_BE":
                if (valueArg instanceof DateExpression) {
                    bb = newByteBuf(8, encodingType.endsWith("_BE"));
                    bb.writeLong(((DateExpression) valueArg).getDate());
                }
                break;
            case "BOOLEAN_BYTE":
                if (valueArg instanceof BooleanExpression) {
                    bb = newByteBuf(1, false /* does not matter */);
                    bb.writeByte(((BooleanExpression) valueArg).getBoolean() ? 1 : 0);
                }
                break;
            case "DOUBLE_OB":
            case "DOUBLE_OBD":
                if (valueArg instanceof DoubleExpression) {
                    bb = newByteBuf(9, true);
                    PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, 9);
                    if (encodingType.endsWith("_OBD")) {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br,
                                ((DoubleExpression) valueArg).getDouble(), Order.DESCENDING);
                        this.sortOrderAscending = false;
                    } else {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br,
                                ((DoubleExpression) valueArg).getDouble(), Order.ASCENDING);
                    }
                }
                break;
            case "FLOAT_OB":
            case "FLOAT_OBD":
                if (valueArg instanceof FloatExpression) {
                    bb = newByteBuf(5, true);
                    PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, 5);
                    if (encodingType.endsWith("_OBD")) {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br,
                                ((FloatExpression) valueArg).getFloat(), Order.DESCENDING);
                        this.sortOrderAscending = false;
                    } else {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br,
                                ((FloatExpression) valueArg).getFloat(), Order.ASCENDING);
                    }
                }
                break;
            case "BIGINT_OB":
            case "BIGINT_OBD":
                if (valueArg instanceof LongExpression) {
                    bb = newByteBuf(9, true);
                    PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, 9);
                    if (encodingType.endsWith("_OBD")) {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br,
                                ((LongExpression) valueArg).getLong(), Order.DESCENDING);
                        this.sortOrderAscending = false;
                    } else {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br,
                                ((LongExpression) valueArg).getLong(), Order.ASCENDING);
                    }
                }
                break;
            case "INT_OB":
            case "INT_OBD":
                if (valueArg instanceof IntExpression) {
                    bb = newByteBuf(5, true);
                    PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, 5);
                    if (encodingType.endsWith("_OBD")) {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br,
                                ((IntExpression) valueArg).getInt(), Order.DESCENDING);
                        this.sortOrderAscending = false;
                    } else {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br,
                                ((IntExpression) valueArg).getInt(), Order.ASCENDING);
                    }
                }
                break;
            case "UTF8":
                // let visitSchemaPath() handle this.
                return e.getInput().accept(this, valueArg);
            }

            if (bb != null) {
                this.value = bb.array();
                this.path = (SchemaPath) e.getInput();
                return true;
            }
        }
    }
    return false;
}

From source file:org.apache.drill.exec.store.mapr.db.binary.CompareFunctionsProcessor.java

License:Apache License

@Override
public Boolean visitConvertExpression(ConvertExpression e, LogicalExpression valueArg) throws RuntimeException {
    if (e.getConvertFunction() == ConvertExpression.CONVERT_FROM) {

        String encodingType = e.getEncodingType();
        int prefixLength = 0;

        // Handle scan pruning in the following scenario:
        // The row-key is a composite key and the CONVERT_FROM() function has byte_substr() as input function which is
        // querying for the first few bytes of the row-key(start-offset 1)
        // Example WHERE clause:
        // CONVERT_FROM(BYTE_SUBSTR(row_key, 1, 8), 'DATE_EPOCH_BE') < DATE '2015-06-17'
        if (e.getInput() instanceof FunctionCall) {

            // We can prune scan range only for big-endian encoded data
            if (encodingType.endsWith("_BE") == false) {
                return false;
            }// w  w w .j a v a 2s  . c  o m

            FunctionCall call = (FunctionCall) e.getInput();
            String functionName = call.getName();
            if (!functionName.equalsIgnoreCase("byte_substr")) {
                return false;
            }

            LogicalExpression nameArg = call.args.get(0);
            LogicalExpression valueArg1 = call.args.size() >= 2 ? call.args.get(1) : null;
            LogicalExpression valueArg2 = call.args.size() >= 3 ? call.args.get(2) : null;

            if (((nameArg instanceof SchemaPath) == false) || (valueArg1 == null)
                    || ((valueArg1 instanceof IntExpression) == false) || (valueArg2 == null)
                    || ((valueArg2 instanceof IntExpression) == false)) {
                return false;
            }

            boolean isRowKey = ((SchemaPath) nameArg).getAsUnescapedPath().equals(DrillHBaseConstants.ROW_KEY);
            int offset = ((IntExpression) valueArg1).getInt();

            if (!isRowKey || (offset != 1)) {
                return false;
            }

            this.path = (SchemaPath) nameArg;
            prefixLength = ((IntExpression) valueArg2).getInt();
            this.isRowKeyPrefixComparison = true;
            return visitRowKeyPrefixConvertExpression(e, prefixLength, valueArg);
        }

        if (e.getInput() instanceof SchemaPath) {
            ByteBuf bb = null;

            switch (encodingType) {
            case "INT_BE":
            case "INT":
            case "UINT_BE":
            case "UINT":
            case "UINT4_BE":
            case "UINT4":
                if (valueArg instanceof IntExpression && (isEqualityFn || encodingType.startsWith("U"))) {
                    bb = newByteBuf(4, encodingType.endsWith("_BE"));
                    bb.writeInt(((IntExpression) valueArg).getInt());
                }
                break;
            case "BIGINT_BE":
            case "BIGINT":
            case "UINT8_BE":
            case "UINT8":
                if (valueArg instanceof LongExpression && (isEqualityFn || encodingType.startsWith("U"))) {
                    bb = newByteBuf(8, encodingType.endsWith("_BE"));
                    bb.writeLong(((LongExpression) valueArg).getLong());
                }
                break;
            case "FLOAT":
                if (valueArg instanceof FloatExpression && isEqualityFn) {
                    bb = newByteBuf(4, true);
                    bb.writeFloat(((FloatExpression) valueArg).getFloat());
                }
                break;
            case "DOUBLE":
                if (valueArg instanceof DoubleExpression && isEqualityFn) {
                    bb = newByteBuf(8, true);
                    bb.writeDouble(((DoubleExpression) valueArg).getDouble());
                }
                break;
            case "TIME_EPOCH":
            case "TIME_EPOCH_BE":
                if (valueArg instanceof TimeExpression) {
                    bb = newByteBuf(8, encodingType.endsWith("_BE"));
                    bb.writeLong(((TimeExpression) valueArg).getTime());
                }
                break;
            case "DATE_EPOCH":
            case "DATE_EPOCH_BE":
                if (valueArg instanceof DateExpression) {
                    bb = newByteBuf(8, encodingType.endsWith("_BE"));
                    bb.writeLong(((DateExpression) valueArg).getDate());
                }
                break;
            case "BOOLEAN_BYTE":
                if (valueArg instanceof BooleanExpression) {
                    bb = newByteBuf(1, false /* does not matter */);
                    bb.writeByte(((BooleanExpression) valueArg).getBoolean() ? 1 : 0);
                }
                break;
            case "DOUBLE_OB":
            case "DOUBLE_OBD":
                if (valueArg instanceof DoubleExpression) {
                    bb = newByteBuf(9, true);
                    PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, 9);
                    if (encodingType.endsWith("_OBD")) {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br,
                                ((DoubleExpression) valueArg).getDouble(), Order.DESCENDING);
                        this.sortOrderAscending = false;
                    } else {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat64(br,
                                ((DoubleExpression) valueArg).getDouble(), Order.ASCENDING);
                    }
                }
                break;
            case "FLOAT_OB":
            case "FLOAT_OBD":
                if (valueArg instanceof FloatExpression) {
                    bb = newByteBuf(5, true);
                    PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, 5);
                    if (encodingType.endsWith("_OBD")) {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br,
                                ((FloatExpression) valueArg).getFloat(), Order.DESCENDING);
                        this.sortOrderAscending = false;
                    } else {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeFloat32(br,
                                ((FloatExpression) valueArg).getFloat(), Order.ASCENDING);
                    }
                }
                break;
            case "BIGINT_OB":
            case "BIGINT_OBD":
                if (valueArg instanceof LongExpression) {
                    bb = newByteBuf(9, true);
                    PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, 9);
                    if (encodingType.endsWith("_OBD")) {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br,
                                ((LongExpression) valueArg).getLong(), Order.DESCENDING);
                        this.sortOrderAscending = false;
                    } else {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeInt64(br,
                                ((LongExpression) valueArg).getLong(), Order.ASCENDING);
                    }
                }
                break;
            case "INT_OB":
            case "INT_OBD":
                if (valueArg instanceof IntExpression) {
                    bb = newByteBuf(5, true);
                    PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, 5);
                    if (encodingType.endsWith("_OBD")) {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br,
                                ((IntExpression) valueArg).getInt(), Order.DESCENDING);
                        this.sortOrderAscending = false;
                    } else {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeInt32(br,
                                ((IntExpression) valueArg).getInt(), Order.ASCENDING);
                    }
                }
                break;
            case "UTF8_OB":
            case "UTF8_OBD":
                if (valueArg instanceof QuotedString) {
                    int stringLen = ((QuotedString) valueArg).value.getBytes(Charsets.UTF_8).length;
                    bb = newByteBuf(stringLen + 2, true);
                    PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, stringLen + 2);
                    if (encodingType.endsWith("_OBD")) {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br,
                                ((QuotedString) valueArg).value, Order.DESCENDING);
                        this.sortOrderAscending = false;
                    } else {
                        org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br,
                                ((QuotedString) valueArg).value, Order.ASCENDING);
                    }
                }
                break;
            case "UTF8":
                // let visitSchemaPath() handle this.
                return e.getInput().accept(this, valueArg);
            }

            if (bb != null) {
                this.value = bb.array();
                this.path = (SchemaPath) e.getInput();
                return true;
            }
        }
    }
    return false;
}

From source file:org.apache.drill.exec.store.mapr.db.binary.MaprDBCompareFunctionsProcessor.java

License:Apache License

@Override
protected ByteBuf getByteBuf(LogicalExpression valueArg, String encodingType) {
    switch (encodingType) {
    case "UTF8_OB":
    case "UTF8_OBD":
        if (valueArg instanceof ValueExpressions.QuotedString) {
            int stringLen = ((ValueExpressions.QuotedString) valueArg).value.getBytes(Charsets.UTF_8).length;
            ByteBuf bb = newByteBuf(stringLen + 2, true);
            PositionedByteRange br = new SimplePositionedMutableByteRange(bb.array(), 0, stringLen + 2);
            if (encodingType.endsWith("_OBD")) {
                org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br,
                        ((ValueExpressions.QuotedString) valueArg).value, Order.DESCENDING);
                setSortOrderAscending(false);
            } else {
                org.apache.hadoop.hbase.util.OrderedBytes.encodeString(br,
                        ((ValueExpressions.QuotedString) valueArg).value, Order.ASCENDING);
            }//from  w  ww. j a va  2 s  .co m
            return bb;
        }
    }
    return null;
}

From source file:org.apache.flink.runtime.webmonitor.handlers.TaskManagerLogHandlerTest.java

License:Apache License

@Test
public void testLogFetchingFailure() throws Exception {
    // ========= setup TaskManager =================================================================================
    InstanceID tmID = new InstanceID();
    ResourceID tmRID = new ResourceID(tmID.toString());
    TaskManagerGateway taskManagerGateway = mock(TaskManagerGateway.class);
    when(taskManagerGateway.getAddress()).thenReturn("/tm/address");

    Instance taskManager = mock(Instance.class);
    when(taskManager.getId()).thenReturn(tmID);
    when(taskManager.getTaskManagerID()).thenReturn(tmRID);
    when(taskManager.getTaskManagerGateway()).thenReturn(taskManagerGateway);
    CompletableFuture<BlobKey> future = new FlinkCompletableFuture<>();
    future.completeExceptionally(new IOException("failure"));
    when(taskManagerGateway.requestTaskManagerLog(any(Time.class))).thenReturn(future);

    // ========= setup JobManager ==================================================================================

    ActorGateway jobManagerGateway = mock(ActorGateway.class);
    Object registeredTaskManagersAnswer = new JobManagerMessages.RegisteredTaskManagers(JavaConverters
            .collectionAsScalaIterableConverter(Collections.singletonList(taskManager)).asScala());

    when(jobManagerGateway.ask(isA(JobManagerMessages.RequestRegisteredTaskManagers$.class),
            any(FiniteDuration.class))).thenReturn(Future$.MODULE$.successful(registeredTaskManagersAnswer));
    when(jobManagerGateway.ask(isA(JobManagerMessages.getRequestBlobManagerPort().getClass()),
            any(FiniteDuration.class))).thenReturn(Future$.MODULE$.successful((Object) 5));
    when(jobManagerGateway.ask(isA(JobManagerMessages.RequestTaskManagerInstance.class),
            any(FiniteDuration.class)))
                    .thenReturn(Future$.MODULE$.successful(
                            (Object) new JobManagerMessages.TaskManagerInstance(Option.apply(taskManager))));
    when(jobManagerGateway.path()).thenReturn("/jm/address");

    JobManagerRetriever retriever = mock(JobManagerRetriever.class);
    when(retriever.getJobManagerGatewayAndWebPort())
            .thenReturn(Option.apply(new scala.Tuple2<ActorGateway, Integer>(jobManagerGateway, 0)));

    TaskManagerLogHandler handler = new TaskManagerLogHandler(retriever,
            ExecutionContext$.MODULE$.fromExecutor(Executors.directExecutor()),
            Future$.MODULE$.successful("/jm/address"), AkkaUtils.getDefaultClientTimeout(),
            TaskManagerLogHandler.FileMode.LOG, new Configuration(), false);

    final AtomicReference<String> exception = new AtomicReference<>();

    ChannelHandlerContext ctx = mock(ChannelHandlerContext.class);
    when(ctx.write(isA(ByteBuf.class))).thenAnswer(new Answer<Object>() {
        @Override//from  w w  w.  j a v a  2 s .c o  m
        public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
            ByteBuf data = invocationOnMock.getArgumentAt(0, ByteBuf.class);
            exception.set(new String(data.array(), ConfigConstants.DEFAULT_CHARSET));
            return null;
        }
    });

    Map<String, String> pathParams = new HashMap<>();
    pathParams.put(TaskManagersHandler.TASK_MANAGER_ID_KEY, tmID.toString());
    Routed routed = mock(Routed.class);
    when(routed.pathParams()).thenReturn(pathParams);
    when(routed.request()).thenReturn(
            new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/taskmanagers/" + tmID + "/log"));

    handler.respondAsLeader(ctx, routed, jobManagerGateway);

    Assert.assertEquals("Fetching TaskManager log failed.", exception.get());
}

From source file:org.apache.hadoop.hbase.security.SaslClientHandler.java

License:Apache License

@Override
public void write(final ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
    // If not complete, try to negotiate
    if (!saslClient.isComplete()) {
        super.write(ctx, msg, promise);
    } else {/*from  ww w . j a v a  2s  . co m*/
        ByteBuf in = (ByteBuf) msg;

        try {
            saslToken = saslClient.wrap(in.array(), in.readerIndex(), in.readableBytes());
        } catch (SaslException se) {
            try {
                saslClient.dispose();
            } catch (SaslException ignored) {
                LOG.debug("Ignoring SASL exception", ignored);
            }
            promise.setFailure(se);
        }
        if (saslToken != null) {
            ByteBuf out = ctx.channel().alloc().buffer(4 + saslToken.length);
            out.writeInt(saslToken.length);
            out.writeBytes(saslToken, 0, saslToken.length);

            ctx.write(out).addListener(new ChannelFutureListener() {
                @Override
                public void operationComplete(ChannelFuture future) throws Exception {
                    if (!future.isSuccess()) {
                        exceptionCaught(ctx, future.cause());
                    }
                }
            });

            saslToken = null;
        }
    }
}

From source file:org.apache.jackrabbit.oak.plugins.segment.standby.codec.ReplyDecoder.java

License:Apache License

private Segment decodeSegment(ByteBuf in, int len, byte type) {
    long msb = in.readLong();
    long lsb = in.readLong();
    long hash = in.readLong();

    // #readBytes throws a 'REPLAY' exception if there are not enough bytes
    // available for reading
    ByteBuf data = in.readBytes(len - 25);
    byte[] segment;
    if (data.hasArray()) {
        segment = data.array();
    } else {/*from w ww  . ja v a 2s . c o m*/
        segment = new byte[len - 25];
        in.readBytes(segment);
    }

    Hasher hasher = Hashing.murmur3_32().newHasher();
    long check = hasher.putBytes(segment).hash().padToLong();
    if (hash == check) {
        SegmentId id = new SegmentId(store.getTracker(), msb, lsb);
        Segment s = new Segment(store.getTracker(), id, ByteBuffer.wrap(segment));
        log.debug("received segment with id {} and size {}", id, s.size());
        return s;
    }
    log.debug("received corrupted segment {}, ignoring", new UUID(msb, lsb));
    return null;
}

From source file:org.apache.jackrabbit.oak.plugins.segment.standby.codec.ReplyDecoder.java

License:Apache License

private IdArrayBasedBlob decodeBlob(ByteBuf in, int length, byte type) {
    int inIdLen = in.readInt();
    byte[] bid = new byte[inIdLen];
    in.readBytes(bid);//from w  ww.  ja v a  2s  .c om
    String id = new String(bid, Charset.forName("UTF-8"));

    long hash = in.readLong();
    // #readBytes throws a 'REPLAY' exception if there are not enough bytes
    // available for reading
    ByteBuf data = in.readBytes(length);
    byte[] blob;
    if (data.hasArray()) {
        blob = data.array();
    } else {
        blob = new byte[length];
        data.readBytes(blob);
    }

    Hasher hasher = Hashing.murmur3_32().newHasher();
    long check = hasher.putBytes(blob).hash().padToLong();
    if (hash == check) {
        log.debug("received blob with id {} and size {}", id, blob.length);
        return new IdArrayBasedBlob(blob, id);
    }
    log.debug("received corrupted binary {}, ignoring", id);
    return null;
}

From source file:org.apache.pulsar.broker.admin.impl.PersistentTopicsBase.java

License:Apache License

protected Response internalPeekNthMessage(String subName, int messagePosition, boolean authoritative) {
    if (topicName.isGlobal()) {
        validateGlobalNamespaceOwnership(namespaceName);
    }//from w w  w  .  ja  v a  2  s . c  om
    PartitionedTopicMetadata partitionMetadata = getPartitionedTopicMetadata(topicName, authoritative);
    if (partitionMetadata.partitions > 0) {
        throw new RestException(Status.METHOD_NOT_ALLOWED,
                "Peek messages on a partitioned topic is not allowed");
    }
    validateAdminAccessForSubscriber(subName, authoritative);
    if (!(getTopicReference(topicName) instanceof PersistentTopic)) {
        log.error("[{}] Not supported operation of non-persistent topic {} {}", clientAppId(), topicName,
                subName);
        throw new RestException(Status.METHOD_NOT_ALLOWED,
                "Skip messages on a non-persistent topic is not allowed");
    }
    PersistentTopic topic = (PersistentTopic) getTopicReference(topicName);
    PersistentReplicator repl = null;
    PersistentSubscription sub = null;
    Entry entry = null;
    if (subName.startsWith(topic.replicatorPrefix)) {
        repl = getReplicatorReference(subName, topic);
    } else {
        sub = (PersistentSubscription) getSubscriptionReference(subName, topic);
    }
    try {
        if (subName.startsWith(topic.replicatorPrefix)) {
            entry = repl.peekNthMessage(messagePosition).get();
        } else {
            entry = sub.peekNthMessage(messagePosition).get();
        }
        checkNotNull(entry);
        PositionImpl pos = (PositionImpl) entry.getPosition();
        ByteBuf metadataAndPayload = entry.getDataBuffer();

        // moves the readerIndex to the payload
        MessageMetadata metadata = Commands.parseMessageMetadata(metadataAndPayload);

        ResponseBuilder responseBuilder = Response.ok();
        responseBuilder.header("X-Pulsar-Message-ID", pos.toString());
        for (KeyValue keyValue : metadata.getPropertiesList()) {
            responseBuilder.header("X-Pulsar-PROPERTY-" + keyValue.getKey(), keyValue.getValue());
        }
        if (metadata.hasPublishTime()) {
            responseBuilder.header("X-Pulsar-publish-time", DateFormatter.format(metadata.getPublishTime()));
        }
        if (metadata.hasEventTime()) {
            responseBuilder.header("X-Pulsar-event-time", DateFormatter.format(metadata.getEventTime()));
        }
        if (metadata.hasNumMessagesInBatch()) {
            responseBuilder.header("X-Pulsar-num-batch-message", metadata.getNumMessagesInBatch());
        }

        // Decode if needed
        CompressionCodec codec = CompressionCodecProvider.getCompressionCodec(metadata.getCompression());
        ByteBuf uncompressedPayload = codec.decode(metadataAndPayload, metadata.getUncompressedSize());

        // Copy into a heap buffer for output stream compatibility
        ByteBuf data = PooledByteBufAllocator.DEFAULT.heapBuffer(uncompressedPayload.readableBytes(),
                uncompressedPayload.readableBytes());
        data.writeBytes(uncompressedPayload);
        uncompressedPayload.release();

        StreamingOutput stream = new StreamingOutput() {

            @Override
            public void write(OutputStream output) throws IOException, WebApplicationException {
                output.write(data.array(), data.arrayOffset(), data.readableBytes());
                data.release();
            }
        };

        return responseBuilder.entity(stream).build();
    } catch (NullPointerException npe) {
        throw new RestException(Status.NOT_FOUND, "Message not found");
    } catch (Exception exception) {
        log.error("[{}] Failed to get message at position {} from {} {}", clientAppId(), messagePosition,
                topicName, subName, exception);
        throw new RestException(exception);
    } finally {
        if (entry != null) {
            entry.release();
        }
    }
}