Example usage for io.netty.buffer ByteBuf readableBytes

List of usage examples for io.netty.buffer ByteBuf readableBytes

Introduction

In this page you can find the example usage for io.netty.buffer ByteBuf readableBytes.

Prototype

public abstract int readableBytes();

Source Link

Document

Returns the number of readable bytes which is equal to (this.writerIndex - this.readerIndex) .

Usage

From source file:com.addthis.hydra.query.web.DetailedStatusHandler.java

License:Apache License

private void onSuccess(QueryEntryInfo queryEntryInfo) throws Exception {
    JSONObject entryJSON = CodecJSON.encodeJSON(queryEntryInfo);
    writer.write(entryJSON.toString());/*  w  ww  .  j  a  v  a  2 s  .  c om*/
    ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()),
            CharsetUtil.UTF_8);
    HttpContent content = new DefaultHttpContent(textResponse);
    response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes());
    if (HttpHeaders.isKeepAlive(request)) {
        response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE);
    }
    ctx.write(response);
    ctx.write(content);
    ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
    if (!HttpHeaders.isKeepAlive(request)) {
        lastContentFuture.addListener(ChannelFutureListener.CLOSE);
    }
}

From source file:com.addthis.hydra.query.web.GoogleDriveAuthentication.java

License:Apache License

/**
 * Send an HTML formatted error message.
 *///from   w w w  .  ja  v  a  2  s .  co m
private static void sendErrorMessage(ChannelHandlerContext ctx, String message) throws IOException {
    HttpResponse response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK);
    response.headers().set(CONTENT_TYPE, "text/html; charset=utf-8");
    StringBuilderWriter writer = new StringBuilderWriter(50);
    writer.append("<html><head><title>Hydra Query Master</title></head><body>");
    writer.append("<h3>");
    writer.append(message);
    writer.append("</h3></body></html>");
    ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()),
            CharsetUtil.UTF_8);
    HttpContent content = new DefaultHttpContent(textResponse);
    response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes());
    ctx.write(response);
    ctx.write(content);
    ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
    lastContentFuture.addListener(ChannelFutureListener.CLOSE);
}

From source file:com.addthis.hydra.query.web.HttpQueryHandler.java

License:Apache License

private void fastHandle(ChannelHandlerContext ctx, FullHttpRequest request, String target, KVPairs kv)
        throws Exception {
    StringBuilderWriter writer = new StringBuilderWriter(50);
    HttpResponse response = HttpUtils.startResponse(writer);
    response.headers().add("Access-Control-Allow-Origin", "*");

    switch (target) {
    case "/metrics":
        fakeMetricsServlet.writeMetrics(writer, kv);
        break;//from  w w  w  .j  ava  2 s. co m
    case "/query/list":
        writer.write("[\n");
        for (QueryEntryInfo stat : tracker.getRunning()) {
            writer.write(CodecJSON.encodeString(stat).concat(",\n"));
        }
        writer.write("]");
        break;
    case "/completed/list":
        writer.write("[\n");
        for (QueryEntryInfo stat : tracker.getCompleted()) {
            writer.write(CodecJSON.encodeString(stat).concat(",\n"));
        }
        writer.write("]");
        break;
    case "/v2/host/list":
    case "/host/list":
        String queryStatusUuid = kv.getValue("uuid");
        QueryEntry queryEntry = tracker.getQueryEntry(queryStatusUuid);
        if (queryEntry != null) {
            DetailedStatusHandler hostDetailsHandler = new DetailedStatusHandler(writer, response, ctx, request,
                    queryEntry);
            hostDetailsHandler.handle();
            return;
        } else {
            QueryEntryInfo queryEntryInfo = tracker.getCompletedQueryInfo(queryStatusUuid);
            if (queryEntryInfo != null) {
                JSONObject entryJSON = CodecJSON.encodeJSON(queryEntryInfo);
                writer.write(entryJSON.toString());
            } else {
                throw new RuntimeException("could not find query");
            }
            break;
        }
    case "/query/cancel":
        if (tracker.cancelRunning(kv.getValue("uuid"))) {
            writer.write("canceled " + kv.getValue("uuid"));
        } else {
            writer.write("canceled failed for " + kv.getValue("uuid"));
            response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
        }
        break;
    case "/query/encode": {
        Query q = new Query(null, kv.getValue("query", kv.getValue("path", "")), null);
        JSONArray path = CodecJSON.encodeJSON(q).getJSONArray("path");
        writer.write(path.toString());
        break;
    }
    case "/query/decode": {
        String qo = "{path:" + kv.getValue("query", kv.getValue("path", "")) + "}";
        Query q = CodecJSON.decodeString(new Query(), qo);
        writer.write(q.getPaths()[0]);
        break;
    }
    case "/v2/queries/finished.list": {
        JSONArray runningEntries = new JSONArray();
        for (QueryEntryInfo entryInfo : tracker.getCompleted()) {
            JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo);
            //TODO: replace this with some high level summary
            entryJSON.put("hostInfoSet", "");
            runningEntries.put(entryJSON);
        }
        writer.write(runningEntries.toString());
        break;
    }
    case "/v2/queries/running.list": {
        JSONArray runningEntries = new JSONArray();
        for (QueryEntryInfo entryInfo : tracker.getRunning()) {
            JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo);
            //TODO: replace this with some high level summary
            entryJSON.put("hostInfoSet", "");
            runningEntries.put(entryJSON);
        }
        writer.write(runningEntries.toString());
        break;
    }
    case "/v2/queries/workers": {
        JSONObject jsonObject = new JSONObject();
        for (WorkerData workerData : meshQueryMaster.worky().values()) {
            jsonObject.put(workerData.hostName, workerData.queryLeases.availablePermits());
        }
        writer.write(jsonObject.toString());
        break;
    }
    case "/v2/queries/list":
        JSONArray queries = new JSONArray();
        for (QueryEntryInfo entryInfo : tracker.getCompleted()) {
            JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo);
            entryJSON.put("state", 0);
            queries.put(entryJSON);
        }
        for (QueryEntryInfo entryInfo : tracker.getRunning()) {
            JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo);
            entryJSON.put("state", 3);
            queries.put(entryJSON);
        }
        writer.write(queries.toString());
        break;
    case "/v2/job/list": {
        StringWriter swriter = new StringWriter();
        final JsonGenerator json = QueryServer.factory.createJsonGenerator(swriter);
        json.writeStartArray();
        for (IJob job : meshQueryMaster.keepy().getJobs()) {
            if (job.getQueryConfig() != null && job.getQueryConfig().getCanQuery()) {
                List<JobTask> tasks = job.getCopyOfTasks();
                String uuid = job.getId();
                json.writeStartObject();
                json.writeStringField("id", uuid);
                json.writeStringField("description", Optional.fromNullable(job.getDescription()).or(""));
                json.writeNumberField("state", job.getState().ordinal());
                json.writeStringField("creator", job.getCreator());
                json.writeNumberField("submitTime", Optional.fromNullable(job.getSubmitTime()).or(-1L));
                json.writeNumberField("startTime", Optional.fromNullable(job.getStartTime()).or(-1L));
                json.writeNumberField("endTime", Optional.fromNullable(job.getStartTime()).or(-1L));
                json.writeNumberField("replicas", Optional.fromNullable(job.getReplicas()).or(0));
                json.writeNumberField("backups", Optional.fromNullable(job.getBackups()).or(0));
                json.writeNumberField("nodes", tasks.size());
                json.writeEndObject();
            }
        }
        json.writeEndArray();
        json.close();
        writer.write(swriter.toString());
        break;
    }
    case "/v2/settings/git.properties": {
        StringWriter swriter = new StringWriter();
        final JsonGenerator json = QueryServer.factory.createJsonGenerator(swriter);
        Properties gitProperties = new Properties();
        json.writeStartObject();
        try {
            InputStream in = queryServer.getClass().getResourceAsStream("/git.properties");
            gitProperties.load(in);
            in.close();
            json.writeStringField("commitIdAbbrev", gitProperties.getProperty("git.commit.id.abbrev"));
            json.writeStringField("commitUserEmail", gitProperties.getProperty("git.commit.user.email"));
            json.writeStringField("commitMessageFull", gitProperties.getProperty("git.commit.message.full"));
            json.writeStringField("commitId", gitProperties.getProperty("git.commit.id"));
            json.writeStringField("commitUserName", gitProperties.getProperty("git.commit.user.name"));
            json.writeStringField("buildUserName", gitProperties.getProperty("git.build.user.name"));
            json.writeStringField("commitIdDescribe", gitProperties.getProperty("git.commit.id.describe"));
            json.writeStringField("buildUserEmail", gitProperties.getProperty("git.build.user.email"));
            json.writeStringField("branch", gitProperties.getProperty("git.branch"));
            json.writeStringField("commitTime", gitProperties.getProperty("git.commit.time"));
            json.writeStringField("buildTime", gitProperties.getProperty("git.build.time"));
        } catch (Exception ex) {
            log.warn("Error loading git.properties, possibly jar was not compiled with maven.");
        }
        json.writeEndObject();
        json.close();
        writer.write(swriter.toString());
        break;
    }
    default:
        // forward to static file server
        ctx.pipeline().addLast(staticFileHandler);
        request.retain();
        ctx.fireChannelRead(request);
        return; // don't do text response clean up
    }
    log.trace("response being sent {}", writer);
    ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()),
            CharsetUtil.UTF_8);
    HttpContent content = new DefaultHttpContent(textResponse);
    response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes());
    if (HttpHeaders.isKeepAlive(request)) {
        response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE);
    }
    ctx.write(response);
    ctx.write(content);
    ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
    log.trace("response pending");
    if (!HttpHeaders.isKeepAlive(request)) {
        log.trace("Setting close listener");
        lastContentFuture.addListener(ChannelFutureListener.CLOSE);
    }
}

From source file:com.addthis.hydra.query.web.LegacyHandler.java

License:Apache License

public static Query handleQuery(Query query, KVPairs kv, HttpRequest request, ChannelHandlerContext ctx)
        throws IOException, QueryException {

    String async = kv.getValue("async");
    if (async == null) {
        return query;
    } else if (async.equals("new")) {
        StringBuilderWriter writer = new StringBuilderWriter(50);
        HttpResponse response = HttpUtils.startResponse(writer);
        String asyncUuid = genAsyncUuid();
        asyncCache.put(asyncUuid, query);
        if (query.isTraced()) {
            Query.emitTrace("async create " + asyncUuid + " from " + query);
        }//ww w  .jav  a  2  s .co m
        writer.write("{\"id\":\"" + asyncUuid + "\"}");
        ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()),
                CharsetUtil.UTF_8);
        HttpContent content = new DefaultHttpContent(textResponse);
        response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes());
        if (HttpHeaders.isKeepAlive(request)) {
            response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE);
        }
        ctx.write(response);
        ctx.write(content);
        ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
        if (!HttpHeaders.isKeepAlive(request)) {
            lastContentFuture.addListener(ChannelFutureListener.CLOSE);
        }
        return null;
    } else {
        Query asyncQuery = asyncCache.getIfPresent(async);
        asyncCache.invalidate(async);
        if (query.isTraced()) {
            Query.emitTrace("async restore " + async + " as " + asyncQuery);
        }
        if (asyncQuery != null) {
            return asyncQuery;
        } else {
            throw new QueryException("Missing Async Id");
        }
    }
}

From source file:com.addthis.hydra.store.db.DBKey.java

License:Apache License

public static DBKey deltaDecode(byte[] encoding, @Nonnull IPageDB.Key baseKey) {
    ByteBuf buffer = Unpooled.copiedBuffer(encoding);
    long offset = Varint.readSignedVarLong(buffer);
    long id = offset + baseKey.id();
    Raw key;//from  w w w. j  a v a 2 s  .c  om
    if (buffer.readableBytes() == 0) {
        key = null;
    } else {
        byte[] data = new byte[buffer.readableBytes()];
        buffer.readBytes(data);
        key = Raw.get(data);
    }
    return new DBKey(id, key);
}

From source file:com.addthis.hydra.store.db.DBKey.java

License:Apache License

@Override
public byte[] deltaEncode(@Nonnull IPageDB.Key baseKey) {
    long offset = id - baseKey.id();
    ByteBuf buffer = Unpooled.buffer();
    Varint.writeSignedVarLong(offset, buffer);
    if (key != null) {
        buffer.writeBytes(key.toBytes());
    }/*  w  ww.j  av  a  2 s  . com*/
    return Arrays.copyOf(buffer.array(), buffer.readableBytes());
}

From source file:com.addthis.hydra.task.source.Mark.java

License:Apache License

@Override
public byte[] bytesEncode(long version) {
    byte[] retBytes = null;
    ByteBuf buffer = PooledByteBufAllocator.DEFAULT.buffer();
    try {/*from w ww.  j  a va  2  s . com*/
        byte[] valBytes = getValue().getBytes();
        Varint.writeUnsignedVarInt(valBytes.length, buffer);
        buffer.writeBytes(valBytes);
        Varint.writeUnsignedVarLong(getIndex(), buffer);
        buffer.writeByte(isEnd() ? 1 : 0);
        Varint.writeUnsignedVarInt(error, buffer);
        retBytes = new byte[buffer.readableBytes()];
        buffer.readBytes(retBytes);
    } finally {
        buffer.release();
    }
    return retBytes;
}

From source file:com.addthis.hydra.task.source.SimpleMark.java

License:Apache License

@Override
public byte[] bytesEncode(long version) {
    byte[] retBytes = null;
    ByteBuf buffer = PooledByteBufAllocator.DEFAULT.buffer();
    try {// w w  w  .  j  a va  2s  . c o  m
        byte[] valBytes = val.getBytes();
        Varint.writeUnsignedVarInt(valBytes.length, buffer);
        buffer.writeBytes(valBytes);
        Varint.writeUnsignedVarLong(index, buffer);
        buffer.writeByte(end ? 1 : 0);
        retBytes = new byte[buffer.readableBytes()];
        buffer.readBytes(retBytes);
    } finally {
        buffer.release();
    }
    return retBytes;
}

From source file:com.addthis.meshy.ChannelState.java

License:Apache License

private int estimateMaxQueued(ByteBuf sendBuffer) {
    return max(1, min(64, (channel.config().getWriteBufferHighWaterMark() / sendBuffer.readableBytes()) >> 6));
}

From source file:com.addthis.meshy.ChannelState.java

License:Apache License

public void messageReceived(ByteBuf in) {
    log.trace("{} recv msg={}", this, in);
    meshy.recvBytes(in.readableBytes());
    buffer.writeBytes(in);/*from  w  w  w. ja  v  a  2 s .  co  m*/
    in.release();
    loop: while (true) {
        switch (mode) {
        // zero signifies a reply to a source
        case ReadType:
            if (buffer.readableBytes() < 4) {
                break loop;
            }
            type = buffer.readInt();
            mode = READMODE.ReadSession;
            continue;
        case ReadSession:
            if (buffer.readableBytes() < 4) {
                break loop;
            }
            session = buffer.readInt();
            mode = READMODE.ReadLength;
            continue;
        // zero length signifies end of session
        case ReadLength:
            if (buffer.readableBytes() < 4) {
                break loop;
            }
            length = buffer.readInt();
            mode = READMODE.ReadData;
            continue;
        case ReadData:
            int readable = buffer.readableBytes();
            if (readable < length) {
                break loop;
            }
            SessionHandler handler = null;
            if (type == MeshyConstants.KEY_RESPONSE) {
                handler = sourceHandlers.get(session);
            } else {
                handler = targetHandlers.get(session);
                if ((handler == null) && (meshy instanceof MeshyServer)) {
                    if (type != MeshyConstants.KEY_EXISTING) {
                        handler = meshy.createHandler(type);
                        ((TargetHandler) handler).setContext((MeshyServer) meshy, this, session);
                        log.debug("{} createHandler {} session={}", this, handler, session);
                        if (targetHandlers.put(session, handler) != null) {
                            log.debug("clobbered session {} with {}", session, handler);
                        }
                        if (targetHandlers.size() >= excessiveTargets) {
                            log.debug("excessive targets reached, current targetHandlers = {}",
                                    targetHandlers.size());
                            if (log.isTraceEnabled()) {
                                debugSessions();
                            }
                        }
                    } else {
                        log.debug("Ignoring bad handler creation request for session {} type {}", session,
                                type); // happens with fast streams and send-mores
                    }
                }
            }
            if (handler != null) {
                if (length == 0) {
                    sessionComplete(handler, type, session);
                } else {
                    try {
                        handler.receive(this, session, length, buffer);
                    } catch (Exception ex) {
                        log.error("suppressing handler exception during receive; trying receiveComplete", ex);
                        sessionComplete(handler, type, session);
                    }
                }
            }
            int read = readable - buffer.readableBytes();
            if (read < length) {
                if ((handler != null) || log.isDebugEnabled()) {
                    log.debug("{} recv type={} handler={} ssn={} did not consume all bytes (read={} of {})",
                            this, type, handler, session, read, length);
                }
                buffer.skipBytes(length - read);
            }
            mode = READMODE.ReadType;
            continue;
        default:
            throw new RuntimeException("invalid state");
        }
    }
    buffer.discardReadBytes();
}