List of usage examples for io.netty.channel ChannelHandlerContext pipeline
ChannelPipeline pipeline();
From source file:com.addthis.hydra.query.loadbalance.HttpQueryCallHandler.java
License:Apache License
/** * special handler for query// w ww.j a va2s. c om */ public static ChannelFuture handleQuery(ChannelHandler queryToQueryResultsEncoder, KVPairs kv, HttpRequest request, ChannelHandlerContext ctx, EventExecutor executor) throws Exception { String job = kv.getValue("job"); String path = kv.getValue("path", kv.getValue("q", "")); Query query = new Query(job, new String[] { path }, new String[] { kv.getValue("ops"), kv.getValue("rops") }); query.setTraced(kv.getIntValue("trace", 0) == 1); query.setParameterIfNotYetSet("hosts", kv.getValue("hosts")); query.setParameterIfNotYetSet("gate", kv.getValue("gate")); query.setParameterIfNotYetSet("originalrequest", kv.getValue("originalrequest")); SocketAddress remoteIP = ctx.channel().remoteAddress(); if (remoteIP instanceof InetSocketAddress) { // only log implementations with known methods query.setParameterIfNotYetSet("remoteip", ((InetSocketAddress) remoteIP).getAddress().getHostAddress()); } query.setParameterIfNotYetSet("allocator", kv.getValue("allocator")); query.setParameterIfNotYetSet("allowPartial", kv.getValue("allowPartial")); String filename = kv.getValue("filename", "query"); String format = kv.getValue("format", "json"); String gdriveAccessToken = kv.getValue("accesstoken"); int timeout = Math.min(kv.getIntValue("timeout", maxQueryTime), maxQueryTime); query.setParameterIfNotYetSet("timeout", timeout); query.setParameter("sender", kv.getValue("sender")); if (log.isDebugEnabled()) { log.debug(new StringMapHelper().put("type", "query.starting").put("query.path", query.getPaths()[0]) .put("query.hosts", query.getParameter("hosts")).put("query.ops", query.getOps()) .put("trace", query.isTraced()).put("sources", query.getParameter("sources")) .put("time", System.currentTimeMillis()).put("job.id", query.getJob()) .put("query.id", query.uuid()).put("sender", query.getParameter("sender")).put("format", format) .put("filename", filename).put("originalrequest", query.getParameter("originalrequest")) .put("timeout", query.getParameter("timeout")).put("requestIP", query.getParameter("remoteip")) .put("allocator", query.getParameter("allocator")) .put("allowPartial", query.getParameter("allowPartial")).createKVPairs().toString()); } // support legacy async query semantics query = LegacyHandler.handleQuery(query, kv, request, ctx); if (query == null) { return ctx.newSucceededFuture(); } if (query.getJob() == null) { sendError(ctx, new HttpResponseStatus(500, "missing job")); return ctx.newSucceededFuture(); } switch (format) { case "json": ctx.pipeline().addLast(executor, "format", new JsonBundleEncoder()); break; case "html": ctx.pipeline().addLast(executor, "format", new HtmlBundleEncoder()); break; case "gdrive": ctx.pipeline().addLast(executor, "stringer", stringer); ctx.pipeline().addLast(executor, "format", GoogleDriveBundleEncoder.create(filename, gdriveAccessToken)); break; default: ctx.pipeline().addLast(executor, "format", DelimitedBundleEncoder.create(filename, format)); break; } ctx.pipeline().addLast(executor, "mqm", queryToQueryResultsEncoder); return ctx.pipeline().write(query, new DefaultChannelProgressivePromise(ctx.channel(), executor)); }
From source file:com.addthis.hydra.query.MeshQueryMaster.java
License:Apache License
protected void writeQuery(ChannelHandlerContext ctx, Query query, ChannelPromise promise) throws Exception { String[] opsLog = query.getOps(); // being able to log and monitor rops is kind of important // creates query for worker and updates local query ops (!mutates query!) // TODO: fix this pipeline interface Query remoteQuery = query.createPipelinedQuery(); if (keepy != null) { keepy.resolveAlias(query);/*from ww w .ja v a 2s .c o m*/ keepy.validateJobForQuery(query); } Map<Integer, Set<FileReferenceWrapper>> fileReferenceMap; try { fileReferenceMap = cachey.get(query.getJob()); if ((fileReferenceMap == null) || fileReferenceMap.isEmpty()) { throw new QueryException("[MeshQueryMaster] No file references found for job: " + query.getJob()); } } catch (ExecutionException e) { log.warn("", e); throw new QueryException("Exception getting file references: " + e.getMessage()); } int canonicalTasks = 0; if (keepy != null) { canonicalTasks = keepy.validateTaskCount(query, fileReferenceMap); } else { for (Integer taskId : fileReferenceMap.keySet()) { if (taskId > canonicalTasks) { canonicalTasks = taskId; } } // tasks are zero indexed canonicalTasks += 1; } QueryTaskSource[] sourcesByTaskID = new QueryTaskSource[canonicalTasks]; for (int i = 0; i < canonicalTasks; i++) { Set<FileReferenceWrapper> sourceOptions = fileReferenceMap.get(i); QueryTaskSourceOption[] taskSourceOptions = new QueryTaskSourceOption[sourceOptions.size()]; int taskSourceOptionsIndex = 0; for (FileReferenceWrapper wrapper : sourceOptions) { FileReference queryReference = wrapper.fileReference; WorkerData workerData = worky.get(queryReference.getHostUUID()); taskSourceOptions[taskSourceOptionsIndex] = new QueryTaskSourceOption(queryReference, workerData.queryLeases); taskSourceOptionsIndex += 1; } sourcesByTaskID[i] = new QueryTaskSource(taskSourceOptions); } MeshSourceAggregator aggregator = new MeshSourceAggregator(sourcesByTaskID, meshy, this, remoteQuery); ctx.pipeline().addLast(ctx.executor(), "query aggregator", aggregator); TrackerHandler trackerHandler = new TrackerHandler(tracker, opsLog); ctx.pipeline().addLast(ctx.executor(), "query tracker", trackerHandler); ctx.pipeline().remove(this); ctx.pipeline().write(query, promise); }
From source file:com.addthis.hydra.query.tracker.TrackerHandler.java
License:Apache License
protected void writeQuery(final ChannelHandlerContext ctx, Query msg, ChannelPromise promise) throws Exception { this.requestPromise = promise; this.queryUser = new DataChannelOutputToNettyBridge(ctx, promise); this.query = msg; query.queryPromise = queryPromise;//from w w w. j a v a 2 s. c om // create a processor chain based in query ops terminating the query user this.opProcessorConsumer = query.newProcessor(queryUser, opPromise); queryEntry = new QueryEntry(query, opsLog, requestPromise, this); // Check if the uuid is repeated, then make a new one if (queryTracker.running.putIfAbsent(query.uuid(), queryEntry) != null) { String old = query.uuid(); query.useNextUUID(); log.warn("Query uuid was already in use in running. Going to try assigning a new one. old:{} new:{}", old, query.uuid()); if (queryTracker.running.putIfAbsent(query.uuid(), queryEntry) != null) { throw new QueryException("Query uuid was STILL somehow already in use : " + query.uuid()); } } log.debug("Executing.... {} {}", query.uuid(), queryEntry.queryDetails); ctx.pipeline().remove(this); opPromise.addListener(this); queryPromise.addListener(this); requestPromise.addListener(this); ctx.write(opProcessorConsumer, queryPromise); }
From source file:com.addthis.hydra.query.web.AbstractBufferingHttpBundleEncoder.java
License:Apache License
@Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { if (msg instanceof Bundle) { send(ctx, (Bundle) msg);/*from w ww .j a v a2s. c o m*/ } else if (msg == DataChannelOutputToNettyBridge.SEND_COMPLETE) { sendComplete(ctx); ctx.pipeline().remove(this); } else { super.write(ctx, msg, promise); // forward write to next handler } }
From source file:com.addthis.hydra.query.web.HttpQueryCallHandler.java
License:Apache License
/** * special handler for query/*from www. jav a 2 s . c o m*/ */ public static ChannelFuture handleQuery(ChannelHandler queryToQueryResultsEncoder, KVPairs kv, HttpRequest request, ChannelHandlerContext ctx, EventExecutor executor) throws Exception { String job = kv.getValue("job"); // support either job=id/dir or job=id&dir=dir for convenience (and don't punish doing both) String dir = kv.getValue("dir"); if ((dir != null) && !job.endsWith(dir)) { String[] jobs = job.split(","); String[] dirs = dir.split(","); job = Arrays.stream(jobs).flatMap(subJob -> Arrays.stream(dirs).map(subDir -> subJob + '/' + subDir)) .collect(Collectors.joining(",")); } String path = kv.getValue("path", kv.getValue("q", "")); Query query = new Query(job, new String[] { path }, new String[] { kv.getValue("ops"), kv.getValue("rops") }); query.setTraced(kv.getIntValue("trace", 0) == 1); query.setParameterIfNotYetSet("hosts", kv.getValue("hosts")); query.setParameterIfNotYetSet("gate", kv.getValue("gate")); query.setParameterIfNotYetSet("originalrequest", kv.getValue("originalrequest")); SocketAddress remoteIP = ctx.channel().remoteAddress(); if (remoteIP instanceof InetSocketAddress) { // only log implementations with known methods query.setParameterIfNotYetSet("remoteip", ((InetSocketAddress) remoteIP).getAddress().getHostAddress()); } query.setParameterIfNotYetSet("allocator", kv.getValue("allocator")); query.setParameterIfNotYetSet("allowPartial", kv.getValue("allowPartial")); query.setParameterIfNotYetSet("tasks", kv.getValue("tasks")); String filename = kv.getValue("filename", "query"); String format = kv.getValue("format", "json"); String gdriveAccessToken = kv.getValue("accesstoken"); int timeout = Math.min(kv.getIntValue("timeout", maxQueryTime), maxQueryTime); query.setParameterIfNotYetSet("timeout", timeout); query.setParameter("sender", kv.getValue("sender")); query.setParameter("injectSource", kv.getValue("injectSource")); if (log.isDebugEnabled()) { log.debug(new StringMapHelper().put("type", "query.starting").put("query.path", query.getPaths()[0]) .put("query.hosts", query.getParameter("hosts")).put("query.ops", query.getOps()) .put("trace", query.isTraced()).put("sources", query.getParameter("sources")) .put("time", System.currentTimeMillis()).put("job.id", query.getJob()) .put("query.id", query.uuid()).put("sender", query.getParameter("sender")).put("format", format) .put("filename", filename).put("originalrequest", query.getParameter("originalrequest")) .put("timeout", query.getParameter("timeout")).put("requestIP", query.getParameter("remoteip")) .put("allocator", query.getParameter("allocator")) .put("allowPartial", query.getParameter("allowPartial")) .put("tasks", query.getParameter("tasks")).createKVPairs().toString()); } // support legacy async query semantics query = LegacyHandler.handleQuery(query, kv, request, ctx); if (query == null) { return ctx.newSucceededFuture(); } if (query.getJob() == null) { sendError(ctx, new HttpResponseStatus(500, "missing job")); return ctx.newSucceededFuture(); } switch (format) { case "json": ctx.pipeline().addLast(executor, "format", new JsonBundleEncoder()); break; case "html": ctx.pipeline().addLast(executor, "format", new HtmlBundleEncoder()); break; case "gdrive": ctx.pipeline().addLast(executor, "stringer", stringer); ctx.pipeline().addLast(executor, "format", GoogleDriveBundleEncoder.create(filename, gdriveAccessToken)); break; case "csv2": case "psv2": case "tsv2": ctx.pipeline().addLast(executor, "format", DelimitedEscapedBundleEncoder.create(filename, format)); break; default: ctx.pipeline().addLast(executor, "format", DelimitedBundleEncoder.create(filename, format)); break; } ctx.pipeline().addLast(executor, "mqm", queryToQueryResultsEncoder); return ctx.pipeline().write(query, new DefaultChannelProgressivePromise(ctx.channel(), executor)); }
From source file:com.addthis.hydra.query.web.HttpQueryHandler.java
License:Apache License
private void fastHandle(ChannelHandlerContext ctx, FullHttpRequest request, String target, KVPairs kv) throws Exception { StringBuilderWriter writer = new StringBuilderWriter(50); HttpResponse response = HttpUtils.startResponse(writer); response.headers().add("Access-Control-Allow-Origin", "*"); switch (target) { case "/metrics": fakeMetricsServlet.writeMetrics(writer, kv); break;/*from w w w. ja v a 2 s . c om*/ case "/query/list": writer.write("[\n"); for (QueryEntryInfo stat : tracker.getRunning()) { writer.write(CodecJSON.encodeString(stat).concat(",\n")); } writer.write("]"); break; case "/completed/list": writer.write("[\n"); for (QueryEntryInfo stat : tracker.getCompleted()) { writer.write(CodecJSON.encodeString(stat).concat(",\n")); } writer.write("]"); break; case "/v2/host/list": case "/host/list": String queryStatusUuid = kv.getValue("uuid"); QueryEntry queryEntry = tracker.getQueryEntry(queryStatusUuid); if (queryEntry != null) { DetailedStatusHandler hostDetailsHandler = new DetailedStatusHandler(writer, response, ctx, request, queryEntry); hostDetailsHandler.handle(); return; } else { QueryEntryInfo queryEntryInfo = tracker.getCompletedQueryInfo(queryStatusUuid); if (queryEntryInfo != null) { JSONObject entryJSON = CodecJSON.encodeJSON(queryEntryInfo); writer.write(entryJSON.toString()); } else { throw new RuntimeException("could not find query"); } break; } case "/query/cancel": if (tracker.cancelRunning(kv.getValue("uuid"))) { writer.write("canceled " + kv.getValue("uuid")); } else { writer.write("canceled failed for " + kv.getValue("uuid")); response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR); } break; case "/query/encode": { Query q = new Query(null, kv.getValue("query", kv.getValue("path", "")), null); JSONArray path = CodecJSON.encodeJSON(q).getJSONArray("path"); writer.write(path.toString()); break; } case "/query/decode": { String qo = "{path:" + kv.getValue("query", kv.getValue("path", "")) + "}"; Query q = CodecJSON.decodeString(new Query(), qo); writer.write(q.getPaths()[0]); break; } case "/v2/queries/finished.list": { JSONArray runningEntries = new JSONArray(); for (QueryEntryInfo entryInfo : tracker.getCompleted()) { JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo); //TODO: replace this with some high level summary entryJSON.put("hostInfoSet", ""); runningEntries.put(entryJSON); } writer.write(runningEntries.toString()); break; } case "/v2/queries/running.list": { JSONArray runningEntries = new JSONArray(); for (QueryEntryInfo entryInfo : tracker.getRunning()) { JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo); //TODO: replace this with some high level summary entryJSON.put("hostInfoSet", ""); runningEntries.put(entryJSON); } writer.write(runningEntries.toString()); break; } case "/v2/queries/workers": { JSONObject jsonObject = new JSONObject(); for (WorkerData workerData : meshQueryMaster.worky().values()) { jsonObject.put(workerData.hostName, workerData.queryLeases.availablePermits()); } writer.write(jsonObject.toString()); break; } case "/v2/queries/list": JSONArray queries = new JSONArray(); for (QueryEntryInfo entryInfo : tracker.getCompleted()) { JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo); entryJSON.put("state", 0); queries.put(entryJSON); } for (QueryEntryInfo entryInfo : tracker.getRunning()) { JSONObject entryJSON = CodecJSON.encodeJSON(entryInfo); entryJSON.put("state", 3); queries.put(entryJSON); } writer.write(queries.toString()); break; case "/v2/job/list": { StringWriter swriter = new StringWriter(); final JsonGenerator json = QueryServer.factory.createJsonGenerator(swriter); json.writeStartArray(); for (IJob job : meshQueryMaster.keepy().getJobs()) { if (job.getQueryConfig() != null && job.getQueryConfig().getCanQuery()) { List<JobTask> tasks = job.getCopyOfTasks(); String uuid = job.getId(); json.writeStartObject(); json.writeStringField("id", uuid); json.writeStringField("description", Optional.fromNullable(job.getDescription()).or("")); json.writeNumberField("state", job.getState().ordinal()); json.writeStringField("creator", job.getCreator()); json.writeNumberField("submitTime", Optional.fromNullable(job.getSubmitTime()).or(-1L)); json.writeNumberField("startTime", Optional.fromNullable(job.getStartTime()).or(-1L)); json.writeNumberField("endTime", Optional.fromNullable(job.getStartTime()).or(-1L)); json.writeNumberField("replicas", Optional.fromNullable(job.getReplicas()).or(0)); json.writeNumberField("backups", Optional.fromNullable(job.getBackups()).or(0)); json.writeNumberField("nodes", tasks.size()); json.writeEndObject(); } } json.writeEndArray(); json.close(); writer.write(swriter.toString()); break; } case "/v2/settings/git.properties": { StringWriter swriter = new StringWriter(); final JsonGenerator json = QueryServer.factory.createJsonGenerator(swriter); Properties gitProperties = new Properties(); json.writeStartObject(); try { InputStream in = queryServer.getClass().getResourceAsStream("/git.properties"); gitProperties.load(in); in.close(); json.writeStringField("commitIdAbbrev", gitProperties.getProperty("git.commit.id.abbrev")); json.writeStringField("commitUserEmail", gitProperties.getProperty("git.commit.user.email")); json.writeStringField("commitMessageFull", gitProperties.getProperty("git.commit.message.full")); json.writeStringField("commitId", gitProperties.getProperty("git.commit.id")); json.writeStringField("commitUserName", gitProperties.getProperty("git.commit.user.name")); json.writeStringField("buildUserName", gitProperties.getProperty("git.build.user.name")); json.writeStringField("commitIdDescribe", gitProperties.getProperty("git.commit.id.describe")); json.writeStringField("buildUserEmail", gitProperties.getProperty("git.build.user.email")); json.writeStringField("branch", gitProperties.getProperty("git.branch")); json.writeStringField("commitTime", gitProperties.getProperty("git.commit.time")); json.writeStringField("buildTime", gitProperties.getProperty("git.build.time")); } catch (Exception ex) { log.warn("Error loading git.properties, possibly jar was not compiled with maven."); } json.writeEndObject(); json.close(); writer.write(swriter.toString()); break; } default: // forward to static file server ctx.pipeline().addLast(staticFileHandler); request.retain(); ctx.fireChannelRead(request); return; // don't do text response clean up } log.trace("response being sent {}", writer); ByteBuf textResponse = ByteBufUtil.encodeString(ctx.alloc(), CharBuffer.wrap(writer.getBuilder()), CharsetUtil.UTF_8); HttpContent content = new DefaultHttpContent(textResponse); response.headers().set(HttpHeaders.Names.CONTENT_LENGTH, textResponse.readableBytes()); if (HttpHeaders.isKeepAlive(request)) { response.headers().set(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } ctx.write(response); ctx.write(content); ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); log.trace("response pending"); if (!HttpHeaders.isKeepAlive(request)) { log.trace("Setting close listener"); lastContentFuture.addListener(ChannelFutureListener.CLOSE); } }
From source file:com.addthis.hydra.query.web.HttpStaticFileHandler.java
License:Apache License
@Override public void channelRead0(ChannelHandlerContext ctx, FullHttpRequest request) throws Exception { if (!request.getDecoderResult().isSuccess()) { sendError(ctx, BAD_REQUEST);/*w w w . j a v a 2 s. co m*/ return; } // since we are using send file, we must remove the compression unit or it will donk out ChannelHandler compressor = ctx.pipeline().get("compressor"); if (compressor != null) { ctx.pipeline().remove("compressor"); } if (request.getMethod() != GET) { sendError(ctx, METHOD_NOT_ALLOWED); return; } QueryStringDecoder urlDecoder = new QueryStringDecoder(request.getUri()); String target = urlDecoder.path(); final String path = sanitizeUri(target); if (path == null) { sendError(ctx, FORBIDDEN); return; } Path file = Paths.get(webDir + path); log.trace("trying to serve static file {}", file); if (Files.isHidden(file) || Files.notExists(file)) { sendError(ctx, NOT_FOUND); return; } if (!Files.isRegularFile(file)) { sendError(ctx, FORBIDDEN); return; } log.trace("cache validation occuring for {}", file); // Cache Validation String ifModifiedSince = request.headers().get(IF_MODIFIED_SINCE); if (ifModifiedSince != null && !ifModifiedSince.isEmpty()) { SimpleDateFormat dateFormatter = new SimpleDateFormat(HttpUtils.HTTP_DATE_FORMAT, Locale.US); Date ifModifiedSinceDate = dateFormatter.parse(ifModifiedSince); // Only compare up to the second because the datetime format we send to the client // does not have milliseconds long ifModifiedSinceDateSeconds = ifModifiedSinceDate.getTime() / 1000; long fileLastModifiedSeconds = Files.getLastModifiedTime(file).toMillis() / 1000; if (ifModifiedSinceDateSeconds == fileLastModifiedSeconds) { sendNotModified(ctx); return; } } log.trace("sending {}", file); FileChannel fileChannel; try { fileChannel = FileChannel.open(file, StandardOpenOption.READ); } catch (IOException fnfe) { sendError(ctx, NOT_FOUND); return; } long fileLength = fileChannel.size(); HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); setContentLength(response, fileLength); setContentTypeHeader(response, file); try { setDateAndCacheHeaders(response, file); } catch (IOException ioex) { fileChannel.close(); sendError(ctx, NOT_FOUND); return; } if (isKeepAlive(request)) { response.headers().set(CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } // Write the initial line and the header. ctx.write(response); // Write the content. ctx.write(new DefaultFileRegion(fileChannel, 0, fileLength)); // Write the end marker ChannelFuture lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); // Decide whether to close the connection or not. if (!isKeepAlive(request)) { // Close the connection when the whole content is written out. lastContentFuture.addListener(ChannelFutureListener.CLOSE); } else { ctx.pipeline().remove(this); if (compressor != null) { ctx.pipeline().addBefore("query", "compressor", compressor); } } }
From source file:com.alibaba.dubbo.qos.server.handler.QosProcessHandler.java
License:Apache License
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception { if (in.readableBytes() < 1) { return;/*from w w w. ja v a 2s .c om*/ } // read one byte to guess protocol final int magic = in.getByte(in.readerIndex()); ChannelPipeline p = ctx.pipeline(); p.addLast(new LocalHostPermitHandler(acceptForeignIp)); if (isHttp(magic)) { // no welcome output for http protocol if (welcomeFuture != null && welcomeFuture.isCancellable()) { welcomeFuture.cancel(false); } p.addLast(new HttpServerCodec()); p.addLast(new HttpObjectAggregator(1048576)); p.addLast(new HttpProcessHandler()); p.remove(this); } else { p.addLast(new LineBasedFrameDecoder(2048)); p.addLast(new StringDecoder(CharsetUtil.UTF_8)); p.addLast(new StringEncoder(CharsetUtil.UTF_8)); p.addLast(new IdleStateHandler(0, 0, 5 * 60)); p.addLast(new TelnetProcessHandler()); p.remove(this); } }
From source file:com.barchart.netty.client.pipeline.CapabilitiesRequest.java
License:BSD License
@Override public void channelActive(final ChannelHandlerContext ctx) throws Exception { ctx.writeAndFlush(new Capabilities() { @Override/*from ww w . j a v a 2 s .co m*/ public Set<String> capabilities() { return Collections.emptySet(); } @Override public Version version() { return null; } @Override public Version minVersion() { return null; } }); ctx.fireChannelActive(); ctx.pipeline().remove(this); }
From source file:com.barchart.netty.common.pipeline.MessageFlowHandler.java
License:BSD License
/** * Must be called by subclass when the flow completes. The simplest way to * do this is usually by using the OnComplete() state listener in your Flow. * * @see OnComplete/* w w w . j av a 2s. c o m*/ */ protected void complete(final ChannelHandlerContext ctx) throws Exception { log.debug("Flow complete, flushing inbound message queue"); // Notify downstream that connection is active if (blockActivate) { super.channelActive(ctx); } Object msg; for (;;) { msg = inboundQueue.poll(); if (msg == null) { break; } ctx.fireChannelRead(msg); } ctx.fireChannelReadComplete(); ctx.pipeline().remove(this); }