List of usage examples for io.netty.channel DefaultChannelProgressivePromise DefaultChannelProgressivePromise
public DefaultChannelProgressivePromise(Channel channel, EventExecutor executor)
From source file:com.addthis.hydra.data.query.op.OpGroupBy.java
License:Apache License
/** * Generate new promise for the child operation. * * @param opPromise promise of the 'groupby' query operation * * @return generated promise// w ww .ja va 2 s .c o m */ private ChannelProgressivePromise generateNewPromise(ChannelProgressivePromise opPromise) { final ChannelProgressivePromise result; if (opPromise.channel() == null) { result = new DefaultChannelProgressivePromise(null, ImmediateEventExecutor.INSTANCE); } else { result = opPromise.channel().newProgressivePromise(); } result.addListener(errorForwarder); return result; }
From source file:com.addthis.hydra.data.query.source.LiveQueryReference.java
License:Apache License
@Override public VirtualFileInput getInput(Map<String, String> options) { try {// ww w . j a va 2s.c o m // ideally the channel here would be some kind of meshy construct, but null should // be fine for now -- we never call await/sync etc in the worker final DataChannelToInputStream bridge = new DataChannelToInputStream( new DefaultChannelProgressivePromise(null, ImmediateEventExecutor.INSTANCE)); if (options == null) { log.warn("Invalid request to getInput. Options cannot be null"); return null; } SearchRunner.querySearchPool.execute(new LiveSearchRunner(options, dirString, bridge, queryEngine)); return bridge; } catch (Exception ex) { throw new RuntimeException(ex); } }
From source file:com.addthis.hydra.data.query.source.QueryReference.java
License:Apache License
/** * Submits the query to the query search pool as a SearchRunner and creates the bridge that will * hand the query response data to meshy. * * @param options/*from w w w . j a v a 2 s . c o m*/ * @return the response bridge (DataChannelToInputStream) */ @Override public VirtualFileInput getInput(Map<String, String> options) { try { // ideally the channel here would be some kind of meshy construct, but null should // be fine for now -- we never call await/sync etc in the worker final DataChannelToInputStream bridge = new DataChannelToInputStream( new DefaultChannelProgressivePromise(null, ImmediateEventExecutor.INSTANCE)); if (options == null) { log.warn("Invalid request to getInput. Options cannot be null"); return null; } final String flag = options.get("flag"); if (flag != null) { if (flag.equals("die")) { System.exit(1); } else if (flag.equals("DIE")) { Runtime.getRuntime().halt(1); } } SearchRunner.querySearchPool.execute(new SearchRunner(options, dirString, bridge)); return bridge; } catch (Exception ex) { throw new RuntimeException(ex); } }
From source file:com.addthis.hydra.data.query.source.SearchRunner.java
License:Apache License
/** * Part 1 - SETUP//from w w w. j av a 2s . c o m * Initialize query run -- parse options, create Query object */ protected void setup() throws Exception { long startTime = System.currentTimeMillis(); MeshQuerySource.queueTimes.update(creationTime - startTime, TimeUnit.MILLISECONDS); query = CodecJSON.decodeString(new Query(), options.get("query")); // set as soon as possible (and especially before creating op processor) query.queryPromise = bridge.queryPromise; // Parse the query and return a reference to the last QueryOpProcessor. ChannelProgressivePromise opPromise = new DefaultChannelProgressivePromise(null, ImmediateEventExecutor.INSTANCE); queryOpProcessor = query.newProcessor(bridge, opPromise); }
From source file:com.addthis.hydra.query.loadbalance.HttpQueryCallHandler.java
License:Apache License
/** * special handler for query/* w ww. j a v a 2s . c o m*/ */ public static ChannelFuture handleQuery(ChannelHandler queryToQueryResultsEncoder, KVPairs kv, HttpRequest request, ChannelHandlerContext ctx, EventExecutor executor) throws Exception { String job = kv.getValue("job"); String path = kv.getValue("path", kv.getValue("q", "")); Query query = new Query(job, new String[] { path }, new String[] { kv.getValue("ops"), kv.getValue("rops") }); query.setTraced(kv.getIntValue("trace", 0) == 1); query.setParameterIfNotYetSet("hosts", kv.getValue("hosts")); query.setParameterIfNotYetSet("gate", kv.getValue("gate")); query.setParameterIfNotYetSet("originalrequest", kv.getValue("originalrequest")); SocketAddress remoteIP = ctx.channel().remoteAddress(); if (remoteIP instanceof InetSocketAddress) { // only log implementations with known methods query.setParameterIfNotYetSet("remoteip", ((InetSocketAddress) remoteIP).getAddress().getHostAddress()); } query.setParameterIfNotYetSet("allocator", kv.getValue("allocator")); query.setParameterIfNotYetSet("allowPartial", kv.getValue("allowPartial")); String filename = kv.getValue("filename", "query"); String format = kv.getValue("format", "json"); String gdriveAccessToken = kv.getValue("accesstoken"); int timeout = Math.min(kv.getIntValue("timeout", maxQueryTime), maxQueryTime); query.setParameterIfNotYetSet("timeout", timeout); query.setParameter("sender", kv.getValue("sender")); if (log.isDebugEnabled()) { log.debug(new StringMapHelper().put("type", "query.starting").put("query.path", query.getPaths()[0]) .put("query.hosts", query.getParameter("hosts")).put("query.ops", query.getOps()) .put("trace", query.isTraced()).put("sources", query.getParameter("sources")) .put("time", System.currentTimeMillis()).put("job.id", query.getJob()) .put("query.id", query.uuid()).put("sender", query.getParameter("sender")).put("format", format) .put("filename", filename).put("originalrequest", query.getParameter("originalrequest")) .put("timeout", query.getParameter("timeout")).put("requestIP", query.getParameter("remoteip")) .put("allocator", query.getParameter("allocator")) .put("allowPartial", query.getParameter("allowPartial")).createKVPairs().toString()); } // support legacy async query semantics query = LegacyHandler.handleQuery(query, kv, request, ctx); if (query == null) { return ctx.newSucceededFuture(); } if (query.getJob() == null) { sendError(ctx, new HttpResponseStatus(500, "missing job")); return ctx.newSucceededFuture(); } switch (format) { case "json": ctx.pipeline().addLast(executor, "format", new JsonBundleEncoder()); break; case "html": ctx.pipeline().addLast(executor, "format", new HtmlBundleEncoder()); break; case "gdrive": ctx.pipeline().addLast(executor, "stringer", stringer); ctx.pipeline().addLast(executor, "format", GoogleDriveBundleEncoder.create(filename, gdriveAccessToken)); break; default: ctx.pipeline().addLast(executor, "format", DelimitedBundleEncoder.create(filename, format)); break; } ctx.pipeline().addLast(executor, "mqm", queryToQueryResultsEncoder); return ctx.pipeline().write(query, new DefaultChannelProgressivePromise(ctx.channel(), executor)); }
From source file:com.addthis.hydra.query.util.QueryChannelUtil.java
License:Apache License
/** */ private static void runQuery(String[] args) throws Exception { HashMap<String, String> qparam = new HashMap<>(); String sep = null;/*from ww w . j a v a 2 s.c om*/ boolean quiet = false; boolean traced = false; int iter = 1; ArrayList<String> paths = new ArrayList<>(1); ArrayList<String> ops = new ArrayList<>(1); ArrayList<String> lops = new ArrayList<>(1); String job = null; String data = null; String out = null; for (int i = 0; i < args.length; i++) { String arg = args[i]; int eqpos; if (arg.equals("help")) { System.out.println( "job=[job] path=[path] ops=[ops] lops=[lops] data=[datadir] [iter=#] [quiet] [sep=separator] [out=file] [trace] [param=val]"); return; } if (arg.equals("trace")) { traced = true; } else if (arg.equals("quiet")) { quiet = true; } else if (arg.equals("csv")) { sep = ","; } else if (arg.equals("tsv")) { sep = "\t"; } else if (arg.startsWith("sep=")) { sep = arg.substring(4); } else if (arg.startsWith("iter=")) { iter = Integer.parseInt(arg.substring(5)); } else if (arg.startsWith("lops=")) { lops.add(arg.substring(5)); } else if (arg.startsWith("ops=")) { ops.add(arg.substring(4)); } else if (arg.startsWith("job=")) { job = arg.substring(4); } else if (arg.startsWith("path=")) { paths.add(arg.substring(5)); } else if (arg.startsWith("fpath=")) { paths.add(Bytes.toString(Files.read(new File(arg.substring(6)))).trim()); } else if (arg.startsWith("data=")) { data = arg.substring(5); } else if (arg.startsWith("out=")) { out = arg.substring(4); } else if ((eqpos = arg.indexOf("=")) > 0) { String key = arg.substring(0, eqpos); String val = arg.substring(eqpos + 1); qparam.put(key, val); } } Query query = new Query(job, paths.toArray(new String[paths.size()]), ops.toArray(new String[ops.size()])); query.setTraced(traced); for (Entry<String, String> e : qparam.entrySet()) { query.setParameter(e.getKey(), e.getValue()); } if (!quiet) { System.out.println(">>> query " + query); } QuerySource client; if (data != null) { final File dir = new File(data); client = new QueryEngineSource() { @Override public QueryEngine getEngineLease() { try { return new QueryEngine(new ReadTree(dir)); } catch (Exception e) { throw new RuntimeException(e); } } }; } else { throw new RuntimeException("no data directory specified"); } while (iter-- > 0) { long start = System.currentTimeMillis(); File tempDir = Files.createTempDir(); BlockingNullConsumer consumer = new BlockingNullConsumer(); QueryOpProcessor proc = new QueryOpProcessor.Builder(consumer, lops.toArray(new String[lops.size()])) .tempDir(tempDir).build(); proc.appendOp(new BundleOutputWrapper(new PrintOp(sep, out), new DefaultChannelProgressivePromise(null, ImmediateEventExecutor.INSTANCE))); client.query(query, proc); consumer.waitComplete(); Files.deleteDir(tempDir); if (!quiet) { System.out.println( ">>> done " + proc + " in " + ((System.currentTimeMillis() - start) / 1000.0) + " sec"); } } System.exit(0); }
From source file:com.addthis.hydra.query.web.HttpQueryCallHandler.java
License:Apache License
/** * special handler for query//ww w. ja v a 2s .c om */ public static ChannelFuture handleQuery(ChannelHandler queryToQueryResultsEncoder, KVPairs kv, HttpRequest request, ChannelHandlerContext ctx, EventExecutor executor) throws Exception { String job = kv.getValue("job"); // support either job=id/dir or job=id&dir=dir for convenience (and don't punish doing both) String dir = kv.getValue("dir"); if ((dir != null) && !job.endsWith(dir)) { String[] jobs = job.split(","); String[] dirs = dir.split(","); job = Arrays.stream(jobs).flatMap(subJob -> Arrays.stream(dirs).map(subDir -> subJob + '/' + subDir)) .collect(Collectors.joining(",")); } String path = kv.getValue("path", kv.getValue("q", "")); Query query = new Query(job, new String[] { path }, new String[] { kv.getValue("ops"), kv.getValue("rops") }); query.setTraced(kv.getIntValue("trace", 0) == 1); query.setParameterIfNotYetSet("hosts", kv.getValue("hosts")); query.setParameterIfNotYetSet("gate", kv.getValue("gate")); query.setParameterIfNotYetSet("originalrequest", kv.getValue("originalrequest")); SocketAddress remoteIP = ctx.channel().remoteAddress(); if (remoteIP instanceof InetSocketAddress) { // only log implementations with known methods query.setParameterIfNotYetSet("remoteip", ((InetSocketAddress) remoteIP).getAddress().getHostAddress()); } query.setParameterIfNotYetSet("allocator", kv.getValue("allocator")); query.setParameterIfNotYetSet("allowPartial", kv.getValue("allowPartial")); query.setParameterIfNotYetSet("tasks", kv.getValue("tasks")); String filename = kv.getValue("filename", "query"); String format = kv.getValue("format", "json"); String gdriveAccessToken = kv.getValue("accesstoken"); int timeout = Math.min(kv.getIntValue("timeout", maxQueryTime), maxQueryTime); query.setParameterIfNotYetSet("timeout", timeout); query.setParameter("sender", kv.getValue("sender")); query.setParameter("injectSource", kv.getValue("injectSource")); if (log.isDebugEnabled()) { log.debug(new StringMapHelper().put("type", "query.starting").put("query.path", query.getPaths()[0]) .put("query.hosts", query.getParameter("hosts")).put("query.ops", query.getOps()) .put("trace", query.isTraced()).put("sources", query.getParameter("sources")) .put("time", System.currentTimeMillis()).put("job.id", query.getJob()) .put("query.id", query.uuid()).put("sender", query.getParameter("sender")).put("format", format) .put("filename", filename).put("originalrequest", query.getParameter("originalrequest")) .put("timeout", query.getParameter("timeout")).put("requestIP", query.getParameter("remoteip")) .put("allocator", query.getParameter("allocator")) .put("allowPartial", query.getParameter("allowPartial")) .put("tasks", query.getParameter("tasks")).createKVPairs().toString()); } // support legacy async query semantics query = LegacyHandler.handleQuery(query, kv, request, ctx); if (query == null) { return ctx.newSucceededFuture(); } if (query.getJob() == null) { sendError(ctx, new HttpResponseStatus(500, "missing job")); return ctx.newSucceededFuture(); } switch (format) { case "json": ctx.pipeline().addLast(executor, "format", new JsonBundleEncoder()); break; case "html": ctx.pipeline().addLast(executor, "format", new HtmlBundleEncoder()); break; case "gdrive": ctx.pipeline().addLast(executor, "stringer", stringer); ctx.pipeline().addLast(executor, "format", GoogleDriveBundleEncoder.create(filename, gdriveAccessToken)); break; case "csv2": case "psv2": case "tsv2": ctx.pipeline().addLast(executor, "format", DelimitedEscapedBundleEncoder.create(filename, format)); break; default: ctx.pipeline().addLast(executor, "format", DelimitedBundleEncoder.create(filename, format)); break; } ctx.pipeline().addLast(executor, "mqm", queryToQueryResultsEncoder); return ctx.pipeline().write(query, new DefaultChannelProgressivePromise(ctx.channel(), executor)); }
From source file:com.addthis.hydra.task.output.tree.PathOutput.java
License:Apache License
public void exec(DataTree tree) { synchronized (this) { if (engine == null) { engine = new QueryEngine(tree); }/*w w w .ja va2 s. c o m*/ } QueryOpProcessor rp = new QueryOpProcessor.Builder(output, opsString).memTip(maxmem).build(); if (ops != null) { for (QueryOp op : ops) { rp.appendOp(op); } } rp.appendOp(new OutputOp(rp)); try { output.open(); engine.search(query, rp, new DefaultChannelProgressivePromise(null, ImmediateEventExecutor.INSTANCE)); } catch (Exception e) { e.printStackTrace(); } finally { output.sendComplete(); } }
From source file:io.reactivex.netty.contexts.NoOpChannelHandlerContext.java
License:Apache License
@Override public ChannelProgressivePromise newProgressivePromise() { return new DefaultChannelProgressivePromise(channel, executor()); }
From source file:org.restcomm.media.network.netty.channel.AsyncNettyNetworkChannelTest.java
License:Open Source License
@SuppressWarnings("unchecked") @Test//from www . j a v a2s .com public void testBindFailure() { // given final SocketAddress localAddress = new InetSocketAddress("127.0.0.1", 2427); final ChannelFuture channelBindFuture = mock(ChannelFuture.class); final ChannelFuture channelCloseFuture = mock(ChannelFuture.class); final Channel channel = mock(Channel.class); final ChannelHandler channelHandler = mock(ChannelHandler.class); this.eventGroup = new NioEventLoopGroup(); final Bootstrap bootstrap = new Bootstrap().group(eventGroup).handler(channelHandler) .channel(NioDatagramChannel.class); final NettyNetworkManager networkManager = new NettyNetworkManager(bootstrap); final NettyNetworkManager networkManagerSpy = spy(networkManager); final AsyncNettyNetworkChannel<Object> networkChannel = new AsyncNettyNetworkChannel<>(networkManagerSpy); final FutureCallback<Void> openCallback = mock(FutureCallback.class); final FutureCallback<Void> bindCallback = mock(FutureCallback.class); final Exception exception = new RuntimeException("Testing purposes!"); when(channel.bind(localAddress)).thenReturn(channelBindFuture); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { final FutureCallback<Channel> callback = invocation.getArgumentAt(0, FutureCallback.class); callback.onSuccess(channel); return null; } }).when(networkManagerSpy).openChannel(any(FutureCallback.class)); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { final ChannelFutureListener listener = invocation.getArgumentAt(0, ChannelFutureListener.class); final ChannelPromise promise = new DefaultChannelProgressivePromise(channel, mock(EventExecutor.class)); promise.setFailure(exception); listener.operationComplete(promise); return null; } }).when(channelBindFuture).addListener(any(ChannelFutureListener.class)); when(channel.close()).thenReturn(channelCloseFuture); // when - open networkChannel.open(openCallback); networkChannel.bind(localAddress, bindCallback); // then verify(bindCallback, timeout(100)).onFailure(exception); assertFalse(networkChannel.isOpen()); assertFalse(networkChannel.isBound()); assertFalse(networkChannel.isConnected()); }