List of usage examples for io.netty.util NetUtil SOMAXCONN
int SOMAXCONN
To view the source code for io.netty.util NetUtil SOMAXCONN.
Click Source Link
From source file:org.apache.hadoop.hive.llap.daemon.services.impl.SystemConfigurationServlet.java
License:Apache License
@Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { JsonGenerator jg = null;// w w w . ja va 2 s . co m PrintWriter writer = null; if (!HttpServer.isInstrumentationAccessAllowed(getServletContext(), request, response)) { response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); return; } setResponseHeader(response); boolean refresh = Boolean.parseBoolean(request.getParameter("refresh")); try { writer = response.getWriter(); jg = jsonFactory.createJsonGenerator(writer); jg.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET); jg.useDefaultPrettyPrinter(); jg.writeStartObject(); jg.writeObjectField("pid", LlapDaemonInfo.INSTANCE.getPID()); jg.writeObjectField("os.name", System.getProperty("os.name")); if (Shell.WINDOWS) { jg.writeObjectField("net.core.somaxconn", NetUtil.SOMAXCONN); } else { String sysctlCmd = "sysctl -a"; try { if (sysctlOutRef.get() == null || refresh) { LOG.info("Reading kernel configs via sysctl.."); String sysctlOutput = Shell.execCommand(sysctlCmd.split("\\s+")); sysctlOutRef.set(sysctlOutput); } } catch (IOException e) { LOG.warn("Unable to execute '{}' command", sysctlCmd, e); sysctlOutRef.set(FAILED); // failures will not be retried (to avoid fork + exec running sysctl command) jg.writeObjectField("sysctl", FAILED); jg.writeObjectField("sysctl-failure-reason", e.getMessage()); } if (sysctlOutRef.get() != null && !sysctlOutRef.get().equals(FAILED)) { String[] lines = sysctlOutRef.get().split("\\r?\\n"); for (String line : lines) { int sepIdx = line.indexOf(SYSCTL_KV_SEPARATOR); String key = sepIdx == -1 ? line.trim() : line.substring(0, sepIdx).trim(); String value = sepIdx == -1 ? null : line.substring(sepIdx + 1).trim().replaceAll("\t", " "); if (!key.isEmpty()) { jg.writeObjectField(key, value); } } } if (!Shell.MAC) { // Red Hat/CentOS: /sys/kernel/mm/redhat_transparent_hugepage/defrag // Ubuntu/Debian, OEL, SLES: /sys/kernel/mm/transparent_hugepage/defrag String thpFileName = "/sys/kernel/mm/transparent_hugepage/enabled"; String thpFileStr = PrivilegedFileReader.read(thpFileName); if (thpFileStr == null) { LOG.warn("Unable to read contents of {}", thpFileName); thpFileName = "/sys/kernel/mm/redhat_transparent_hugepage/enabled"; thpFileStr = PrivilegedFileReader.read(thpFileName); } if (thpFileStr != null) { // Format: "always madvise [never]" int strIdx = thpFileStr.indexOf('['); int endIdx = thpFileStr.indexOf(']'); jg.writeObjectField(thpFileName, thpFileStr.substring(strIdx + 1, endIdx)); } else { LOG.warn("Unable to read contents of {}", thpFileName); } String thpDefragFileName = "/sys/kernel/mm/transparent_hugepage/enabled"; String thpDefragFileStr = PrivilegedFileReader.read(thpDefragFileName); if (thpDefragFileStr == null) { LOG.warn("Unable to read contents of {}", thpDefragFileName); thpDefragFileName = "/sys/kernel/mm/redhat_transparent_hugepage/enabled"; thpDefragFileStr = PrivilegedFileReader.read(thpDefragFileName); } if (thpDefragFileStr != null) { // Format: "always madvise [never]" int strIdx = thpDefragFileStr.indexOf('['); int endIdx = thpDefragFileStr.indexOf(']'); jg.writeObjectField(thpDefragFileName, thpDefragFileStr.substring(strIdx + 1, endIdx)); } else { LOG.warn("Unable to read contents of {}", thpDefragFileName); } } } jg.writeEndObject(); response.setStatus(HttpServletResponse.SC_OK); } catch (Exception e) { LOG.error("Caught exception while processing llap /system web service request", e); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } finally { if (jg != null) { jg.close(); } if (writer != null) { writer.close(); } } }
From source file:org.apache.hadoop.hive.llap.shufflehandler.ShuffleHandler.java
License:Apache License
public void start() throws Exception { ServerBootstrap bootstrap = new ServerBootstrap(selector); // Timer is shared across entire factory and must be released separately timer = new HashedWheelTimer(); try {/* w ww. j a v a 2 s. c o m*/ pipelineFact = new HttpPipelineFactory(conf, timer); } catch (Exception ex) { throw new RuntimeException(ex); } bootstrap.setPipelineFactory(pipelineFact); bootstrap.setOption("backlog", NetUtil.SOMAXCONN); port = conf.getInt(SHUFFLE_PORT_CONFIG_KEY, DEFAULT_SHUFFLE_PORT); Channel ch = bootstrap.bind(new InetSocketAddress(port)); accepted.add(ch); port = ((InetSocketAddress) ch.getLocalAddress()).getPort(); conf.set(SHUFFLE_PORT_CONFIG_KEY, Integer.toString(port)); pipelineFact.SHUFFLE.setPort(port); if (dirWatcher != null) { dirWatcher.start(); } LOG.info("LlapShuffleHandler" + " listening on port " + port + " (SOMAXCONN: " + bootstrap.getOption("backlog") + ")"); }