List of usage examples for org.apache.commons.cli CommandLine getOptionValues
public String[] getOptionValues(char opt)
From source file:org.apache.cocoon.Main.java
/** * The <code>main</code> method. * * @param args a <code>String[]</code> of arguments * @exception Exception if an error occurs *//*from w w w .j av a2 s . c o m*/ public static void main(String[] args) throws Exception { Main.setOptions(); CommandLine line = new PosixParser().parse(options, args); listener = new OutputStreamListener(System.out); CocoonBean cocoon = new CocoonBean(); cocoon.addListener(listener); if (line.hasOption(HELP_OPT)) { printUsage(); } else if (line.hasOption(VERSION_OPT)) { printVersion(); } else { String uriGroup = null; if (line.hasOption(URI_GROUP_NAME_OPT)) { uriGroup = line.getOptionValue(URI_GROUP_NAME_OPT); } String destDir = null; if (line.hasOption(XCONF_OPT)) { // destDir from command line overrides one in xconf file destDir = Main.processXConf(cocoon, line.getOptionValue(XCONF_OPT), destDir, uriGroup); } if (line.hasOption(DEST_DIR_OPT)) { destDir = line.getOptionValue(DEST_DIR_OPT); } if (line.hasOption(VERBOSE_OPT)) { cocoon.setVerbose(true); } if (line.hasOption(PRECOMPILE_ONLY_OPT)) { cocoon.setPrecompileOnly(true); } if (line.hasOption(WORK_DIR_OPT)) { String workDir = line.getOptionValue(WORK_DIR_OPT); if (workDir.length() == 0) { listener.messageGenerated( "Careful, you must specify a work dir when using the -w/--workDir argument"); System.exit(1); } else { cocoon.setWorkDir(line.getOptionValue(WORK_DIR_OPT)); } } if (line.hasOption(CONTEXT_DIR_OPT)) { String contextDir = line.getOptionValue(CONTEXT_DIR_OPT); if (contextDir.length() == 0) { listener.messageGenerated( "Careful, you must specify a configuration file when using the -c/--contextDir argument"); System.exit(1); } else { cocoon.setContextDir(contextDir); } } if (line.hasOption(CONFIG_FILE_OPT)) { cocoon.setConfigFile(line.getOptionValue(CONFIG_FILE_OPT)); } if (line.hasOption(LOG_KIT_OPT)) { cocoon.setLogKit(line.getOptionValue(LOG_KIT_OPT)); } if (line.hasOption(LOGGER_OPT)) { cocoon.setLogger(line.getOptionValue(LOGGER_OPT)); } if (line.hasOption(LOG_LEVEL_OPT)) { cocoon.setLogLevel(line.getOptionValue(LOG_LEVEL_OPT)); } if (line.hasOption(AGENT_OPT)) { cocoon.setAgentOptions(line.getOptionValue(AGENT_OPT)); } if (line.hasOption(ACCEPT_OPT)) { cocoon.setAcceptOptions(line.getOptionValue(ACCEPT_OPT)); } if (line.hasOption(DEFAULT_FILENAME_OPT)) { cocoon.setDefaultFilename(line.getOptionValue(DEFAULT_FILENAME_OPT)); } if (line.hasOption(BROKEN_LINK_FILE_OPT)) { listener.setReportFile(line.getOptionValue(BROKEN_LINK_FILE_OPT)); } if (line.hasOption(FOLLOW_LINKS_OPT)) { cocoon.setFollowLinks(BooleanUtils.toBoolean(line.getOptionValue(FOLLOW_LINKS_OPT))); } if (line.hasOption(CONFIRM_EXTENSIONS_OPT)) { cocoon.setConfirmExtensions( BooleanUtils.toBoolean(line.getOptionValue(CONFIRM_EXTENSIONS_OPT, "yes"))); } if (line.hasOption(LOAD_CLASS_OPT)) { cocoon.addLoadedClasses(Arrays.asList(line.getOptionValues(LOAD_CLASS_OPT))); } if (line.hasOption(URI_FILE_OPT)) { cocoon.addTargets(BeanConfigurator.processURIFile(line.getOptionValue(URI_FILE_OPT)), destDir); } cocoon.addTargets(line.getArgList(), destDir); listener.messageGenerated(CocoonBean.getProlog()); if (cocoon.getTargetCount() == 0 && cocoon.isPrecompileOnly()) { listener.messageGenerated("Please, specify at least one starting URI."); System.exit(1); } cocoon.initialize(); cocoon.process(); cocoon.dispose(); listener.complete(); int exitCode = (listener.isSuccessful() ? 0 : 1); System.exit(exitCode); } }
From source file:org.apache.eagle.common.config.ConfigOptionParser.java
protected Map<String, String> parseCommand(CommandLine cmd) throws ParseException { Map<String, String> result = new HashMap<>(); if (cmd.hasOption(CONFIG_OPT_FLAG)) { String[] values = cmd.getOptionValues(CONFIG_OPT_FLAG); for (String value : values) { int eqIndex = value.indexOf("="); if (eqIndex > 0 && eqIndex < value.length()) { String k = value.substring(0, eqIndex); String v = value.substring(eqIndex + 1, value.length()); if (result.containsKey(k)) { throw new ParseException("Duplicated " + CONFIG_OPT_FLAG + " " + value); } else { result.put(k, v);//from w ww. ja v a 2 s . c om } } else { throw new ParseException("Invalid format: -" + CONFIG_OPT_FLAG + " " + value + ", required: -" + CONFIG_OPT_FLAG + " key=value"); } } } return result; }
From source file:org.apache.easyant.core.EasyAntMain.java
/** * Process command line arguments. When ant is started from Launcher, launcher-only arguments do not get passed * through to this routine.//from w ww .ja va 2s.c o m * * @since Ant 1.6 */ private void processArgs(CommandLine line) { String searchForThis; PrintStream logTo = null; if (line.hasOption("help")) { printUsage(); return; } if (easyAntConfiguration.getMsgOutputLevel() >= Project.MSG_VERBOSE || line.hasOption("version")) { printVersion(); if (line.hasOption("version")) { return; } } if (line.hasOption("showMemoryDetails")) { easyAntConfiguration.setShowMemoryDetails(true); } if (line.hasOption("diagnostics")) { Diagnostics.doReport(System.out, easyAntConfiguration.getMsgOutputLevel()); return; } if (line.hasOption("quiet")) { easyAntConfiguration.setMsgOutputLevel(Project.MSG_WARN); } if (line.hasOption("verbose")) { easyAntConfiguration.setMsgOutputLevel(Project.MSG_VERBOSE); } if (line.hasOption("debug")) { easyAntConfiguration.setMsgOutputLevel(Project.MSG_DEBUG); } if (line.hasOption("noinput")) { easyAntConfiguration.setAllowInput(false); } if (line.hasOption("logfile")) { try { File logFile = new File(line.getOptionValue("logfile")); logTo = new PrintStream(new FileOutputStream(logFile)); isLogFileUsed = true; } catch (IOException ioe) { String msg = "Cannot write on the specified log file. " + "Make sure the path exists and you have write " + "permissions."; throw new BuildException(msg); } catch (ArrayIndexOutOfBoundsException aioobe) { String msg = "You must specify a log file when " + "using the -log argument"; throw new BuildException(msg); } } if (line.hasOption("buildmodule")) { File buildModule = new File(line.getOptionValue("buildmodule").replace('/', File.separatorChar)); easyAntConfiguration.setBuildModule(buildModule); } if (line.hasOption("buildfile")) { File buildFile = new File(line.getOptionValue("buildfile").replace('/', File.separatorChar)); easyAntConfiguration.setBuildFile(buildFile); } if (line.hasOption("buildconf")) { easyAntConfiguration.getActiveBuildConfigurations().add(line.getOptionValue("buildconf")); } File easyantConfFile = null; if (line.hasOption("configfile")) { easyantConfFile = new File(line.getOptionValue("configfile").replace('/', File.separatorChar)); } else { // if no command line switch is specified check the default location File easyantHome = new File( System.getProperty(EasyAntMagicNames.EASYANT_HOME).replace('/', File.separatorChar)); File defaultGlobalEasyantConfFile = new File(easyantHome, EasyAntConstants.DEFAULT_GLOBAL_EASYANT_CONF_FILE); if (defaultGlobalEasyantConfFile.exists()) { easyantConfFile = defaultGlobalEasyantConfFile; } } if (easyantConfFile != null) { try { easyAntConfiguration = EasyantConfigurationFactory.getInstance() .createConfigurationFromFile(easyAntConfiguration, easyantConfFile.toURI().toURL()); } catch (Exception e) { throw new BuildException(e); } } if (line.hasOption("listener")) { easyAntConfiguration.getListeners().add(line.getOptionValue("listener")); } if (line.hasOption("D")) { easyAntConfiguration.getDefinedProps().putAll(line.getOptionProperties("D")); } if (line.hasOption("logger")) { if (easyAntConfiguration.getLoggerClassname() != null) { throw new BuildException("Only one logger class may be specified."); } easyAntConfiguration.setLoggerClassname(line.getOptionValue("logger")); } if (line.hasOption("inputhandler")) { if (easyAntConfiguration.getInputHandlerClassname() != null) { throw new BuildException("Only one input handler class may " + "be specified."); } easyAntConfiguration.setInputHandlerClassname(line.getOptionValue("inputhandler")); } if (line.hasOption("emacs")) { easyAntConfiguration.setEmacsMode(true); } if (line.hasOption("projecthelp")) { // set the flag to display the targets and quit projectHelp = true; } if (line.hasOption("find")) { // eat up next arg if present, default to module.ivy if (line.getOptionValues("find").length > 0) { searchForThis = line.getOptionValue("find"); } else { searchForThis = EasyAntConstants.DEFAULT_BUILD_MODULE; } easyAntConfiguration.setBuildModule(new File(searchForThis)); easyAntConfiguration.setBuildModuleLookupEnabled(true); } if (line.hasOption("propertyfile")) { propertyFiles.add(line.getOptionValue("propertyfile")); } if (line.hasOption("keep-going")) { easyAntConfiguration.setKeepGoingMode(true); } if (line.hasOption("offline")) { easyAntConfiguration.setOffline(true); } if (line.hasOption("nice")) { easyAntConfiguration.setThreadPriority(Integer.decode(line.getOptionValue("nice"))); if (easyAntConfiguration.getThreadPriority() < Thread.MIN_PRIORITY || easyAntConfiguration.getThreadPriority() > Thread.MAX_PRIORITY) { throw new BuildException("Niceness value is out of the range 1-10"); } } if (line.hasOption("autoproxy")) { easyAntConfiguration.setProxy(true); } if (!line.getArgList().isEmpty()) { for (Object o : line.getArgList()) { String target = (String) o; easyAntConfiguration.getTargets().add(target); } } // Load the property files specified by -propertyfile loadPropertyFiles(); if (logTo != null) { easyAntConfiguration.setOut(logTo); easyAntConfiguration.setErr(logTo); System.setOut(easyAntConfiguration.getOut()); System.setErr(easyAntConfiguration.getErr()); } readyToRun = true; }
From source file:org.apache.flink.client.cli.ProgramOptions.java
protected ProgramOptions(CommandLine line) throws CliArgsException { super(line);/*from www .j a v a 2 s . c o m*/ String[] args = line.hasOption(ARGS_OPTION.getOpt()) ? line.getOptionValues(ARGS_OPTION.getOpt()) : line.getArgs(); if (line.hasOption(JAR_OPTION.getOpt())) { this.jarFilePath = line.getOptionValue(JAR_OPTION.getOpt()); } else if (args.length > 0) { jarFilePath = args[0]; args = Arrays.copyOfRange(args, 1, args.length); } else { jarFilePath = null; } this.programArgs = args; List<URL> classpaths = new ArrayList<URL>(); if (line.hasOption(CLASSPATH_OPTION.getOpt())) { for (String path : line.getOptionValues(CLASSPATH_OPTION.getOpt())) { try { classpaths.add(new URL(path)); } catch (MalformedURLException e) { throw new CliArgsException("Bad syntax for classpath: " + path); } } } this.classpaths = classpaths; this.entryPointClass = line.hasOption(CLASS_OPTION.getOpt()) ? line.getOptionValue(CLASS_OPTION.getOpt()) : null; if (line.hasOption(PARALLELISM_OPTION.getOpt())) { String parString = line.getOptionValue(PARALLELISM_OPTION.getOpt()); try { parallelism = Integer.parseInt(parString); if (parallelism <= 0) { throw new NumberFormatException(); } } catch (NumberFormatException e) { throw new CliArgsException("The parallelism must be a positive number: " + parString); } } else { parallelism = ExecutionConfig.PARALLELISM_DEFAULT; } stdoutLogging = !line.hasOption(LOGGING_OPTION.getOpt()); detachedMode = line.hasOption(DETACHED_OPTION.getOpt()); if (line.hasOption(SAVEPOINT_PATH_OPTION.getOpt())) { String savepointPath = line.getOptionValue(SAVEPOINT_PATH_OPTION.getOpt()); boolean allowNonRestoredState = line.hasOption(SAVEPOINT_ALLOW_NON_RESTORED_OPTION.getOpt()); this.savepointSettings = SavepointRestoreSettings.forPath(savepointPath, allowNonRestoredState); } else { this.savepointSettings = SavepointRestoreSettings.none(); } }
From source file:org.apache.flink.client.FlinkYarnSessionCli.java
/** * Creates a new Yarn Client./*from w ww . ja v a 2s .c o m*/ * @param cmd the command line to parse options from * @return an instance of the client or null if there was an error */ public AbstractFlinkYarnClient createFlinkYarnClient(CommandLine cmd) { AbstractFlinkYarnClient flinkYarnClient = getFlinkYarnClient(); if (flinkYarnClient == null) { return null; } if (!cmd.hasOption(CONTAINER.getOpt())) { // number of containers is required option! LOG.error("Missing required argument " + CONTAINER.getOpt()); printUsage(); return null; } flinkYarnClient.setTaskManagerCount(Integer.valueOf(cmd.getOptionValue(CONTAINER.getOpt()))); // Jar Path Path localJarPath; if (cmd.hasOption(FLINK_JAR.getOpt())) { String userPath = cmd.getOptionValue(FLINK_JAR.getOpt()); if (!userPath.startsWith("file://")) { userPath = "file://" + userPath; } localJarPath = new Path(userPath); } else { LOG.info("No path for the flink jar passed. Using the location of " + flinkYarnClient.getClass() + " to locate the jar"); localJarPath = new Path("file://" + flinkYarnClient.getClass().getProtectionDomain().getCodeSource().getLocation().getPath()); } flinkYarnClient.setLocalJarPath(localJarPath); // Conf Path String confDirPath = CliFrontend.getConfigurationDirectoryFromEnv(); GlobalConfiguration.loadConfiguration(confDirPath); Configuration flinkConfiguration = GlobalConfiguration.getConfiguration(); flinkYarnClient.setFlinkConfigurationObject(flinkConfiguration); flinkYarnClient.setConfigurationDirectory(confDirPath); File confFile = new File(confDirPath + File.separator + CONFIG_FILE_NAME); if (!confFile.exists()) { LOG.error("Unable to locate configuration file in " + confFile); return null; } Path confPath = new Path(confFile.getAbsolutePath()); flinkYarnClient.setConfigurationFilePath(confPath); List<File> shipFiles = new ArrayList<>(); // path to directory to ship if (cmd.hasOption(SHIP_PATH.getOpt())) { String shipPath = cmd.getOptionValue(SHIP_PATH.getOpt()); File shipDir = new File(shipPath); if (shipDir.isDirectory()) { shipFiles = new ArrayList<>(Arrays.asList(shipDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return !(name.equals(".") || name.equals("..")); } }))); } else { LOG.warn("Ship directory is not a directory. Ignoring it."); } } //check if there is a logback or log4j file if (confDirPath.length() > 0) { File logback = new File(confDirPath + File.pathSeparator + CONFIG_FILE_LOGBACK_NAME); if (logback.exists()) { shipFiles.add(logback); flinkYarnClient.setFlinkLoggingConfigurationPath(new Path(logback.toURI())); } File log4j = new File(confDirPath + File.pathSeparator + CONFIG_FILE_LOG4J_NAME); if (log4j.exists()) { shipFiles.add(log4j); if (flinkYarnClient.getFlinkLoggingConfigurationPath() != null) { // this means there is already a logback configuration file --> fail LOG.warn("The configuration directory ('" + confDirPath + "') contains both LOG4J and " + "Logback configuration files. Please delete or rename one of them."); } // else flinkYarnClient.setFlinkLoggingConfigurationPath(new Path(log4j.toURI())); } } flinkYarnClient.setShipFiles(shipFiles); // queue if (cmd.hasOption(QUEUE.getOpt())) { flinkYarnClient.setQueue(cmd.getOptionValue(QUEUE.getOpt())); } // JobManager Memory if (cmd.hasOption(JM_MEMORY.getOpt())) { int jmMemory = Integer.valueOf(cmd.getOptionValue(JM_MEMORY.getOpt())); flinkYarnClient.setJobManagerMemory(jmMemory); } // Task Managers memory if (cmd.hasOption(TM_MEMORY.getOpt())) { int tmMemory = Integer.valueOf(cmd.getOptionValue(TM_MEMORY.getOpt())); flinkYarnClient.setTaskManagerMemory(tmMemory); } if (cmd.hasOption(SLOTS.getOpt())) { int slots = Integer.valueOf(cmd.getOptionValue(SLOTS.getOpt())); flinkYarnClient.setTaskManagerSlots(slots); } String[] dynamicProperties = null; if (cmd.hasOption(DYNAMIC_PROPERTIES.getOpt())) { dynamicProperties = cmd.getOptionValues(DYNAMIC_PROPERTIES.getOpt()); } String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, CliFrontend.YARN_DYNAMIC_PROPERTIES_SEPARATOR); flinkYarnClient.setDynamicPropertiesEncoded(dynamicPropertiesEncoded); if (cmd.hasOption(DETACHED.getOpt())) { this.detachedMode = true; flinkYarnClient.setDetachedMode(detachedMode); } if (cmd.hasOption(NAME.getOpt())) { flinkYarnClient.setName(cmd.getOptionValue(NAME.getOpt())); } else { // set the default application name, if none is specified if (defaultApplicationName != null) { flinkYarnClient.setName(defaultApplicationName); } } return flinkYarnClient; }
From source file:org.apache.flink.runtime.clusterframework.BootstrapTools.java
/** * Parse the dynamic properties (passed on the command line). *//*from ww w . j a v a 2s . co m*/ public static Configuration parseDynamicProperties(CommandLine cmd) { final Configuration config = new Configuration(); String[] values = cmd.getOptionValues(DYNAMIC_PROPERTIES_OPT); if (values != null) { for (String value : values) { String[] pair = value.split("=", 2); if (pair.length == 1) { config.setString(pair[0], Boolean.TRUE.toString()); } else if (pair.length == 2) { config.setString(pair[0], pair[1]); } } } return config; }
From source file:org.apache.flink.table.client.cli.CliOptionsParser.java
private static List<URL> checkUrls(CommandLine line, Option option) { if (line.hasOption(option.getOpt())) { final String[] urls = line.getOptionValues(option.getOpt()); return Arrays.stream(urls).distinct().map((url) -> { try { return Path.fromLocalFile(new File(url).getAbsoluteFile()).toUri().toURL(); } catch (Exception e) { throw new SqlClientException("Invalid path for option '" + option.getLongOpt() + "': " + url, e);// w w w . j a va 2 s . co m } }).collect(Collectors.toList()); } return null; }
From source file:org.apache.flink.yarn.cli.FlinkYarnCLI.java
public YarnClusterDescriptorV2 createDescriptor(String defaultApplicationName, CommandLine cmd) { YarnClusterDescriptorV2 yarnClusterDescriptor = new YarnClusterDescriptorV2(); // Jar Path// ww w . j a v a 2 s . c om Path localJarPath; if (cmd.hasOption(FLINK_JAR.getOpt())) { String userPath = cmd.getOptionValue(FLINK_JAR.getOpt()); if (!userPath.startsWith("file://")) { userPath = "file://" + userPath; } localJarPath = new Path(userPath); } else { LOG.info("No path for the flink jar passed. Using the location of " + yarnClusterDescriptor.getClass() + " to locate the jar"); String encodedJarPath = yarnClusterDescriptor.getClass().getProtectionDomain().getCodeSource() .getLocation().getPath(); try { // we have to decode the url encoded parts of the path String decodedPath = URLDecoder.decode(encodedJarPath, Charset.defaultCharset().name()); localJarPath = new Path(new File(decodedPath).toURI()); } catch (UnsupportedEncodingException e) { throw new RuntimeException("Couldn't decode the encoded Flink dist jar path: " + encodedJarPath + " Please supply a path manually via the -" + FLINK_JAR.getOpt() + " option."); } } yarnClusterDescriptor.setLocalJarPath(localJarPath); List<File> shipFiles = new ArrayList<>(); // path to directory to ship if (cmd.hasOption(SHIP_PATH.getOpt())) { String shipPath = cmd.getOptionValue(SHIP_PATH.getOpt()); File shipDir = new File(shipPath); if (shipDir.isDirectory()) { shipFiles.add(shipDir); } else { LOG.warn("Ship directory is not a directory. Ignoring it."); } } yarnClusterDescriptor.addShipFiles(shipFiles); // queue if (cmd.hasOption(QUEUE.getOpt())) { yarnClusterDescriptor.setQueue(cmd.getOptionValue(QUEUE.getOpt())); } // JobManager Memory if (cmd.hasOption(JM_MEMORY.getOpt())) { int jmMemory = Integer.valueOf(cmd.getOptionValue(JM_MEMORY.getOpt())); yarnClusterDescriptor.setJobManagerMemory(jmMemory); } String[] dynamicProperties = null; if (cmd.hasOption(DYNAMIC_PROPERTIES.getOpt())) { dynamicProperties = cmd.getOptionValues(DYNAMIC_PROPERTIES.getOpt()); } String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR); yarnClusterDescriptor.setDynamicPropertiesEncoded(dynamicPropertiesEncoded); if (cmd.hasOption(DETACHED.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) { // TODO: not support non detach mode now. //this.detachedMode = false; } yarnClusterDescriptor.setDetachedMode(this.detachedMode); if (defaultApplicationName != null) { yarnClusterDescriptor.setName(defaultApplicationName); } if (cmd.hasOption(ZOOKEEPER_NAMESPACE.getOpt())) { String zookeeperNamespace = cmd.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt()); yarnClusterDescriptor.setZookeeperNamespace(zookeeperNamespace); } return yarnClusterDescriptor; }
From source file:org.apache.flink.yarn.cli.FlinkYarnSessionCli.java
public AbstractYarnClusterDescriptor createDescriptor(String defaultApplicationName, CommandLine cmd) { AbstractYarnClusterDescriptor yarnClusterDescriptor = getClusterDescriptor(); if (!cmd.hasOption(CONTAINER.getOpt())) { // number of containers is required option! LOG.error("Missing required argument {}", CONTAINER.getOpt()); printUsage();// w ww .j a va 2s .c om throw new IllegalArgumentException("Missing required argument " + CONTAINER.getOpt()); } yarnClusterDescriptor.setTaskManagerCount(Integer.valueOf(cmd.getOptionValue(CONTAINER.getOpt()))); // Jar Path Path localJarPath; if (cmd.hasOption(FLINK_JAR.getOpt())) { String userPath = cmd.getOptionValue(FLINK_JAR.getOpt()); if (!userPath.startsWith("file://")) { userPath = "file://" + userPath; } localJarPath = new Path(userPath); } else { LOG.info("No path for the flink jar passed. Using the location of " + yarnClusterDescriptor.getClass() + " to locate the jar"); String encodedJarPath = yarnClusterDescriptor.getClass().getProtectionDomain().getCodeSource() .getLocation().getPath(); try { // we have to decode the url encoded parts of the path String decodedPath = URLDecoder.decode(encodedJarPath, Charset.defaultCharset().name()); localJarPath = new Path(new File(decodedPath).toURI()); } catch (UnsupportedEncodingException e) { throw new RuntimeException("Couldn't decode the encoded Flink dist jar path: " + encodedJarPath + " Please supply a path manually via the -" + FLINK_JAR.getOpt() + " option."); } } yarnClusterDescriptor.setLocalJarPath(localJarPath); List<File> shipFiles = new ArrayList<>(); // path to directory to ship if (cmd.hasOption(SHIP_PATH.getOpt())) { String shipPath = cmd.getOptionValue(SHIP_PATH.getOpt()); File shipDir = new File(shipPath); if (shipDir.isDirectory()) { shipFiles.add(shipDir); } else { LOG.warn("Ship directory is not a directory. Ignoring it."); } } yarnClusterDescriptor.addShipFiles(shipFiles); // queue if (cmd.hasOption(QUEUE.getOpt())) { yarnClusterDescriptor.setQueue(cmd.getOptionValue(QUEUE.getOpt())); } // JobManager Memory if (cmd.hasOption(JM_MEMORY.getOpt())) { int jmMemory = Integer.valueOf(cmd.getOptionValue(JM_MEMORY.getOpt())); yarnClusterDescriptor.setJobManagerMemory(jmMemory); } // Task Managers memory if (cmd.hasOption(TM_MEMORY.getOpt())) { int tmMemory = Integer.valueOf(cmd.getOptionValue(TM_MEMORY.getOpt())); yarnClusterDescriptor.setTaskManagerMemory(tmMemory); } if (cmd.hasOption(SLOTS.getOpt())) { int slots = Integer.valueOf(cmd.getOptionValue(SLOTS.getOpt())); yarnClusterDescriptor.setTaskManagerSlots(slots); } String[] dynamicProperties = null; if (cmd.hasOption(DYNAMIC_PROPERTIES.getOpt())) { dynamicProperties = cmd.getOptionValues(DYNAMIC_PROPERTIES.getOpt()); } String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR); yarnClusterDescriptor.setDynamicPropertiesEncoded(dynamicPropertiesEncoded); if (cmd.hasOption(DETACHED.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) { this.detachedMode = true; yarnClusterDescriptor.setDetachedMode(true); } if (cmd.hasOption(NAME.getOpt())) { yarnClusterDescriptor.setName(cmd.getOptionValue(NAME.getOpt())); } else { // set the default application name, if none is specified if (defaultApplicationName != null) { yarnClusterDescriptor.setName(defaultApplicationName); } } if (cmd.hasOption(ZOOKEEPER_NAMESPACE.getOpt())) { String zookeeperNamespace = cmd.getOptionValue(ZOOKEEPER_NAMESPACE.getOpt()); yarnClusterDescriptor.setZookeeperNamespace(zookeeperNamespace); } // ----- Convenience ----- // the number of slots available from YARN: int yarnTmSlots = yarnClusterDescriptor.getTaskManagerSlots(); if (yarnTmSlots == -1) { yarnTmSlots = 1; yarnClusterDescriptor.setTaskManagerSlots(yarnTmSlots); } int maxSlots = yarnTmSlots * yarnClusterDescriptor.getTaskManagerCount(); int userParallelism = Integer .valueOf(cmd.getOptionValue(CliFrontendParser.PARALLELISM_OPTION.getOpt(), "-1")); if (userParallelism != -1) { int slotsPerTM = (int) Math .ceil((double) userParallelism / yarnClusterDescriptor.getTaskManagerCount()); String message = "The YARN cluster has " + maxSlots + " slots available, " + "but the user requested a parallelism of " + userParallelism + " on YARN. " + "Each of the " + yarnClusterDescriptor.getTaskManagerCount() + " TaskManagers " + "will get " + slotsPerTM + " slots."; logAndSysout(message); yarnClusterDescriptor.setTaskManagerSlots(slotsPerTM); } return yarnClusterDescriptor; }
From source file:org.apache.flink.yarn.Client.java
public void run(String[] args) throws Exception { if (UserGroupInformation.isSecurityEnabled()) { throw new RuntimeException("Flink YARN client does not have security support right now." + "File a bug, we will fix it asap"); }//w w w . j a v a2 s . c o m //Utils.logFilesInCurrentDirectory(LOG); // // Command Line Options // Options options = new Options(); options.addOption(VERBOSE); options.addOption(FLINK_CONF_DIR); options.addOption(FLINK_JAR); options.addOption(JM_MEMORY); options.addOption(TM_MEMORY); options.addOption(TM_CORES); options.addOption(CONTAINER); options.addOption(GEN_CONF); options.addOption(QUEUE); options.addOption(QUERY); options.addOption(SHIP_PATH); options.addOption(SLOTS); options.addOption(DYNAMIC_PROPERTIES); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (MissingOptionException moe) { System.out.println(moe.getMessage()); printUsage(); System.exit(1); } // Jar Path Path localJarPath; if (cmd.hasOption(FLINK_JAR.getOpt())) { String userPath = cmd.getOptionValue(FLINK_JAR.getOpt()); if (!userPath.startsWith("file://")) { userPath = "file://" + userPath; } localJarPath = new Path(userPath); } else { localJarPath = new Path( "file://" + Client.class.getProtectionDomain().getCodeSource().getLocation().getPath()); } if (cmd.hasOption(GEN_CONF.getOpt())) { LOG.info("Placing default configuration in current directory"); File outFile = generateDefaultConf(localJarPath); LOG.info("File written to " + outFile.getAbsolutePath()); System.exit(0); } // Conf Path Path confPath = null; String confDirPath = ""; if (cmd.hasOption(FLINK_CONF_DIR.getOpt())) { confDirPath = cmd.getOptionValue(FLINK_CONF_DIR.getOpt()) + "/"; File confFile = new File(confDirPath + CONFIG_FILE_NAME); if (!confFile.exists()) { LOG.error("Unable to locate configuration file in " + confFile); System.exit(1); } confPath = new Path(confFile.getAbsolutePath()); } else { System.out.println("No configuration file has been specified"); // no configuration path given. // -> see if there is one in the current directory File currDir = new File("."); File[] candidates = currDir.listFiles(new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { return name != null && name.endsWith(".yaml"); } }); if (candidates == null || candidates.length == 0) { System.out.println( "No configuration file has been found in current directory.\n" + "Copying default."); File outFile = generateDefaultConf(localJarPath); confPath = new Path(outFile.toURI()); } else { if (candidates.length > 1) { System.out.println("Multiple .yaml configuration files were found in the current directory\n" + "Please specify one explicitly"); System.exit(1); } else if (candidates.length == 1) { confPath = new Path(candidates[0].toURI()); } } } List<File> shipFiles = new ArrayList<File>(); // path to directory to ship if (cmd.hasOption(SHIP_PATH.getOpt())) { String shipPath = cmd.getOptionValue(SHIP_PATH.getOpt()); File shipDir = new File(shipPath); if (shipDir.isDirectory()) { shipFiles = new ArrayList<File>(Arrays.asList(shipDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return !(name.equals(".") || name.equals("..")); } }))); } else { LOG.warn("Ship directory is not a directory!"); } } boolean hasLogback = false; boolean hasLog4j = false; //check if there is a logback or log4j file if (confDirPath.length() > 0) { File logback = new File(confDirPath + "/logback.xml"); if (logback.exists()) { shipFiles.add(logback); hasLogback = true; } File log4j = new File(confDirPath + "/log4j.properties"); if (log4j.exists()) { shipFiles.add(log4j); hasLog4j = true; } } // queue String queue = "default"; if (cmd.hasOption(QUEUE.getOpt())) { queue = cmd.getOptionValue(QUEUE.getOpt()); } // JobManager Memory int jmMemory = 512; if (cmd.hasOption(JM_MEMORY.getOpt())) { jmMemory = Integer.valueOf(cmd.getOptionValue(JM_MEMORY.getOpt())); } if (jmMemory < MIN_JM_MEMORY) { System.out.println("The JobManager memory is below the minimum required memory amount " + "of " + MIN_JM_MEMORY + " MB"); System.exit(1); } // Task Managers memory int tmMemory = 1024; if (cmd.hasOption(TM_MEMORY.getOpt())) { tmMemory = Integer.valueOf(cmd.getOptionValue(TM_MEMORY.getOpt())); } if (tmMemory < MIN_TM_MEMORY) { System.out.println("The TaskManager memory is below the minimum required memory amount " + "of " + MIN_TM_MEMORY + " MB"); System.exit(1); } if (cmd.hasOption(SLOTS.getOpt())) { slots = Integer.valueOf(cmd.getOptionValue(SLOTS.getOpt())); } String[] dynamicProperties = null; if (cmd.hasOption(DYNAMIC_PROPERTIES.getOpt())) { dynamicProperties = cmd.getOptionValues(DYNAMIC_PROPERTIES.getOpt()); } String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, CliFrontend.YARN_DYNAMIC_PROPERTIES_SEPARATOR); // Task Managers vcores int tmCores = 1; if (cmd.hasOption(TM_CORES.getOpt())) { tmCores = Integer.valueOf(cmd.getOptionValue(TM_CORES.getOpt())); } Utils.getFlinkConfiguration(confPath.toUri().getPath()); int jmPort = GlobalConfiguration.getInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, 0); if (jmPort == 0) { LOG.warn("Unable to find job manager port in configuration!"); jmPort = ConfigConstants.DEFAULT_JOB_MANAGER_IPC_PORT; } conf = Utils.initializeYarnConfiguration(); // intialize HDFS LOG.info("Copy App Master jar from local filesystem and add to local environment"); // Copy the application master jar to the filesystem // Create a local resource to point to the destination jar path final FileSystem fs = FileSystem.get(conf); // hard coded check for the GoogleHDFS client because its not overriding the getScheme() method. if (!fs.getClass().getSimpleName().equals("GoogleHadoopFileSystem") && fs.getScheme().startsWith("file")) { LOG.warn("The file system scheme is '" + fs.getScheme() + "'. This indicates that the " + "specified Hadoop configuration path is wrong and the sytem is using the default Hadoop configuration values." + "The Flink YARN client needs to store its files in a distributed file system"); } // Create yarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); // Query cluster for metrics if (cmd.hasOption(QUERY.getOpt())) { showClusterMetrics(yarnClient); } if (!cmd.hasOption(CONTAINER.getOpt())) { LOG.error("Missing required argument " + CONTAINER.getOpt()); printUsage(); yarnClient.stop(); System.exit(1); } // TM Count final int taskManagerCount = Integer.valueOf(cmd.getOptionValue(CONTAINER.getOpt())); System.out.println("Using values:"); System.out.println("\tContainer Count = " + taskManagerCount); System.out.println("\tJar Path = " + localJarPath.toUri().getPath()); System.out.println("\tConfiguration file = " + confPath.toUri().getPath()); System.out.println("\tJobManager memory = " + jmMemory); System.out.println("\tTaskManager memory = " + tmMemory); System.out.println("\tTaskManager cores = " + tmCores); // Create application via yarnClient YarnClientApplication app = yarnClient.createApplication(); GetNewApplicationResponse appResponse = app.getNewApplicationResponse(); Resource maxRes = appResponse.getMaximumResourceCapability(); if (tmMemory > maxRes.getMemory() || tmCores > maxRes.getVirtualCores()) { LOG.error("The cluster does not have the requested resources for the TaskManagers available!\n" + "Maximum Memory: " + maxRes.getMemory() + ", Maximum Cores: " + tmCores); yarnClient.stop(); System.exit(1); } if (jmMemory > maxRes.getMemory()) { LOG.error("The cluster does not have the requested resources for the JobManager available!\n" + "Maximum Memory: " + maxRes.getMemory()); yarnClient.stop(); System.exit(1); } int totalMemoryRequired = jmMemory + tmMemory * taskManagerCount; ClusterResourceDescription freeClusterMem = getCurrentFreeClusterResources(yarnClient); if (freeClusterMem.totalFreeMemory < totalMemoryRequired) { LOG.error("This YARN session requires " + totalMemoryRequired + "MB of memory in the cluster. " + "There are currently only " + freeClusterMem.totalFreeMemory + "MB available."); yarnClient.stop(); System.exit(1); } if (tmMemory > freeClusterMem.containerLimit) { LOG.error("The requested amount of memory for the TaskManagers (" + tmMemory + "MB) is more than " + "the largest possible YARN container: " + freeClusterMem.containerLimit); yarnClient.stop(); System.exit(1); } if (jmMemory > freeClusterMem.containerLimit) { LOG.error("The requested amount of memory for the JobManager (" + jmMemory + "MB) is more than " + "the largest possible YARN container: " + freeClusterMem.containerLimit); yarnClient.stop(); System.exit(1); } // respect custom JVM options in the YAML file final String javaOpts = GlobalConfiguration.getString(ConfigConstants.FLINK_JVM_OPTIONS, ""); // Set up the container launch context for the application master ContainerLaunchContext amContainer = Records.newRecord(ContainerLaunchContext.class); String amCommand = "$JAVA_HOME/bin/java" + " -Xmx" + Utils.calculateHeapSize(jmMemory) + "M " + javaOpts; if (hasLogback || hasLog4j) { amCommand += " -Dlog.file=\"" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/jobmanager-main.log\""; } if (hasLogback) { amCommand += " -Dlogback.configurationFile=file:logback.xml"; } if (hasLog4j) { amCommand += " -Dlog4j.configuration=file:log4j.properties"; } amCommand += " " + ApplicationMaster.class.getName() + " " + " 1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/jobmanager-stdout.log" + " 2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/jobmanager-stderr.log"; amContainer.setCommands(Collections.singletonList(amCommand)); System.err.println("amCommand=" + amCommand); // Set-up ApplicationSubmissionContext for the application ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext(); final ApplicationId appId = appContext.getApplicationId(); /** * All network ports are offsetted by the application number * to avoid version port clashes when running multiple Flink sessions * in parallel */ int appNumber = appId.getId(); jmPort = Utils.offsetPort(jmPort, appNumber); // Setup jar for ApplicationMaster LocalResource appMasterJar = Records.newRecord(LocalResource.class); LocalResource flinkConf = Records.newRecord(LocalResource.class); Path remotePathJar = Utils.setupLocalResource(conf, fs, appId.toString(), localJarPath, appMasterJar, fs.getHomeDirectory()); Path remotePathConf = Utils.setupLocalResource(conf, fs, appId.toString(), confPath, flinkConf, fs.getHomeDirectory()); Map<String, LocalResource> localResources = new HashMap<String, LocalResource>(2); localResources.put("flink.jar", appMasterJar); localResources.put("flink-conf.yaml", flinkConf); // setup security tokens (code from apache storm) final Path[] paths = new Path[3 + shipFiles.size()]; StringBuffer envShipFileList = new StringBuffer(); // upload ship files for (int i = 0; i < shipFiles.size(); i++) { File shipFile = shipFiles.get(i); LocalResource shipResources = Records.newRecord(LocalResource.class); Path shipLocalPath = new Path("file://" + shipFile.getAbsolutePath()); paths[3 + i] = Utils.setupLocalResource(conf, fs, appId.toString(), shipLocalPath, shipResources, fs.getHomeDirectory()); localResources.put(shipFile.getName(), shipResources); envShipFileList.append(paths[3 + i]); if (i + 1 < shipFiles.size()) { envShipFileList.append(','); } } paths[0] = remotePathJar; paths[1] = remotePathConf; sessionFilesDir = new Path(fs.getHomeDirectory(), ".flink/" + appId.toString() + "/"); FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL); fs.setPermission(sessionFilesDir, permission); // set permission for path. Utils.setTokensFor(amContainer, paths, this.conf); amContainer.setLocalResources(localResources); fs.close(); int amRPCPort = GlobalConfiguration.getInteger(ConfigConstants.YARN_AM_PRC_PORT, ConfigConstants.DEFAULT_YARN_AM_RPC_PORT); amRPCPort = Utils.offsetPort(amRPCPort, appNumber); // Setup CLASSPATH for ApplicationMaster Map<String, String> appMasterEnv = new HashMap<String, String>(); Utils.setupEnv(conf, appMasterEnv); // set configuration values appMasterEnv.put(Client.ENV_TM_COUNT, String.valueOf(taskManagerCount)); appMasterEnv.put(Client.ENV_TM_CORES, String.valueOf(tmCores)); appMasterEnv.put(Client.ENV_TM_MEMORY, String.valueOf(tmMemory)); appMasterEnv.put(Client.FLINK_JAR_PATH, remotePathJar.toString()); appMasterEnv.put(Client.ENV_APP_ID, appId.toString()); appMasterEnv.put(Client.ENV_CLIENT_HOME_DIR, fs.getHomeDirectory().toString()); appMasterEnv.put(Client.ENV_CLIENT_SHIP_FILES, envShipFileList.toString()); appMasterEnv.put(Client.ENV_CLIENT_USERNAME, UserGroupInformation.getCurrentUser().getShortUserName()); appMasterEnv.put(Client.ENV_AM_PRC_PORT, String.valueOf(amRPCPort)); appMasterEnv.put(Client.ENV_SLOTS, String.valueOf(slots)); appMasterEnv.put(Client.ENV_APP_NUMBER, String.valueOf(appNumber)); if (dynamicPropertiesEncoded != null) { appMasterEnv.put(Client.ENV_DYNAMIC_PROPERTIES, dynamicPropertiesEncoded); } amContainer.setEnvironment(appMasterEnv); // Set up resource type requirements for ApplicationMaster Resource capability = Records.newRecord(Resource.class); capability.setMemory(jmMemory); capability.setVirtualCores(1); appContext.setApplicationName("Flink"); // application name appContext.setAMContainerSpec(amContainer); appContext.setResource(capability); appContext.setQueue(queue); // file that we write into the conf/ dir containing the jobManager address and the dop. yarnPropertiesFile = new File(confDirPath + CliFrontend.YARN_PROPERTIES_FILE); LOG.info("Submitting application master " + appId); yarnClient.submitApplication(appContext); ApplicationReport appReport = yarnClient.getApplicationReport(appId); YarnApplicationState appState = appReport.getYarnApplicationState(); boolean told = false; char[] el = { '/', '|', '\\', '-' }; int i = 0; int numTaskmanagers = 0; int numMessages = 0; BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); while (appState != YarnApplicationState.FINISHED && appState != YarnApplicationState.KILLED && appState != YarnApplicationState.FAILED) { if (!told && appState == YarnApplicationState.RUNNING) { System.err.println("Flink JobManager is now running on " + appReport.getHost() + ":" + jmPort); System.err.println("JobManager Web Interface: " + appReport.getTrackingUrl()); // write jobmanager connect information Properties yarnProps = new Properties(); yarnProps.setProperty(CliFrontend.YARN_PROPERTIES_JOBMANAGER_KEY, appReport.getHost() + ":" + jmPort); if (slots != -1) { yarnProps.setProperty(CliFrontend.YARN_PROPERTIES_DOP, Integer.toString(slots * taskManagerCount)); } // add dynamic properties if (dynamicProperties != null) { yarnProps.setProperty(CliFrontend.YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING, dynamicPropertiesEncoded); } OutputStream out = new FileOutputStream(yarnPropertiesFile); yarnProps.store(out, "Generated YARN properties file"); out.close(); yarnPropertiesFile.setReadable(true, false); // readable for all. // connect RPC service cmc = new ClientMasterControl(new InetSocketAddress(appReport.getHost(), amRPCPort)); cmc.start(); Runtime.getRuntime().addShutdownHook(new ClientShutdownHook()); told = true; } if (!told) { System.err.print(el[i++] + "\r"); if (i == el.length) { i = 0; } Thread.sleep(500); // wait for the application to switch to RUNNING } else { int newTmCount = cmc.getNumberOfTaskManagers(); if (numTaskmanagers != newTmCount) { System.err.println("Number of connected TaskManagers changed to " + newTmCount + ". " + "Slots available: " + cmc.getNumberOfAvailableSlots()); numTaskmanagers = newTmCount; } // we also need to show new messages. if (cmc.getFailedStatus()) { System.err.println("The Application Master failed!\nMessages:\n"); for (Message m : cmc.getMessages()) { System.err.println("Message: " + m.getMessage()); } System.err.println("Requesting Application Master shutdown"); cmc.shutdownAM(); cmc.close(); System.err.println("Application Master closed."); } if (cmc.getMessages().size() != numMessages) { System.err.println("Received new message(s) from the Application Master"); List<Message> msg = cmc.getMessages(); while (msg.size() > numMessages) { System.err.println("Message: " + msg.get(numMessages).getMessage()); numMessages++; } } // wait until CLIENT_POLLING_INTERVALL is over or the user entered something. long startTime = System.currentTimeMillis(); while ((System.currentTimeMillis() - startTime) < CLIENT_POLLING_INTERVALL * 1000 && !in.ready()) { Thread.sleep(200); } if (in.ready()) { String command = in.readLine(); evalCommand(command); } } appReport = yarnClient.getApplicationReport(appId); appState = appReport.getYarnApplicationState(); } LOG.info("Application " + appId + " finished with" + " state " + appState + " and " + "final state " + appReport.getFinalApplicationStatus() + " at " + appReport.getFinishTime()); if (appState == YarnApplicationState.FAILED || appState == YarnApplicationState.KILLED) { LOG.warn("Application failed. Diagnostics " + appReport.getDiagnostics()); LOG.warn("If log aggregation is activated in the Hadoop cluster, we recommend to retreive " + "the full application log using this command:\n" + "\tyarn logs -applicationId " + appReport.getApplicationId() + "\n" + "(It sometimes takes a few seconds until the logs are aggregated)"); } }