Example usage for org.apache.commons.cli CommandLine getOptionValues

List of usage examples for org.apache.commons.cli CommandLine getOptionValues

Introduction

In this page you can find the example usage for org.apache.commons.cli CommandLine getOptionValues.

Prototype

public String[] getOptionValues(char opt) 

Source Link

Document

Retrieves the array of values, if any, of an option.

Usage

From source file:org.apache.hadoop.hive.metastore.tools.metatool.HiveMetaToolCommandLine.java

private void parseCommandLine(String[] args) throws ParseException {
    CommandLine cl = new GnuParser().parse(OPTIONS, args);

    listFSRoot = cl.hasOption(LIST_FS_ROOT.getOpt());
    jdoqlQuery = cl.getOptionValue(EXECUTE_JDOQL.getOpt());
    updateLocationParams = cl.getOptionValues(UPDATE_LOCATION.getOpt());
    dryRun = cl.hasOption(DRY_RUN.getOpt());
    serdePropKey = cl.getOptionValue(SERDE_PROP_KEY.getOpt());
    tablePropKey = cl.getOptionValue(TABLE_PROP_KEY.getOpt());
    help = cl.hasOption(HELP.getOpt());//from w ww .  j av  a 2s . com

    int commandCount = (isListFSRoot() ? 1 : 0) + (isExecuteJDOQL() ? 1 : 0) + (isUpdateLocation() ? 1 : 0);
    if (commandCount != 1) {
        throw new IllegalArgumentException(
                "exectly one of -listFSRoot, -executeJDOQL, -updateLocation must be set");
    }

    if (updateLocationParams != null && updateLocationParams.length != 2) {
        throw new IllegalArgumentException("HiveMetaTool:updateLocation takes in 2 arguments but was passed "
                + updateLocationParams.length + " arguments");
    }

    if ((dryRun || serdePropKey != null || tablePropKey != null) && !isUpdateLocation()) {
        throw new IllegalArgumentException(
                "-dryRun, -serdePropKey, -tablePropKey may be used only for the " + "-updateLocation command");
    }
}

From source file:org.apache.hadoop.mapreduce.MiniHadoopClusterManager.java

/**
 * Parses arguments and fills out the member variables.
 *
 * @param args/*from  w w w  . j  a  v  a2  s  . c om*/
 *          Command-line arguments.
 * @return true on successful parse; false to indicate that the program should
 *         exit.
 */
private boolean parseArguments(String[] args) {
    Options options = makeOptions();
    CommandLine cli;
    try {
        CommandLineParser parser = new GnuParser();
        cli = parser.parse(options, args);
    } catch (ParseException e) {
        LOG.warn("options parsing failed:  " + e.getMessage());
        new HelpFormatter().printHelp("...", options);
        return false;
    }

    if (cli.hasOption("help")) {
        new HelpFormatter().printHelp("...", options);
        return false;
    }
    if (cli.getArgs().length > 0) {
        for (String arg : cli.getArgs()) {
            System.err.println("Unrecognized option: " + arg);
            new HelpFormatter().printHelp("...", options);
            return false;
        }
    }

    // MR
    noMR = cli.hasOption("nomr");
    numNodeManagers = intArgument(cli, "nodemanagers", 1);
    rmPort = intArgument(cli, "rmport", 0);
    jhsPort = intArgument(cli, "jhsport", 0);
    fs = cli.getOptionValue("namenode");

    // HDFS
    noDFS = cli.hasOption("nodfs");
    numDataNodes = intArgument(cli, "datanodes", 1);
    nnPort = intArgument(cli, "nnport", 0);
    dfsOpts = cli.hasOption("format") ? StartupOption.FORMAT : StartupOption.REGULAR;

    // Runner
    writeDetails = cli.getOptionValue("writeDetails");
    writeConfig = cli.getOptionValue("writeConfig");

    // General
    conf = new JobConf();
    updateConfiguration(conf, cli.getOptionValues("D"));

    return true;
}

From source file:org.apache.hadoop.mpich.appmaster.AppMasterArgumentsParser.java

public static AppMasterArguments parse(String[] args) throws ParseException {
    CommandLine cliParser = new GnuParser().parse(OPTS, args);
    int np = Integer.parseInt(cliParser.getOptionValue("np"));
    String executable = cliParser.getOptionValue("exec");
    String wdir = cliParser.getOptionValue("wdir");
    int containerMem = Integer.parseInt(cliParser.getOptionValue("containerMem", "1024"));
    int containerCores = Integer.parseInt(cliParser.getOptionValue("containerCores", "1"));
    int mpjContainerPriority = Integer.parseInt(cliParser.getOptionValue("containerPriority", "0"));
    String ioServer = cliParser.getOptionValue("ioServer");
    int ioServerPort = Integer.parseInt(cliParser.getOptionValue("ioServerPort"));

    String[] appArgs = null;//from   w  w w .  ja v a 2  s.c  om
    if (cliParser.hasOption("appArgs")) {
        appArgs = cliParser.getOptionValues("appArgs");
    }

    boolean debugYarn = false;
    if (cliParser.hasOption("debugYarn")) {
        debugYarn = true;
    }

    return new AppMasterArguments(np, executable, wdir, containerMem, containerCores, mpjContainerPriority,
            ioServer, ioServerPort, appArgs, debugYarn);
}

From source file:org.apache.hadoop.mpich.client.ClientArgumentsParser.java

public static ClientArguments parse(String[] args) throws ParseException {
    CommandLine cliParser = new GnuParser().parse(OPTS, args);

    int np = Integer.parseInt(cliParser.getOptionValue("np"));

    String executable = cliParser.getOptionValue("exec");

    String workingDirectory = cliParser.getOptionValue("wdir");

    int amMem = Integer.parseInt(cliParser.getOptionValue("amMem", "2048"));

    int amCores = Integer.parseInt(cliParser.getOptionValue("amCores", "1"));

    int containerMem = Integer.parseInt(cliParser.getOptionValue("containerMem", "1024"));

    int containerCores = Integer.parseInt(cliParser.getOptionValue("containerCores", "1"));

    String yarnQueue = cliParser.getOptionValue("yarnQueue", "default");

    String appName = cliParser.getOptionValue("appName", "MPICH-YARN-Application");

    int amPriority = Integer.parseInt(cliParser.getOptionValue("amPriority", "0"));

    String containerPriority = cliParser.getOptionValue("mpjContainerPriority", "0");

    String hdfsFolder = cliParser.getOptionValue("hdfsFolder", "/tmp");

    String[] appArgs = null;//from   w w  w  . j  av  a2s  . co m
    if (cliParser.hasOption("appArgs")) {
        appArgs = cliParser.getOptionValues("appArgs");
    }

    boolean debugYarn = false;
    if (cliParser.hasOption("debugYarn")) {
        debugYarn = true;
    }

    return new ClientArguments(np, executable, workingDirectory, appArgs, amMem, amCores, containerMem,
            containerCores, yarnQueue, appName, amPriority, containerPriority, hdfsFolder, debugYarn);
}

From source file:org.apache.hadoop.realtime.client.DragonClient.java

public boolean init(String[] args) throws ParseException {
    Options opts = new Options();
    opts.addOption("appname", true, "Application Name. Default value - DistributedShell");
    opts.addOption("priority", true, "Application Priority. Default 0");
    opts.addOption("queue", true, "RM Queue in which this application is to be submitted");
    opts.addOption("user", true, "User to run the application as");
    opts.addOption("master_memory", true,
            "Amount of memory in MB to be requested to run the application master");
    opts.addOption("jar", true, "Jar file containing the application master");
    opts.addOption("child_jar", true, "Location of the Child Class JAR to be executed");
    opts.addOption("class", true, "Main class to  be run for the Application Master.");
    opts.addOption("child_class", true, "Java child class to be executed by the Container");
    opts.addOption("child_args", true, "Command line args for the child class");
    opts.addOption("child_env", true, "Environment for child class. Specified as env_key=env_val pairs");
    opts.addOption("child_class_priority", true, "Priority for the child class containers");
    opts.addOption("container_memory", true, "Amount of memory in MB to be requested to run the shell command");
    opts.addOption("num_containers", true, "No. of containers on which the shell command needs to be executed");
    opts.addOption("log_properties", true, "log4j.properties file");
    opts.addOption("debug", false, "Dump out debug information");
    opts.addOption("help", false, "Print usage");
    CommandLine cliParser = new GnuParser().parse(opts, args);
    if (args.length == 0) {
        printUsage(opts);//from w ww . j  a v  a  2  s  .c  o m
        System.err.println("No args specified for client to initialize");
        return false;
    }

    if (cliParser.hasOption("help")) {
        printUsage(opts);
        return false;
    }
    appName = cliParser.getOptionValue("appname", "DistributedStream");
    amPriority = Integer.parseInt(cliParser.getOptionValue("priority", "0"));
    amQueue = cliParser.getOptionValue("queue", "");
    amUser = cliParser.getOptionValue("user", "");
    amMemory = Integer.parseInt(cliParser.getOptionValue("master_memory", "10"));
    if (amMemory < 0) {
        System.err.println(
                "Invalid memory specified for application master, exiting." + " Specified memory=" + amMemory);
        return false;
    }

    if (!cliParser.hasOption("jar")) {
        System.err.println("No jar file specified for application master");
        return false;
    }
    appMasterJar = cliParser.getOptionValue("jar");
    appMasterMainClass = cliParser.getOptionValue("class", DragonApplicationMaster.class.getName());
    childJar = cliParser.getOptionValue("child_jar", appMasterJar);
    if (!cliParser.hasOption("child_class")) {
        System.err.println("No child_class specified to be executed by container");
        return false;
    }
    childClass = cliParser.getOptionValue("child_class");
    if (cliParser.hasOption("child_args")) {
        childArgs = cliParser.getOptionValue("child_args");
    }
    if (cliParser.hasOption("child_env")) {
        String envs[] = cliParser.getOptionValues("child_env");
        for (String env : envs) {
            env = env.trim();
            int index = env.indexOf('=');
            if (index == -1) {
                childEnv.put(env, "");
                continue;
            }
            String key = env.substring(0, index);
            String val = "";
            if (index < (env.length() - 1)) {
                val = env.substring(index + 1);
            }
            childEnv.put(key, val);
        }
    }
    childClassPriority = Integer.parseInt(cliParser.getOptionValue("shell_cmd_priority", "0"));

    containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "10"));
    numContainers = Integer.parseInt(cliParser.getOptionValue("num_containers", "1"));

    if (containerMemory < 0 || numContainers < 1) {
        System.err.println("Invalid no. of containers or container memory specified, exiting."
                + " Specified containerMemory=" + containerMemory + ", numContainer=" + numContainers);
        return false;
    }
    log4jPropFile = cliParser.getOptionValue("log_properties", "");

    return true;
}

From source file:org.apache.hadoop.realtime.server.DragonApplicationMaster.java

/**
 * Parse command line options//from   ww w  .  j a va2 s .  c o  m
 * 
 * @param args Command line args
 * @return Whether init successful and run should be invoked
 * @throws ParseException
 * @throws IOException
 */
public boolean init(String[] args) throws ParseException {
    Options opts = new Options();
    opts.addOption("child_class", true, "Java child class to be executed by the Container");
    opts.addOption("child_args", true, "Command line args for the child class");
    opts.addOption("child_env", true, "Environment for child class. Specified as env_key=env_val pairs");
    opts.addOption("priority", true, "Priority for the child class containers");
    opts.addOption("container_memory", true, "Amount of memory in MB to be requested to run the shell command");
    opts.addOption("num_containers", true, "No. of containers on which the shell command needs to be executed");
    opts.addOption("help", false, "Print usage");
    CommandLine cliParser = new GnuParser().parse(opts, args);

    if (args.length == 0) {
        printUsage(opts);
        throw new IllegalArgumentException("No args specified for application master to initialize");
    }

    Map<String, String> envs = System.getenv();
    appAttemptID = Records.newRecord(ApplicationAttemptId.class);
    if (!envs.containsKey(ApplicationConstants.AM_CONTAINER_ID_ENV)) {
        if (cliParser.hasOption("app_attempt_id")) {
            String appIdStr = cliParser.getOptionValue("app_attempt_id", "");
            appAttemptID = ConverterUtils.toApplicationAttemptId(appIdStr);
        } else {
            throw new IllegalArgumentException("Application Attempt Id not set in the environment");
        }
    } else {
        ContainerId containerId = ConverterUtils
                .toContainerId(envs.get(ApplicationConstants.AM_CONTAINER_ID_ENV));
        appAttemptID = containerId.getApplicationAttemptId();
    }
    LOG.info("Application master for app" + ", appId=" + appAttemptID.getApplicationId().getId()
            + ", clustertimestamp=" + appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId="
            + appAttemptID.getAttemptId());

    if (!cliParser.hasOption("child_class")) {
        System.err.println("No child_class specified to be executed by container");
        return false;
    }
    childClass = cliParser.getOptionValue("child_class");
    if (cliParser.hasOption("child_args")) {
        childArgs = cliParser.getOptionValue("child_args");
    }
    if (cliParser.hasOption("child_env")) {
        String childEnvs[] = cliParser.getOptionValues("child_env");
        for (String env : childEnvs) {
            env = env.trim();
            int index = env.indexOf('=');
            if (index == -1) {
                childEnv.put(env, "");
                continue;
            }
            String key = env.substring(0, index);
            String val = "";
            if (index < (env.length() - 1)) {
                val = env.substring(index + 1);
            }
            childEnv.put(key, val);
        }
    }
    if (envs.containsKey(DSConstants.DISTRIBUTED_CHILDCLASS_LOCATION)) {
        childJarPath = envs.get(DSConstants.DISTRIBUTED_CHILDCLASS_LOCATION);
    }
    if (envs.containsKey(DSConstants.DISTRIBUTED_CHILDCLASS_TIMESTAMP)) {
        childJarPathTimestamp = Long.valueOf(envs.get(DSConstants.DISTRIBUTED_CHILDCLASS_TIMESTAMP));
    }
    if (envs.containsKey(DSConstants.DISTRIBUTED_CHILDCLASS_LEN)) {
        childJarPathLen = Long.valueOf(envs.get(DSConstants.DISTRIBUTED_CHILDCLASS_LEN));
    }
    containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "10"));
    numTotalContainers = Integer.parseInt(cliParser.getOptionValue("num_containers", "1"));
    requestPriority = Integer.parseInt(cliParser.getOptionValue("priority", "0"));

    return true;
}

From source file:org.apache.hadoop.streaming.MapStreamJob.java

void parseArgv() {
    CommandLine cmdLine = null;
    try {/*from  w  ww  . j  a  va 2s. c om*/
        cmdLine = parser.parse(allOptions, argv_);
    } catch (Exception oe) {
        LOG.error(oe.getMessage());
        exitUsage(argv_.length > 0 && "-info".equals(argv_[0]));
    }

    if (cmdLine != null) {
        verbose_ = cmdLine.hasOption("verbose");
        detailedUsage_ = cmdLine.hasOption("info");
        debug_ = cmdLine.hasOption("debug") ? debug_ + 1 : debug_;

        String[] values = cmdLine.getOptionValues("input");
        if (values != null && values.length > 0) {
            for (String input : values) {
                inputSpecs_.add(input);
            }
        }
        output_ = (String) cmdLine.getOptionValue("output");

        // mapred Sample : TEST/mapper.py/reducer.py
        values = cmdLine.getOptionValues("mapred");
        if (values != null && values.length > 0) {
            for (String mapred : values) {
                String[] buf = mapred.split("/");
                if (buf.length < 2) {
                    // error
                }
                dirKey_.add(buf[0]);
                mapCmd_.add(buf[1]);
                redCmd_.add(buf[2]);
            }
        }

        comCmd_ = (String) cmdLine.getOptionValue("combiner");

        values = cmdLine.getOptionValues("file");
        if (values != null && values.length > 0) {
            for (String file : values) {
                packageFiles_.add(file);
            }
            validate(packageFiles_);
        }

        String fsName = (String) cmdLine.getOptionValue("dfs");
        if (null != fsName) {
            LOG.warn("-dfs option is deprecated, please use -fs instead.");
            config_.set("fs.default.name", fsName);
        }

        additionalConfSpec_ = (String) cmdLine.getOptionValue("additionalconfspec");
        inputFormatSpec_ = (String) cmdLine.getOptionValue("inputformat");
        outputFormatSpec_ = (String) cmdLine.getOptionValue("outputformat");
        numReduceTasksSpec_ = (String) cmdLine.getOptionValue("numReduceTasks");
        partitionerSpec_ = (String) cmdLine.getOptionValue("partitioner");
        inReaderSpec_ = (String) cmdLine.getOptionValue("inputreader");
        mapDebugSpec_ = (String) cmdLine.getOptionValue("mapdebug");
        reduceDebugSpec_ = (String) cmdLine.getOptionValue("reducedebug");

        String[] car = cmdLine.getOptionValues("cacheArchive");
        if (null != car && car.length > 0) {
            LOG.warn("-cacheArchive option is deprecated, please use -archives instead.");
            for (String s : car) {
                cacheArchives = (cacheArchives == null) ? s : cacheArchives + "," + s;
            }
        }

        String[] caf = cmdLine.getOptionValues("cacheFile");
        if (null != caf && caf.length > 0) {
            LOG.warn("-cacheFile option is deprecated, please use -files instead.");
            for (String s : caf) {
                cacheFiles = (cacheFiles == null) ? s : cacheFiles + "," + s;
            }
        }

        String[] jobconf = cmdLine.getOptionValues("jobconf");
        if (null != jobconf && jobconf.length > 0) {
            LOG.warn("-jobconf option is deprecated, please use -D instead.");
            for (String s : jobconf) {
                String[] parts = s.split("=", 2);
                config_.set(parts[0], parts[1]);
            }
        }

        String[] cmd = cmdLine.getOptionValues("cmdenv");
        if (null != cmd && cmd.length > 0) {
            for (String s : cmd) {
                if (addTaskEnvironment_.length() > 0) {
                    addTaskEnvironment_ += " ";
                }
                addTaskEnvironment_ += s;
            }
        }
    } else {
        exitUsage(argv_.length > 0 && "-info".equals(argv_[0]));
    }
}

From source file:org.apache.hadoop.streaming.RedStreamJob.java

void parseArgv() {
    CommandLine cmdLine = null;
    try {//  w w  w. j  a  v  a 2 s  .c o m
        cmdLine = parser.parse(allOptions, argv_);
    } catch (Exception oe) {
        LOG.error(dirKey + " informs\t" + oe.getMessage());
        exitUsage(argv_.length > 0 && "-info".equals(argv_[0]));
    }

    if (cmdLine != null) {
        verbose_ = cmdLine.hasOption("verbose");
        detailedUsage_ = cmdLine.hasOption("info");
        debug_ = cmdLine.hasOption("debug") ? debug_ + 1 : debug_;

        String[] values = cmdLine.getOptionValues("input");
        if (values != null && values.length > 0) {
            for (String input : values) {
                inputSpecs_.add(input);
            }
        }
        output_ = (String) cmdLine.getOptionValue("output");
        dirKey = output_.substring(output_.lastIndexOf("/") + 1);

        mapCmd_ = (String) cmdLine.getOptionValue("mapper");
        comCmd_ = (String) cmdLine.getOptionValue("combiner");
        redCmd_ = (String) cmdLine.getOptionValue("reducer");

        values = cmdLine.getOptionValues("file");
        if (values != null && values.length > 0) {
            for (String file : values) {
                packageFiles_.add(file);
            }
            validate(packageFiles_);
        }

        String fsName = (String) cmdLine.getOptionValue("dfs");
        if (null != fsName) {
            LOG.warn(dirKey + " warns\t" + "-dfs option is deprecated, please use -fs instead.");
            config_.set("fs.default.name", fsName);
        }

        additionalConfSpec_ = (String) cmdLine.getOptionValue("additionalconfspec");
        inputFormatSpec_ = (String) cmdLine.getOptionValue("inputformat");
        outputFormatSpec_ = (String) cmdLine.getOptionValue("outputformat");
        numReduceTasksSpec_ = (String) cmdLine.getOptionValue("numReduceTasks");
        partitionerSpec_ = (String) cmdLine.getOptionValue("partitioner");
        inReaderSpec_ = (String) cmdLine.getOptionValue("inputreader");
        mapDebugSpec_ = (String) cmdLine.getOptionValue("mapdebug");
        reduceDebugSpec_ = (String) cmdLine.getOptionValue("reducedebug");

        String[] car = cmdLine.getOptionValues("cacheArchive");
        if (null != car && car.length > 0) {
            LOG.warn(dirKey + " warns\t" + "-cacheArchive option is deprecated, please use -archives instead.");
            for (String s : car) {
                cacheArchives = (cacheArchives == null) ? s : cacheArchives + "," + s;
            }
        }

        String[] caf = cmdLine.getOptionValues("cacheFile");
        if (null != caf && caf.length > 0) {
            LOG.warn(dirKey + " warns\t" + "-cacheFile option is deprecated, please use -files instead.");
            for (String s : caf) {
                cacheFiles = (cacheFiles == null) ? s : cacheFiles + "," + s;
            }
        }

        String[] jobconf = cmdLine.getOptionValues("jobconf");
        if (null != jobconf && jobconf.length > 0) {
            LOG.warn(dirKey + " warns\t" + "-jobconf option is deprecated, please use -D instead.");
            for (String s : jobconf) {
                String[] parts = s.split("=", 2);
                config_.set(parts[0], parts[1]);
            }
        }

        String[] cmd = cmdLine.getOptionValues("cmdenv");
        if (null != cmd && cmd.length > 0) {
            for (String s : cmd) {
                if (addTaskEnvironment_.length() > 0) {
                    addTaskEnvironment_ += " ";
                }
                addTaskEnvironment_ += s;
            }
        }
    } else {
        exitUsage(argv_.length > 0 && "-info".equals(argv_[0]));
    }
}

From source file:org.apache.hadoop.streaming.StreamJob.java

void parseArgv() {
    CommandLine cmdLine = null;
    try {//w  w  w  .j  a  v a2 s .  co m
        cmdLine = parser.parse(allOptions, argv_);
    } catch (Exception oe) {
        LOG.error(oe.getMessage());
        exitUsage(argv_.length > 0 && "-info".equals(argv_[0]));
    }

    if (cmdLine != null) {
        verbose_ = cmdLine.hasOption("verbose");
        detailedUsage_ = cmdLine.hasOption("info");
        debug_ = cmdLine.hasOption("debug") ? debug_ + 1 : debug_;

        String[] values = cmdLine.getOptionValues("input");
        if (values != null && values.length > 0) {
            for (String input : values) {
                inputSpecs_.add(input);
            }
        }
        output_ = (String) cmdLine.getOptionValue("output");

        mapCmd_ = (String) cmdLine.getOptionValue("mapper");
        comCmd_ = (String) cmdLine.getOptionValue("combiner");
        redCmd_ = (String) cmdLine.getOptionValue("reducer");

        values = cmdLine.getOptionValues("file");
        if (values != null && values.length > 0) {
            for (String file : values) {
                packageFiles_.add(file);
            }
            validate(packageFiles_);
        }

        String fsName = (String) cmdLine.getOptionValue("dfs");
        if (null != fsName) {
            LOG.warn("-dfs option is deprecated, please use -fs instead.");
            config_.set("fs.default.name", fsName);
        }

        additionalConfSpec_ = (String) cmdLine.getOptionValue("additionalconfspec");
        inputFormatSpec_ = (String) cmdLine.getOptionValue("inputformat");
        outputFormatSpec_ = (String) cmdLine.getOptionValue("outputformat");
        numReduceTasksSpec_ = (String) cmdLine.getOptionValue("numReduceTasks");
        partitionerSpec_ = (String) cmdLine.getOptionValue("partitioner");
        inReaderSpec_ = (String) cmdLine.getOptionValue("inputreader");
        mapDebugSpec_ = (String) cmdLine.getOptionValue("mapdebug");
        reduceDebugSpec_ = (String) cmdLine.getOptionValue("reducedebug");
        ioSpec_ = (String) cmdLine.getOptionValue("io");

        String[] car = cmdLine.getOptionValues("cacheArchive");
        if (null != car && car.length > 0) {
            LOG.warn("-cacheArchive option is deprecated, please use -archives instead.");
            for (String s : car) {
                cacheArchives = (cacheArchives == null) ? s : cacheArchives + "," + s;
            }
        }

        String[] caf = cmdLine.getOptionValues("cacheFile");
        if (null != caf && caf.length > 0) {
            LOG.warn("-cacheFile option is deprecated, please use -files instead.");
            for (String s : caf) {
                cacheFiles = (cacheFiles == null) ? s : cacheFiles + "," + s;
            }
        }

        String[] jobconf = cmdLine.getOptionValues("jobconf");
        if (null != jobconf && jobconf.length > 0) {
            LOG.warn("-jobconf option is deprecated, please use -D instead.");
            for (String s : jobconf) {
                String[] parts = s.split("=", 2);
                config_.set(parts[0], parts[1]);
            }
        }

        String[] cmd = cmdLine.getOptionValues("cmdenv");
        if (null != cmd && cmd.length > 0) {
            for (String s : cmd) {
                if (addTaskEnvironment_.length() > 0) {
                    addTaskEnvironment_ += " ";
                }
                addTaskEnvironment_ += s;
            }
        }
    } else {
        exitUsage(argv_.length > 0 && "-info".equals(argv_[0]));
    }
}

From source file:org.apache.hadoop.test.MiniHadoopClusterManager.java

/**
 * Parses arguments and fills out the member variables.
 *
 * @param args/*from   w w  w.  j  a  v a2  s. c  om*/
 *          Command-line arguments.
 * @return true on successful parse; false to indicate that the program should
 *         exit.
 */
private boolean parseArguments(String[] args) {
    Options options = makeOptions();
    CommandLine cli;
    try {
        CommandLineParser parser = new GnuParser();
        cli = parser.parse(options, args);
    } catch (ParseException e) {
        LOG.warn("options parsing failed:  " + e.getMessage());
        new HelpFormatter().printHelp("...", options);
        return false;
    }

    if (cli.hasOption("help")) {
        new HelpFormatter().printHelp("...", options);
        return false;
    }
    if (cli.getArgs().length > 0) {
        for (String arg : cli.getArgs()) {
            System.err.println("Unrecognized option: " + arg);
            new HelpFormatter().printHelp("...", options);
            return false;
        }
    }

    // MR
    noMR = cli.hasOption("nomr");
    numTaskTrackers = intArgument(cli, "tasktrackers", 1);
    jtPort = intArgument(cli, "jtPort", 0);
    fs = cli.getOptionValue("namenode");

    // HDFS
    noDFS = cli.hasOption("nodfs");
    numDataNodes = intArgument(cli, "datanodes", 1);
    nnPort = intArgument(cli, "nnport", 0);
    dfsOpts = cli.hasOption("format") ? StartupOption.FORMAT : StartupOption.REGULAR;

    // Runner
    writeDetails = cli.getOptionValue("writeDetails");
    writeConfig = cli.getOptionValue("writeConfig");

    // General
    conf = new JobConf();
    updateConfiguration(conf, cli.getOptionValues("D"));

    return true;
}