Example usage for org.apache.commons.cli Options Options

List of usage examples for org.apache.commons.cli Options Options

Introduction

In this page you can find the example usage for org.apache.commons.cli Options Options.

Prototype

Options

Source Link

Usage

From source file:com.amertkara.multiplerunners.Application.java

public static void main(String[] args) {
    if (args.length == 0) {
        Options options = new Options();
        CommandLineParser parser = new DefaultParser();
        HelpFormatter formatter = new HelpFormatter();

        Option parserOpt = Option.builder("parse").desc("Parser").build();
        Option analyzerOpt = Option.builder("analyze").desc("Analyzer").build();

        options.addOption(analyzerOpt);/*from  w w  w  . j a v  a 2  s  .c  om*/
        options.addOption(parserOpt);

        formatter.printHelp("sample", options);
    } else {
        SpringApplication.run(Application.class, args);
    }
}

From source file:it.tizianofagni.sparkboost.BoostClassifierExe.java

public static void main(String[] args) {

    Options options = new Options();
    options.addOption("b", "binaryProblem", false,
            "Indicate if the input dataset contains a binary problem and not a multilabel one");
    options.addOption("z", "labels0based", false,
            "Indicate if the labels IDs in the dataset to classifyLibSvmWithResults are already assigned in the range [0, numLabels-1] included");
    options.addOption("l", "enableSparkLogging", false, "Enable logging messages of Spark");
    options.addOption("w", "windowsLocalModeFix", true,
            "Set the directory containing the winutils.exe command");
    options.addOption("p", "parallelismDegree", true,
            "Set the parallelism degree (default: number of available cores in the Spark runtime");

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;//w w w.jav  a 2s .  co  m
    String[] remainingArgs = null;
    try {
        cmd = parser.parse(options, args);
        remainingArgs = cmd.getArgs();
        if (remainingArgs.length != 3)
            throw new ParseException("You need to specify all mandatory parameters");
    } catch (ParseException e) {
        System.out.println("Parsing failed.  Reason: " + e.getMessage());
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(
                BoostClassifierExe.class.getSimpleName() + " [OPTIONS] <inputFile> <inputModel> <outputFile>",
                options);
        System.exit(-1);
    }

    boolean binaryProblem = false;
    if (cmd.hasOption("b"))
        binaryProblem = true;
    boolean labels0Based = false;
    if (cmd.hasOption("z"))
        labels0Based = true;
    boolean enablingSparkLogging = false;
    if (cmd.hasOption("l"))
        enablingSparkLogging = true;

    if (cmd.hasOption("w")) {
        System.setProperty("hadoop.home.dir", cmd.getOptionValue("w"));
    }

    String inputFile = remainingArgs[0];
    String inputModel = remainingArgs[1];
    String outputFile = remainingArgs[2];

    long startTime = System.currentTimeMillis();

    // Disable Spark logging.
    if (!enablingSparkLogging) {
        Logger.getLogger("org").setLevel(Level.OFF);
        Logger.getLogger("akka").setLevel(Level.OFF);
    }

    // Create and configure Spark context.
    SparkConf conf = new SparkConf().setAppName("Spark MPBoost classifier");
    JavaSparkContext sc = new JavaSparkContext(conf);

    // Load boosting classifier from disk.
    BoostClassifier classifier = DataUtils.loadModel(sc, inputModel);

    // Get the parallelism degree.
    int parallelismDegree = sc.defaultParallelism();
    if (cmd.hasOption("p")) {
        parallelismDegree = Integer.parseInt(cmd.getOptionValue("p"));
    }

    // Classify documents available on specified input file.
    classifier.classifyLibSvm(sc, inputFile, parallelismDegree, labels0Based, binaryProblem, outputFile);
    long endTime = System.currentTimeMillis();
    System.out.println("Execution time: " + (endTime - startTime) + " milliseconds.");
}

From source file:com.discursive.jccook.cmdline.CliComplexExample.java

public static void main(String[] args) throws Exception {
    CommandLineParser parser = new BasicParser();

    Options options = new Options();
    options.addOption("h", "help", false, "Print this usage information");
    options.addOption("v", "verbose", false, "Print out VERBOSE debugging information");
    OptionGroup optionGroup = new OptionGroup();
    optionGroup.addOption(OptionBuilder.hasArg(true).create('f'));
    optionGroup.addOption(OptionBuilder.hasArg(true).create('m'));
    options.addOptionGroup(optionGroup);

    CommandLine commandLine = parser.parse(options, args);

    boolean verbose = false;
    String file = "";
    String mail = "";

    if (commandLine.hasOption('h')) {
        System.out.println("Help Message");
        System.exit(0);/*from w w  w  .ja v a2 s.c om*/
    }

    if (commandLine.hasOption('v')) {
        verbose = true;
    }

    if (commandLine.hasOption('f')) {
        file = commandLine.getOptionValue('f');
    } else if (commandLine.hasOption('m')) {
        mail = commandLine.getOptionValue('m');
    }

    System.exit(0);
}

From source file:it.tizianofagni.sparkboost.MPBoostLearnerExe.java

public static void main(String[] args) {
    Options options = new Options();
    options.addOption("b", "binaryProblem", false,
            "Indicate if the input dataset contains a binary problem and not a multilabel one");
    options.addOption("z", "labels0based", false,
            "Indicate if the labels IDs in the dataset to classifyLibSvmWithResults are already assigned in the range [0, numLabels-1] included");
    options.addOption("l", "enableSparkLogging", false, "Enable logging messages of Spark");
    options.addOption("w", "windowsLocalModeFix", true,
            "Set the directory containing the winutils.exe command");
    options.addOption("dp", "documentPartitions", true, "The number of document partitions");
    options.addOption("fp", "featurePartitions", true, "The number of feature partitions");
    options.addOption("lp", "labelPartitions", true, "The number of label partitions");

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;//from   ww  w.  jav  a2 s. c  o m
    String[] remainingArgs = null;
    try {
        cmd = parser.parse(options, args);
        remainingArgs = cmd.getArgs();
        if (remainingArgs.length != 3)
            throw new ParseException("You need to specify all mandatory parameters");
    } catch (ParseException e) {
        System.out.println("Parsing failed.  Reason: " + e.getMessage());
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(
                MPBoostLearnerExe.class.getSimpleName() + " [OPTIONS] <inputFile> <outputFile> <numIterations>",
                options);
        System.exit(-1);
    }

    boolean binaryProblem = false;
    if (cmd.hasOption("b"))
        binaryProblem = true;
    boolean labels0Based = false;
    if (cmd.hasOption("z"))
        labels0Based = true;
    boolean enablingSparkLogging = false;
    if (cmd.hasOption("l"))
        enablingSparkLogging = true;

    if (cmd.hasOption("w")) {
        System.setProperty("hadoop.home.dir", cmd.getOptionValue("w"));
    }

    String inputFile = remainingArgs[0];
    String outputFile = remainingArgs[1];
    int numIterations = Integer.parseInt(remainingArgs[2]);

    long startTime = System.currentTimeMillis();

    // Disable Spark logging.
    if (!enablingSparkLogging) {
        Logger.getLogger("org").setLevel(Level.OFF);
        Logger.getLogger("akka").setLevel(Level.OFF);
    }

    // Create and configure Spark context.
    SparkConf conf = new SparkConf().setAppName("Spark MPBoost learner");
    JavaSparkContext sc = new JavaSparkContext(conf);

    // Create and configure learner.
    MpBoostLearner learner = new MpBoostLearner(sc);
    learner.setNumIterations(numIterations);

    if (cmd.hasOption("dp")) {
        learner.setNumDocumentsPartitions(Integer.parseInt(cmd.getOptionValue("dp")));
    }
    if (cmd.hasOption("fp")) {
        learner.setNumFeaturesPartitions(Integer.parseInt(cmd.getOptionValue("fp")));
    }
    if (cmd.hasOption("lp")) {
        learner.setNumLabelsPartitions(Integer.parseInt(cmd.getOptionValue("lp")));
    }

    // Build classifier with MPBoost learner.
    BoostClassifier classifier = learner.buildModel(inputFile, labels0Based, binaryProblem);

    // Save classifier to disk.
    DataUtils.saveModel(sc, classifier, outputFile);

    long endTime = System.currentTimeMillis();
    System.out.println("Execution time: " + (endTime - startTime) + " milliseconds.");
}

From source file:de.akadvh.view.Main.java

/**
 * @param args//from   ww w  .  j  av a2s  . c  o m
 */
public static void main(String[] args) {

    Options options = new Options();
    options.addOption("u", "user", true, "Benutzername");
    options.addOption("p", "pass", true, "Passwort");
    options.addOption("c", "console", false, "Consolenmodus");
    options.addOption("v", "verbose", false, "Mehr Ausgabe");
    options.addOption("m", "modul", true, "Modul");
    options.addOption("n", "noten", false, "Notenuebersicht erstellen");
    options.addOption("t", "termin", false, "Terminuebersicht (angemeldete Module) downloaden");
    options.addOption("version", false, "Version");
    options.addOption("h", "help", false, "Hilfe");

    CommandLineParser parser = new PosixParser();
    try {
        CommandLine cmd = parser.parse(options, args);

        if (cmd.hasOption("help")) {

            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("java -jar akadvh.jar", options);
            System.exit(0);
        }

        if (cmd.hasOption("version")) {

            System.out.println("Akadvh Version: " + Akadvh.getVersion());
            System.exit(0);

        }

        if (cmd.hasOption("console")) {

            ConsoleView cv = new ConsoleView(cmd.getOptionValue("user"), cmd.getOptionValue("pass"),
                    cmd.getOptionValue("modul"), cmd.hasOption("noten"), cmd.hasOption("termin"),
                    cmd.hasOption("verbose"));

        } else {

            SwingView sv = new SwingView(cmd.getOptionValue("user"), cmd.getOptionValue("pass"));

        }

    } catch (UnrecognizedOptionException e1) {
        System.out.println(e1.getMessage());
        System.out.println("--help fuer Hilfe");
    } catch (ParseException e) {
        e.printStackTrace();
    }
}

From source file:boa.BoaMain.java

public static void main(final String[] args) throws IOException {
    final Options options = new Options();

    options.addOption("p", "parse", false, "parse and semantic check a Boa program (don't generate code)");
    options.addOption("c", "compile", false, "compile a Boa program");
    options.addOption("e", "execute", false, "execute a Boa program locally");
    options.addOption("g", "generate", false, "generate a Boa dataset");

    try {//w  w  w. j a va  2  s .c o  m
        if (args.length == 0) {
            printHelp(options, null);
            return;
        } else {
            final CommandLine cl = new PosixParser().parse(options, new String[] { args[0] });
            final String[] tempargs = new String[args.length - 1];
            System.arraycopy(args, 1, tempargs, 0, args.length - 1);

            if (cl.hasOption("c")) {
                boa.compiler.BoaCompiler.main(tempargs);
            } else if (cl.hasOption("p")) {
                boa.compiler.BoaCompiler.parseOnly(tempargs);
            } else if (cl.hasOption("e")) {
                boa.evaluator.BoaEvaluator.main(tempargs);
            } else if (cl.hasOption("g")) {
                boa.datagen.BoaGenerator.main(tempargs);
            }
        }
    } catch (final org.apache.commons.cli.ParseException e) {
        printHelp(options, e.getMessage());
    }
}

From source file:it.tizianofagni.sparkboost.AdaBoostMHLearnerExe.java

public static void main(String[] args) {
    Options options = new Options();
    options.addOption("b", "binaryProblem", false,
            "Indicate if the input dataset contains a binary problem and not a multilabel one");
    options.addOption("z", "labels0based", false,
            "Indicate if the labels IDs in the dataset to classifyLibSvmWithResults are already assigned in the range [0, numLabels-1] included");
    options.addOption("l", "enableSparkLogging", false, "Enable logging messages of Spark");
    options.addOption("w", "windowsLocalModeFix", true,
            "Set the directory containing the winutils.exe command");
    options.addOption("dp", "documentPartitions", true, "The number of document partitions");
    options.addOption("fp", "featurePartitions", true, "The number of feature partitions");
    options.addOption("lp", "labelPartitions", true, "The number of label partitions");

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = null;/*w  w w.  j av  a 2  s . c om*/
    String[] remainingArgs = null;
    try {
        cmd = parser.parse(options, args);
        remainingArgs = cmd.getArgs();
        if (remainingArgs.length != 5)
            throw new ParseException("You need to specify all mandatory parameters");
    } catch (ParseException e) {
        System.out.println("Parsing failed.  Reason: " + e.getMessage());
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(AdaBoostMHLearnerExe.class.getSimpleName()
                + " [OPTIONS] <inputFile> <outputFile> <numIterations> <sparkMaster> <parallelismDegree>",
                options);
        System.exit(-1);
    }

    boolean binaryProblem = false;
    if (cmd.hasOption("b"))
        binaryProblem = true;
    boolean labels0Based = false;
    if (cmd.hasOption("z"))
        labels0Based = true;
    boolean enablingSparkLogging = false;
    if (cmd.hasOption("l"))
        enablingSparkLogging = true;

    if (cmd.hasOption("w")) {
        System.setProperty("hadoop.home.dir", cmd.getOptionValue("w"));
    }

    String inputFile = remainingArgs[0];
    String outputFile = remainingArgs[1];
    int numIterations = Integer.parseInt(remainingArgs[2]);
    String sparkMaster = remainingArgs[3];
    int parallelismDegree = Integer.parseInt(remainingArgs[4]);

    long startTime = System.currentTimeMillis();

    // Disable Spark logging.
    if (!enablingSparkLogging) {
        Logger.getLogger("org").setLevel(Level.OFF);
        Logger.getLogger("akka").setLevel(Level.OFF);
    }

    // Create and configure Spark context.
    SparkConf conf = new SparkConf().setAppName("Spark AdaBoost.MH learner");
    JavaSparkContext sc = new JavaSparkContext(conf);

    // Create and configure learner.
    AdaBoostMHLearner learner = new AdaBoostMHLearner(sc);
    learner.setNumIterations(numIterations);

    if (cmd.hasOption("dp")) {
        learner.setNumDocumentsPartitions(Integer.parseInt(cmd.getOptionValue("dp")));
    }
    if (cmd.hasOption("fp")) {
        learner.setNumFeaturesPartitions(Integer.parseInt(cmd.getOptionValue("fp")));
    }
    if (cmd.hasOption("lp")) {
        learner.setNumLabelsPartitions(Integer.parseInt(cmd.getOptionValue("lp")));
    }

    // Build classifier with MPBoost learner.
    BoostClassifier classifier = learner.buildModel(inputFile, labels0Based, binaryProblem);

    // Save classifier to disk.
    DataUtils.saveModel(sc, classifier, outputFile);

    long endTime = System.currentTimeMillis();
    System.out.println("Execution time: " + (endTime - startTime) + " milliseconds.");
}

From source file:com.asual.lesscss.LessEngineCli.java

public static void main(String[] args) throws LessException, URISyntaxException {
    Options cmdOptions = new Options();
    cmdOptions.addOption(LessOptions.CHARSET_OPTION, true, "Input file charset encoding. Defaults to UTF-8.");
    cmdOptions.addOption(LessOptions.COMPRESS_OPTION, false, "Flag that enables compressed CSS output.");
    cmdOptions.addOption(LessOptions.CSS_OPTION, false, "Flag that enables compilation of .css files.");
    cmdOptions.addOption(LessOptions.LESS_OPTION, true, "Path to a custom less.js for Rhino version.");
    try {/*from   ww w  .j a v  a  2s .c o  m*/
        CommandLineParser cmdParser = new GnuParser();
        CommandLine cmdLine = cmdParser.parse(cmdOptions, args);
        LessOptions options = new LessOptions();
        if (cmdLine.hasOption(LessOptions.CHARSET_OPTION)) {
            options.setCharset(cmdLine.getOptionValue(LessOptions.CHARSET_OPTION));
        }
        if (cmdLine.hasOption(LessOptions.COMPRESS_OPTION)) {
            options.setCompress(true);
        }
        if (cmdLine.hasOption(LessOptions.CSS_OPTION)) {
            options.setCss(true);
        }
        if (cmdLine.hasOption(LessOptions.LESS_OPTION)) {
            options.setLess(new File(cmdLine.getOptionValue(LessOptions.LESS_OPTION)).toURI().toURL());
        }
        LessEngine engine = new LessEngine(options);
        if (System.in.available() != 0) {
            BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
            StringWriter sw = new StringWriter();
            char[] buffer = new char[1024];
            int n = 0;
            while (-1 != (n = in.read(buffer))) {
                sw.write(buffer, 0, n);
            }
            String src = sw.toString();
            if (!src.isEmpty()) {
                System.out.println(engine.compile(src, null, options.isCompress()));
                System.exit(0);
            }
        }
        String[] files = cmdLine.getArgs();
        if (files.length == 1) {
            System.out.println(engine.compile(new File(files[0]), options.isCompress()));
            System.exit(0);
        }
        if (files.length == 2) {
            engine.compile(new File(files[0]), new File(files[1]), options.isCompress());
            System.exit(0);
        }

    } catch (IOException ioe) {
        System.err.println("Error opening input file.");
    } catch (ParseException pe) {
        System.err.println("Error parsing arguments.");
    }
    String[] paths = LessEngine.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath()
            .split(File.separator);
    HelpFormatter formatter = new HelpFormatter();
    formatter.printHelp("java -jar " + paths[paths.length - 1] + " input [output] [options]", cmdOptions);
    System.exit(1);
}

From source file:com.genentech.chemistry.openEye.apps.SDFSubRMSD.java

public static void main(String... args) throws IOException { // create command line Options object
    Options options = new Options();
    Option opt = new Option("in", true, "input file oe-supported");
    opt.setRequired(true);/*from   w w w.  ja v a  2s. com*/
    options.addOption(opt);

    opt = new Option("out", true, "output file oe-supported");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("fragFile", true, "file with single 3d substructure query");
    opt.setRequired(false);
    options.addOption(opt);

    opt = new Option("isMDL", false,
            "if given the fragFile is suposed to be an mdl query file, query features are supported.");
    opt.setRequired(false);
    options.addOption(opt);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }
    args = cmd.getArgs();

    if (cmd.hasOption("d")) {
        System.err.println("Start debugger and press return:");
        new BufferedReader(new InputStreamReader(System.in)).readLine();
    }

    String inFile = cmd.getOptionValue("in");
    String outFile = cmd.getOptionValue("out");
    String fragFile = cmd.getOptionValue("fragFile");

    // read fragment
    OESubSearch ss;
    oemolistream ifs = new oemolistream(fragFile);
    OEMolBase mol;
    if (!cmd.hasOption("isMDL")) {
        mol = new OEGraphMol();
        oechem.OEReadMolecule(ifs, mol);
        ss = new OESubSearch(mol, OEExprOpts.AtomicNumber, OEExprOpts.BondOrder);
    } else {
        int aromodel = OEIFlavor.Generic.OEAroModelOpenEye;
        int qflavor = ifs.GetFlavor(ifs.GetFormat());
        ifs.SetFlavor(ifs.GetFormat(), (qflavor | aromodel));
        int opts = OEMDLQueryOpts.Default | OEMDLQueryOpts.SuppressExplicitH;
        OEQMol qmol = new OEQMol();
        oechem.OEReadMDLQueryFile(ifs, qmol, opts);
        ss = new OESubSearch(qmol);
        mol = qmol;
    }

    double nSSatoms = mol.NumAtoms();
    double sssCoords[] = new double[mol.GetMaxAtomIdx() * 3];
    mol.GetCoords(sssCoords);
    mol.Clear();
    ifs.close();

    if (!ss.IsValid())
        throw new Error("Invalid query " + args[0]);

    ifs = new oemolistream(inFile);
    oemolostream ofs = new oemolostream(outFile);
    int count = 0;

    while (oechem.OEReadMolecule(ifs, mol)) {
        count++;
        double rmsd = Double.MAX_VALUE;
        double molCoords[] = new double[mol.GetMaxAtomIdx() * 3];
        mol.GetCoords(molCoords);

        for (OEMatchBase mb : ss.Match(mol, false)) {
            double r = 0;
            for (OEMatchPairAtom mp : mb.GetAtoms()) {
                OEAtomBase asss = mp.getPattern();
                double sx = sssCoords[asss.GetIdx() * 3];
                double sy = sssCoords[asss.GetIdx() * 3];
                double sz = sssCoords[asss.GetIdx() * 3];

                OEAtomBase amol = mp.getTarget();
                double mx = molCoords[amol.GetIdx() * 3];
                double my = molCoords[amol.GetIdx() * 3];
                double mz = molCoords[amol.GetIdx() * 3];

                r += Math.sqrt((sx - mx) * (sx - mx) + (sy - my) * (sy - my) + (sz - mz) * (sz - mz));
            }
            r /= nSSatoms;
            rmsd = Math.min(rmsd, r);
        }

        if (rmsd != Double.MAX_VALUE)
            oechem.OESetSDData(mol, "SSSrmsd", String.format("%.3f", rmsd));

        oechem.OEWriteMolecule(ofs, mol);
        mol.Clear();
    }

    ifs.close();
    ofs.close();

    mol.delete();
    ss.delete();
}

From source file:com.google.cloud.trace.v1.TraceServiceSmokeTest.java

public static void main(String args[]) {
    Logger.getLogger("").setLevel(Level.WARNING);
    try {//w  w w  . ja va  2s.  c  om
        Options options = new Options();
        options.addOption("h", "help", false, "show usage");
        options.addOption(Option.builder().longOpt("project_id").desc("Project id").hasArg()
                .argName("PROJECT-ID").required(true).build());
        CommandLine cl = (new DefaultParser()).parse(options, args);
        if (cl.hasOption("help")) {
            HelpFormatter formater = new HelpFormatter();
            formater.printHelp("TraceServiceSmokeTest", options);
        }
        executeNoCatch(cl.getOptionValue("project_id"));
        System.out.println("OK");
    } catch (Exception e) {
        System.err.println("Failed with exception:");
        e.printStackTrace(System.err);
        System.exit(1);
    }
}