List of usage examples for org.apache.commons.cli CommandLine hasOption
public boolean hasOption(char opt)
From source file:de.akquinet.dustjs.DustEngine.java
public static void main(String[] args) throws URISyntaxException { Options cmdOptions = new Options(); cmdOptions.addOption(DustOptions.CHARSET_OPTION, true, "Input file charset encoding. Defaults to UTF-8."); cmdOptions.addOption(DustOptions.DUST_OPTION, true, "Path to a custom dust.js for Rhino version."); try {/* www . j av a2s. c o m*/ CommandLineParser cmdParser = new GnuParser(); CommandLine cmdLine = cmdParser.parse(cmdOptions, args); DustOptions options = new DustOptions(); if (cmdLine.hasOption(DustOptions.CHARSET_OPTION)) { options.setCharset(cmdLine.getOptionValue(DustOptions.CHARSET_OPTION)); } if (cmdLine.hasOption(DustOptions.DUST_OPTION)) { options.setDust(new File(cmdLine.getOptionValue(DustOptions.DUST_OPTION)).toURI().toURL()); } DustEngine engine = new DustEngine(options); String[] files = cmdLine.getArgs(); String src = null; if (files == null || files.length == 0) { BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); StringWriter sw = new StringWriter(); char[] buffer = new char[1024]; int n = 0; while (-1 != (n = in.read(buffer))) { sw.write(buffer, 0, n); } src = sw.toString(); } if (src != null && !src.isEmpty()) { System.out.println(engine.compile(src, "test")); return; } if (files.length == 1) { System.out.println(engine.compile(new File(files[0]))); return; } if (files.length == 2) { engine.compile(new File(files[0]), new File(files[1])); return; } } catch (IOException ioe) { System.err.println("Error opening input file."); } catch (ParseException pe) { System.err.println("Error parsing arguments."); } HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar dust-engine.jar input [output] [options]", cmdOptions); System.exit(1); }
From source file:com.continuuity.loom.runtime.DummyProvisioner.java
public static void main(final String[] args) throws Exception { Options options = new Options(); options.addOption("h", "host", true, "Loom server to connect to"); options.addOption("p", "port", true, "Loom server port to connect to"); options.addOption("c", "concurrency", true, "default concurrent threads"); options.addOption("f", "failurePercent", true, "% of the time a provisioner should fail its task"); options.addOption("o", "once", false, "whether or not only one task should be taken before exiting"); options.addOption("d", "taskDuration", true, "number of milliseconds it should take to finish a task"); options.addOption("s", "sleepMs", true, "number of milliseconds a thread will sleep before taking another task"); options.addOption("n", "numTasks", true, "number of tasks to try and take from the queue. Default is infinite."); options.addOption("t", "tenant", true, "tenant id to use."); try {/*from w w w. j a va2s . c o m*/ CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); host = cmd.hasOption('h') ? cmd.getOptionValue('h') : "localhost"; port = cmd.hasOption('p') ? Integer.valueOf(cmd.getOptionValue('p')) : 55054; concurrency = cmd.hasOption('c') ? Integer.valueOf(cmd.getOptionValue('c')) : 5; failurePercent = cmd.hasOption('f') ? Integer.valueOf(cmd.getOptionValue('f')) : 0; runOnce = cmd.hasOption('o'); taskMs = cmd.hasOption('d') ? Long.valueOf(cmd.getOptionValue('d')) : 1000; sleepMs = cmd.hasOption('s') ? Long.valueOf(cmd.getOptionValue('s')) : 1000; numTasks = cmd.hasOption('n') ? Integer.valueOf(cmd.getOptionValue('n')) : -1; tenant = cmd.hasOption('t') ? cmd.getOptionValue('t') : "loom"; } catch (ParseException e) { LOG.error("exception parsing input arguments.", e); return; } if (concurrency < 1) { LOG.error("invalid concurrency level {}.", concurrency); return; } if (runOnce) { new Provisioner("dummy-0", tenant, host, port, failurePercent, taskMs, sleepMs, 1).runOnce(); } else { LOG.info(String.format( "running with %d threads, connecting to %s:%d using tenant %s, with a failure rate of" + "%d percent, task time of %d ms, and sleep time of %d ms between fetches", concurrency, host, port, tenant, failurePercent, taskMs, sleepMs)); pool = Executors.newFixedThreadPool(concurrency); try { int tasksPerProvisioner = numTasks >= 0 ? numTasks / concurrency : -1; int extra = numTasks < 0 ? 0 : numTasks % concurrency; pool.execute(new Provisioner("dummy-0", tenant, host, port, failurePercent, taskMs, sleepMs, tasksPerProvisioner + extra)); for (int i = 1; i < concurrency; i++) { pool.execute(new Provisioner("dummy-" + i, tenant, host, port, failurePercent, taskMs, sleepMs, tasksPerProvisioner)); } } catch (Exception e) { LOG.error("Caught exception, shutting down now.", e); pool.shutdownNow(); } pool.shutdown(); } }
From source file:com.examples.cloud.speech.AsyncRecognizeClient.java
public static void main(String[] args) throws Exception { String audioFile = ""; String host = "speech.googleapis.com"; Integer port = 443;//from www. j a v a2 s . c o m Integer sampling = 16000; CommandLineParser parser = new DefaultParser(); Options options = new Options(); options.addOption(OptionBuilder.withLongOpt("uri").withDescription("path to audio uri").hasArg() .withArgName("FILE_PATH").create()); options.addOption( OptionBuilder.withLongOpt("host").withDescription("endpoint for api, e.g. speech.googleapis.com") .hasArg().withArgName("ENDPOINT").create()); options.addOption(OptionBuilder.withLongOpt("port").withDescription("SSL port, usually 443").hasArg() .withArgName("PORT").create()); options.addOption(OptionBuilder.withLongOpt("sampling").withDescription("Sampling Rate, i.e. 16000") .hasArg().withArgName("RATE").create()); try { CommandLine line = parser.parse(options, args); if (line.hasOption("uri")) { audioFile = line.getOptionValue("uri"); } else { System.err.println("An Audio uri must be specified (e.g. file:///foo/baz.raw)."); System.exit(1); } if (line.hasOption("host")) { host = line.getOptionValue("host"); } else { System.err.println("An API enpoint must be specified (typically speech.googleapis.com)."); System.exit(1); } if (line.hasOption("port")) { port = Integer.parseInt(line.getOptionValue("port")); } else { System.err.println("An SSL port must be specified (typically 443)."); System.exit(1); } if (line.hasOption("sampling")) { sampling = Integer.parseInt(line.getOptionValue("sampling")); } else { System.err.println("An Audio sampling rate must be specified."); System.exit(1); } } catch (ParseException exp) { System.err.println("Unexpected exception:" + exp.getMessage()); System.exit(1); } ManagedChannel channel = AsyncRecognizeClient.createChannel(host, port); AsyncRecognizeClient client = new AsyncRecognizeClient(channel, URI.create(audioFile), sampling); try { client.recognize(); } finally { client.shutdown(); } }
From source file:eu.scape_project.spacip.Spacip.java
/** * Main entry point./*ww w.j av a 2 s . c om*/ * * @param args * @throws Exception */ public static void main(String[] args) throws Exception { // configuration properties pu = new PropertyUtil("/eu/scape_project/spacip/config.properties"); // hadoop configuration Configuration hadoopConf = new Configuration(); // Command line interface config = new CliConfig(); CommandLineParser cmdParser = new PosixParser(); GenericOptionsParser gop = new GenericOptionsParser(hadoopConf, args); CommandLine cmd = cmdParser.parse(Options.OPTIONS, gop.getRemainingArgs()); if ((args.length == 0) || (cmd.hasOption(Options.HELP_OPT))) { Options.exit("Usage", 0); } else { Options.initOptions(cmd, config); } // cli parameter has priority over default configuration int cliParamNumPerInv = config.getNumItemsPerInvokation(); int defaultNumPerInv = Integer.parseInt(pu.getProp("default.itemsperinvokation")); int numPerInv = (cliParamNumPerInv != 0) ? cliParamNumPerInv : defaultNumPerInv; // setting hadoop configuration parameters so that they can be used // during MapReduce hadoopConf.setInt("num_items_per_task", numPerInv); hadoopConf.set("output_file_suffix", pu.getProp("default.outputfilesuffix")); hadoopConf.set("scape_platform_invoke", pu.getProp("tomar.invoke.command")); hadoopConf.set("unpack_hdfs_path", pu.getProp("default.hdfsdir.unpacked")); hadoopConf.set("joboutput_hdfs_path", pu.getProp("default.hdfsdir.joboutput")); hadoopConf.set("tooloutput_hdfs_path", pu.getProp("default.hdfsdir.toolout")); hadoopConf.set("container_file_suffix", pu.getProp("containerfilesuffix")); hadoopConf.set("tomar_param_pattern", pu.getProp("tomar.param.pattern")); startHadoopJob(hadoopConf); }
From source file:backtype.storm.command.gray_upgrade.java
public static void main(String[] args) throws Exception { if (args == null || args.length < 1) { System.out.println("Invalid parameter"); usage();/* w w w . j a v a2 s .c o m*/ return; } String topologyName = args[0]; String[] str2 = Arrays.copyOfRange(args, 1, args.length); CommandLineParser parser = new GnuParser(); Options r = buildGeneralOptions(new Options()); CommandLine commandLine = parser.parse(r, str2, true); int workerNum = 0; String component = null; List<String> workers = null; if (commandLine.hasOption("n")) { workerNum = Integer.valueOf(commandLine.getOptionValue("n")); } if (commandLine.hasOption("p")) { component = commandLine.getOptionValue("p"); } if (commandLine.hasOption("w")) { String w = commandLine.getOptionValue("w"); if (!StringUtils.isBlank(w)) { workers = Lists.newArrayList(); String[] parts = w.split(","); for (String part : parts) { if (part.split(":").length == 2) { workers.add(part.trim()); } } } } upgradeTopology(topologyName, component, workers, workerNum); }
From source file:com.alexoree.jenkins.Main.java
public static void main(String[] args) throws Exception { // create Options object Options options = new Options(); options.addOption("t", false, "throttle the downloads, waits 5 seconds in between each d/l"); // automatically generate the help statement HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("jenkins-sync", options); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); boolean throttle = cmd.hasOption("t"); String plugins = "https://updates.jenkins-ci.org/latest/"; List<String> ps = new ArrayList<String>(); Document doc = Jsoup.connect(plugins).get(); for (Element file : doc.select("td a")) { //System.out.println(file.attr("href")); if (file.attr("href").endsWith(".hpi") || file.attr("href").endsWith(".war")) { ps.add(file.attr("href")); }/*from w w w.java 2 s . co m*/ } File root = new File("."); //https://updates.jenkins-ci.org/latest/AdaptivePlugin.hpi new File("./latest").mkdirs(); //output zip file String zipFile = "jenkinsSync.zip"; // create byte buffer byte[] buffer = new byte[1024]; FileOutputStream fos = new FileOutputStream(zipFile); ZipOutputStream zos = new ZipOutputStream(fos); //download the plugins for (int i = 0; i < ps.size(); i++) { System.out.println("[" + i + "/" + ps.size() + "] downloading " + plugins + ps.get(i)); String outputFile = download(root.getAbsolutePath() + "/latest/" + ps.get(i), plugins + ps.get(i)); FileInputStream fis = new FileInputStream(outputFile); // begin writing a new ZIP entry, positions the stream to the start of the entry data zos.putNextEntry(new ZipEntry(outputFile.replace(root.getAbsolutePath(), "") .replace("updates.jenkins-ci.org/", "").replace("https:/", ""))); int length; while ((length = fis.read(buffer)) > 0) { zos.write(buffer, 0, length); } zos.closeEntry(); fis.close(); if (throttle) Thread.sleep(WAIT); new File(root.getAbsolutePath() + "/latest/" + ps.get(i)).deleteOnExit(); } //download the json metadata plugins = "https://updates.jenkins-ci.org/"; ps = new ArrayList<String>(); doc = Jsoup.connect(plugins).get(); for (Element file : doc.select("td a")) { //System.out.println(file.attr("href")); if (file.attr("href").endsWith(".json")) { ps.add(file.attr("href")); } } for (int i = 0; i < ps.size(); i++) { download(root.getAbsolutePath() + "/" + ps.get(i), plugins + ps.get(i)); FileInputStream fis = new FileInputStream(root.getAbsolutePath() + "/" + ps.get(i)); // begin writing a new ZIP entry, positions the stream to the start of the entry data zos.putNextEntry(new ZipEntry(plugins + ps.get(i))); int length; while ((length = fis.read(buffer)) > 0) { zos.write(buffer, 0, length); } zos.closeEntry(); fis.close(); new File(root.getAbsolutePath() + "/" + ps.get(i)).deleteOnExit(); if (throttle) Thread.sleep(WAIT); } // close the ZipOutputStream zos.close(); }
From source file:jlite.cli.JobMatch.java
public static void main(String[] args) { System.out.println(); // extra line CommandLineParser parser = new GnuParser(); Options options = setupOptions();// www.j a v a2 s .c o m HelpFormatter helpFormatter = new HelpFormatter(); helpFormatter.setSyntaxPrefix("Usage: "); CommandLine line = null; try { line = parser.parse(options, args); if (line.hasOption("help")) { helpFormatter.printHelp(100, COMMAND, "\noptions:", options, "\n" + CLI.FOOTER, false); System.out.println(); // extra line System.exit(0); } else { if (line.hasOption("xml")) { System.out.println("<output>"); } String[] remArgs = line.getArgs(); if (remArgs.length == 1) { run(remArgs[0], line); } else if (remArgs.length == 0) { throw new MissingArgumentException("Missing required argument: <jdl_file>"); } else { throw new UnrecognizedOptionException("Unrecognized extra arguments"); } } } catch (ParseException e) { System.err.println(e.getMessage() + "\n"); helpFormatter.printHelp(100, COMMAND, "\noptions:", options, "\n" + CLI.FOOTER, false); System.out.println(); // extra line System.exit(-1); } catch (Exception e) { if (line.hasOption("xml")) { System.out.println("<error>" + e.getMessage() + "</error>"); } else { System.err.println(e.getMessage()); } } finally { if (line.hasOption("xml")) { System.out.println("</output>"); } } System.out.println(); // extra line }
From source file:de.unirostock.sems.caro.CaRo.java
/** * The main method to be called by the command line. * /*w ww. j a va2 s. c o m*/ * @param args * the arguments */ public static void main(String[] args) { Options options = new Options(); options.addOption(new Option("h", "help", false, "print the help message")); options.addOption( Option.builder().longOpt("roca").desc("convert a research object into a combine archive").build()); options.addOption( Option.builder().longOpt("caro").desc("convert a combine archive into a research object").build()); options.addOption(Option.builder("i").longOpt("in").required().argName("FILE").hasArg() .desc("source container to be converted").build()); options.addOption(Option.builder("o").longOpt("out").required().argName("FILE").hasArg() .desc("target container to be created").build()); CommandLineParser parser = new DefaultParser(); CommandLine line = null; try { line = parser.parse(options, args); if (line.hasOption("help")) { help(options, null); return; } } catch (ParseException e) { help(options, "Parsing of command line options failed. Reason: " + e.getMessage()); return; } File in = new File(line.getOptionValue("in")); File out = new File(line.getOptionValue("out")); if (!in.exists()) { help(options, "file " + in + " does not exist"); return; } if (out.exists()) { help(options, "file " + out + " already exist"); return; } if (line.hasOption("caro") && line.hasOption("roca")) { help(options, "only one of --roca and --caro is allowed"); return; } CaRoConverter conv = null; if (line.hasOption("caro")) conv = new CaToRo(in); else if (line.hasOption("roca")) conv = new RoToCa(in); else { help(options, "you need to either supply --roca or --caro"); return; } conv.convertTo(out); if (conv.hasErrors()) System.err.println("There were errors!"); if (conv.hasWarnings()) System.err.println("There were warnings!"); List<CaRoNotification> notifications = conv.getNotifications(); for (CaRoNotification note : notifications) System.out.println(note); }
From source file:fr.cnrs.sharp.Main.java
public static void main(String args[]) { Options options = new Options(); Option versionOpt = new Option("v", "version", false, "print the version information and exit"); Option helpOpt = new Option("h", "help", false, "print the help"); Option inProvFileOpt = OptionBuilder.withArgName("input_PROV_file_1> ... <input_PROV_file_n") .withLongOpt("input_PROV_files").withDescription("The list of PROV input files, in RDF Turtle.") .hasArgs().create("i"); Option inRawFileOpt = OptionBuilder.withArgName("input_raw_file_1> ... <input_raw_file_n") .withLongOpt("input_raw_files") .withDescription(// w w w .j a v a2 s .c om "The list of raw files to be fingerprinted and possibly interlinked with owl:sameAs.") .hasArgs().create("ri"); Option summaryOpt = OptionBuilder.withArgName("summary").withLongOpt("summary") .withDescription("Materialization of wasInfluencedBy relations.").create("s"); options.addOption(inProvFileOpt); options.addOption(inRawFileOpt); options.addOption(versionOpt); options.addOption(helpOpt); options.addOption(summaryOpt); String header = "SharpTB is a tool to maturate provenance based on PROV inferences"; String footer = "\nPlease report any issue to alban.gaignard@univ-nantes.fr"; try { CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("SharpTB", header, options, footer, true); System.exit(0); } if (cmd.hasOption("v")) { logger.info("SharpTB version 0.1.0"); System.exit(0); } if (cmd.hasOption("ri")) { String[] inFiles = cmd.getOptionValues("ri"); Model model = ModelFactory.createDefaultModel(); for (String inFile : inFiles) { Path p = Paths.get(inFile); if (!p.toFile().isFile()) { logger.error("Cannot find file " + inFile); System.exit(1); } else { //1. fingerprint try { model.add(Interlinking.fingerprint(p)); } catch (IOException e) { logger.error("Cannot fingerprint file " + inFile); } } } //2. genSameAs Model sameAs = Interlinking.generateSameAs(model); sameAs.write(System.out, "TTL"); } if (cmd.hasOption("i")) { String[] inFiles = cmd.getOptionValues("i"); Model data = ModelFactory.createDefaultModel(); for (String inFile : inFiles) { Path p = Paths.get(inFile); if (!p.toFile().isFile()) { logger.error("Cannot find file " + inFile); System.exit(1); } else { RDFDataMgr.read(data, inFile, Lang.TTL); } } Model res = Harmonization.harmonizeProv(data); try { Path pathInfProv = Files.createTempFile("PROV-inf-tgd-egd-", ".ttl"); res.write(new FileWriter(pathInfProv.toFile()), "TTL"); System.out.println("Harmonized PROV written to file " + pathInfProv.toString()); //if the summary option is activated, then save the subgraph and generate a visualization if (cmd.hasOption("s")) { String queryInfluence = "PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> \n" + "PREFIX prov: <http://www.w3.org/ns/prov#> \n" + "CONSTRUCT { \n" + " ?x ?p ?y .\n" + " ?x rdfs:label ?lx .\n" + " ?y rdfs:label ?ly .\n" + "} WHERE {\n" + " ?x ?p ?y .\n" + " FILTER (?p IN (prov:wasInfluencedBy)) .\n" + " ?x rdfs:label ?lx .\n" + " ?y rdfs:label ?ly .\n" + "}"; Query query = QueryFactory.create(queryInfluence); QueryExecution queryExec = QueryExecutionFactory.create(query, res); Model summary = queryExec.execConstruct(); queryExec.close(); Util.writeHtmlViz(summary); } } catch (IOException ex) { logger.error("Impossible to write the harmonized provenance file."); System.exit(1); } } else { // logger.info("Please fill the -i input parameter."); // HelpFormatter formatter = new HelpFormatter(); // formatter.printHelp("SharpTB", header, options, footer, true); // System.exit(0); } } catch (ParseException ex) { logger.error("Error while parsing command line arguments. Please check the following help:"); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("SharpToolBox", header, options, footer, true); System.exit(1); } }
From source file:com.github.brandtg.switchboard.FileLogAggregator.java
/** Main. */ public static void main(String[] args) throws Exception { Options opts = new Options(); opts.addOption("h", "help", false, "Prints help message"); opts.addOption("f", "file", true, "File to output aggregated logs to"); opts.addOption("s", "separator", true, "Line separator in log"); CommandLine cli = new GnuParser().parse(opts, args); if (cli.getArgs().length == 0 || cli.hasOption("help")) { new HelpFormatter().printHelp("usage: [opts] sourceHost:port ...", opts); System.exit(1);//from w w w . jav a 2s . c o m } // Parse sources Set<InetSocketAddress> sources = new HashSet<>(); for (int i = 0; i < cli.getArgs().length; i++) { String[] tokens = cli.getArgs()[i].split(":"); sources.add(new InetSocketAddress(tokens[0], Integer.valueOf(tokens[1]))); } // Parse output stream OutputStream outputStream; if (cli.hasOption("file")) { outputStream = new FileOutputStream(cli.getOptionValue("file")); } else { outputStream = System.out; } // Separator String separator = cli.getOptionValue("separator", "\n"); if (separator.length() != 1) { throw new IllegalArgumentException("Separator must only be 1 character"); } final FileLogAggregator fileLogAggregator = new FileLogAggregator(sources, separator.charAt(0), outputStream); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { fileLogAggregator.stop(); } catch (Exception e) { LOG.error("Error when stopping log aggregator", e); } } }); fileLogAggregator.start(); }