List of usage examples for org.apache.commons.cli CommandLine hasOption
public boolean hasOption(char opt)
From source file:benchmark.hbase.controller.TestLauncher.java
public static void main(final String[] args) throws Exception { // create the parser final CommandLineParser parser = new BasicParser(); // parse the command line arguments final CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("u")) { displayHelp();/*from w ww . ja va 2s .c om*/ } final String connectionUrl = cmd.getOptionValue("connection-url"); final StoreType storeType = StoreType.fromName(cmd.getOptionValue("store-type")); final Optional<String> optionalNumReads = Optional.fromNullable(cmd.getOptionValue("num-reads")); final Optional<String> optionalNumWrites = Optional.fromNullable(cmd.getOptionValue("num-writes")); final int readConcurrancy = Integer.parseInt(cmd.getOptionValue("read-concurrancy")); final int writeConcurrancy = Integer.parseInt(cmd.getOptionValue("write-concurrancy")); int numReads = 0; int numWrites = 0; BenchmarkType benchmarkType = BenchmarkType.READ_ONLY; if (optionalNumReads.isPresent() && optionalNumWrites.isPresent()) { benchmarkType = BenchmarkType.READ_AND_WRITE; numReads = Integer.parseInt(optionalNumReads.get()); numWrites = Integer.parseInt(optionalNumWrites.get()); } else if (optionalNumReads.isPresent()) { benchmarkType = BenchmarkType.READ_ONLY; numReads = Integer.parseInt(optionalNumReads.get()); } else if (optionalNumWrites.isPresent()) { benchmarkType = BenchmarkType.WRITE_ONLY; numWrites = Integer.parseInt(optionalNumWrites.get()); } log.info("connectionUrl: {}", connectionUrl); log.info("storeType: {}", storeType); log.info("numReads: {}", numReads); log.info("numWrites: {}", numWrites); log.info("readConcurrancy: {}", readConcurrancy); log.info("writeConcurrancy: {}", writeConcurrancy); log.info("benchmarkType: {}", benchmarkType); TestLauncher.start(storeType, benchmarkType, numReads, numWrites, readConcurrancy, connectionUrl); System.exit(0); }
From source file:com.servioticy.dispatcher.DispatcherTopology.java
/** * @param args// w ww . jav a 2s . c o m * @throws InvalidTopologyException * @throws AlreadyAliveException * @throws InterruptedException */ public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException, InterruptedException, ParseException { Options options = new Options(); options.addOption( OptionBuilder.withArgName("file").hasArg().withDescription("Config file path.").create("f")); options.addOption(OptionBuilder.withArgName("topology").hasArg() .withDescription("Name of the topology in storm. If no name is given it will run in local mode.") .create("t")); options.addOption(OptionBuilder.withDescription("Enable debugging").create("d")); CommandLineParser parser = new GnuParser(); CommandLine cmd = parser.parse(options, args); String path = null; if (cmd.hasOption("f")) { path = cmd.getOptionValue("f"); } DispatcherContext dc = new DispatcherContext(); dc.loadConf(path); TopologyBuilder builder = new TopologyBuilder(); // TODO Auto-assign workers to the spout in function of the number of Kestrel IPs builder.setSpout("updates", new KestrelThriftSpout(Arrays.asList(dc.updatesAddresses), dc.updatesPort, dc.updatesQueue, new UpdateDescriptorScheme())); builder.setSpout("actions", new KestrelThriftSpout(Arrays.asList(dc.actionsAddresses), dc.actionsPort, dc.actionsQueue, new ActuationScheme())); builder.setBolt("prepare", new PrepareBolt(dc)).shuffleGrouping("updates"); builder.setBolt("actuationdispatcher", new ActuationDispatcherBolt(dc)).shuffleGrouping("actions"); builder.setBolt("subretriever", new SubscriptionRetrieveBolt(dc)).shuffleGrouping("prepare", "subscription"); builder.setBolt("externaldispatcher", new ExternalDispatcherBolt(dc)).fieldsGrouping("subretriever", "externalSub", new Fields("subid")); builder.setBolt("internaldispatcher", new InternalDispatcherBolt(dc)).fieldsGrouping("subretriever", "internalSub", new Fields("subid")); builder.setBolt("streamdispatcher", new StreamDispatcherBolt(dc)) .shuffleGrouping("subretriever", "streamSub").shuffleGrouping("prepare", "stream"); builder.setBolt("streamprocessor", new StreamProcessorBolt(dc)).shuffleGrouping("streamdispatcher", "default"); if (dc.benchmark) { builder.setBolt("benchmark", new BenchmarkBolt(dc)).shuffleGrouping("streamdispatcher", "benchmark") .shuffleGrouping("subretriever", "benchmark").shuffleGrouping("streamprocessor", "benchmark") .shuffleGrouping("prepare", "benchmark"); } Config conf = new Config(); conf.setDebug(cmd.hasOption("d")); if (cmd.hasOption("t")) { StormSubmitter.submitTopology(cmd.getOptionValue("t"), conf, builder.createTopology()); } else { conf.setMaxTaskParallelism(4); LocalCluster cluster = new LocalCluster(); cluster.submitTopology("dispatcher", conf, builder.createTopology()); } }
From source file:com.hortonworks.registries.storage.tool.shell.ShellMigrationInitializer.java
public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption(Option.builder("s").numberOfArgs(1).longOpt(OPTION_SCRIPT_ROOT_PATH) .desc("Root directory of script path").build()); options.addOption(Option.builder("c").numberOfArgs(1).longOpt(OPTION_CONFIG_FILE_PATH) .desc("Config file path").build()); options.addOption(Option.builder().hasArg(false).longOpt(ShellMigrationOption.MIGRATE.toString()) .desc("Execute schema migration from last check point").build()); options.addOption(Option.builder().hasArg(false).longOpt(ShellMigrationOption.INFO.toString()) .desc("Show the status of the schema migration compared to the target database").build()); options.addOption(Option.builder().hasArg(false).longOpt(ShellMigrationOption.VALIDATE.toString()) .desc("Validate the target database changes with the migration scripts").build()); options.addOption(Option.builder().hasArg(false).longOpt(ShellMigrationOption.REPAIR.toString()).desc( "Repairs the SCRIPT_CHANGE_LOG by removing failed migrations and correcting checksum of existing migration script") .build());// w ww . j a v a 2 s .com CommandLineParser parser = new BasicParser(); CommandLine commandLine = parser.parse(options, args); if (!commandLine.hasOption(OPTION_SCRIPT_ROOT_PATH)) { usage(options); System.exit(1); } boolean isShellMigrationOptionSpecified = false; ShellMigrationOption shellMigrationOptionSpecified = null; for (ShellMigrationOption shellMigrationOption : ShellMigrationOption.values()) { if (commandLine.hasOption(shellMigrationOption.toString())) { if (isShellMigrationOptionSpecified) { System.out.println( "Only one operation can be execute at once, please select one of ',migrate', 'validate', 'info', 'repair'."); System.exit(1); } isShellMigrationOptionSpecified = true; shellMigrationOptionSpecified = shellMigrationOption; } } if (!isShellMigrationOptionSpecified) { System.out.println( "One of the option 'migrate', 'validate', 'info', 'repair' must be specified to execute."); System.exit(1); } String scriptRootPath = commandLine.getOptionValue(OPTION_SCRIPT_ROOT_PATH); String confFilePath = commandLine.getOptionValue(OPTION_CONFIG_FILE_PATH); StorageProviderConfiguration storageProperties; try { Map<String, Object> conf = Utils.readConfig(confFilePath); StorageProviderConfigurationReader confReader = new StorageProviderConfigurationReader(); storageProperties = confReader.readStorageConfig(conf); } catch (IOException e) { System.err.println("Error occurred while reading config file: " + confFilePath); System.exit(1); throw new IllegalStateException("Shouldn't reach here"); } ShellMigrationHelper schemaMigrationHelper = new ShellMigrationHelper( ShellFlywayFactory.get(storageProperties, scriptRootPath)); try { schemaMigrationHelper.execute(shellMigrationOptionSpecified); System.out.println(String.format("\"%s\" option successful", shellMigrationOptionSpecified.toString())); } catch (Exception e) { System.err.println(String.format("\"%s\" option failed : %s", shellMigrationOptionSpecified.toString(), e.getMessage())); System.exit(1); } }
From source file:com.threew.validacion.tarjetas.credito.App.java
/** * Apliacion principal o main de la librera. * @param args the command line arguments *///from w w w . j a v a 2s . c om public static void main(String[] args) { /// Preguntar por los argumentos // Probar con nmeros de tarjeta Options opciones = new Options(); /// Agregar opciones opciones.addOption("n", true, "el nmero de tarjeta a validar"); opciones.addOption("c", "el cdigo CVV/CVV2 de la tarjeta a validar"); /// Analizar la linea de comandos CommandLineParser parser = new DefaultParser(); try { /// Linea de comandos CommandLine cmd = parser.parse(opciones, args); /// Preguntar si a linea de comandos se usa if (cmd.hasOption("n") == true) { String numero = cmd.getOptionValue("n"); /// Validar validar(numero); } else { validar(args[1]); } } catch (ParseException ex) { Log.error(ex.toString()); } }
From source file:net.openhft.chronicle.queue.ChronicleReaderMain.java
public static void main(@NotNull String[] args) { final Options options = options(); final CommandLine commandLine = parseCommandLine(args, options); final Consumer<String> messageSink = commandLine.hasOption('l') ? s -> System.out.println(s.replaceAll("\n", "")) : System.out::println; final ChronicleReader chronicleReader = new ChronicleReader().withMessageSink(messageSink) .withBasePath(Paths.get(commandLine.getOptionValue('d'))); configureReader(chronicleReader, commandLine); chronicleReader.execute();// w ww. ja va 2 s .c om }
From source file:com.metadave.stow.Stow.java
public static void main(String args[]) { System.out.println("Stow: StringTemplate Object Wrapper"); System.out.println("(C) 2014 Dave Parfitt"); System.out.println("Stow uses the Apache 2 license"); CommandLineParser parser = new BasicParser(); Options options = new Options(); //options.addOption( "a", "all", false, "do not hide entries starting with ."); Option javaPackage = new Option("java_package", "package for generated classes"); javaPackage.setArgs(1);// w ww .ja va 2s.co m //javaPackage.setRequired(true); Option destDir = new Option("dest", "destination directory for generated .java files"); destDir.setArgs(1); Option stgFile = new Option("stg", "StringTemplate4 group file"); stgFile.setArgs(1); Option classPrefix = new Option("class_prefix", "Prefix to use for generated classes"); classPrefix.setArgs(1); //destDir.setRequired(true); options.addOption(javaPackage); options.addOption(destDir); options.addOption(stgFile); options.addOption(classPrefix); try { CommandLine line = parser.parse(options, args); if (line.hasOption("java_package") && line.hasOption("dest") && line.hasOption("stg")) { generateObjects(line.getOptionValue("stg"), line.getOptionValue("java_package"), line.hasOption("class_prefix") ? line.getOptionValue("class_prefix") : "", line.getOptionValue("dest")); } else { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("stow", options); } } catch (ParseException exp) { System.out.println("Error parsing stow command line:" + exp.getMessage()); } }
From source file:com.msd.gin.halyard.tools.HalyardUpdate.java
/** * Main of the HalyardUpdate//w w w . jav a 2s.c o m * @param args String command line arguments * @throws Exception throws Exception in case of any problem */ public static void main(final String args[]) throws Exception { if (conf == null) conf = new Configuration(); Options options = new Options(); options.addOption(newOption("h", null, "Prints this help")); options.addOption(newOption("v", null, "Prints version")); options.addOption(newOption("s", "source_htable", "Source HBase table with Halyard RDF store")); options.addOption( newOption("q", "sparql_query", "SPARQL tuple or graph query executed to export the data")); try { CommandLine cmd = new PosixParser().parse(options, args); if (args.length == 0 || cmd.hasOption('h')) { printHelp(options); return; } if (cmd.hasOption('v')) { Properties p = new Properties(); try (InputStream in = HalyardUpdate.class .getResourceAsStream("/META-INF/maven/com.msd.gin.halyard/hbasesail/pom.properties")) { if (in != null) p.load(in); } System.out.println("Halyard Update version " + p.getProperty("version", "unknown")); return; } if (!cmd.getArgList().isEmpty()) throw new ParseException("Unknown arguments: " + cmd.getArgList().toString()); for (char c : "sq".toCharArray()) { if (!cmd.hasOption(c)) throw new ParseException("Missing mandatory option: " + c); } for (char c : "sq".toCharArray()) { String s[] = cmd.getOptionValues(c); if (s != null && s.length > 1) throw new ParseException("Multiple values for option: " + c); } SailRepository rep = new SailRepository( new HBaseSail(conf, cmd.getOptionValue('s'), false, 0, true, 0, null)); rep.initialize(); try { Update u = rep.getConnection().prepareUpdate(QueryLanguage.SPARQL, cmd.getOptionValue('q')); LOG.info("Update execution started"); u.execute(); LOG.info("Update finished"); } finally { rep.shutDown(); } } catch (Exception exp) { System.out.println(exp.getMessage()); printHelp(options); throw exp; } }
From source file:eu.scape_project.tb.cipex_analyse.CipexCountAgreeOnMimeType.java
/** * Main entry point./* w w w . ja v a 2 s. com*/ * * @param args * @throws Exception */ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); // Command line interface config = new CliConfig(); CommandLineParser cmdParser = new PosixParser(); GenericOptionsParser gop = new GenericOptionsParser(conf, args); CommandLine cmd = cmdParser.parse(Options.OPTIONS, gop.getRemainingArgs()); if ((args.length == 0) || (cmd.hasOption(Options.HELP_OPT))) { Options.exit("Usage", 0); } else { Options.initOptions(cmd, config); } startHadoopJob(conf); }
From source file:com.aerospike.utility.SetDelete.java
public static void main(String[] args) throws ParseException { Options options = new Options(); options.addOption("h", "host", true, "Server hostname (default: localhost)"); options.addOption("p", "port", true, "Server port (default: 3000)"); options.addOption("n", "namespace", true, "Namespace (default: test)"); options.addOption("s", "set", true, "Set to delete (default: test)"); options.addOption("u", "usage", false, "Print usage."); CommandLineParser parser = new PosixParser(); CommandLine cl = parser.parse(options, args, false); if (args.length == 0 || cl.hasOption("u")) { logUsage(options);/*from w w w.j av a 2s .c o m*/ return; } String host = cl.getOptionValue("h", "127.0.0.1"); String portString = cl.getOptionValue("p", "3000"); int port = Integer.parseInt(portString); String set = cl.getOptionValue("s", "test"); String namespace = cl.getOptionValue("n", "test"); log.info("Host: " + host); log.info("Port: " + port); log.info("Name space: " + namespace); log.info("Set: " + set); if (set == null) { log.error("You must specify a set"); return; } try { final AerospikeClient client = new AerospikeClient(host, port); ScanPolicy scanPolicy = new ScanPolicy(); scanPolicy.includeBinData = false; scanPolicy.concurrentNodes = true; scanPolicy.priority = Priority.HIGH; /* * scan the entire Set using scannAll(). This will scan each node * in the cluster and return the record Digest to the call back object */ client.scanAll(scanPolicy, namespace, set, new ScanCallback() { public void scanCallback(Key key, Record record) throws AerospikeException { /* * for each Digest returned, delete it using delete() */ if (client.delete(null, key)) count++; /* * after 25,000 records delete, return print the count. */ if (count % 25000 == 0) { log.info("Deleted " + count); } } }, new String[] {}); log.info("Deleted " + count + " records from set " + set); } catch (AerospikeException e) { int resultCode = e.getResultCode(); log.info(ResultCode.getResultString(resultCode)); log.debug("Error details: ", e); } }
From source file:com.mvdb.etl.actions.ScanDBChanges.java
public static void main(String[] args) throws IOException { String customerName = null;//from ww w. ja va 2s. co m String snapshotDir = null; final CommandLineParser cmdLinePosixParser = new PosixParser(); final Options posixOptions = constructPosixOptions(); CommandLine commandLine; try { commandLine = cmdLinePosixParser.parse(posixOptions, args); if (commandLine.hasOption("customer")) { customerName = commandLine.getOptionValue("customer"); } if (commandLine.hasOption("snapshotDir")) { snapshotDir = commandLine.getOptionValue("snapshotDir"); } } catch (ParseException parseException) // checked exception { System.err.println( "Encountered exception while parsing using PosixParser:\n" + parseException.getMessage()); } if (customerName == null) { System.err.println("Could not find customerName. Aborting..."); System.exit(1); } if (snapshotDir == null) { System.err.println("Could not find snapshotDir. Aborting..."); System.exit(1); } ApplicationContext context = Top.getContext(); final ConfigurationDAO configurationDAO = (ConfigurationDAO) context.getBean("configurationDAO"); final GenericDAO genericDAO = (GenericDAO) context.getBean("genericDAO"); File snapshotDirectory = getSnapshotDirectory(configurationDAO, customerName, snapshotDir); //write file schema-orders.dat in snapshotDirectory Metadata metadata = genericDAO.getMetadata("orders", snapshotDirectory); //writes files: header-orders.dat, data-orders.dat in snapshotDirectory genericDAO.scan2("orders", snapshotDirectory); }