List of usage examples for org.apache.commons.cli CommandLine getOptionValues
public String[] getOptionValues(char opt)
From source file:bdsup2sub.cli.CommandLineParser.java
private void parseMoveXOption(CommandLine line) throws ParseException { if (line.hasOption(MOVE_X)) { if (line.getOptionValues(MOVE_X) == null || line.getOptionValues(MOVE_X).length < 1) { throw new ParseException("Missing argument for moving captions."); }/*from w w w . j av a 2s. c o m*/ String value = line.getOptionValues(MOVE_X)[0]; if (value.equalsIgnoreCase("left")) { moveModeX = Optional.of(CaptionMoveModeX.LEFT); } else if (value.equalsIgnoreCase("center")) { moveModeX = Optional.of(CaptionMoveModeX.CENTER); } else if (value.equalsIgnoreCase("right")) { moveModeX = Optional.of(CaptionMoveModeX.RIGHT); } else { throw new ParseException("Invalid move mode: " + value); } if ((moveModeX.get() == CaptionMoveModeX.LEFT || moveModeX.get() == CaptionMoveModeX.RIGHT) && line.getOptionValues(MOVE_X).length > 1) { value = line.getOptionValues(MOVE_X)[1]; moveXOffset = Optional.of(ToolBox.getInt(value)); if (moveXOffset.get() < 0) { throw new ParseException("Invalid pixel offset: " + value); } } } }
From source file:ca.gnewton.lusql.core.LuSqlMain.java
static public RunState handleOptions(LuSql lusql, final String[] args) throws ParseException { setupOptions();//from w w w. j a v a 2s . c o m if (args.length == 0) { return RunState.ShowOptions; } RunState optionFlag = RunState.Work; //CommandLineParser parser = new PosixParser(); CommandLineParser parser = new GnuParser(); CommandLine line = null; try { line = parser.parse(options, args); } catch (Exception e) { e.printStackTrace(); return RunState.ShowOptions; } //*********************************************** // This must be set before other things... //*********************************************** if (line.hasOption("I")) { lusql.setGlobalFieldIndexParameter(new LuceneFieldParameters(line.getOptionValue("I"))); } //*********************************************** String[] explain = line.getOptionValues("e"); if (explain != null) { try { setExplainPlugins(explain); } catch (Throwable t) { t.printStackTrace(); } return RunState.ExplainPlugin; } String[] sinkProps = line.getOptionValues(CLIDocSinkProperties); MultiValueProp sinkP = new MultiValueProp(); if (sinkProps != null) { for (int i = 0; i < sinkProps.length; i++) { String parts[] = sinkProps[i].split("="); if (parts == null || parts.length != 2) { System.err.println("Error processing command line parameter:[-psi" + sinkProps[i] + "] Missing equals sign?"); } sinkP.put(parts[0], parts[1]); } } lusql.setSinkProperties(sinkP); MultiValueProp sourceP = new MultiValueProp(); String[] sourceProps = line.getOptionValues(CLIDocSourceProperties); if (sourceProps != null) { for (int i = 0; i < sourceProps.length; i++) { String parts[] = sourceProps[i].split("="); if (parts == null || parts.length != 2) { System.err.println("Error processing command line parameter:[ -pso" + sourceProps[i] + "] Missing equals sign?"); } sourceP.put(parts[0], parts[1]); } } lusql.setSourceProperties(sourceP); ///////////////// String[] filterProps = line.getOptionValues(CLIDocFilterProperties); if (filterProps != null) { Map<String, MultiValueProp> fp = new HashMap<String, MultiValueProp>(); for (int i = 0; i < filterProps.length; i++) { String parts[] = filterProps[i].split(":"); if (parts == null || parts.length != 2) throw new ParseException("Badly formed filter options: " + filterProps[i]); String n = parts[0]; parts = parts[1].split("="); MultiValueProp p = null; if (fp.containsKey(n)) p = fp.get(n); else { p = new MultiValueProp(); fp.put(n, p); } p.setProperty(parts[0], parts[1]); } Iterator<String> it = fp.keySet().iterator(); while (it.hasNext()) { String k = it.next(); lusql.setFilterProperties(k, fp.get(k)); } } if (line.hasOption("O")) { lusql.setOffset(Integer.parseInt(line.getOptionValue("O"))); } if (line.hasOption("stdout") || line.hasOption("o")) { lusql.setSinkWriteToStdout(true); } if (line.hasOption("stdin")) { lusql.setSinkReadFromStdin(true); } // Global fields String[] gfs = line.getOptionValues("g"); if (gfs != null) for (int i = 0; i < gfs.length; i++) { String nnnParts[] = gfs[i].split("\\|"); String nnn = null; String theRest = gfs[i]; LuceneFieldParameters lfp = lusql.getGlobalFieldIndexParameter(); if (nnnParts.length == 2) { // -g "A:A:A|field=value" lfp = new LuceneFieldParameters(nnnParts[0]); theRest = nnnParts[1]; } // else -g "field=value" String parts[] = theRest.split("="); if (parts == null || parts.length != 2) { System.err.println("Bad global field parameter (-g): " + gfs); return RunState.ShowOptions; } lusql.addGlobalField(lfp, parts[0], parts[1]); } String[] sqs = line.getOptionValues("Q"); String parameter; if (sqs != null) for (int i = 0; i < sqs.length; i++) { //System.err.println("****** " + sqs[i]); String[] parts = sqs[i].split("|"); //if(parts.length == 3) parameter = parts[1]; //else //parameter = DefaultFieldIndexGlobalParameter; lusql.addSubQuery(new SubQuery(sqs[i], parameter)); } if (line.hasOption("M")) SubQuery.setKeyMeta(line.getOptionValue("M")); if (line.hasOption("zso")) lusql.setSourceCompression(true); if (line.hasOption("zsi")) lusql.setSinkCompression(true); if (line.hasOption("A")) { lusql.setAppendToLuceneIndex(true); lusql.setIndexCreate(false); } if (line.hasOption("a")) lusql.setAnalyzerName(line.getOptionValue("a")); if (line.hasOption("C")) lusql.setOutputChunk((Integer.parseInt(line.getOptionValue("C")))); if (line.hasOption("w")) lusql.setWorkPerThread((Integer.parseInt(line.getOptionValue("w")))); if (line.hasOption("S")) lusql.setQueueSize(Integer.parseInt(line.getOptionValue("S"))); if (line.hasOption("V")) lusql.setLoadAverageLimit((Float.parseFloat(line.getOptionValue("V")))); if (line.hasOption("X")) { printArgs(args); } //REQUIRED if (line.hasOption("c")) { lusql.setDBUrl(line.getOptionValue("c")); } /* else { if(!line.hasOption("e")) return RunState.ShowOptions; } */ if (line.hasOption("d")) { lusql.setDBDriverName(line.getOptionValue("d")); System.err.println("****************************"); } String[] flds = line.getOptionValues(CLIDocFiltersClassName); if (flds != null) { List<String> filterNames = new ArrayList<String>(); for (int i = 0; i < flds.length; i++) { filterNames.add(flds[i]); } lusql.setDocFilterNames(filterNames); } flds = line.getOptionValues("F"); if (flds != null) { Set<String> fieldNames = new HashSet<String>(); for (int i = 0; i < flds.length; i++) { fieldNames.add(flds[i]); } lusql.setFieldNames(fieldNames); } // -i fieldname=A:A:A flds = line.getOptionValues("i"); if (flds != null) { for (int i = 0; i < flds.length; i++) { String[] parts = flds[i].split("="); if (parts == null || parts.length != 2) { System.err.println("Bad global field parameter (-i): " + flds[i]); System.err.println(""); return RunState.ShowOptions; } lusql.addFieldIndexParameter(parts[0], parts[1]); } } if (line.hasOption("l")) lusql.setSinkLocationName(line.getOptionValue("l")); if (line.hasOption(CLIDocSourceClassName)) lusql.setDocSourceClassName(line.getOptionValue(CLIDocSourceClassName)); if (line.hasOption("P")) { lusql.setPrimaryKeyField(line.getOptionValue("P")); log.info("Setting primary key field: " + lusql.getPrimaryKeyField()); } if (line.hasOption(CLIDocSinkClassName)) { lusql.setDocSinkClassName(line.getOptionValue(CLIDocSinkClassName)); } String[] sind = line.getOptionValues("L"); if (sind != null) for (int i = 0; i < sind.length; i++) { lusql.addSecondaryIndexName(sind[i]); } if (line.hasOption("J")) lusql.setMerge(false); if (line.hasOption("m")) lusql.setMySql(false); if (line.hasOption("N")) lusql.setNumThreads(new Integer(line.getOptionValue("N")).intValue()); if (line.hasOption("K")) lusql.setDocPacketSize(new Integer(line.getOptionValue("K")).intValue()); if (line.hasOption("n")) lusql.setMaxDocs(new Long(line.getOptionValue("n")).longValue()); if (line.hasOption("p")) { lusql.setPropertiesFileName(line.getOptionValue("p")); try { MultiValueProp mvp = new MultiValueProp(); Properties p = new Properties(); p.load(new FileInputStream(lusql.getPropertiesFileName())); mvp.loadFromProperties(p); lusql.setProperties(mvp); } catch (IOException io) { io.printStackTrace(); throw new ParseException("Problem with properties file: " + lusql.getPropertiesFileName()); } } if (line.hasOption("q")) lusql.setQuery(line.getOptionValue("q")); if (line.hasOption("r")) lusql.setRAMBufferSizeMB(new Double(line.getOptionValue("r")).doubleValue()); if (line.hasOption("s")) lusql.setStopWordFileName(line.getOptionValue("s")); if (line.hasOption("t")) lusql.setTest(true); if (line.hasOption("T")) lusql.setThreaded(false); if (line.hasOption("E")) lusql.setTransactionIsolation(Integer.parseInt(line.getOptionValue("E"))); if (line.hasOption("v")) LuSql.setVerbose(true); if (line.hasOption("onlyMap")) lusql.setOnlyMap(true); flds = line.getOptionValues("map"); if (flds != null) { for (int i = 0; i < flds.length; i++) { String[] parts = flds[i].split("="); fieldMap.put(parts[0], parts[1]); } } return optionFlag; }
From source file:edu.cornell.med.icb.geo.tools.GDS2InsightfulMiner.java
private void proccess(final String[] args) { // create the Options final Options options = new Options(); // help//w w w . j a va 2s .c om options.addOption("h", "help", false, "print this message"); // input file name final Option inputOption = new Option("i", "input", true, "specify a GEO data set file (GDS file)"); inputOption.setArgName("file"); inputOption.setRequired(true); options.addOption(inputOption); // output file name final Option outputOption = new Option("o", "output", true, "specify the destination file"); outputOption.setArgName("file"); outputOption.setRequired(true); options.addOption(outputOption); // label values final Option labelOptions = new Option("l", "label", true, "specify a label to tag a set of columns"); labelOptions.setArgName("double-value"); labelOptions.setRequired(false); options.addOption(labelOptions); // group file names final Option groupOptions = new Option("g", "group", true, "specify a file that named columns associated to a label. Each -group option must match a -label option. Matching is done according to the order on the command line. Each line of the group file identifies a column in the GEO data set that is to be labeled according to the corresponding label"); groupOptions.setArgName("file"); groupOptions.setRequired(false); options.addOption(groupOptions); // default label value final Option defaultLabelOption = new Option("dl", "default-label", true, "Specify the label to use for columns that are not identified by -l -g pairs. Default value is zero."); groupOptions.setArgName("double-value"); groupOptions.setRequired(false); options.addOption(defaultLabelOption); // parse the command line arguments CommandLine line = null; double defaultLabelValue = 0; try { // create the command line parser final CommandLineParser parser = new BasicParser(); line = parser.parse(options, args, true); if ((line.hasOption("l") && !line.hasOption("g")) || (line.hasOption("g") && !line.hasOption("l"))) { System.err.println("Options -label and -group must be used together."); System.exit(10); } if (line.hasOption("l") && line.getOptionValues("l").length != line.getOptionValues("g").length) { System.err.println("The number of -label and -group options must match exactly."); System.exit(10); } if (line.hasOption("dl")) { defaultLabelValue = Double.parseDouble(line.getOptionValue("dl")); } } catch (ParseException e) { System.err.println(e.getMessage()); usage(options); System.exit(1); } // print help and exit if (line.hasOption("h")) { usage(options); System.exit(0); } try { final Map<Double, Set<String>> labels = readLabelGroups(line.getOptionValues("l"), line.getOptionValues("g")); //labels convert(line.getOptionValue("i"), line.getOptionValue("o"), labels, defaultLabelValue); System.exit(0); } catch (FileNotFoundException e) { System.err.println("Error opening file: \n"); printGroups(line); } catch (IOException e) { System.err.println("An error occurred reading one of the group files:\n"); printGroups(line); } }
From source file:com.svds.genericconsumer.main.GenericConsumerGroup.java
/** * Helper for main for parsing command-line arguments and running * the ConsumerGroup/*w ww.j a va 2s .c o m*/ * * @param cmd * @throws IOException * @throws ParseException */ private void doWork(CommandLine cmd) throws IOException, ParseException { LOG.info("HELLO from doWork"); shutdownThread = new ShutdownThread(this); Runtime.getRuntime().addShutdownHook(shutdownThread); consumer = kafka.consumer.Consumer.createJavaConsumerConnector( createConsumerConfig(cmd.getOptionValue(ZOOKEEPER), cmd.getOptionValue(GROUPID))); run(cmd.getOptionValue(TOPICNAME), ((Number) cmd.getParsedOptionValue(THREADS)).intValue(), cmd.getOptionValue(CONSUMERCLASS), (cmd.getOptionValue(PARAMETERS) == null) ? new String[0] : cmd.getOptionValues(PARAMETERS)); }
From source file:com.google.enterprise.connector.importexport.DumpConnectors.java
@Override public void run(CommandLine commandLine) throws Exception { // Must specify output filename. String[] args = commandLine.getArgs(); initStandAloneContext(false);/*from w ww . j ava2 s.c om*/ // Since we did not start the Context, we need to init TypeMap. getTypeMap().init(); try { // If user asks for a list of available Connectors, print it and exit. if (commandLine.hasOption("list")) { listConnectors(); return; } // Determine which connectors to export. Collection<String> connectors = null; String[] connectorNames = commandLine.getOptionValues('c'); if (connectorNames != null) { connectors = ImmutableSortedSet.copyOf(connectorNames); } // Must specify output file. if (args.length != 1) { printUsage(); return; } // Write the connector configurations out to the specified file. PrintWriter out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(args[0]), "UTF-8")); getExportConnectors().getConnectors(connectors).toXml(out, 0); out.close(); } finally { shutdown(); } }
From source file:com.eviware.soapui.SoapUI.java
private static boolean processCommandLineArgs(CommandLine cmd, org.apache.commons.cli.Options options) { if (cmd.hasOption('w')) { workspaceName = cmd.getOptionValue('w'); }/*from www . jav a 2s . co m*/ if (cmd.hasOption('p')) { for (String projectNamePassword : cmd.getOptionValues('p')) { String[] nameAndPassword = projectNamePassword.split(":"); projectOptions.put(nameAndPassword[0], nameAndPassword[1]); } } return true; }
From source file:hws.core.JobClient.java
public void run(String[] args) throws Exception { //final String command = args[0]; //final int n = Integer.valueOf(args[1]); //final Path jarPath = new Path(args[2]); Options options = new Options(); /*options.addOption(OptionBuilder.withLongOpt("jar") .withDescription( "Jar path" ) .hasArg()/* w w w. ja v a 2s .co m*/ .withArgName("JarPath") .create()); options.addOption(OptionBuilder.withLongOpt("scheduler") .withDescription( "Scheduler class name" ) .hasArg() .withArgName("ClassName") .create()); */options.addOption(OptionBuilder.withLongOpt("zk-servers") .withDescription("List of the ZooKeeper servers").hasArgs().withArgName("zkAddrs").create("zks")); //options.addOption("l", "list", false, "list modules"); options.addOption(OptionBuilder.withLongOpt("load").withDescription("load new modules").hasArgs() .withArgName("XMLFiles").create()); /*options.addOption(OptionBuilder.withLongOpt( "remove" ) .withDescription( "remove modules" ) .hasArgs() .withArgName("ModuleNames") .create("rm")); */CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); //Path jarPath = null; //String schedulerClassName = null; String[] xmlFileNames = null; //String []moduleNames = null; String zksArgs = ""; String[] zkServers = null; if (cmd.hasOption("zks")) { zksArgs = "-zks"; zkServers = cmd.getOptionValues("zks"); for (String zks : zkServers) { zksArgs += " " + zks; } } //Logger setup //FSDataOutputStream writer = FileSystem.get(conf).create(new Path("hdfs:///hws/apps/"+appIdStr+"/logs/jobClient.log")); //Logger.addOutputStream(writer); /*if(cmd.hasOption("l")){ LOG.warn("Argument --list (-l) is not supported yet."); } if(cmd.hasOption("jar")){ jarPath = new Path(cmd.getOptionValue("jar")); } if(cmd.hasOption("scheduler")){ schedulerClassName = cmd.getOptionValue("scheduler"); }*/ if (cmd.hasOption("load")) { xmlFileNames = cmd.getOptionValues("load"); } /*else if(cmd.hasOption("rm")){ moduleNames = cmd.getOptionValues("rm"); }*/ //LOG.info("Jar-Path "+jarPath); if (xmlFileNames != null) { String paths = ""; for (String path : xmlFileNames) { paths += path + "; "; } LOG.info("Load XMLs: " + paths); } /*if(moduleNames!=null){ String modules = ""; for(String module: moduleNames){ modules += module+"; "; } LOG.info("remove: "+modules); }*/ // Create yarnClient YarnConfiguration conf = new YarnConfiguration(); YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); // Create application via yarnClient YarnClientApplication app = yarnClient.createApplication(); System.out.println("LOG Path: " + ApplicationConstants.LOG_DIR_EXPANSION_VAR); // Set up the container launch context for the application master ContainerLaunchContext amContainer = Records.newRecord(ContainerLaunchContext.class); ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext(); ApplicationId appId = appContext.getApplicationId(); ZkClient zk = new ZkClient(zkServers[0]); //TODO select a ZooKeeper server if (!zk.exists("/hadoop-watershed")) { zk.createPersistent("/hadoop-watershed", ""); } zk.createPersistent("/hadoop-watershed/" + appId.toString(), ""); FileSystem fs = FileSystem.get(conf); LOG.info("Collecting files to upload"); fs.mkdirs(new Path("hdfs:///hws/apps/" + appId.toString())); fs.mkdirs(new Path("hdfs:///hws/apps/" + appId.toString() + "/logs")); ModulePipeline modulePipeline = ModulePipeline.fromXMLFiles(xmlFileNames); LOG.info("Uploading files to HDFS"); for (String path : modulePipeline.files()) { uploadFile(fs, new File(path), appId); } LOG.info("Upload finished"); String modulePipelineJson = Json.dumps(modulePipeline); String modulePipelineBase64 = Base64.encodeBase64String(StringUtils.getBytesUtf8(modulePipelineJson)) .replaceAll("\\s", ""); LOG.info("ModulePipeline: " + modulePipelineJson); //LOG.info("ModulePipeline: "+modulePipelineBase64); amContainer.setCommands(Collections.singletonList("$JAVA_HOME/bin/java" + " -Xmx256M" + " hws.core.JobMaster" + " -aid " + appId.toString() + " --load " + modulePipelineBase64 + " " + zksArgs + " 1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout" + " 2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr")); // Setup jar for ApplicationMaster //LocalResource appMasterJar = Records.newRecord(LocalResource.class); //setupAppMasterJar(jarPath, appMasterJar); //amContainer.setLocalResources(Collections.singletonMap("hws.jar", appMasterJar)); LOG.info("Listing files for YARN-Watershed"); RemoteIterator<LocatedFileStatus> filesIterator = fs.listFiles(new Path("hdfs:///hws/bin/"), false); Map<String, LocalResource> resources = new HashMap<String, LocalResource>(); LOG.info("Files setup as resource"); while (filesIterator.hasNext()) { LocatedFileStatus fileStatus = filesIterator.next(); // Setup jar for ApplicationMaster LocalResource containerJar = Records.newRecord(LocalResource.class); ContainerUtils.setupContainerJar(fs, fileStatus.getPath(), containerJar); resources.put(fileStatus.getPath().getName(), containerJar); } LOG.info("container resource setup"); amContainer.setLocalResources(resources); fs.close(); //closing FileSystem interface // Setup CLASSPATH for ApplicationMaster Map<String, String> appMasterEnv = new HashMap<String, String>(); ContainerUtils.setupContainerEnv(appMasterEnv, conf); amContainer.setEnvironment(appMasterEnv); // Set up resource type requirements for ApplicationMaster Resource capability = Records.newRecord(Resource.class); capability.setMemory(256); capability.setVirtualCores(1); // Finally, set-up ApplicationSubmissionContext for the application //ApplicationSubmissionContext appContext = //app.getApplicationSubmissionContext(); appContext.setApplicationName("Hadoop-Watershed"); // application name appContext.setAMContainerSpec(amContainer); appContext.setResource(capability); appContext.setQueue("default"); // queue // Submit application LOG.info("Submitting application " + appId); yarnClient.submitApplication(appContext); LOG.info("Waiting for containers to finish"); zk.waitUntilExists("/hadoop-watershed/" + appId.toString() + "/done", TimeUnit.MILLISECONDS, 250); ApplicationReport appReport = yarnClient.getApplicationReport(appId); YarnApplicationState appState = appReport.getYarnApplicationState(); while (appState != YarnApplicationState.FINISHED && appState != YarnApplicationState.KILLED && appState != YarnApplicationState.FAILED) { Thread.sleep(100); appReport = yarnClient.getApplicationReport(appId); appState = appReport.getYarnApplicationState(); } System.out.println("Application " + appId + " finished with" + " state " + appState + " at " + appReport.getFinishTime()); System.out.println("deleting " + appId.toString() + " znode"); zk.deleteRecursive("/hadoop-watershed/" + appId.toString()); //TODO remove app folder from ZooKeeper }
From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.tools.Solve.java
@Override public void run(CommandLine commandLine) throws IOException { // parse the algorithm parameters Properties properties = new Properties(); if (commandLine.hasOption("properties")) { for (String property : commandLine.getOptionValues("properties")) { String[] tokens = property.split("="); if (tokens.length == 2) { properties.setProperty(tokens[0], tokens[1]); } else { throw new FrameworkException("malformed property argument"); }//from w ww . j a va 2 s. c o m } } if (commandLine.hasOption("epsilon")) { properties.setProperty("epsilon", commandLine.getOptionValue("epsilon")); } int maxEvaluations = Integer.parseInt(commandLine.getOptionValue("numberOfEvaluations")); // seed the pseudo-random number generator if (commandLine.hasOption("seed")) { PRNG.setSeed(Long.parseLong(commandLine.getOptionValue("seed"))); } // parse the runtime frequency int runtimeFrequency = 100; if (commandLine.hasOption("runtimeFrequency")) { runtimeFrequency = Integer.parseInt(commandLine.getOptionValue("runtimeFrequency")); } // open the resources and begin processing Problem problem = null; Algorithm algorithm = null; ResultFileWriter writer = null; File file = new File(commandLine.getOptionValue("output")); try { if (commandLine.hasOption("problem")) { problem = ProblemFactory.getInstance().getProblem(commandLine.getOptionValue("problem")); } else { problem = createExternalProblem(commandLine); } if (commandLine.hasOption("test")) { runTests(problem, commandLine); return; } try { algorithm = AlgorithmFactory.getInstance().getAlgorithm(commandLine.getOptionValue("algorithm"), properties, problem); // if the output file exists, delete first to avoid appending FileUtils.delete(file); try { writer = new ResultFileWriter(problem, file); algorithm = new RuntimeCollector(algorithm, runtimeFrequency, writer); while (!algorithm.isTerminated() && (algorithm.getNumberOfEvaluations() < maxEvaluations)) { algorithm.step(); } } finally { if (writer != null) { writer.close(); } } } finally { if (algorithm != null) { algorithm.terminate(); } } } catch (ParseException e) { throw new IOException(e); } finally { if (problem != null) { problem.close(); } } }
From source file:bdsup2sub.cli.CommandLineParser.java
private void parseConvertFramerateOption(CommandLine line) throws ParseException { if (line.hasOption(CONVERT_FRAMERATE)) { convertFpsMode = true;//from w w w.j a va 2 s .c o m if (line.getOptionValues(CONVERT_FRAMERATE).length != 2) { throw new ParseException("2 arguments needed for framerate conversion."); } String value = line.getOptionValues(CONVERT_FRAMERATE)[0]; if (value.equalsIgnoreCase("auto")) { // keep undefined } else { sourceFrameRate = Optional.of(SubtitleUtils.getFps(value)); if (sourceFrameRate.get() <= 0) { throw new ParseException("Invalid source framerate: " + value); } } value = line.getOptionValues(CONVERT_FRAMERATE)[1]; targetFrameRate = Optional.of(SubtitleUtils.getFps(value)); if (targetFrameRate.get() <= 0) { throw new ParseException("Invalid target framerate: " + value); } } }
From source file:fr.inrialpes.exmo.align.cli.GroupAggreg.java
public void run(String[] args) throws Exception { try {// w ww.j av a 2s. c o m CommandLine line = parseCommandLine(args); if (line == null) return; // --help // Here deal with command specific arguments if (line.hasOption('t')) threshold = Double.parseDouble(line.getOptionValue('t')); if (line.hasOption('T')) cutMethod = line.getOptionValue('T'); if (line.hasOption('m')) aggMethod = line.getOptionValue('m'); if (line.hasOption('l')) { listAlgo = line.getOptionValues('l'); size = listAlgo.length; } if (line.hasOption('w')) srcDir = line.getOptionValue('w'); if (line.hasOption('o')) dirName = line.getOptionValue('o'); } catch (ParseException exp) { logger.error(exp.getMessage()); usage(); System.exit(-1); } // check that dirName exist and is writable File outDir = new File(dirName); if (!outDir.isDirectory() || !outDir.canWrite()) { logger.error("Directory {} must exist and be writable", dirName); throw new AlignmentException("Cannot output to " + dirName); } // Run it iterateDirectories(); }