List of usage examples for java.lang Boolean parseBoolean
public static boolean parseBoolean(String s)
From source file:com.khubla.jvmbasic.jvmbasicc.JVMBasic.java
/** * start here//w ww .j av a2 s. c om * <p> * -file src\test\resources\bas\easy\print.bas -verbose true * </p> */ public static void main(String[] args) { try { System.out.println("khubla.com jvmBASIC Compiler"); /* * options */ final Options options = new Options(); Option oo = Option.builder().argName(OUTPUT_OPTION).longOpt(OUTPUT_OPTION).type(String.class).hasArg() .required(false).desc("target directory to output to").build(); options.addOption(oo); oo = Option.builder().argName(FILE_OPTION).longOpt(FILE_OPTION).type(String.class).hasArg() .required(true).desc("file to compile").build(); options.addOption(oo); oo = Option.builder().argName(VERBOSE_OPTION).longOpt(VERBOSE_OPTION).type(String.class).hasArg() .required(false).desc("verbose output").build(); options.addOption(oo); /* * parse */ final CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (final Exception e) { e.printStackTrace(); final HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("posix", options); System.exit(0); } /* * verbose output? */ final Boolean verbose = Boolean.parseBoolean(cmd.getOptionValue(VERBOSE_OPTION)); /* * get the file */ final String filename = cmd.getOptionValue(FILE_OPTION); final String outputDirectory = cmd.getOptionValue(OUTPUT_OPTION); if (null != filename) { /* * filename */ final String basFileName = System.getProperty("user.dir") + "/" + filename; final File fl = new File(basFileName); if (true == fl.exists()) { /* * show the filename */ System.out.println("Compiling: " + fl.getCanonicalFile()); /* * compiler */ final JVMBasicCompiler jvmBasicCompiler = new JVMBasicCompiler(); /* * compile */ jvmBasicCompiler.compileToClassfile(basFileName, null, outputDirectory, verbose, true, true); } else { throw new Exception("Unable to find: '" + basFileName + "'"); } } else { throw new Exception("File was not supplied"); } } catch (final Exception e) { e.printStackTrace(); } }
From source file:com.adobe.aem.demomachine.Updates.java
public static void main(String[] args) { String rootFolder = null;//from w ww . j a v a2 s . c om // Command line options for this tool Options options = new Options(); options.addOption("f", true, "Demo Machine root folder"); CommandLineParser parser = new BasicParser(); try { CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("f")) { rootFolder = cmd.getOptionValue("f"); } } catch (Exception e) { System.exit(-1); } Properties md5properties = new Properties(); try { URL url = new URL( "https://raw.githubusercontent.com/Adobe-Marketing-Cloud/aem-demo-machine/master/conf/checksums.properties"); InputStream in = url.openStream(); Reader reader = new InputStreamReader(in, "UTF-8"); md5properties.load(reader); reader.close(); } catch (Exception e) { System.out.println("Error: Cannot connect to GitHub.com to check for updates"); System.exit(-1); } System.out.println(AemDemoConstants.HR); int nbUpdateAvailable = 0; List<String[]> listPaths = Arrays.asList(AemDemoConstants.demoPaths); for (String[] path : listPaths) { if (path.length == 5) { logger.debug(path[1]); File pathFolder = new File(rootFolder + (path[1].length() > 0 ? (File.separator + path[1]) : "")); if (pathFolder.exists()) { String newMd5 = AemDemoUtils.calcMD5HashForDir(pathFolder, Boolean.parseBoolean(path[3]), false); logger.debug("MD5 is: " + newMd5); String oldMd5 = md5properties.getProperty("demo.md5." + path[0]); if (oldMd5 == null || oldMd5.length() == 0) { logger.error("Cannot find MD5 for " + path[0]); System.out.println(path[2] + " : Cannot find M5 checksum"); continue; } if (newMd5.equals(oldMd5)) { continue; } else { System.out.println(path[2] + " : New update available" + (path[0].equals("0") ? " (use 'git pull' to get the latest changes)" : "")); nbUpdateAvailable++; } } else { System.out.println(path[2] + " : Not installed"); } } } if (nbUpdateAvailable == 0) { System.out.println("Your AEM Demo Machine is up to date!"); } System.out.println(AemDemoConstants.HR); }
From source file:com.cloud.test.utils.TestClient.java
public static void main(String[] args) { String host = "http://localhost"; String port = "8080"; String testUrl = "/client/test"; int numThreads = 1; try {// w ww . ja v a2s .c o m // Parameters List<String> argsList = Arrays.asList(args); Iterator<String> iter = argsList.iterator(); while (iter.hasNext()) { String arg = iter.next(); // host if (arg.equals("-h")) { host = "http://" + iter.next(); } if (arg.equals("-p")) { port = iter.next(); } if (arg.equals("-t")) { numThreads = Integer.parseInt(iter.next()); } if (arg.equals("-s")) { sleepTime = Long.parseLong(iter.next()); } if (arg.equals("-c")) { cleanUp = Boolean.parseBoolean(iter.next()); if (!cleanUp) sleepTime = 0L; // no need to wait if we don't ever cleanup } if (arg.equals("-r")) { repeat = Boolean.parseBoolean(iter.next()); } if (arg.equals("-u")) { numOfUsers = Integer.parseInt(iter.next()); } if (arg.equals("-i")) { internet = Boolean.parseBoolean(iter.next()); } } final String server = host + ":" + port + testUrl; s_logger.info("Starting test against server: " + server + " with " + numThreads + " thread(s)"); if (cleanUp) s_logger.info("Clean up is enabled, each test will wait " + sleepTime + " ms before cleaning up"); if (numOfUsers > 0) { s_logger.info("Pre-generating users for test of size : " + numOfUsers); users = new String[numOfUsers]; Random ran = new Random(); for (int i = 0; i < numOfUsers; i++) { users[i] = Math.abs(ran.nextInt()) + "-user"; } } for (int i = 0; i < numThreads; i++) { new Thread(new Runnable() { public void run() { do { String username = null; try { long now = System.currentTimeMillis(); Random ran = new Random(); if (users != null) { username = users[Math.abs(ran.nextInt()) % numOfUsers]; } else { username = Math.abs(ran.nextInt()) + "-user"; } NDC.push(username); String url = server + "?email=" + username + "&password=" + username + "&command=deploy"; s_logger.info("Launching test for user: " + username + " with url: " + url); HttpClient client = new HttpClient(); HttpMethod method = new GetMethod(url); int responseCode = client.executeMethod(method); boolean success = false; String reason = null; if (responseCode == 200) { if (internet) { s_logger.info("Deploy successful...waiting 5 minute before SSH tests"); Thread.sleep(300000L); // Wait 60 seconds so the linux VM can boot up. s_logger.info("Begin Linux SSH test"); reason = sshTest(method.getResponseHeader("linuxIP").getValue()); if (reason == null) { s_logger.info("Linux SSH test successful"); s_logger.info("Begin Windows SSH test"); reason = sshWinTest(method.getResponseHeader("windowsIP").getValue()); } } if (reason == null) { if (internet) { s_logger.info("Windows SSH test successful"); } else { s_logger.info("deploy test successful....now cleaning up"); if (cleanUp) { s_logger.info( "Waiting " + sleepTime + " ms before cleaning up vms"); Thread.sleep(sleepTime); } else { success = true; } } if (users == null) { s_logger.info("Sending cleanup command"); url = server + "?email=" + username + "&password=" + username + "&command=cleanup"; } else { s_logger.info("Sending stop DomR / destroy VM command"); url = server + "?email=" + username + "&password=" + username + "&command=stopDomR"; } method = new GetMethod(url); responseCode = client.executeMethod(method); if (responseCode == 200) { success = true; } else { reason = method.getStatusText(); } } else { // Just stop but don't destroy the VMs/Routers s_logger.info("SSH test failed with reason '" + reason + "', stopping VMs"); url = server + "?email=" + username + "&password=" + username + "&command=stop"; responseCode = client.executeMethod(new GetMethod(url)); } } else { // Just stop but don't destroy the VMs/Routers reason = method.getStatusText(); s_logger.info("Deploy test failed with reason '" + reason + "', stopping VMs"); url = server + "?email=" + username + "&password=" + username + "&command=stop"; client.executeMethod(new GetMethod(url)); } if (success) { s_logger.info("***** Completed test for user : " + username + " in " + ((System.currentTimeMillis() - now) / 1000L) + " seconds"); } else { s_logger.info("##### FAILED test for user : " + username + " in " + ((System.currentTimeMillis() - now) / 1000L) + " seconds with reason : " + reason); } } catch (Exception e) { s_logger.warn("Error in thread", e); try { HttpClient client = new HttpClient(); String url = server + "?email=" + username + "&password=" + username + "&command=stop"; client.executeMethod(new GetMethod(url)); } catch (Exception e1) { } } finally { NDC.clear(); } } while (repeat); } }).start(); } } catch (Exception e) { s_logger.error(e); } }
From source file:de.ee.hezel.PDFCompareMain.java
/** * Args[]/* w w w .j av a 2 s. co m*/ * [1. path] * [2. path] * -output [true/false] * -visualise [output path] * * return value * 1 = not enough parameters * 2 = * * @param args * @return int */ public static void main(String[] args) { // not enough parameters if (args.length < 1) { System.out.println("usage: java -jar PDFCompare.jar " + "<path 1> <path 2> [-output <true/false>] [-visualise <path 3>] [-log <path 4>] [-compare <compare type>] [-prefix <pdf prefix>]" + newline + newline + "<path 1> = path with PDF documents from old version" + newline + "<path 2> = path with PDF documents from new version" + newline + "[output] = console output" + newline + "[visualise] = output folder for visualizing differnces " + newline + "[log] = path for log files and differnce images" + newline + "[compare type] = type of comparison <\"SIMPLE\" | \"STRUCTURAL\" | \"VISUAL\">" + newline + "[prefix] = compare only pdfs where the name starts with this prefix" + newline); return; } boolean output = false; File targetPath = null, logPath = null; int compareType = 1; // simple (Modes: SIMPLE/STRUCTURAL/VISUAL) String prefix = null; // read the incoming arguments for (int i = 2; i < args.length; i++) { if (args[i].equals("-output")) { output = Boolean.parseBoolean(args[++i]); } else if (args[i].equals("-visualise")) { targetPath = new File(args[++i]); } else if (args[i].equals("-log")) { logPath = new File(args[++i]); } else if (args[i].equals("-compare")) { String nextArg = args[++i]; if (nextArg.equalsIgnoreCase("STRUCTURAL")) compareType = 2; else if (nextArg.equalsIgnoreCase("VISUAL")) compareType = 3; } else if ((args[i]).equals("-prefix")) { prefix = args[++i]; } } // create or clear the output path checkOutputPath(targetPath); // configure log4j Properties props = getLog4jProperties(output, logPath); LogManager.resetConfiguration(); PropertyConfigurator.configure(props); // check the output paths File path1 = new File(args[0]); File path2 = new File(args[1]); if (!path1.isDirectory() || !path2.isDirectory()) { if (!path1.isDirectory()) log.error("[Path 1] does not exist"); if (!path2.isDirectory()) log.error("[Path 2] does not exist"); } // compare the files in path 1 with the files in path 2 and // save the results in path 3 (if given) PDFComparator pdfComparer = new PDFComparator(logPath, compareType); boolean foundDifference = pdfComparer.run(path1, path2, targetPath, prefix); // exit parameter (interesting for jenkins) System.exit(foundDifference ? 1 : 0); }
From source file:com.hazelcast.jet.benchmark.trademonitor.FlinkTradeMonitor.java
public static void main(String[] args) throws Exception { if (args.length != 13) { System.err.println("Usage:"); System.err.println(" " + FlinkTradeMonitor.class.getSimpleName() + " <bootstrap.servers> <topic> <offset-reset> <maxLagMs> <windowSizeMs> <slideByMs> <outputPath> <checkpointInterval> <checkpointUri> <doAsyncSnapshot> <stateBackend> <kafkaParallelism> <windowParallelism>"); System.err.println("<stateBackend> - fs | rocksDb"); System.exit(1);/*w w w .j a va 2 s . c om*/ } String brokerUri = args[0]; String topic = args[1]; String offsetReset = args[2]; int lagMs = Integer.parseInt(args[3]); int windowSize = Integer.parseInt(args[4]); int slideBy = Integer.parseInt(args[5]); String outputPath = args[6]; int checkpointInt = Integer.parseInt(args[7]); String checkpointUri = args[8]; boolean doAsyncSnapshot = Boolean.parseBoolean(args[9]); String stateBackend = args[10]; int kafkaParallelism = Integer.parseInt(args[11]); int windowParallelism = Integer.parseInt(args[12]); System.out.println("bootstrap.servers: " + brokerUri); System.out.println("topic: " + topic); System.out.println("offset-reset: " + offsetReset); System.out.println("lag: " + lagMs); System.out.println("windowSize: " + windowSize); System.out.println("slideBy: " + slideBy); System.out.println("outputPath: " + outputPath); System.out.println("checkpointInt: " + checkpointInt); System.out.println("checkpointUri: " + checkpointUri); System.out.println("doAsyncSnapshot: " + doAsyncSnapshot); System.out.println("stateBackend: " + stateBackend); System.out.println("kafkaParallelism: " + kafkaParallelism); System.out.println("windowParallelism: " + windowParallelism); // set up the execution environment StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); if (checkpointInt > 0) { env.enableCheckpointing(checkpointInt); env.getCheckpointConfig().setMinPauseBetweenCheckpoints(checkpointInt); } env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 5000)); if ("fs".equalsIgnoreCase(stateBackend)) { env.setStateBackend(new FsStateBackend(checkpointUri, doAsyncSnapshot)); } else if ("rocksDb".equalsIgnoreCase(stateBackend)) { env.setStateBackend(new RocksDBStateBackend(checkpointUri)); } else { System.err.println("Bad value for stateBackend: " + stateBackend); System.exit(1); } DeserializationSchema<Trade> schema = new AbstractDeserializationSchema<Trade>() { TradeDeserializer deserializer = new TradeDeserializer(); @Override public Trade deserialize(byte[] message) throws IOException { return deserializer.deserialize(null, message); } }; DataStreamSource<Trade> trades = env .addSource(new FlinkKafkaConsumer010<>(topic, schema, getKafkaProperties(brokerUri, offsetReset))) .setParallelism(kafkaParallelism); AssignerWithPeriodicWatermarks<Trade> timestampExtractor = new BoundedOutOfOrdernessTimestampExtractor<Trade>( Time.milliseconds(lagMs)) { @Override public long extractTimestamp(Trade element) { return element.getTime(); } }; WindowAssigner window = windowSize == slideBy ? TumblingEventTimeWindows.of(Time.milliseconds(windowSize)) : SlidingEventTimeWindows.of(Time.milliseconds(windowSize), Time.milliseconds(slideBy)); trades.assignTimestampsAndWatermarks(timestampExtractor).keyBy((Trade t) -> t.getTicker()).window(window) .aggregate(new AggregateFunction<Trade, MutableLong, Long>() { @Override public MutableLong createAccumulator() { return new MutableLong(); } @Override public MutableLong add(Trade value, MutableLong accumulator) { accumulator.increment(); return accumulator; } @Override public MutableLong merge(MutableLong a, MutableLong b) { a.setValue(Math.addExact(a.longValue(), b.longValue())); return a; } @Override public Long getResult(MutableLong accumulator) { return accumulator.longValue(); } }, new WindowFunction<Long, Tuple5<String, String, Long, Long, Long>, String, TimeWindow>() { @Override public void apply(String key, TimeWindow window, Iterable<Long> input, Collector<Tuple5<String, String, Long, Long, Long>> out) throws Exception { long timeMs = System.currentTimeMillis(); long count = input.iterator().next(); long latencyMs = timeMs - window.getEnd() - lagMs; out.collect( new Tuple5<>(Instant.ofEpochMilli(window.getEnd()).atZone(ZoneId.systemDefault()) .toLocalTime().toString(), key, count, timeMs, latencyMs)); } }).setParallelism(windowParallelism).writeAsCsv(outputPath, WriteMode.OVERWRITE); env.execute("Trade Monitor Example"); }
From source file:hyperloglog.tools.HyperLogLogCLI.java
public static void main(String[] args) { Options options = new Options(); addOptions(options);//w ww .j a v a 2 s . c o m CommandLineParser parser = new BasicParser(); CommandLine cli = null; long n = 0; long seed = 123; EncodingType enc = EncodingType.SPARSE; int p = 14; int hb = 64; boolean bitPack = true; boolean noBias = true; int unique = -1; String filePath = null; BufferedReader br = null; String outFile = null; String inFile = null; FileOutputStream fos = null; DataOutputStream out = null; FileInputStream fis = null; DataInputStream in = null; try { cli = parser.parse(options, args); if (!(cli.hasOption('n') || cli.hasOption('f') || cli.hasOption('d'))) { System.out.println("Example usage: hll -n 1000 " + "<OR> hll -f /tmp/input.txt " + "<OR> hll -d -i /tmp/out.hll"); usage(options); return; } if (cli.hasOption('n')) { n = Long.parseLong(cli.getOptionValue('n')); } if (cli.hasOption('e')) { String value = cli.getOptionValue('e'); if (value.equals(EncodingType.DENSE.name())) { enc = EncodingType.DENSE; } } if (cli.hasOption('p')) { p = Integer.parseInt(cli.getOptionValue('p')); if (p < 4 && p > 16) { System.out.println("Warning! Out-of-range value specified for p. Using to p=14."); p = 14; } } if (cli.hasOption('h')) { hb = Integer.parseInt(cli.getOptionValue('h')); } if (cli.hasOption('c')) { noBias = Boolean.parseBoolean(cli.getOptionValue('c')); } if (cli.hasOption('b')) { bitPack = Boolean.parseBoolean(cli.getOptionValue('b')); } if (cli.hasOption('f')) { filePath = cli.getOptionValue('f'); br = new BufferedReader(new FileReader(new File(filePath))); } if (filePath != null && cli.hasOption('n')) { System.out.println("'-f' (input file) specified. Ignoring -n."); } if (cli.hasOption('s')) { if (cli.hasOption('o')) { outFile = cli.getOptionValue('o'); fos = new FileOutputStream(new File(outFile)); out = new DataOutputStream(fos); } else { System.err.println("Specify output file. Example usage: hll -s -o /tmp/out.hll"); usage(options); return; } } if (cli.hasOption('d')) { if (cli.hasOption('i')) { inFile = cli.getOptionValue('i'); fis = new FileInputStream(new File(inFile)); in = new DataInputStream(fis); } else { System.err.println("Specify input file. Example usage: hll -d -i /tmp/in.hll"); usage(options); return; } } // return after deserialization if (fis != null && in != null) { long start = System.currentTimeMillis(); HyperLogLog deserializedHLL = HyperLogLogUtils.deserializeHLL(in); long end = System.currentTimeMillis(); System.out.println(deserializedHLL.toString()); System.out.println("Count after deserialization: " + deserializedHLL.count()); System.out.println("Deserialization time: " + (end - start) + " ms"); return; } // construct hll and serialize it if required HyperLogLog hll = HyperLogLog.builder().enableBitPacking(bitPack).enableNoBias(noBias).setEncoding(enc) .setNumHashBits(hb).setNumRegisterIndexBits(p).build(); if (br != null) { Set<String> hashset = new HashSet<String>(); String line; while ((line = br.readLine()) != null) { hll.addString(line); hashset.add(line); } n = hashset.size(); } else { Random rand = new Random(seed); for (int i = 0; i < n; i++) { if (unique < 0) { hll.addLong(rand.nextLong()); } else { int val = rand.nextInt(unique); hll.addLong(val); } } } long estCount = hll.count(); System.out.println("Actual count: " + n); System.out.println(hll.toString()); System.out.println("Relative error: " + HyperLogLogUtils.getRelativeError(n, estCount) + "%"); if (fos != null && out != null) { long start = System.currentTimeMillis(); HyperLogLogUtils.serializeHLL(out, hll); long end = System.currentTimeMillis(); System.out.println("Serialized hyperloglog to " + outFile); System.out.println("Serialized size: " + out.size() + " bytes"); System.out.println("Serialization time: " + (end - start) + " ms"); out.close(); } } catch (ParseException e) { System.err.println("Invalid parameter."); usage(options); } catch (NumberFormatException e) { System.err.println("Invalid type for parameter."); usage(options); } catch (FileNotFoundException e) { System.err.println("Specified file not found."); usage(options); } catch (IOException e) { System.err.println("Exception occured while reading file."); usage(options); } }
From source file:com.joseflavio.iperoxo.IpeRoxo.java
/** * Mtodo inicial./*from ww w . j a v a2s . c o m*/ */ public static void main(String[] args) { try { log.info(getMensagem(null, "Log.Inicio")); executarConfiguracaoGeral(); executarConfiguracao(args); executarFonteDeDados(); if (Boolean.parseBoolean(getPropriedade("IpeRoxo.FinalizarAposDataSource"))) { log.info(getMensagem(null, "Log.FinalizandoAposDataSource")); System.exit(0); } executarInicializacao(); executarCopaiba(); } catch (Exception e) { log.error(e.getMessage(), e); System.exit(1); } }
From source file:com.ict.dtube.example.operation.Consumer.java
public static void main(String[] args) throws InterruptedException, MQClientException { CommandLine commandLine = buildCommandline(args); if (commandLine != null) { String group = commandLine.getOptionValue('g'); String topic = commandLine.getOptionValue('t'); String subscription = commandLine.getOptionValue('s'); final String returnFailedHalf = commandLine.getOptionValue('f'); DtubePushConsumer consumer = new DtubePushConsumer(group); consumer.setInstanceName(Long.toString(System.currentTimeMillis())); consumer.subscribe(topic, subscription); consumer.registerMessageListener(new MessageListenerConcurrently() { AtomicLong consumeTimes = new AtomicLong(0); @Override/* w w w.j a va2 s. c om*/ public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> msgs, ConsumeConcurrentlyContext context) { long currentTimes = this.consumeTimes.incrementAndGet(); System.out.printf("%-8d %s\n", currentTimes, msgs); if (Boolean.parseBoolean(returnFailedHalf)) { if ((currentTimes % 2) == 0) { return ConsumeConcurrentlyStatus.RECONSUME_LATER; } } return ConsumeConcurrentlyStatus.CONSUME_SUCCESS; } }); consumer.start(); System.out.println("Consumer Started."); } }
From source file:com.alibaba.rocketmq.example.operation.Consumer.java
public static void main(String[] args) throws InterruptedException, MQClientException { CommandLine commandLine = buildCommandline(args); if (commandLine != null) { String group = commandLine.getOptionValue('g'); String topic = commandLine.getOptionValue('t'); String subscription = commandLine.getOptionValue('s'); final String returnFailedHalf = commandLine.getOptionValue('f'); DefaultMQPushConsumer consumer = new DefaultMQPushConsumer(group); consumer.setInstanceName(Long.toString(System.currentTimeMillis())); consumer.subscribe(topic, subscription); consumer.registerMessageListener(new MessageListenerConcurrently() { AtomicLong consumeTimes = new AtomicLong(0); @Override//from w ww. j a v a 2 s .c o m public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> msgs, ConsumeConcurrentlyContext context) { long currentTimes = this.consumeTimes.incrementAndGet(); System.out.printf("%-8d %s\n", currentTimes, msgs); if (Boolean.parseBoolean(returnFailedHalf)) { if ((currentTimes % 2) == 0) { return ConsumeConcurrentlyStatus.RECONSUME_LATER; } } return ConsumeConcurrentlyStatus.CONSUME_SUCCESS; } }); consumer.start(); System.out.println("Consumer Started."); } }
From source file:com.avego.oauth.migration.OauthDataMigrator.java
/** * This migrates spring security oauth 2 token data in m6 form to 1.0.5 release form * @param args/*w ww.j ava 2 s . com*/ * @throws Exception */ public static void main(String[] args) throws Exception { if (args.length < 3) { System.err.println("Usage <db_jdbc_url> <db_user> <db_pw>"); System.err.println("Or <db_jdbc_url> <db_user> <db_pw>" + " <remove_refresh_tokens> <serialize_new_token_values> <oauth_access_token_table> <oauth_refresh_token_table>"); System.exit(1); } Boolean removeRefreshTokens = Boolean.FALSE; if (args.length > 3) { removeRefreshTokens = Boolean.parseBoolean(args[3]); } Boolean serializeNewTokenValues = Boolean.FALSE; if (args.length > 4) { serializeNewTokenValues = Boolean.parseBoolean(args[4]); } Map<String, Object> params = new HashMap<String, Object>(3); params.put(JdbcOauthMigrationDao.JDBC_URL_KEY, args[0]); params.put(JdbcOauthMigrationDao.USER_KEY, args[1]); params.put(JdbcOauthMigrationDao.PASS_KEY, args[2]); params.put(REMOVE_REFRESH_TOKENS_PARAM, removeRefreshTokens); params.put(SERIALIZE_NEW_TOKEN_VALUES_PARAM, serializeNewTokenValues); if (args.length > 5) { params.put(JdbcOauthMigrationDao.ACCESS_TOKEN_TABLE, args[5]); } if (args.length > 6) { params.put(JdbcOauthMigrationDao.REFRESH_TOKEN_TABLE, args[6]); } OauthDataMigrator migrator = new OauthDataMigrator(params); migrator.migrateData(); }