List of usage examples for java.lang System setProperty
public static String setProperty(String key, String value)
From source file:com.alibaba.rocketmq.tools.command.consumer.ConsumerStatusSubCommand.java
public static void main(String[] args) { System.setProperty(MixAll.NAMESRV_ADDR_PROPERTY, "127.0.0.1:9876"); MQAdminStartup.main(new String[] { new ConsumerStatusSubCommand().commandName(), // "-g", "benchmark_consumer" // });/*from w w w . ja va 2 s .co m*/ }
From source file:com.da.daum.DaumCafeLevelUpList.java
public static void main(String[] args) { DaumCafeLevelUpList cfl = new DaumCafeLevelUpList(); log.warn("Logging Works"); System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.SimpleLog"); System.setProperty("org.apache.commons.logging.simplelog.showdatetime", "true"); System.setProperty("org.apache.commons.logging.simplelog.log.httpclient.wire", "debug"); System.setProperty("org.apache.commons.logging.simplelog.log.org.apache.commons.httpclient", "debug"); try {// ww w. ja va 2 s.c o m // https://msp.f-secure.com/web-test/common/test.html // String body = // CHttpUtil.DownloadHtml("https://logins.daum.net/accounts/loginform.do?mobilefull=1&t__nil_footer=login&url=http%3a%2f%2fm%2edaum%2enet%2f"); /* * String body = CHttpUtil.DownloadHtml( * "https://logins.daum.net/accounts/mobile.do?url=http%3A%2F%2Fm.daum.net%2F&relative=&mobilefull=1&weblogin=1&id=changwng&pw=cw89040310&stln=on&saved_id=on" * ); System.out.println(body); */ String nPage = "1"; String p_author_id = "bluesman"; String p_gnum = ""; // ga String p_host_url = ""; // ga if (args.length > 0) { nPage = args[0]; } if (args.length > 1) { p_author_id = args[1]; } if (args.length > 2) { STORY_DIR = args[2]; } if (args.length > 3) { p_host_url = args[3]; SO_URL = p_host_url; /* * host_url = "http://www."+SO_URL; photo_url = * "http://photo."+SO_URL+"/album/theme/"; story_url = * "http://story."+SO_URL+"/honor/"; */ } cfl.executeURL(nPage, p_author_id, p_gnum); // cfl.executeAuthorList(nPage, p_author_id, p_gnum); } catch (ClientProtocolException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (URISyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:it.isislab.dmason.util.SystemManagement.Worker.thrower.DMasonWorker.java
public static void main(String[] args) { RuntimeMXBean bean = ManagementFactory.getRuntimeMXBean(); ///*w w w. ja v a 2s . co m*/ // Get name representing the running Java virtual machine. // It returns something like 6460@AURORA. Where the value // before the @ symbol is the PID. // String jvmName = bean.getName(); //Used for log4j properties System.setProperty("logfile.name", "worker" + jvmName); //Used for log4j properties System.setProperty("steplog.name", "workerStep" + jvmName); DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH_mm_ss_SS"); Date date = new Date(); dateFormat.format(date); System.setProperty("timestamp", date.toLocaleString()); System.setProperty("paramsfile.name", "params"); try { File logPath = new File("Logs/workers"); if (logPath.exists()) FileUtils.cleanDirectory(logPath); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger = Logger.getLogger(DMasonWorker.class.getCanonicalName()); logger.debug("StartWorker " + version); autoStart = false; connect = false; ip = null; port = null; String topic = ""; updated = false; isBatch = false; topicPrefix = ""; if (args.length == 0) { // Force waiting for beacon (requires ActiveMQWrapper) autoStart = false; connect = true; } else if (args.length == 2) { // Launched with IP and Port ip = args[0]; port = args[1]; autoStart = true; connect = true; } else if (args.length == 4) { // Used by D-Mason in order to restart a // worker after update, batch execution, reset autoStart = true; ip = args[0]; port = args[1]; topic = args[2]; if (args[3].equals("update")) { updated = true; } if (args[3].equals("reset")) { updated = false; isBatch = false; } if (args[3].contains("Batch")) { updated = false; isBatch = true; topicPrefix = args[3]; } } else { System.out.println("Usage: StartWorker IP PORT"); } DMasonWorker worker = new DMasonWorker(ip, port, topic); boolean connected = worker.startConnection(); if (connected) { logger.debug("CONNECTED:"); logger.debug(" IP : " + worker.ipAddress.getIPaddress()); logger.debug(" Port : " + worker.ipAddress.getPort()); logger.debug(" Prefix : " + DMasonWorker.topicPrefix); logger.debug(" Topic : " + worker.myTopic); } else { logger.info("CONNECTION FAILED:"); logger.debug(" IP : " + worker.ipAddress.getIPaddress()); logger.debug(" Port : " + worker.ipAddress.getPort()); logger.debug(" Prefix : " + DMasonWorker.topicPrefix); logger.debug(" Topic : " + worker.myTopic); } }
From source file:com.da.daum.DaumCafeBungImgList.java
public static void main(String[] args) { DaumCafeBungImgList cfl = new DaumCafeBungImgList(); log.warn("Logging Works"); System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.SimpleLog"); System.setProperty("org.apache.commons.logging.simplelog.showdatetime", "true"); System.setProperty("org.apache.commons.logging.simplelog.log.httpclient.wire", "debug"); System.setProperty("org.apache.commons.logging.simplelog.log.org.apache.commons.httpclient", "debug"); try {// ww w.j a v a2 s . c o m // https://msp.f-secure.com/web-test/common/test.html // String body = // CHttpUtil.DownloadHtml("https://logins.daum.net/accounts/loginform.do?mobilefull=1&t__nil_footer=login&url=http%3a%2f%2fm%2edaum%2enet%2f"); /* * String body = CHttpUtil.DownloadHtml( * "https://logins.daum.net/accounts/mobile.do?url=http%3A%2F%2Fm.daum.net%2F&relative=&mobilefull=1&weblogin=1&id=changwng&pw=cw89040310&stln=on&saved_id=on" * ); System.out.println(body); */ String nPage = "1"; String p_author_id = "bluesman"; String p_gnum = ""; // ga String p_host_url = ""; // ga if (args.length > 0) { nPage = args[0]; } if (args.length > 1) { p_author_id = args[1]; } if (args.length > 2) { STORY_DIR = args[2]; } if (args.length > 3) { p_host_url = args[3]; SO_URL = p_host_url; /* * host_url = "http://www."+SO_URL; photo_url = * "http://photo."+SO_URL+"/album/theme/"; story_url = * "http://story."+SO_URL+"/honor/"; */ } cfl.executeURL(nPage, p_author_id, p_gnum); // cfl.executeAuthorList(nPage, p_author_id, p_gnum); } catch (ClientProtocolException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (URISyntaxException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:com.finderbots.miner2.pinterest.PinterestCrawlAndMinerTool.java
public static void main(String[] args) { Options options = new Options(); CmdLineParser parser = new CmdLineParser(options); try {//from ww w .j a v a 2 s. com parser.parseArgument(args); } catch (CmdLineException e) { System.err.println(e.getMessage()); printUsageAndExit(parser); } // Before we get too far along, see if the domain looks valid. String domain = options.getDomain(); String urlsFile = options.getUrlsFile(); if (domain != null) { validateDomain(domain, parser); } else { if (urlsFile == null) { System.err.println( "Either a target domain should be specified or a file with a list of urls needs to be provided"); printUsageAndExit(parser); } } if (domain != null && urlsFile != null) { System.out.println("Warning: Both domain and urls file list provided - using domain"); } String outputDirName = options.getOutputDir(); if (options.isDebugLogging()) { System.setProperty("bixo.root.level", "DEBUG"); } else { System.setProperty("bixo.root.level", "INFO"); } if (options.getLoggingAppender() != null) { // Set console vs. DRFA vs. something else System.setProperty("bixo.appender", options.getLoggingAppender()); } String logsDir = options.getLogsDir(); if (!logsDir.endsWith("/")) { logsDir = logsDir + "/"; } try { JobConf conf = new JobConf(); Path outputPath = new Path(outputDirName); FileSystem fs = outputPath.getFileSystem(conf); // First check if the user wants to clean if (options.isCleanOutputDir()) { if (fs.exists(outputPath)) { fs.delete(outputPath, true); } } // See if the user isn't starting from scratch then set up the // output directory and create an initial urls subdir. if (!fs.exists(outputPath)) { fs.mkdirs(outputPath); // Create a "0-<timestamp>" sub-directory with just a /crawldb subdir // In the /crawldb dir the input file will have a single URL for the target domain. Path curLoopDir = CrawlDirUtils.makeLoopDir(fs, outputPath, 0); String curLoopDirName = curLoopDir.getName(); setLoopLoggerFile(logsDir + curLoopDirName, 0); Path crawlDbPath = new Path(curLoopDir, CrawlConfig.CRAWLDB_SUBDIR_NAME); if (domain != null) { importOneDomain(domain, crawlDbPath, conf); } else { importUrls(urlsFile, crawlDbPath); } } Path latestDirPath = CrawlDirUtils.findLatestLoopDir(fs, outputPath); if (latestDirPath == null) { System.err.println("No previous cycle output dirs exist in " + outputDirName); printUsageAndExit(parser); } Path crawlDbPath = new Path(latestDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME); // Set up the start and end loop counts. int startLoop = CrawlDirUtils.extractLoopNumber(latestDirPath); int endLoop = startLoop + options.getNumLoops(); // Set up the UserAgent for the fetcher. UserAgent userAgent = new UserAgent(options.getAgentName(), CrawlConfig.EMAIL_ADDRESS, CrawlConfig.WEB_ADDRESS); // You also get to customize the FetcherPolicy FetcherPolicy defaultPolicy; if (options.getCrawlDuration() != 0) { defaultPolicy = new AdaptiveFetcherPolicy(options.getEndCrawlTime(), options.getCrawlDelay()); } else { defaultPolicy = new FetcherPolicy(); } defaultPolicy.setMaxContentSize(CrawlConfig.MAX_CONTENT_SIZE); defaultPolicy.setRequestTimeout(10L * 1000L);//10 seconds // COMPLETE for crawling a single site, EFFICIENT for many sites if (options.getCrawlPolicy().equals(Options.IMPOLITE_CRAWL_POLICY)) { defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.IMPOLITE); } else if (options.getCrawlPolicy().equals(Options.EFFICIENT_CRAWL_POLICY)) { defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.EFFICIENT); } else if (options.getCrawlPolicy().equals(Options.COMPLETE_CRAWL_POLICY)) { defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.COMPLETE); } // It is a good idea to set up a crawl duration when running long crawls as you may // end up in situations where the fetch slows down due to a 'long tail' and by // specifying a crawl duration you know exactly when the crawl will end. int crawlDurationInMinutes = options.getCrawlDuration(); boolean hasEndTime = crawlDurationInMinutes != Options.NO_CRAWL_DURATION; long targetEndTime = hasEndTime ? System.currentTimeMillis() + (crawlDurationInMinutes * CrawlConfig.MILLISECONDS_PER_MINUTE) : FetcherPolicy.NO_CRAWL_END_TIME; // By setting up a url filter we only deal with urls that we want to // instead of all the urls that we extract. BaseUrlFilter urlFilter = null; List<String> patterns = null; String regexUrlFiltersFile = options.getRegexUrlFiltersFile(); if (regexUrlFiltersFile != null) { patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlFiltersFile); } else { patterns = RegexUrlDatumFilter.getDefaultUrlFilterPatterns(); if (domain != null) { String domainPatterStr = "+(?i)^(http|https)://([a-z0-9]*\\.)*" + domain; patterns.add(domainPatterStr); } else { String protocolPatterStr = "+(?i)^(http|https)://*"; patterns.add(protocolPatterStr); //Log.warn("Defaulting to basic url regex filtering (just suffix and protocol"); } } urlFilter = new RegexUrlDatumFilter(patterns.toArray(new String[patterns.size()])); // get a list of patterns which tell the miner which URLs to include or exclude. patterns.clear(); RegexUrlStringFilter urlsToMineFilter = null; String regexUrlsToMineFiltersFile = options.getRegexUrlToMineFile(); AnalyzeHtml analyzer = null; if (regexUrlsToMineFiltersFile != null) { patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlsToMineFiltersFile); urlsToMineFilter = new RegexUrlStringFilter(patterns.toArray(new String[patterns.size()])); analyzer = new AnalyzeHtml(urlsToMineFilter); } // OK, now we're ready to start looping, since we've got our current // settings for (int curLoop = startLoop + 1; curLoop <= endLoop; curLoop++) { // Adjust target end time, if appropriate. if (hasEndTime) { int remainingLoops = (endLoop - curLoop) + 1; long now = System.currentTimeMillis(); long perLoopTime = (targetEndTime - now) / remainingLoops; defaultPolicy.setCrawlEndTime(now + perLoopTime); } Path curLoopDirPath = CrawlDirUtils.makeLoopDir(fs, outputPath, curLoop); String curLoopDirName = curLoopDirPath.getName(); setLoopLoggerFile(logsDir + curLoopDirName, curLoop); Flow flow = PinterestCrawlAndMinerWorkflow.createFlow(curLoopDirPath, crawlDbPath, defaultPolicy, userAgent, urlFilter, analyzer, options); flow.complete(); // Writing out .dot files is a good way to verify your flows. flow.writeDOT("valid-flow.dot"); // Update crawlDbPath to point to the latest crawl db crawlDbPath = new Path(curLoopDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME); } } catch (PlannerException e) { e.writeDOT("failed-flow.dot"); System.err.println("PlannerException: " + e.getMessage()); e.printStackTrace(System.err); System.exit(-1); } catch (Throwable t) { System.err.println("Exception running tool: " + t.getMessage()); t.printStackTrace(System.err); System.exit(-1); } }
From source file:com.finderbots.miner2.tomatoes.RTCriticsCrawlAndMinerTool.java
public static void main(String[] args) { Options options = new Options(); CmdLineParser parser = new CmdLineParser(options); try {/*from w w w.j av a 2 s .c o m*/ parser.parseArgument(args); } catch (CmdLineException e) { System.err.println(e.getMessage()); printUsageAndExit(parser); } // Before we get too far along, see if the domain looks valid. String domain = options.getDomain(); String urlsFile = options.getUrlsFile(); if (domain != null) { validateDomain(domain, parser); } else { if (urlsFile == null) { System.err.println( "Either a target domain should be specified or a file with a list of urls needs to be provided"); printUsageAndExit(parser); } } if (domain != null && urlsFile != null) { System.out.println("Warning: Both domain and urls file list provided - using domain"); } String outputDirName = options.getOutputDir(); if (options.isDebugLogging()) { System.setProperty("bixo.root.level", "DEBUG"); } else { System.setProperty("bixo.root.level", "INFO"); } if (options.getLoggingAppender() != null) { // Set console vs. DRFA vs. something else System.setProperty("bixo.appender", options.getLoggingAppender()); } String logsDir = options.getLogsDir(); if (!logsDir.endsWith("/")) { logsDir = logsDir + "/"; } try { JobConf conf = new JobConf(); Path outputPath = new Path(outputDirName); FileSystem fs = outputPath.getFileSystem(conf); // First check if the user wants to clean if (options.isCleanOutputDir()) { if (fs.exists(outputPath)) { fs.delete(outputPath, true); } } // See if the user isn't starting from scratch then set up the // output directory and create an initial urls subdir. if (!fs.exists(outputPath)) { fs.mkdirs(outputPath); // Create a "0-<timestamp>" sub-directory with just a /crawldb subdir // In the /crawldb dir the input file will have a single URL for the target domain. Path curLoopDir = CrawlDirUtils.makeLoopDir(fs, outputPath, 0); String curLoopDirName = curLoopDir.getName(); setLoopLoggerFile(logsDir + curLoopDirName, 0); Path crawlDbPath = new Path(curLoopDir, CrawlConfig.CRAWLDB_SUBDIR_NAME); if (domain != null) { importOneDomain(domain, crawlDbPath, conf); } else { importUrls(urlsFile, crawlDbPath); } } Path latestDirPath = CrawlDirUtils.findLatestLoopDir(fs, outputPath); if (latestDirPath == null) { System.err.println("No previous cycle output dirs exist in " + outputDirName); printUsageAndExit(parser); } Path crawlDbPath = new Path(latestDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME); // Set up the start and end loop counts. int startLoop = CrawlDirUtils.extractLoopNumber(latestDirPath); int endLoop = startLoop + options.getNumLoops(); // Set up the UserAgent for the fetcher. UserAgent userAgent = new UserAgent(options.getAgentName(), CrawlConfig.EMAIL_ADDRESS, CrawlConfig.WEB_ADDRESS); // You also get to customize the FetcherPolicy FetcherPolicy defaultPolicy; if (options.getCrawlDuration() != 0) { defaultPolicy = new AdaptiveFetcherPolicy(options.getEndCrawlTime(), options.getCrawlDelay()); } else { defaultPolicy = new FetcherPolicy(); } defaultPolicy.setMaxContentSize(CrawlConfig.MAX_CONTENT_SIZE); defaultPolicy.setRequestTimeout(10L * 1000L);//10 seconds // COMPLETE for crawling a single site, EFFICIENT for many sites if (options.getCrawlPolicy().equals(Options.IMPOLITE_CRAWL_POLICY)) { defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.IMPOLITE); } else if (options.getCrawlPolicy().equals(Options.EFFICIENT_CRAWL_POLICY)) { defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.EFFICIENT); } else if (options.getCrawlPolicy().equals(Options.COMPLETE_CRAWL_POLICY)) { defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.COMPLETE); } // It is a good idea to set up a crawl duration when running long crawls as you may // end up in situations where the fetch slows down due to a 'long tail' and by // specifying a crawl duration you know exactly when the crawl will end. int crawlDurationInMinutes = options.getCrawlDuration(); boolean hasEndTime = crawlDurationInMinutes != Options.NO_CRAWL_DURATION; long targetEndTime = hasEndTime ? System.currentTimeMillis() + (crawlDurationInMinutes * CrawlConfig.MILLISECONDS_PER_MINUTE) : FetcherPolicy.NO_CRAWL_END_TIME; // By setting up a url filter we only deal with urls that we want to // instead of all the urls that we extract. BaseUrlFilter urlFilter = null; List<String> patterns = null; String regexUrlFiltersFile = options.getRegexUrlFiltersFile(); if (regexUrlFiltersFile != null) { patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlFiltersFile); } else { patterns = RegexUrlDatumFilter.getDefaultUrlFilterPatterns(); if (domain != null) { String domainPatterStr = "+(?i)^(http|https)://([a-z0-9]*\\.)*" + domain; patterns.add(domainPatterStr); } else { String protocolPatterStr = "+(?i)^(http|https)://*"; patterns.add(protocolPatterStr); //Log.warn("Defaulting to basic url regex filtering (just suffix and protocol"); } } urlFilter = new RegexUrlDatumFilter(patterns.toArray(new String[patterns.size()])); // get a list of patterns which tell the miner which URLs to include or exclude. patterns.clear(); RegexUrlStringFilter urlsToMineFilter = null; String regexUrlsToMineFiltersFile = options.getRegexUrlToMineFile(); MineRTCriticsPreferences prefsAnalyzer = null; if (regexUrlsToMineFiltersFile != null) { patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlsToMineFiltersFile); urlsToMineFilter = new RegexUrlStringFilter(patterns.toArray(new String[patterns.size()])); prefsAnalyzer = new MineRTCriticsPreferences(urlsToMineFilter); } // OK, now we're ready to start looping, since we've got our current // settings for (int curLoop = startLoop + 1; curLoop <= endLoop; curLoop++) { // Adjust target end time, if appropriate. if (hasEndTime) { int remainingLoops = (endLoop - curLoop) + 1; long now = System.currentTimeMillis(); long perLoopTime = (targetEndTime - now) / remainingLoops; defaultPolicy.setCrawlEndTime(now + perLoopTime); } Path curLoopDirPath = CrawlDirUtils.makeLoopDir(fs, outputPath, curLoop); String curLoopDirName = curLoopDirPath.getName(); setLoopLoggerFile(logsDir + curLoopDirName, curLoop); Flow flow = RTCriticsCrawlAndMinerWorkflow.createFlow(curLoopDirPath, crawlDbPath, defaultPolicy, userAgent, urlFilter, prefsAnalyzer, options); flow.complete(); // Writing out .dot files is a good way to verify your flows. flow.writeDOT("valid-flow.dot"); // Update crawlDbPath to point to the latest crawl db crawlDbPath = new Path(curLoopDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME); } } catch (PlannerException e) { e.writeDOT("failed-flow.dot"); System.err.println("PlannerException: " + e.getMessage()); e.printStackTrace(System.err); System.exit(-1); } catch (Throwable t) { System.err.println("Exception running tool: " + t.getMessage()); t.printStackTrace(System.err); System.exit(-1); } }
From source file:com.opengamma.engine.calcnode.CalculationNodeProcess.java
/** * Starts a calculation node, retrieving configuration from the given URL * //ww w. j a v a 2s.c om * @param url The URL to use */ public static void main(final String url) { s_logger.info("Using configuration URL {}", url); String configuration = getConfigurationXml(url); if (configuration == null) { for (int i = 0; i < CONFIGURATION_RETRY; i++) { s_logger.warn("Failed to retrieve configuration - retrying"); sleep(1); configuration = getConfigurationXml(url); if (configuration != null) { break; } } if (configuration == null) { s_logger.error("No response from {}", url); System.exit(1); } } // Create and start the spring config System.setProperty("opengamma.engine.calcnode.baseurl", getBaseUrl(url)); setConnectionDefaults(url); if (startContext(configuration)) { s_logger.info("Calculation node started"); } else { s_logger.error("Couldn't start calculation node"); System.exit(1); } // Terminate if the configuration changes - the O/S will restart us int retry = 0; do { sleep(CONFIGURATION_POLL_PERIOD); final String newConfiguration = getConfigurationXml(url); if (newConfiguration != null) { if (!configuration.equals(newConfiguration)) { s_logger.info("Configuration at {} has changed", url); System.exit(0); } retry = 0; } else { switch (++retry) { case 1: s_logger.debug("No response from configuration at {}", url); break; case 2: s_logger.info("No response from configuration at {}", url); break; case 3: s_logger.warn("No response from configuration at {}", url); break; case 4: s_logger.error("No response from configuration at {}", url); startGracefulShutdown(); // TODO: wait for the graceful shutdown to complete (i.e. node goes idle) System.exit(0); break; } } s_logger.info("Free memory = {}Mb, total memory = {}Mb", (double) Runtime.getRuntime().freeMemory() / (1024d * 1024d), (double) Runtime.getRuntime().totalMemory() / (1024d * 1024d)); } while (true); }
From source file:com.intuit.tank.harness.APITestHarness.java
/** * @param args//from w ww . j av a 2 s . c o m */ public static void main(String[] args) { // set ttl on dns to small value try { java.security.Security.setProperty("networkaddress.cache.ttl", "0"); } catch (Throwable e1) { LOG.warn(LogUtil.getLogMessage("Error setting dns timeout: " + e1.toString(), LogEventType.System)); } try { System.setProperty("jsse.enableSNIExtension", "false"); } catch (Throwable e1) { LOG.warn(LogUtil.getLogMessage("Error disabling SNI extension: " + e1.toString(), LogEventType.System)); } if (args.length < 1) { usage(); return; } getInstance().initializeFromArgs(args); }
From source file:com.mirth.connect.cli.CommandLineInterface.java
public static void main(String[] args) { System.setProperty("log4j.configuration", "log4j-cli.properties"); new CommandLineInterface(args); }
From source file:com.microsoft.gittf.client.clc.Main.java
public static void main(String[] args) { // Configure logging, use the standard TFS SDK logging. System.setProperty("teamexplorer.application", ProductInformation.getProductName()); //$NON-NLS-1$ LoggingConfiguration.configure();// ww w .j a v a 2s . co m final Log log = LogFactory.getLog(ProductInformation.getProductName()); try { ArgumentCollection mainArguments = new ArgumentCollection(); try { mainArguments = ArgumentParser.parse(args, ARGUMENTS, ArgumentParserOptions.ALLOW_UNKNOWN_ARGUMENTS); } catch (ArgumentParserException e) { console.getErrorStream().println(e.getLocalizedMessage()); console.getErrorStream().println(getUsage()); System.exit(ExitCode.FAILURE); } if (mainArguments.contains("version")) //$NON-NLS-1$ { console.getOutputStream().println(Messages.formatString("Main.ApplicationVersionFormat", //$NON-NLS-1$ ProductInformation.getProductName(), ProductInformation.getBuildNumber())); return; } /* * Special case "--help command" handling - convert to * "help command" */ if (mainArguments.contains("help") && mainArguments.contains("command")) //$NON-NLS-1$ //$NON-NLS-2$ { HelpCommand helpCommand = new HelpCommand(); helpCommand.setArguments(ArgumentParser.parse(new String[] { ((FreeArgumentCollection) mainArguments.getArgument("command")).getValues()[0] //$NON-NLS-1$ }, helpCommand.getPossibleArguments())); helpCommand.setConsole(console); helpCommand.run(); return; } else if (mainArguments.contains("help") || !mainArguments.contains("command")) //$NON-NLS-1$ //$NON-NLS-2$ { showHelp(); return; } // Set the verbosity of the console from the arguments. if (mainArguments.contains("quiet")) //$NON-NLS-1$ { console.setVerbosity(Verbosity.QUIET); } else if (mainArguments.contains("verbose")) //$NON-NLS-1$ { console.setVerbosity(Verbosity.VERBOSE); } /* * Parse the free arguments into the command name and arguments to * pass to it. Add any unmatched arguments that were specified on * the command line before the argument. (eg, for * "git-tf --bare clone", we parsed the "--bare" as an unmatched * argument to the main command. We instead want to add the "--bare" * as an argument to "clone".) */ String[] fullCommand = ((FreeArgumentCollection) mainArguments.getArgument("command")).getValues(); //$NON-NLS-1$ String[] additionalArguments = mainArguments.getUnknownArguments(); String commandName = fullCommand[0]; String[] commandArgs = new String[additionalArguments.length + (fullCommand.length - 1)]; if (additionalArguments.length > 0) { System.arraycopy(additionalArguments, 0, commandArgs, 0, additionalArguments.length); } if (fullCommand.length > 1) { System.arraycopy(fullCommand, 1, commandArgs, mainArguments.getUnknownArguments().length, fullCommand.length - 1); } // Locate the specified command by name List<CommandDefinition> possibleCommands = new ArrayList<CommandDefinition>(); for (CommandDefinition c : COMMANDS) { if (c.getName().equals(commandName)) { possibleCommands.clear(); possibleCommands.add(c); break; } else if (c.getName().startsWith(commandName)) { possibleCommands.add(c); } } if (possibleCommands.size() == 0) { printError(Messages.formatString("Main.CommandNotFoundFormat", commandName, //$NON-NLS-1$ ProductInformation.getProductName())); System.exit(1); } if (possibleCommands.size() > 1) { printError(Messages.formatString("Main.AmbiguousCommandFormat", commandName, //$NON-NLS-1$ ProductInformation.getProductName())); for (CommandDefinition c : possibleCommands) { printError(Messages.formatString("Main.AmbiguousCommandListFormat", c.getName()), false); //$NON-NLS-1$ } System.exit(1); } // Instantiate the command final CommandDefinition commandDefinition = possibleCommands.get(0); Command command = null; try { command = commandDefinition.getType().newInstance(); } catch (Exception e) { printError(Messages.formatString("Main.CommandCreationFailedFormat", commandName)); //$NON-NLS-1$ System.exit(1); } // Set the console command.setConsole(console); // Parse the arguments ArgumentCollection argumentCollection = null; try { argumentCollection = ArgumentParser.parse(commandArgs, command.getPossibleArguments()); } catch (ArgumentParserException e) { Main.printError(e.getLocalizedMessage()); Main.printError(getUsage(command)); log.error("Could not parse arguments", e); //$NON-NLS-1$ System.exit(1); } // Handle the --help argument directly if (argumentCollection.contains("help")) //$NON-NLS-1$ { command.showHelp(); System.exit(0); } // Set the verbosity of the console from the arguments. if (argumentCollection.contains("quiet")) //$NON-NLS-1$ { console.setVerbosity(Verbosity.QUIET); } else if (argumentCollection.contains("verbose")) //$NON-NLS-1$ { console.setVerbosity(Verbosity.VERBOSE); } command.setArguments(argumentCollection); System.exit(command.run()); } catch (Exception e) { printError(e.getLocalizedMessage()); log.warn(MessageFormat.format("Error executing command: {0}", getCommandLine(args)), e); //$NON-NLS-1$ } }