List of usage examples for java.lang System setProperty
public static String setProperty(String key, String value)
From source file:net.opentsdb.ConfigReader.java
public static void main(String[] args) { System.setProperty("capsule.cache.dir", "/tmp"); try {// w ww .j a va 2 s . c om final Config config = new Config(false); String[] cmdLineArgs = load(config, new String[] { "--port", "3847" }); System.out.println("CMD Line Args:" + Arrays.toString(cmdLineArgs)); System.out.println("TSDB Config:" + config.dumpConfiguration()); } catch (Exception ex) { ex.printStackTrace(System.err); throw new RuntimeException(ex); } }
From source file:ErrorHandlerTest.java
public static void main(String[] args) { // Tell AWT to invoke my Handler. System.setProperty("sun.awt.exception.handler", "ErrorHandler"); // Now create and show the GUI. new ErrorHandlerTest().setVisible(true); }
From source file:com.termmed.statistics.runner.Runner.java
/** * The main method.//from www. jav a 2 s . c o m * * @param args the arguments */ public static void main(String[] args) { logger = new ProcessLogger(); if (args.length == 0) { logger.logInfo("Error happened getting params. Params file doesn't exist"); System.exit(0); // }else{ // args=new String[]{"config/complete_nl-edition11320160930.xml"}; } File infoFolder = new File(I_Constants.PROCESS_INFO_FOLDER); if (!infoFolder.exists()) { infoFolder.mkdirs(); } OutputInfoFactory.get().setExecutionId(UUID.randomUUID().toString()); String msg; int posIni; long start = logger.startTime(); File file = new File(args[0]); Config configFile = getConfig(file); OutputInfoFactory.get().setConfig(configFile); System.setProperty("textdb.allow_full_path", "true"); Connection c; try { boolean clean = false; if (args.length >= 2) { for (int i = 1; i < args.length; i++) { logger.logInfo("Arg " + i + ": " + args[i]); if (args[i].toLowerCase().equals("clean")) { clean = true; } } } dataFolder = new File(I_Constants.REPO_FOLDER); if (!dataFolder.exists()) { dataFolder.mkdirs(); } changedDate = true; changedPreviousDate = true; getParams(file); checkDates(); /*******************************/ // changedDate=false; // changedPreviousDate=false; /********************************/ if (clean || changedDate || changedPreviousDate) { logger.logInfo("Removing old data"); removeDBFolder(); removeRepoFolder(); removeReducedFolder(); changedDate = true; changedPreviousDate = true; } Class.forName("org.hsqldb.jdbcDriver"); logger.logInfo("Connecting to DB. This task can take several minutes... wait please."); c = DriverManager.getConnection("jdbc:hsqldb:file:" + I_Constants.DB_FOLDER, "sa", "sa"); initFileProviders(file); // OutputInfoFactory.get().getStatisticProcess().setOutputFolder(I_Constants.STATS_OUTPUT_FOLDER); /*******************************/ // DbSetup dbs=new DbSetup(c); // dbs.recreatePath("org/ihtsdo/statistics/db/setup/storedprocedure"); // dbs=null; /*******************************/ ImportManager impor = new ImportManager(c, file, changedDate, changedPreviousDate); impor.execute(); impor = null; Processor proc = new Processor(c, file); proc.execute(); proc = null; msg = logger.endTime(start); posIni = msg.indexOf("ProcessingTime:") + 16; OutputInfoFactory.get().getStatisticProcess().setTimeTaken(msg.substring(posIni)); // OutputInfoFactory.get().getPatternProcess().setOutputFolder(I_Constants.PATTERN_OUTPUT_FOLDER); long startPattern = logger.startTime(); PatternExecutor pe = new PatternExecutor(file); pe.execute(); pe = null; msg = logger.endTime(startPattern); posIni = msg.indexOf("ProcessingTime:") + 16; OutputInfoFactory.get().getPatternProcess().setTimeTaken(msg.substring(posIni)); OutputInfoFactory.get().setStatus("Complete"); } catch (Exception e) { OutputInfoFactory.get().setStatus("Error: " + e.getMessage() + " - View log for details."); e.printStackTrace(); } msg = logger.endTime(start); posIni = msg.indexOf("ProcessingTime:") + 16; OutputInfoFactory.get().setTimeTaken(msg.substring(posIni)); try { saveInfo(); } catch (IOException e) { e.printStackTrace(); } }
From source file:it.polimi.tower4clouds.manager.server.MMServer.java
public static void main(String[] args) { PropertiesConfiguration releaseProperties = null; try {/*from ww w .j a v a2s . c o m*/ releaseProperties = new PropertiesConfiguration("release.properties"); } catch (org.apache.commons.configuration.ConfigurationException e) { logger.error("Internal error", e); System.exit(1); } APP_NAME = releaseProperties.getString("application.name"); APP_FILE_NAME = releaseProperties.getString("dist.file.name"); APP_VERSION = releaseProperties.getString("release.version"); try { ManagerConfig.init(args, APP_FILE_NAME); if (ManagerConfig.getInstance().isHelp()) { logger.info(ManagerConfig.usage); } else if (ManagerConfig.getInstance().isVersion()) { logger.info("Version: {}", APP_VERSION); } else { logger.info("{} {}", APP_NAME, APP_VERSION); logger.info("Current configuration:\n{}", ManagerConfig.getInstance().toString()); MonitoringManager manager = new MonitoringManager(ManagerConfig.getInstance()); System.setProperty("org.restlet.engine.loggerFacadeClass", "org.restlet.ext.slf4j.Slf4jLoggerFacade"); Component component = new Component(); Server server = new Server(Protocol.HTTP, ManagerConfig.getInstance().getMmPort()); Context context = new Context(); context.getParameters().add("maxThreads", "500"); // context.getParameters().add("maxTotalConnections", "500"); context.getParameters().add("maxQueued", "5000"); server.setContext(context); component.getServers().add(server); component.getClients().add(Protocol.CLAP); component.getDefaultHost().attach("", new MMServer(manager)); logger.info( "Starting Monitoring Manager server on port " + ManagerConfig.getInstance().getMmPort()); component.start(); } } catch (ConfigurationException e) { logger.error("Configuration problem: " + e.getMessage()); logger.error("Run \"" + APP_FILE_NAME + " --help\" for help"); System.exit(1); } catch (HttpException | IOException | ServerErrorException e) { logger.error("Connection problem: {}", e.getMessage()); System.exit(1); } catch (Exception e) { logger.error("Unknown error", e); System.exit(1); } }
From source file:bixo.tools.LengthenUrlsTool.java
/** * @param args - URL to fetch, or path to file of URLs *//*from www. j a v a 2 s . c o m*/ @SuppressWarnings("rawtypes") public static void main(String[] args) { try { String url = null; if (args.length == 0) { System.out.print("URL to lengthen: "); url = readInputLine(); if (url.length() == 0) { System.exit(0); } if (!url.startsWith("http://")) { url = "http://" + url; } } else if (args.length != 1) { System.out.print("A single URL or filename parameter is allowed"); System.exit(0); } else { url = args[0]; } String filename; if (!url.startsWith("http://")) { // It's a path to a file of URLs filename = url; } else { // We have a URL that we need to write to a temp file. File tempFile = File.createTempFile("LengthenUrlsTool", "txt"); filename = tempFile.getAbsolutePath(); FileWriter fw = new FileWriter(tempFile); IOUtils.write(url, fw); fw.close(); } System.setProperty("bixo.root.level", "TRACE"); // Uncomment this to see the wire log for HttpClient // System.setProperty("bixo.http.level", "DEBUG"); BaseFetcher fetcher = UrlLengthener.makeFetcher(10, ConfigUtils.BIXO_TOOL_AGENT); Pipe pipe = new Pipe("urls"); pipe = new Each(pipe, new UrlLengthener(fetcher)); pipe = new Each(pipe, new Debug()); BixoPlatform platform = new BixoPlatform(LengthenUrlsTool.class, Platform.Local); BasePath filePath = platform.makePath(filename); TextLine textLineLocalScheme = new TextLine(new Fields("url")); Tap sourceTap = platform.makeTap(textLineLocalScheme, filePath, SinkMode.KEEP); SinkTap sinkTap = new NullSinkTap(new Fields("url")); FlowConnector flowConnector = platform.makeFlowConnector(); Flow flow = flowConnector.connect(sourceTap, sinkTap, pipe); flow.complete(); } catch (Exception e) { System.err.println("Exception running tool: " + e.getMessage()); e.printStackTrace(System.err); System.exit(-1); } }
From source file:com.pivotal.gemfire.tools.pulse.testbed.driver.TomcatHelper.java
public static void main(String[] args) throws Exception { String host = InetAddress.getLocalHost().getHostAddress(); int port = 8080; String path = "/tushark1/code-checkout/tools/Pulse/trunk/build-artifacts/linux/dist/pulse-7.0.1.RC1.war"; String context = "/pulse"; System.setProperty("pulse.propMockDataUpdaterClass", "com.pivotal.gemfire.tools.pulse.testbed.PropMockDataUpdater"); Tomcat tomcat = TomcatHelper.startTomcat("localhost", port, context, path); Thread.sleep(30000);/* w w w .java 2 s.c o m*/ System.out.println("Sleep completed"); System.out.println("Exiting ...."); tomcat.stop(); tomcat.destroy(); }
From source file:it.tizianofagni.sparkboost.MPBoostLearnerExe.java
public static void main(String[] args) { Options options = new Options(); options.addOption("b", "binaryProblem", false, "Indicate if the input dataset contains a binary problem and not a multilabel one"); options.addOption("z", "labels0based", false, "Indicate if the labels IDs in the dataset to classifyLibSvmWithResults are already assigned in the range [0, numLabels-1] included"); options.addOption("l", "enableSparkLogging", false, "Enable logging messages of Spark"); options.addOption("w", "windowsLocalModeFix", true, "Set the directory containing the winutils.exe command"); options.addOption("dp", "documentPartitions", true, "The number of document partitions"); options.addOption("fp", "featurePartitions", true, "The number of feature partitions"); options.addOption("lp", "labelPartitions", true, "The number of label partitions"); CommandLineParser parser = new BasicParser(); CommandLine cmd = null;//from w ww . j ava2s.com String[] remainingArgs = null; try { cmd = parser.parse(options, args); remainingArgs = cmd.getArgs(); if (remainingArgs.length != 3) throw new ParseException("You need to specify all mandatory parameters"); } catch (ParseException e) { System.out.println("Parsing failed. Reason: " + e.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp( MPBoostLearnerExe.class.getSimpleName() + " [OPTIONS] <inputFile> <outputFile> <numIterations>", options); System.exit(-1); } boolean binaryProblem = false; if (cmd.hasOption("b")) binaryProblem = true; boolean labels0Based = false; if (cmd.hasOption("z")) labels0Based = true; boolean enablingSparkLogging = false; if (cmd.hasOption("l")) enablingSparkLogging = true; if (cmd.hasOption("w")) { System.setProperty("hadoop.home.dir", cmd.getOptionValue("w")); } String inputFile = remainingArgs[0]; String outputFile = remainingArgs[1]; int numIterations = Integer.parseInt(remainingArgs[2]); long startTime = System.currentTimeMillis(); // Disable Spark logging. if (!enablingSparkLogging) { Logger.getLogger("org").setLevel(Level.OFF); Logger.getLogger("akka").setLevel(Level.OFF); } // Create and configure Spark context. SparkConf conf = new SparkConf().setAppName("Spark MPBoost learner"); JavaSparkContext sc = new JavaSparkContext(conf); // Create and configure learner. MpBoostLearner learner = new MpBoostLearner(sc); learner.setNumIterations(numIterations); if (cmd.hasOption("dp")) { learner.setNumDocumentsPartitions(Integer.parseInt(cmd.getOptionValue("dp"))); } if (cmd.hasOption("fp")) { learner.setNumFeaturesPartitions(Integer.parseInt(cmd.getOptionValue("fp"))); } if (cmd.hasOption("lp")) { learner.setNumLabelsPartitions(Integer.parseInt(cmd.getOptionValue("lp"))); } // Build classifier with MPBoost learner. BoostClassifier classifier = learner.buildModel(inputFile, labels0Based, binaryProblem); // Save classifier to disk. DataUtils.saveModel(sc, classifier, outputFile); long endTime = System.currentTimeMillis(); System.out.println("Execution time: " + (endTime - startTime) + " milliseconds."); }
From source file:SIFT_Volume_Stitching.java
public static void main(String[] args) { // set the plugins.dir property to make the plugin appear in the Plugins menu Class<?> clazz = SIFT_Volume_Stitching.class; String url = clazz.getResource("/" + clazz.getName().replace('.', '/') + ".class").toString(); String pluginsDir = url.substring("file:".length(), url.length() - clazz.getName().length() - ".class".length()); System.setProperty("plugins.dir", pluginsDir); // start ImageJ new ImageJ(); // open the Clown sample ImagePlus image1 = IJ.openImage("https://www.creatis.insa-lyon.fr/~frindel/BackStack115.tif"); ImagePlus image2 = IJ.openImage("https://www.creatis.insa-lyon.fr/~frindel/FrontStack.tif"); image1.show();// ww w .j av a 2 s .co m image2.show(); // run the plugin IJ.runPlugIn(clazz.getName(), ""); }
From source file:com.pivotal.gemfire.tools.pulse.tests.TomcatHelper.java
public static void main(String[] args) throws Exception { String host = InetAddress.getLocalHost().getHostAddress(); int port = 9090; String path = "D:/springsource/springsourceWS/VMware-Pulse-cheetah-dev-jun13/build-artifacts/win/dist/pulse-7.0.1.war"; String context = "/pulse"; System.setProperty("pulse.propMockDataUpdaterClass", "com.pivotal.gemfire.tools.pulse.testbed.PropMockDataUpdater"); Tomcat tomcat = TomcatHelper.startTomcat("localhost", port, context, path); Thread.sleep(30000);/* www. j a va 2 s . c o m*/ System.out.println("Sleep completed"); System.out.println("Exiting ...."); tomcat.stop(); tomcat.destroy(); }
From source file:it.tizianofagni.sparkboost.AdaBoostMHLearnerExe.java
public static void main(String[] args) { Options options = new Options(); options.addOption("b", "binaryProblem", false, "Indicate if the input dataset contains a binary problem and not a multilabel one"); options.addOption("z", "labels0based", false, "Indicate if the labels IDs in the dataset to classifyLibSvmWithResults are already assigned in the range [0, numLabels-1] included"); options.addOption("l", "enableSparkLogging", false, "Enable logging messages of Spark"); options.addOption("w", "windowsLocalModeFix", true, "Set the directory containing the winutils.exe command"); options.addOption("dp", "documentPartitions", true, "The number of document partitions"); options.addOption("fp", "featurePartitions", true, "The number of feature partitions"); options.addOption("lp", "labelPartitions", true, "The number of label partitions"); CommandLineParser parser = new BasicParser(); CommandLine cmd = null;// w ww. j a v a 2s.c o m String[] remainingArgs = null; try { cmd = parser.parse(options, args); remainingArgs = cmd.getArgs(); if (remainingArgs.length != 5) throw new ParseException("You need to specify all mandatory parameters"); } catch (ParseException e) { System.out.println("Parsing failed. Reason: " + e.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(AdaBoostMHLearnerExe.class.getSimpleName() + " [OPTIONS] <inputFile> <outputFile> <numIterations> <sparkMaster> <parallelismDegree>", options); System.exit(-1); } boolean binaryProblem = false; if (cmd.hasOption("b")) binaryProblem = true; boolean labels0Based = false; if (cmd.hasOption("z")) labels0Based = true; boolean enablingSparkLogging = false; if (cmd.hasOption("l")) enablingSparkLogging = true; if (cmd.hasOption("w")) { System.setProperty("hadoop.home.dir", cmd.getOptionValue("w")); } String inputFile = remainingArgs[0]; String outputFile = remainingArgs[1]; int numIterations = Integer.parseInt(remainingArgs[2]); String sparkMaster = remainingArgs[3]; int parallelismDegree = Integer.parseInt(remainingArgs[4]); long startTime = System.currentTimeMillis(); // Disable Spark logging. if (!enablingSparkLogging) { Logger.getLogger("org").setLevel(Level.OFF); Logger.getLogger("akka").setLevel(Level.OFF); } // Create and configure Spark context. SparkConf conf = new SparkConf().setAppName("Spark AdaBoost.MH learner"); JavaSparkContext sc = new JavaSparkContext(conf); // Create and configure learner. AdaBoostMHLearner learner = new AdaBoostMHLearner(sc); learner.setNumIterations(numIterations); if (cmd.hasOption("dp")) { learner.setNumDocumentsPartitions(Integer.parseInt(cmd.getOptionValue("dp"))); } if (cmd.hasOption("fp")) { learner.setNumFeaturesPartitions(Integer.parseInt(cmd.getOptionValue("fp"))); } if (cmd.hasOption("lp")) { learner.setNumLabelsPartitions(Integer.parseInt(cmd.getOptionValue("lp"))); } // Build classifier with MPBoost learner. BoostClassifier classifier = learner.buildModel(inputFile, labels0Based, binaryProblem); // Save classifier to disk. DataUtils.saveModel(sc, classifier, outputFile); long endTime = System.currentTimeMillis(); System.out.println("Execution time: " + (endTime - startTime) + " milliseconds."); }