List of usage examples for java.util Properties setProperty
public synchronized Object setProperty(String key, String value)
From source file:ape.Main.java
public static void main(String[] args) { // Creating the Properties object for log4j Properties ppt = new Properties(); ppt.setProperty("log4j.rootLogger", "INFO, appender1"); ppt.setProperty("log4j.appender.appender1", "org.apache.log4j.DailyRollingFileAppender"); ppt.setProperty("log4j.appender.appender1.File", "/var/log/ape.log"); ppt.setProperty("log4j.appender.appender1.DatePattern", ".yyyy-MM-dd"); ppt.setProperty("log4j.appender.appender1.layout", "org.apache.log4j.PatternLayout"); // Configuring log4j to use the Properties object created above PropertyConfigurator.configure(ppt); // Log the current date and time logger.info("\n---------------------------------\nStarting time:"); logTime();/*w w w . j a v a2s .c om*/ // Initialize all of the Option objects for each command (these are used by the CLI parser) createOptions(); // There should be an array of strings passed in as an argument (even if it's empty) // If we get null, we exit if (args == null) { System.err .println("Invalid arguments. main(String[] args) method expected array of strings, got null."); logger.info("Invalid arguments. main(String[] args) method expected array of strings, got null"); printHelp(); return; } // If an empty array is passed in, print the help dialog and exit if (args.length == 0) { printHelp(); return; } // Use the CLI parser to attempt to parse the command into a series of Option objects try { System.out.println(Arrays.toString(args)); logger.info(Arrays.toString(args)); line = getCommand(args); //System.out.println(line.toString()); } catch (MissingArgumentException e) { System.out.println("Missing an argument. Check your syntax."); logger.info("Missing an argument."); logger.info("Dumping args array:"); for (int i = 0; i < args.length; i++) { logger.info(i + ": " + args[i]); } printHelp(); return; } catch (ParseException e) { System.out.println("Parsing error, see help dialog:"); logger.info("Parsing error, see help dialog."); printHelp(); return; } // Get the array of options that were parsed from the command line Option[] options = line.getOptions(); if (line.hasOption("v")) { MAX_OPTION_LENGTH = 3; } else { MAX_OPTION_LENGTH = 2; } if (options == null || options.length > MAX_OPTION_LENGTH || options.length < 1) { System.out.println("Too many options"); logger.info("Too many options"); printHelp(); return; } if (line.hasOption("v")) { VERBOSE = true; logger.info("Executing Ape verbosely."); System.out.println("Executing Ape verbosely"); } //Find which option is cmd, which is -local/-remote, order might be disturbed for (int k = 0; k < options.length; k++) { if (VERBOSE) { System.out.println(options[k]); logger.info(options[k]); } if (!options[k].getOpt().equals("v")) { if (options[k].getOpt() == "L" || options[k].getOpt() == "R") { modeN = k; } else cmdN = k; } } // If the version flag was in the command, print the version and exit if (line.hasOption("V")) { logger.info("Printing out current version: " + VERSION); System.out.println("ChaosMonkey version: " + VERSION); return; } if (line.hasOption('h') || options.length < 1 || modeN == cmdN || modeN == -1 || cmdN == -1) { if (cmdN == -1) { System.out.println("Failure commands were not specified."); logger.info("Failure commands were not specified."); } System.out.println("Exiting ..."); logger.info("Exiting ..."); printHelp(); return; } if (VERBOSE) { System.out.println("Mode is " + options[modeN].getLongOpt()); if (options[modeN].getOpt() == "R") { System.out.println("List of Hosts:"); for (int j = 0; j < line.getOptionValues("R").length; j++) { System.out.println(line.getOptionValues("R")[j]); } } System.out.println("Command is " + options[cmdN].getLongOpt()); if (line.getOptionValues(options[cmdN].getOpt()) != null) { for (int l = 0; l < line.getOptionValues(options[cmdN].getOpt()).length; l++) System.out.println("Command Argument: " + line.getOptionValues(options[cmdN].getOpt())[l]); } } logger.info("Type of Event " + options[cmdN].getLongOpt()); // Remote command execution if (line.hasOption("R")) { //go to remote String[] passIn = line.getOptionValues("R"); logger.info("Executing a command remotely"); logger.info("hosts: "); for (int k = 0; k < passIn.length; k++) { logger.info(passIn[k]); } CommunicationInterface r = new PDSHCommunication(options[cmdN].getOpt(), line.getOptionValues(options[cmdN].getOpt()), passIn); try { // If the command executed successfully if (r.execute()) { logger.info("End time"); logTime(); System.out.println("Running Remote Command Succeeded"); } // If the command exited with an error else { System.out.println("Running remote command failed"); logger.info("Running remote command failed"); } } catch (IOException e) { e.printStackTrace(); return; } return; } // Local command execution else if (line.hasOption("L")) { logger.info("Running Locally"); ApeCommand ac = ApeCommand.getCommand(options[cmdN].getLongOpt()); if (ac == null) { System.out.println(options[cmdN].getLongOpt() + " is not a valid command."); System.out.println( "This can occur if a new command class is added but an entry is not added in the ApeCommand file."); System.out.println( "See src/main/resources/META-INF/services/ape.ApeCommand and ensure that the command's class is there."); logger.info(options[cmdN].getLongOpt() + " is not a valid command."); return; } try { String[] cmdArgs = line.getOptionValues(options[cmdN].getOpt()); if (ac.exec(cmdArgs)) { System.out.println("Running local command succeeded"); logger.info("End time"); logTime(); } else { System.out.println("Running local command failed"); logger.info("Running local command failed"); } return; } catch (ParseException e) { if (Main.VERBOSE) { System.out.println("VERBOSE: A parse exception was thrown. "); System.out.println( "VERBOSE: Interpreting this as an invalid number of arguments for a particular flag and printing the help dialog."); System.out.println("VERBOSE: Stack trace:"); e.printStackTrace(); logger.info("VERBOSE: A parse exception was thrown. "); logger.info( "VERBOSE: Interpreting this as an invalid number of arguments for a particular flag and printing the help dialog."); logger.info("VERBOSE: Stack trace:"); logger.info(e); } System.out.println("Invalid number of arguments."); logger.info("Invalid number of arguments"); printHelp(); } catch (IOException e) { System.out.println("Running local command failed"); logger.info("Running local command failed"); e.printStackTrace(); } } // If the local or remote flags were not used then print the help dialog else { printHelp(); } }
From source file:glacierpipe.GlacierPipeMain.java
public static void main(String[] args) throws IOException, ParseException { CommandLineParser parser = new GnuParser(); CommandLine cmd = parser.parse(OPTIONS, args); if (cmd.hasOption("help")) { try (PrintWriter writer = new PrintWriter(System.err)) { printHelp(writer);/*from w w w .j a v a 2 s . c om*/ } System.exit(0); } else if (cmd.hasOption("upload")) { // Turn the CommandLine into Properties Properties cliProperties = new Properties(); for (Iterator<?> i = cmd.iterator(); i.hasNext();) { Option o = (Option) i.next(); String opt = o.getLongOpt(); opt = opt != null ? opt : o.getOpt(); String value = o.getValue(); value = value != null ? value : ""; cliProperties.setProperty(opt, value); } // Build up a configuration ConfigBuilder configBuilder = new ConfigBuilder(); // Archive name List<?> archiveList = cmd.getArgList(); if (archiveList.size() > 1) { throw new ParseException("Too many arguments"); } else if (archiveList.isEmpty()) { throw new ParseException("No archive name provided"); } configBuilder.setArchive(archiveList.get(0).toString()); // All other arguments on the command line configBuilder.setFromProperties(cliProperties); // Load any config from the properties file Properties fileProperties = new Properties(); try (InputStream in = new FileInputStream(configBuilder.propertiesFile)) { fileProperties.load(in); } catch (IOException e) { System.err.printf("Warning: unable to read properties file %s; %s%n", configBuilder.propertiesFile, e); } configBuilder.setFromProperties(fileProperties); // ... Config config = new Config(configBuilder); IOBuffer buffer = new MemoryIOBuffer(config.partSize); AmazonGlacierClient client = new AmazonGlacierClient( new BasicAWSCredentials(config.accessKey, config.secretKey)); client.setEndpoint(config.endpoint); // Actual upload try (InputStream in = new BufferedInputStream(System.in, 4096); PrintWriter writer = new PrintWriter(System.err); ObservableProperties configMonitor = config.reloadProperties ? new ObservableProperties(config.propertiesFile) : null; ProxyingThrottlingStrategy throttlingStrategy = new ProxyingThrottlingStrategy(config);) { TerminalGlacierPipeObserver observer = new TerminalGlacierPipeObserver(writer); if (configMonitor != null) { configMonitor.registerObserver(throttlingStrategy); } GlacierPipe pipe = new GlacierPipe(buffer, observer, config.maxRetries, throttlingStrategy); pipe.pipe(client, config.vault, config.archive, in); } catch (Exception e) { e.printStackTrace(System.err); } System.exit(0); } else { try (PrintWriter writer = new PrintWriter(System.err)) { writer.println("No action specified."); printHelp(writer); } System.exit(-1); } }
From source file:com.qspin.qtaste.kernel.engine.TestEngine.java
public static void main(String[] args) { boolean executionResult = false; try {/*w w w. ja v a 2s . co m*/ // Log4j Configuration PropertyConfigurator.configure(StaticConfiguration.CONFIG_DIRECTORY + "/log4j.properties"); // log version information logger.info("QTaste kernel version: " + com.qspin.qtaste.kernel.Version.getInstance().getFullVersion()); logger.info("QTaste testAPI version: " + VersionControl.getInstance().getTestApiVersion("")); // handle optional config file name if ((args.length < 4) || (args.length > 10)) { showUsage(); } String testSuiteDir = null; String testbed = null; int numberLoops = 1; boolean loopsInHours = false; int i = 0; while (i < args.length) { if (args[i].equals("-testsuite") && (i + 1 < args.length)) { logger.info("Using " + args[i + 1] + " as test suite directory"); testSuiteDir = args[i + 1]; i += 2; } else if (args[i].equals("-testbed") && (i + 1 < args.length)) { logger.info("Using " + args[i + 1] + " as testbed configuration file"); testbed = args[i + 1]; i += 2; } else if (args[i].equals("-engine") && (i + 1 < args.length)) { logger.info("Using " + args[i + 1] + " as engine configuration file"); TestEngineConfiguration.setConfigFile(args[i + 1]); i += 2; } else if (args[i].equals("-loop")) { String message = "Running test suite in loop"; numberLoops = -1; if ((i + 1 < args.length)) { // more arguments, check if next argument is a loop argument if (args[i + 1].startsWith("-")) { i++; } else { String countOrHoursStr; if (args[i + 1].endsWith("h")) { loopsInHours = true; countOrHoursStr = args[i + 1].substring(0, args[i + 1].length() - 1); } else { loopsInHours = false; countOrHoursStr = args[i + 1]; } try { numberLoops = Integer.parseInt(countOrHoursStr); if (numberLoops <= 0) { throw new NumberFormatException(); } message += (loopsInHours ? " during " : " ") + numberLoops + " " + (loopsInHours ? "hour" : "time") + (numberLoops > 1 ? "s" : ""); i += 2; } catch (NumberFormatException e) { showUsage(); } } } else { // no more arguments i++; } logger.info(message); } else if (args[i].equals("-sutversion") && (i + 1 < args.length)) { logger.info("Using " + args[i + 1] + " as sutversion"); TestBedConfiguration.setSUTVersion(args[i + 1]); i += 2; } else { showUsage(); } } if (testSuiteDir == null || testbed == null) { showUsage(); } TestBedConfiguration.setConfigFile(testbed); // start the log4j server Log4jServer.getInstance().start(); // initialize Python interpreter Properties properties = new Properties(); properties.setProperty("python.home", StaticConfiguration.JYTHON_HOME); properties.setProperty("python.path", StaticConfiguration.JYTHON_LIB); PythonInterpreter.initialize(System.getProperties(), properties, new String[] { "" }); TestSuite testSuite = DirectoryTestSuite.createDirectoryTestSuite(testSuiteDir); testSuite.setExecutionLoops(numberLoops, loopsInHours); executionResult = execute(testSuite); } finally { shutdown(); } System.exit(executionResult ? 0 : 1); }
From source file:com.adobe.aem.demomachine.Checksums.java
public static void main(String[] args) { String rootFolder = null;// w w w .j a v a2s. co m // Command line options for this tool Options options = new Options(); options.addOption("f", true, "Demo Machine root folder"); CommandLineParser parser = new BasicParser(); try { CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("f")) { rootFolder = cmd.getOptionValue("f"); } } catch (Exception e) { System.exit(-1); } Properties md5properties = new Properties(); List<String[]> listPaths = Arrays.asList(AemDemoConstants.demoPaths); for (String[] path : listPaths) { if (path.length == 5) { logger.debug(path[1]); File pathFolder = new File(rootFolder + (path[1].length() > 0 ? (File.separator + path[1]) : "")); if (pathFolder.exists()) { String md5 = AemDemoUtils.calcMD5HashForDir(pathFolder, Boolean.parseBoolean(path[3]), false); logger.debug("MD5 is: " + md5); md5properties.setProperty("demo.path." + path[0], path[1]); md5properties.setProperty("demo.md5." + path[0], md5); } else { logger.error("Folder cannot be found"); } } } File md5 = new File(rootFolder + File.separator + "conf" + File.separator + "checksums.properties"); try { @SuppressWarnings("serial") Properties tmpProperties = new Properties() { @Override public synchronized Enumeration<Object> keys() { return Collections.enumeration(new TreeSet<Object>(super.keySet())); } }; tmpProperties.putAll(md5properties); tmpProperties.store(new FileOutputStream(md5), null); } catch (Exception e) { logger.error(e.getMessage()); } System.out.println("MD5 checkums generated"); }
From source file:fr.iphc.grid.jobmonitor.CeList.java
public static void main(String[] args) throws Exception { SessionFactory.createSession(true);//from ww w . j a v a2s . co m CeList command = new CeList(); CommandLine line = command.parse(args); Integer timeout = 0; String TableSql = "monce"; // MySQLAccess sql = new MySQLAccess(); if (line.getOptionValue(OPT_TIMEOUT) == null) { timeout = 15; } else { timeout = Integer.parseInt(line.getOptionValue(OPT_TIMEOUT)); } timeout = timeout * 60; // convertir en secondes Date start = new Date(); String OutDir = line.getOptionValue(OPT_OUTDIR); if (OutDir == null) { OutDir = "/tmp/thread"; } ArrayList<URL> CeList = null; if (line.getOptionValue(OPT_CEPATH) == null) { CeList = AvailableLdapCe(); // for (URL k : CeList) { // // System.out.println(k); // } } else { CeList = AvailableCe(line.getOptionValue(OPT_CEPATH)); } Boolean ret = initDirectory(new File(OutDir)); if (!ret) { System.out.println("ERROR: " + OutDir + "STOP"); System.exit(-1); } // check if we can connect to the grid // try{ // SessionFactory.createSession(true); // }catch(NoSuccessException e){ // System.err.println("Could not connect to the grid at all // ("+e.getMessage()+")"); // System.err.println("Aborting"); // System.exit(0); // // } SubmitterThread[] st = new SubmitterThread[CeList.size()]; Iterator<URL> i = CeList.iterator(); int index = 0; while (i.hasNext()) { URL serviceURL = i.next(); // Ne pas importer dans thread because options. Properties prop = new Properties(); prop.setProperty("Executable", "/bin/hostname");// // prop.setProperty("Executable", "touch /dev/null"); JobDescription desc = createJobDescription(prop); desc.setAttribute(JobDescription.INTERACTIVE, "true"); desc.setAttribute(JobDescription.EXECUTABLE, "/bin/hostname"); // proxy="/home/dsa/.globus/biomed.txt"; // Context ctx = ContextFactory.createContext(); // ctx.setAttribute(Context.TYPE, "VOMS"); // ctx.setAttribute(Context.USERVO, "biomed"); // ctx.setAttribute(Context.USERPROXY,proxy); // Session session = SessionFactory.createSession(false); // session.addContext(ctx); Session session = SessionFactory.createSession(true); st[index] = new SubmitterThread(serviceURL, session, desc, OutDir, timeout, start); st[index].setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { // System.out.println("Error! An exception occured in " // + t.getName() + ". Cause: " + e.getMessage()); } }); st[index].start(); // Thread.sleep(15*1000); // test si fichier exist // System.out.println("Alive // "+OutDir+"/"+serviceURL.getHost()+"_"+serviceURL.getPath().replaceAll("/cream-","")+".out"); // while ((!((new // File(OutDir+"/"+serviceURL.getHost()+"_"+serviceURL.getPath().replaceAll("/cream-","")+".out").exists()) // || // (new // File(OutDir+"/"+serviceURL.getHost()+"_"+serviceURL.getPath().replaceAll("/cream-","")+".err").exists())))) // { // Thread.sleep(500); // } // System.out.println("Alive "+serviceURL.getHost()+"-"+ index+"FILE // EXIST"); index++; } ; // System.out.println("BEGIN LOOP: Max " + index); long date_start = System.currentTimeMillis(); // System.out.println("BEGIN START: " + date_start); Integer time_out = (timeout + 180) * 1000; // unit ms value in minute // +120 // =delta par rapport thread Boolean Alive = true; // int nb = 0; long now = System.currentTimeMillis(); do { now = System.currentTimeMillis(); Alive = false; // nb = 0; for (int j = 0; j < index; j++) { if (st[j].isAlive()) { // System.out.println("Alive "+j); Alive = true; // nb++; } } // System.out.println(nb); Thread.sleep(10000); } while ((Alive) && ((now - date_start) < time_out)); for (int j = 0; j < index; j++) { if (st[j].isAlive()) { st[j].Requeststop(); } } BilanCe(OutDir, CeList, TableSql); jobManagerLdap jm = new jobManagerLdap(); jm.updateLdapCe(); System.out.println("END " + new Date()); // faire un traitement... System.exit(0); }
From source file:com.netflix.suro.SuroServer.java
public static void main(String[] args) throws IOException { final AtomicReference<Injector> injector = new AtomicReference<Injector>(); try {//from ww w. ja v a2s . co m // Parse the command line Options options = createOptions(); final CommandLine line = new BasicParser().parse(options, args); // Load the properties file final Properties properties = new Properties(); if (line.hasOption('p')) { properties.load(new FileInputStream(line.getOptionValue('p'))); } // Bind all command line options to the properties with prefix "SuroServer." for (Option opt : line.getOptions()) { String name = opt.getOpt(); String value = line.getOptionValue(name); String propName = PROP_PREFIX + opt.getArgName(); if (propName.equals(DynamicPropertyRoutingMapConfigurator.ROUTING_MAP_PROPERTY)) { properties.setProperty(DynamicPropertyRoutingMapConfigurator.ROUTING_MAP_PROPERTY, FileUtils.readFileToString(new File(value))); } else if (propName.equals(DynamicPropertySinkConfigurator.SINK_PROPERTY)) { properties.setProperty(DynamicPropertySinkConfigurator.SINK_PROPERTY, FileUtils.readFileToString(new File(value))); } else if (propName.equals(DynamicPropertyInputConfigurator.INPUT_CONFIG_PROPERTY)) { properties.setProperty(DynamicPropertyInputConfigurator.INPUT_CONFIG_PROPERTY, FileUtils.readFileToString(new File(value))); } else { properties.setProperty(propName, value); } } create(injector, properties); injector.get().getInstance(LifecycleManager.class).start(); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { try { Closeables.close(injector.get().getInstance(LifecycleManager.class), true); } catch (IOException e) { // do nothing because Closeables.close will swallow IOException } } }); waitForShutdown(getControlPort(properties)); } catch (Throwable e) { System.err.println("SuroServer startup failed: " + e.getMessage()); System.exit(-1); } finally { Closeables.close(injector.get().getInstance(LifecycleManager.class), true); } }
From source file:com.virtualparadigm.packman.processor.JPackageManagerBU.java
public static void main(String[] args) { // StringTemplate templ = new StringTemplate("foo $fo$bo$r$ yo"); // templ.setAttribute("success", "foobar"); // templ.setAttribute("bo", "oba"); // System.out.println(templ.toString()); try {/* w w w .j a v a2 s. c om*/ // StringTemplateLoader stringTemplateLoader = new StringTemplateLoader(); String firstTemplate = "firstTemplate"; String content = "this should ${foobar} ${foo:bar.0.1}"; String updatedContent = "this should ${foobar} #{foo:bar.0.1}"; // String content = "this should ${foo:bar}"; // System.out.println(content.matches("\\$\\{.*\\:.*\\}")); // System.out.println(content.replaceAll("\\$\\{.*\\:.*\\}", "hahaha")); // System.out.println(content.replaceAll("(\\$\\{.*)(\\:)(.*\\})", "$1-$3")); // System.out.println(content.replaceAll("(\\$\\{.*)(\\:)(.*\\})", "$1-$3")); // System.out.println(content.replaceAll("(\\$)(\\{.*)(\\:)(.*\\})", "#$2$3$4")); System.out.println(updatedContent.replaceAll("(#)(\\{)([^\\}]*)(\\:)([^\\}]*)(\\})", "\\$$2$3$4$5$6")); System.out.println(content.replaceAll("(\\$)(\\{)(.*)(\\:)(.*)(\\})", "--$2$3$4$5$6--")); System.out.println(content.replaceAll("(\\$)(\\{\\w*\\:\\w*\\})", "#$2")); // stringTemplateLoader.putTemplate(firstTemplate, "this should ${foobar} ${foo:bar}"); // // freemarker.template.Configuration freeMarkerConfiguration = new freemarker.template.Configuration(); // freeMarkerConfiguration.setTemplateLoader(stringTemplateLoader); // Template template = freeMarkerConfiguration.getTemplate(firstTemplate); // Map<String, Object> valueMap = new HashMap<String, Object>(); // valueMap.put("foobar", "helloworld"); // // Writer out = new OutputStreamWriter(System.out); // template.process(valueMap, out); // out.flush(); // // freeMarkerConfiguration.clearTemplateCache(); } catch (Exception e) { e.printStackTrace(); } System.out.println(""); System.out.println(""); VelocityEngine velocityEngine = new VelocityEngine(); Properties vProps = new Properties(); // vProps.put("file.resource.loader.path", ""); vProps.setProperty("resource.loader", "string"); vProps.setProperty("string.resource.loader.class", "org.apache.velocity.runtime.resource.loader.StringResourceLoader"); velocityEngine.init(vProps); Template template = null; VelocityContext velocityContext = new VelocityContext(); velocityContext.put("bo", "oba"); velocityContext.put("foobar", "be replaced"); try { StringResourceRepository repository = StringResourceLoader.getRepository(); repository.putStringResource("template", FileUtils.readFileToString( new File("c:/dev/workbench/paradigm_workspace/jpackage-manager/template.xml"))); StringWriter writer = new StringWriter(); template = velocityEngine.getTemplate("template"); template.merge(velocityContext, writer); System.out.println(writer.toString()); } catch (Exception e) { e.printStackTrace(); } }
From source file:esg.node.util.migrate.UserMigrationTool.java
public static void main(String[] args) { try {//w w w. j a va 2 s . c o m //Enter the connection URI information //setup source connection Properties props = new Properties(); if (args.length >= 4) { for (int i = 0; i < (args.length - 1); i++) { System.out.println(); if ("-U".equals(args[i])) { i++; System.out.print("user = "); if (args[i].startsWith("-")) { --i; continue; } props.setProperty("db.user", args[i]); System.out.print(args[i]); continue; } if ("-h".equals(args[i])) { i++; System.out.print("host = "); if (args[i].startsWith("-")) { --i; continue; } props.setProperty("db.host", args[i]); System.out.print(args[i]); continue; } if ("-p".equals(args[i])) { i++; System.out.print("port = "); if (args[i].startsWith("-")) { --i; continue; } props.setProperty("db.port", args[i]); System.out.print(args[i]); continue; } if ("-d".equals(args[i])) { i++; System.out.print("database = "); if (args[i].startsWith("-")) { --i; continue; } props.setProperty("db.database", args[i]); System.out.print(args[i]); continue; } } System.out.println(); } else { System.out.println("\nUsage:"); System.out.println( " java -jar esgf-security-user-migration-x.x.x.jar -U <username> -h <host> -p <port> -d <database>"); System.out.println(" (hit return and then enter your password)\n"); System.exit(1); } char password[] = null; try { password = PasswordField.getPassword(System.in, "Enter source database password: "); } catch (IOException ioe) { System.err.println("Ooops sumthin' ain't right with the input... :-("); System.exit(1); ioe.printStackTrace(); } if (password == null) { System.out.println("No password entered"); System.exit(1); } props.setProperty("db.password", String.valueOf(password)); System.out.println(); (new UserMigrationTool()).init(props).migrate(); } catch (Throwable t) { System.out.println(t.getMessage()); System.out.println( "\n Sorry, please check your database connection information again, was not able to migrate users :-(\n"); System.exit(1); } System.out.println("\ndone :-)\n"); System.out.println(" Thank you for migrating to the ESGF P2P Node"); System.out.println(" http://esgf.org\n"); }
From source file:de.unileipzig.ub.indexer.App.java
public static void main(String[] args) throws IOException { // create Options object Options options = new Options(); options.addOption("h", "help", false, "display this help"); options.addOption("f", "filename", true, "name of the JSON file whose content should be indexed"); options.addOption("i", "index", true, "the name of the target index"); options.addOption("d", "doctype", true, "the name of the doctype (title, local, ...)"); options.addOption("t", "host", true, "elasticsearch hostname (default: 0.0.0.0)"); options.addOption("p", "port", true, "transport port (that's NOT the http port, default: 9300)"); options.addOption("c", "cluster", true, "cluster name (default: elasticsearch_mdma)"); options.addOption("b", "bulksize", true, "number of docs sent in one request (default: 3000)"); options.addOption("v", "verbose", false, "show processing speed while indexing"); options.addOption("s", "status", false, "only show status of index for file"); options.addOption("r", "repair", false, "attempt to repair recoverable inconsistencies on the go"); options.addOption("e", "debug", false, "set logging level to debug"); options.addOption("l", "logfile", true, "logfile - in not specified only log to stdout"); options.addOption("m", "memcached", true, "host and port of memcached (default: localhost:11211)"); options.addOption("z", "latest-flag-on", true, "enable latest flag according to field (within content, e.g. 001)"); options.addOption("a", "flat", false, "flat-mode: do not check for inconsistencies"); CommandLineParser parser = new PosixParser(); CommandLine cmd = null;// w ww . ja va 2s. c o m try { cmd = parser.parse(options, args); } catch (ParseException ex) { logger.error(ex); System.exit(1); } // setup logging Properties systemProperties = System.getProperties(); systemProperties.put("net.spy.log.LoggerImpl", "net.spy.memcached.compat.log.Log4JLogger"); System.setProperties(systemProperties); Logger.getLogger("net.spy.memcached").setLevel(Level.ERROR); Properties props = new Properties(); props.load(props.getClass().getResourceAsStream("/log4j.properties")); if (cmd.hasOption("debug")) { props.setProperty("log4j.logger.de.unileipzig", "DEBUG"); } if (cmd.hasOption("logfile")) { props.setProperty("log4j.rootLogger", "INFO, stdout, F"); props.setProperty("log4j.appender.F", "org.apache.log4j.FileAppender"); props.setProperty("log4j.appender.F.File", cmd.getOptionValue("logfile")); props.setProperty("log4j.appender.F.layout", "org.apache.log4j.PatternLayout"); props.setProperty("log4j.appender.F.layout.ConversionPattern", "%5p | %d | %F | %L | %m%n"); } PropertyConfigurator.configure(props); InetAddress addr = InetAddress.getLocalHost(); String memcachedHostAndPort = addr.getHostAddress() + ":11211"; if (cmd.hasOption("m")) { memcachedHostAndPort = cmd.getOptionValue("m"); } // setup caching try { if (memcachedClient == null) { memcachedClient = new MemcachedClient( new ConnectionFactoryBuilder().setFailureMode(FailureMode.Cancel).build(), AddrUtil.getAddresses("0.0.0.0:11211")); try { // give client and server 500ms Thread.sleep(300); } catch (InterruptedException ex) { } Collection availableServers = memcachedClient.getAvailableServers(); logger.info(availableServers); if (availableServers.size() == 0) { logger.info("no memcached servers found"); memcachedClient.shutdown(); memcachedClient = null; } else { logger.info(availableServers.size() + " memcached server(s) detected, fine."); } } } catch (IOException ex) { logger.warn("couldn't create a connection, bailing out: " + ex.getMessage()); } // process options if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("indexer", options, true); quit(0); } boolean verbose = false; if (cmd.hasOption("verbose")) { verbose = true; } // ES options String[] hosts = new String[] { "0.0.0.0" }; int port = 9300; String clusterName = "elasticsearch_mdma"; int bulkSize = 3000; if (cmd.hasOption("host")) { hosts = cmd.getOptionValues("host"); } if (cmd.hasOption("port")) { port = Integer.parseInt(cmd.getOptionValue("port")); } if (cmd.hasOption("cluster")) { clusterName = cmd.getOptionValue("cluster"); } if (cmd.hasOption("bulksize")) { bulkSize = Integer.parseInt(cmd.getOptionValue("bulksize")); if (bulkSize < 1 || bulkSize > 100000) { logger.error("bulksize must be between 1 and 100,000"); quit(1); } } // ES Client final Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "elasticsearch_mdma") .build(); final TransportClient client = new TransportClient(settings); for (String host : hosts) { client.addTransportAddress(new InetSocketTransportAddress(host, port)); } if (cmd.hasOption("filename") && cmd.hasOption("index") && cmd.hasOption("doctype")) { final String filename = cmd.getOptionValue("filename"); final File _file = new File(filename); if (_file.length() == 0) { logger.info(_file.getAbsolutePath() + " is empty, skipping"); quit(0); // file is empty } // for flat mode: leave a stampfile beside the json to // indicate previous successful processing File directory = new File(filename).getParentFile(); File stampfile = new File(directory, DigestUtils.shaHex(filename) + ".indexed"); long start = System.currentTimeMillis(); long lineCount = 0; final String indexName = cmd.getOptionValue("index"); final String docType = cmd.getOptionValue("doctype"); BulkRequestBuilder bulkRequest = client.prepareBulk(); try { if (cmd.hasOption("flat")) { // flat mode // ......... if (stampfile.exists()) { logger.info("SKIPPING, since it seems this file has already " + "been imported (found: " + stampfile.getAbsolutePath() + ")"); quit(0); } } else { final String srcSHA1 = extractSrcSHA1(filename); logger.debug(filename + " srcsha1: " + srcSHA1); long docsInIndex = getIndexedRecordCount(client, indexName, srcSHA1); logger.debug(filename + " indexed: " + docsInIndex); long docsInFile = getLineCount(filename); logger.debug(filename + " lines: " + docsInFile); // in non-flat-mode, indexing would take care // of inconsistencies if (docsInIndex == docsInFile) { logger.info("UP-TO DATE: " + filename + " (" + docsInIndex + ", " + srcSHA1 + ")"); client.close(); quit(0); } if (docsInIndex > 0) { logger.warn("INCONSISTENCY DETECTED: " + filename + ": indexed:" + docsInIndex + " lines:" + docsInFile); if (!cmd.hasOption("r")) { logger.warn( "Please re-run indexer with --repair flag or delete residues first with: $ curl -XDELETE " + hosts[0] + ":9200/" + indexName + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1 + "\" }}'"); client.close(); quit(1); } else { logger.info("Attempting to clear residues..."); // attempt to repair once DeleteByQueryResponse dbqr = client.prepareDeleteByQuery(indexName) .setQuery(termQuery("meta.srcsha1", srcSHA1)).execute().actionGet(); Iterator<IndexDeleteByQueryResponse> it = dbqr.iterator(); long deletions = 0; while (it.hasNext()) { IndexDeleteByQueryResponse response = it.next(); deletions += 1; } logger.info("Deleted residues of " + filename); logger.info("Refreshing [" + indexName + "]"); RefreshResponse refreshResponse = client.admin().indices() .refresh(new RefreshRequest(indexName)).actionGet(); long indexedAfterDelete = getIndexedRecordCount(client, indexName, srcSHA1); logger.info(indexedAfterDelete + " docs remained"); if (indexedAfterDelete > 0) { logger.warn("Not all residues cleaned. Try to fix this manually: $ curl -XDELETE " + hosts[0] + ":9200/" + indexName + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1 + "\" }}'"); quit(1); } else { logger.info("Residues are gone. Now trying to reindex: " + filename); } } } } logger.info("INDEXING-REQUIRED: " + filename); if (cmd.hasOption("status")) { quit(0); } HashSet idsInBatch = new HashSet(); String idField = null; if (cmd.hasOption("z")) { idField = cmd.getOptionValue("z"); } final FileReader fr = new FileReader(filename); final BufferedReader br = new BufferedReader(fr); String line; // one line is one document while ((line = br.readLine()) != null) { // "Latest-Flag" machine // This gets obsolete with a "flat" index if (cmd.hasOption("z")) { // flag that indicates, whether the document // about to be indexed will be the latest boolean willBeLatest = true; // check if there is a previous (lower meta.timestamp) document with // the same identifier (whatever that may be - queried under "content") final String contentIdentifier = getContentIdentifier(line, idField); idsInBatch.add(contentIdentifier); // assumed in meta.timestamp final Long timestamp = Long.parseLong(getTimestamp(line)); logger.debug("Checking whether record is latest (line: " + lineCount + ")"); logger.debug(contentIdentifier + ", " + timestamp); // get all docs, which match the contentIdentifier // by filter, which doesn't score final TermFilterBuilder idFilter = new TermFilterBuilder("content." + idField, contentIdentifier); final TermFilterBuilder kindFilter = new TermFilterBuilder("meta.kind", docType); final AndFilterBuilder afb = new AndFilterBuilder(); afb.add(idFilter).add(kindFilter); final FilteredQueryBuilder fb = filteredQuery(matchAllQuery(), afb); final SearchResponse searchResponse = client.prepareSearch(indexName) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(fb).setFrom(0) .setSize(1200) // 3 years and 105 days assuming daily updates at the most .setExplain(false).execute().actionGet(); final SearchHits searchHits = searchResponse.getHits(); logger.debug("docs with this id in the index: " + searchHits.getTotalHits()); for (final SearchHit hit : searchHits.getHits()) { final String docId = hit.id(); final Map<String, Object> source = hit.sourceAsMap(); final Map meta = (Map) source.get("meta"); final Long docTimestamp = Long.parseLong(meta.get("timestamp").toString()); // if the indexed doc timestamp is lower the the current one, // remove any latest flag if (timestamp >= docTimestamp) { source.remove("latest"); final ObjectMapper mapper = new ObjectMapper(); // put the updated doc back // IndexResponse response = client.prepareIndex(indexName, docType).setCreate(false).setId(docId) .setSource(mapper.writeValueAsBytes(source)) .execute(new ActionListener<IndexResponse>() { public void onResponse(IndexResponse rspns) { logger.debug("Removed latest flag from " + contentIdentifier + ", " + docTimestamp + ", " + hit.id() + " since (" + timestamp + " > " + docTimestamp + ")"); } public void onFailure(Throwable thrwbl) { logger.error("Could not remove flag from " + hit.id() + ", " + contentIdentifier); } }); // .execute() //.actionGet(); } else { logger.debug("Doc " + hit.id() + " is newer (" + docTimestamp + ")"); willBeLatest = false; } } if (willBeLatest) { line = setLatestFlag(line); logger.info("Setting latest flag on " + contentIdentifier + ", " + timestamp); } // end of latest-flag machine // beware - this will be correct as long as there // are no dups within one bulk! } bulkRequest.add(client.prepareIndex(indexName, docType).setSource(line)); lineCount++; logger.debug("Added line " + lineCount + " to BULK"); logger.debug(line); if (lineCount % bulkSize == 0) { if (idsInBatch.size() != bulkSize && cmd.hasOption("z")) { logger.error( "This batch has duplications in the ID. That's not bad for the index, just makes the latest flag fuzzy"); logger.error( "Bulk size was: " + bulkSize + ", but " + idsInBatch.size() + " IDs (only)"); } idsInBatch.clear(); logger.debug("Issuing BULK request"); final long actionCount = bulkRequest.numberOfActions(); final BulkResponse bulkResponse = bulkRequest.execute().actionGet(); final long tookInMillis = bulkResponse.getTookInMillis(); if (bulkResponse.hasFailures()) { logger.fatal("FAILED, bulk not indexed. exiting now."); Iterator<BulkItemResponse> it = bulkResponse.iterator(); while (it.hasNext()) { BulkItemResponse bir = it.next(); if (bir.isFailed()) { Failure failure = bir.getFailure(); logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage() + ", type: " + failure.getType() + ", index: " + failure.getIndex()); } } quit(1); } else { if (verbose) { final double elapsed = System.currentTimeMillis() - start; final double speed = (lineCount / elapsed * 1000); logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount + "/" + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)"); } } bulkRequest = client.prepareBulk(); } } // handle the remaining items final long actionCount = bulkRequest.numberOfActions(); if (actionCount > 0) { final BulkResponse bulkResponse = bulkRequest.execute().actionGet(); final long tookInMillis = bulkResponse.getTookInMillis(); if (bulkResponse.hasFailures()) { logger.fatal("FAILED, bulk not indexed. exiting now."); Iterator<BulkItemResponse> it = bulkResponse.iterator(); while (it.hasNext()) { BulkItemResponse bir = it.next(); if (bir.isFailed()) { Failure failure = bir.getFailure(); logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage() + ", type: " + failure.getType() + ", index: " + failure.getIndex()); } } quit(1); } else { // trigger update now RefreshResponse refreshResponse = client.admin().indices() .refresh(new RefreshRequest(indexName)).actionGet(); if (verbose) { final double elapsed = System.currentTimeMillis() - start; final double speed = (lineCount / elapsed * 1000); logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount + "/" + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)"); } } } br.close(); client.close(); final double elapsed = (System.currentTimeMillis() - start) / 1000; final double speed = (lineCount / elapsed); logger.info("indexing (" + filename + ") " + lineCount + " docs took " + elapsed + "s (speed: " + String.format("%.2f", speed) + "r/s)"); if (cmd.hasOption("flat")) { try { FileUtils.touch(stampfile); } catch (IOException ioe) { logger.warn(".indexed files not created. Will reindex everything everytime."); } } } catch (IOException e) { client.close(); logger.error(e); quit(1); } finally { client.close(); } } quit(0); }
From source file:com.mgmtp.jfunk.core.JFunk.java
/** * Starts jFunk./*from w ww . j av a 2s . c o m*/ * * <pre> * -threadcount=<count> Optional Number of threads to be used. Allows for parallel * execution of test scripts. * -parallel Optional Allows a single script to be executed in parallel * depending on the number of threads specified. The * argument is ignored if multiple scripts are specified. * <script parameters> Optional Similar to Java system properties they can be provided * as key-value-pairs preceded by -S, e.g. -Skey=value. * These parameters are then available in the script as * Groovy variables. * <script(s)> Required At least one test script must be specified. * * Example: * java -cp <jFunkClasspath> com.mgmtp.jfunk.core.JFunk -Skey=value -threadcount=4 -parallel mytest.script * </pre> * * @param args * The program arguments. */ public static void main(final String[] args) { SLF4JBridgeHandler.install(); boolean exitWithError = true; StopWatch stopWatch = new StopWatch(); try { RESULT_LOG.info("jFunk started"); stopWatch.start(); int threadCount = 1; boolean parallel = false; Properties scriptProperties = new Properties(); List<File> scripts = Lists.newArrayList(); for (String arg : args) { if (arg.startsWith("-threadcount")) { String[] split = arg.split("="); Preconditions.checkArgument(split.length == 2, "The number of threads must be specified as follows: -threadcount=<value>"); threadCount = Integer.parseInt(split[1]); RESULT_LOG.info("Using " + threadCount + (threadCount == 1 ? " thread" : " threads")); } else if (arg.startsWith("-S")) { arg = arg.substring(2); String[] split = arg.split("="); Preconditions.checkArgument(split.length == 2, "Script parameters must be given in the form -S<name>=<value>"); scriptProperties.setProperty(split[0], normalizeScriptParameterValue(split[1])); RESULT_LOG.info("Using script parameter " + split[0] + " with value " + split[1]); } else if (arg.equals("-parallel")) { parallel = true; RESULT_LOG.info("Using parallel mode"); } else { scripts.add(new File(arg)); } } if (scripts.isEmpty()) { scripts.addAll(requestScriptsViaGui()); if (scripts.isEmpty()) { RESULT_LOG.info("Execution finished (took " + stopWatch + " H:mm:ss.SSS)"); System.exit(0); } } String propsFileName = System.getProperty("jfunk.props.file", "jfunk.properties"); Module module = ModulesLoader.loadModulesFromProperties(new JFunkDefaultModule(), propsFileName); Injector injector = Guice.createInjector(module); JFunkFactory factory = injector.getInstance(JFunkFactory.class); JFunkBase jFunk = factory.create(threadCount, parallel, scripts, scriptProperties); jFunk.execute(); exitWithError = false; } catch (JFunkExecutionException ex) { // no logging necessary } catch (Exception ex) { Logger.getLogger(JFunk.class).error("jFunk terminated unexpectedly.", ex); } finally { stopWatch.stop(); RESULT_LOG.info("Execution finished (took " + stopWatch + " H:mm:ss.SSS)"); } System.exit(exitWithError ? -1 : 0); }