List of usage examples for java.util Iterator next
E next();
From source file:gsn.utils.GSNMonitor.java
public static void main(String[] args) { String configFileName;/* w w w . ja v a 2 s .co m*/ if (args.length >= 2) { configFileName = args[0]; System.out.println("Using config file: " + configFileName); for (int i = 1; i < args.length; i++) { System.out.println("Adding e-mail: " + args[i]); listOfMails.add(args[i]); } } else { System.out.println("Usage java -jar VSMonitor.jar <config_file> <list_of_mails>"); System.out.println("e.g. java -jar VSMonitor.jar conf/monitoring.cfg user@gmail.com admin@gmail.com"); return; } initFromFile(configFileName); // for each monitored GSN server Iterator iter = listOfGSNSessions.iterator(); while (iter.hasNext()) { try { readStatus((GSNSessionAddress) iter.next()); } catch (Exception e) { logger.error("Exception: " + e.getMessage()); logger.error("StackTrace:\n" + getStackTrace(e)); } } checkUpdateTimes(); // Generate Report report.append("\n[ERROR]\n" + errorsBuffer).append("\n[WARNING]\n" + warningsBuffer) .append("\n[INFO]\n" + infosBuffer); if ((nSensorsLate > 0) || (nHostsDown > 0)) { summary.append("WARNING: "); if (nHostsDown > 0) summary.append(nHostsDown + " host(s) down. "); if (nSensorsLate > 0) summary.append(nSensorsLate + " sensor(s) not updated. "); // Send e-mail only if there are errors try { sendMail(); } catch (EmailException e) { logger.error("Cannot send e-mail. " + e.getMessage()); logger.error("StackTrace:\n" + getStackTrace(e)); } } // Showing report System.out.println(summary); System.out.println(report); System.exit(status); }
From source file:com.cloud.test.utils.TestClient.java
public static void main(String[] args) { String host = "http://localhost"; String port = "8080"; String testUrl = "/client/test"; int numThreads = 1; try {/*from w w w . ja v a 2 s .c om*/ // Parameters List<String> argsList = Arrays.asList(args); Iterator<String> iter = argsList.iterator(); while (iter.hasNext()) { String arg = iter.next(); // host if (arg.equals("-h")) { host = "http://" + iter.next(); } if (arg.equals("-p")) { port = iter.next(); } if (arg.equals("-t")) { numThreads = Integer.parseInt(iter.next()); } if (arg.equals("-s")) { sleepTime = Long.parseLong(iter.next()); } if (arg.equals("-c")) { cleanUp = Boolean.parseBoolean(iter.next()); if (!cleanUp) sleepTime = 0L; // no need to wait if we don't ever cleanup } if (arg.equals("-r")) { repeat = Boolean.parseBoolean(iter.next()); } if (arg.equals("-u")) { numOfUsers = Integer.parseInt(iter.next()); } if (arg.equals("-i")) { internet = Boolean.parseBoolean(iter.next()); } } final String server = host + ":" + port + testUrl; s_logger.info("Starting test against server: " + server + " with " + numThreads + " thread(s)"); if (cleanUp) s_logger.info("Clean up is enabled, each test will wait " + sleepTime + " ms before cleaning up"); if (numOfUsers > 0) { s_logger.info("Pre-generating users for test of size : " + numOfUsers); users = new String[numOfUsers]; Random ran = new Random(); for (int i = 0; i < numOfUsers; i++) { users[i] = Math.abs(ran.nextInt()) + "-user"; } } for (int i = 0; i < numThreads; i++) { new Thread(new Runnable() { public void run() { do { String username = null; try { long now = System.currentTimeMillis(); Random ran = new Random(); if (users != null) { username = users[Math.abs(ran.nextInt()) % numOfUsers]; } else { username = Math.abs(ran.nextInt()) + "-user"; } NDC.push(username); String url = server + "?email=" + username + "&password=" + username + "&command=deploy"; s_logger.info("Launching test for user: " + username + " with url: " + url); HttpClient client = new HttpClient(); HttpMethod method = new GetMethod(url); int responseCode = client.executeMethod(method); boolean success = false; String reason = null; if (responseCode == 200) { if (internet) { s_logger.info("Deploy successful...waiting 5 minute before SSH tests"); Thread.sleep(300000L); // Wait 60 seconds so the linux VM can boot up. s_logger.info("Begin Linux SSH test"); reason = sshTest(method.getResponseHeader("linuxIP").getValue()); if (reason == null) { s_logger.info("Linux SSH test successful"); s_logger.info("Begin Windows SSH test"); reason = sshWinTest(method.getResponseHeader("windowsIP").getValue()); } } if (reason == null) { if (internet) { s_logger.info("Windows SSH test successful"); } else { s_logger.info("deploy test successful....now cleaning up"); if (cleanUp) { s_logger.info( "Waiting " + sleepTime + " ms before cleaning up vms"); Thread.sleep(sleepTime); } else { success = true; } } if (users == null) { s_logger.info("Sending cleanup command"); url = server + "?email=" + username + "&password=" + username + "&command=cleanup"; } else { s_logger.info("Sending stop DomR / destroy VM command"); url = server + "?email=" + username + "&password=" + username + "&command=stopDomR"; } method = new GetMethod(url); responseCode = client.executeMethod(method); if (responseCode == 200) { success = true; } else { reason = method.getStatusText(); } } else { // Just stop but don't destroy the VMs/Routers s_logger.info("SSH test failed with reason '" + reason + "', stopping VMs"); url = server + "?email=" + username + "&password=" + username + "&command=stop"; responseCode = client.executeMethod(new GetMethod(url)); } } else { // Just stop but don't destroy the VMs/Routers reason = method.getStatusText(); s_logger.info("Deploy test failed with reason '" + reason + "', stopping VMs"); url = server + "?email=" + username + "&password=" + username + "&command=stop"; client.executeMethod(new GetMethod(url)); } if (success) { s_logger.info("***** Completed test for user : " + username + " in " + ((System.currentTimeMillis() - now) / 1000L) + " seconds"); } else { s_logger.info("##### FAILED test for user : " + username + " in " + ((System.currentTimeMillis() - now) / 1000L) + " seconds with reason : " + reason); } } catch (Exception e) { s_logger.warn("Error in thread", e); try { HttpClient client = new HttpClient(); String url = server + "?email=" + username + "&password=" + username + "&command=stop"; client.executeMethod(new GetMethod(url)); } catch (Exception e1) { } } finally { NDC.clear(); } } while (repeat); } }).start(); } } catch (Exception e) { s_logger.error(e); } }
From source file:esiptestbed.mudrod.ontology.process.LocalOntology.java
public static void main(String[] args) throws Exception { // boolean options Option helpOpt = new Option("h", "help", false, "show this help message"); // argument options Option ontDirOpt = Option.builder(ONT_DIR).required(true).numberOfArgs(1).hasArg(true) .desc("A directory containing .owl files.").argName(ONT_DIR).build(); // create the options Options options = new Options(); options.addOption(helpOpt);/*from www.j a v a 2 s. com*/ options.addOption(ontDirOpt); String ontDir; CommandLineParser parser = new DefaultParser(); try { CommandLine line = parser.parse(options, args); if (line.hasOption(ONT_DIR)) { ontDir = line.getOptionValue(ONT_DIR).replace("\\", "/"); } else { ontDir = LocalOntology.class.getClassLoader().getResource("ontology").getFile(); } if (!ontDir.endsWith("/")) { ontDir += "/"; } } catch (Exception e) { LOG.error("Error whilst processing main method of LocalOntology.", e); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("LocalOntology: 'ontDir' argument is mandatory. ", options, true); return; } File fileDir = new File(ontDir); //Fail if the input is not a directory. if (fileDir.isDirectory()) { List<String> owlFiles = new ArrayList<>(); for (File owlFile : fileDir.listFiles()) { owlFiles.add(owlFile.toString()); } MudrodEngine mEngine = new MudrodEngine(); Properties props = mEngine.loadConfig(); Ontology ontology = new OntologyFactory(props).getOntology(); //convert to correct iput for ontology loading. String[] owlArray = new String[owlFiles.size()]; owlArray = owlFiles.toArray(owlArray); ontology.load(owlArray); String[] terms = new String[] { "Glacier ice" }; //Demonstrate that we can do basic ontology heirarchy navigation and log output. for (Iterator<OntClass> i = getParser().rootClasses(getModel()); i.hasNext();) { //print Ontology Class Hierarchy OntClass c = i.next(); renderHierarchy(System.out, c, new LinkedList<>(), 0); for (Iterator<OntClass> subClass = c.listSubClasses(true); subClass.hasNext();) { OntClass sub = subClass.next(); //This means that the search term is present as an OntClass if (terms[0].equalsIgnoreCase(sub.getLabel(null))) { //Add the search term(s) above to the term cache. for (int j = 0; j < terms.length; j++) { addSearchTerm(terms[j], sub); } //Query the ontology and return subclasses of the search term(s) for (int k = 0; k < terms.length; k++) { Iterator<String> iter = ontology.subclasses(terms[k]); while (iter.hasNext()) { LOG.info("Subclasses >> " + iter.next()); } } //print any synonymic relationships to demonstrate that we can //undertake synonym-based query expansion for (int l = 0; l < terms.length; l++) { Iterator<String> iter = ontology.synonyms(terms[l]); while (iter.hasNext()) { LOG.info("Synonym >> " + iter.next()); } } } } } mEngine.end(); } }
From source file:gov.nasa.jpl.mudrod.ontology.process.LocalOntology.java
public static void main(String[] args) throws Exception { // boolean options Option helpOpt = new Option("h", "help", false, "show this help message"); // argument options Option ontDirOpt = OptionBuilder.hasArg(true).withArgName(ONT_DIR) .withDescription("A directory containing .owl files.").isRequired(false).create(); // create the options Options options = new Options(); options.addOption(helpOpt);/*from w ww .j a v a 2 s . c om*/ options.addOption(ontDirOpt); String ontDir; CommandLineParser parser = new GnuParser(); try { CommandLine line = parser.parse(options, args); if (line.hasOption(ONT_DIR)) { ontDir = line.getOptionValue(ONT_DIR).replace("\\", "/"); } else { ontDir = LocalOntology.class.getClassLoader().getResource("ontology").getFile(); } if (!ontDir.endsWith("/")) { ontDir += "/"; } } catch (Exception e) { LOG.error("Error whilst processing main method of LocalOntology.", e); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("LocalOntology: 'ontDir' argument is mandatory. ", options, true); return; } File fileDir = new File(ontDir); //Fail if the input is not a directory. if (fileDir.isDirectory()) { List<String> owlFiles = new ArrayList<>(); for (File owlFile : fileDir.listFiles()) { owlFiles.add(owlFile.toString()); } MudrodEngine mEngine = new MudrodEngine(); Properties props = mEngine.loadConfig(); Ontology ontology = new OntologyFactory(props).getOntology(); //convert to correct iput for ontology loading. String[] owlArray = new String[owlFiles.size()]; owlArray = owlFiles.toArray(owlArray); ontology.load(owlArray); String[] terms = new String[] { "Glacier ice" }; //Demonstrate that we can do basic ontology heirarchy navigation and log output. for (Iterator<OntClass> i = getParser().rootClasses(getModel()); i.hasNext();) { //print Ontology Class Hierarchy OntClass c = i.next(); renderHierarchy(System.out, c, new LinkedList<>(), 0); for (Iterator<OntClass> subClass = c.listSubClasses(true); subClass.hasNext();) { OntClass sub = subClass.next(); //This means that the search term is present as an OntClass if (terms[0].equalsIgnoreCase(sub.getLabel(null))) { //Add the search term(s) above to the term cache. for (int j = 0; j < terms.length; j++) { addSearchTerm(terms[j], sub); } //Query the ontology and return subclasses of the search term(s) for (int k = 0; k < terms.length; k++) { Iterator<String> iter = ontology.subclasses(terms[k]); while (iter.hasNext()) { LOG.info("Subclasses >> " + iter.next()); } } //print any synonymic relationships to demonstrate that we can //undertake synonym-based query expansion for (int l = 0; l < terms.length; l++) { Iterator<String> iter = ontology.synonyms(terms[l]); while (iter.hasNext()) { LOG.info("Synonym >> " + iter.next()); } } } } } mEngine.end(); } }
From source file:application.ReviewDocumentIndexer.java
/** * @param args// w w w .ja v a 2s . com */ @SuppressWarnings("deprecation") public static void main(String[] args) { // Parse command line arguments. Exit program is provided arguments are insufficient ReviewDocumentIndexer indexer = new ReviewDocumentIndexer(args); if (indexer == null) return; // Open a new index IndexWriter index = null; try { index = new IndexWriter(new SimpleFSDirectory(new File(Paths.luceneIndex)), new ReviewTextAnalyzer(indexer), indexer.new_index ? true : false, MaxFieldLength.UNLIMITED); if (indexer.pause_every > 2) { index.setMaxBufferedDocs(indexer.pause_every); } index.setMaxMergeDocs(Config.maxMergeDocs); index.setMergeFactor(Config.mergeFactor); } catch (CorruptIndexException e) { AppLogger.error.log(Level.SEVERE, "Lucene detected an inconsistency upon opening the index located at " + Paths.luceneIndex); throw new RuntimeException("Exiting application", e); } catch (LockObtainFailedException e) { AppLogger.error.log(Level.SEVERE, "Index located at " + Paths.luceneIndex + " is already open by another Lucene process"); throw new RuntimeException("Exiting application", e); } catch (IOException e) { AppLogger.error.log(Level.SEVERE, "Could not access location " + Paths.luceneIndex); throw new RuntimeException("Exiting application", e); } // Load a number of reviews from database NumberFormat docIdFormat = TokenListsCollector.defaultDocIdFormat(); try { DatabaseReviewCollection reviews = new DatabaseReviewCollection(indexer.pause_every); reviews.setLimits(indexer.min_reviewid, indexer.stop_after); int indexed_counter = 0; while (reviews.hasNextSegment()) { System.out.print(Calendar.getInstance().getTime().toGMTString()); System.out.print(" Loading from DB... "); reviews.loadNextSegment(); Iterator<Review> reviewsIterator = reviews.getIterator(); System.out.print(" Indexing... "); while (reviewsIterator.hasNext()) { DatabaseReview dbr = (DatabaseReview) reviewsIterator.next(); int dbr_id = dbr.getReviewid(); int dbr_rating = dbr.getRating(); try { indexer.theReviewId.set(dbr_id); indexer.theStats.setCurrent(dbr_id, dbr_rating); index.addDocument(dbr.getDocumentForIndexing()); indexed_counter++; // Also, keep track of the rating and length of this review indexer.theStats.storeCurrent(); } catch (CorruptIndexException e) { AppLogger.error.log(Level.SEVERE, "Lucene detected an inconsistency upon saving review #" + Integer.toString(dbr.getReviewid()) + "to the index located at " + Paths.luceneIndex); return; } catch (IOException e) { AppLogger.error.log(Level.WARNING, "Review #" + Integer.toString(dbr.getReviewid()) + " could not be indexed"); } } // Backup everything System.out.print("Indexed " + indexed_counter + " reviews total. "); if (indexer.pause_every > 0) { System.out.print("Saving tokenlists... "); indexer.theTokenLists.writeNextFile(docIdFormat); System.out.print("Saving state... "); try { index.commit(); indexer.saveState(); } catch (CorruptIndexException e) { AppLogger.error.log(Level.SEVERE, "Committing index changes failed on review #" + indexer.theReviewId.get() + "due to CorruptIndexException"); return; } catch (IOException e) { AppLogger.error.log(Level.WARNING, "Committing index changes failed on review #" + indexer.theReviewId.get() + "due to IOException"); } } System.out.print("DONE\n"); reviews.reset(); } } catch (SQLException e) { AppLogger.error.log(Level.SEVERE, "An exception occured while trying to access the database.\n" + e.getMessage()); return; } try { index.close(); indexer.backupIndex(); } catch (CorruptIndexException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } System.err.println("Indexing successfully completed!"); return; }
From source file:ValidateLicenseHeaders.java
/** * ValidateLicenseHeaders jboss-src-root * //from w ww .jav a 2s .co m * @param args */ public static void main(String[] args) throws Exception { if (args.length == 0 || args[0].startsWith("-h")) { log.info("Usage: ValidateLicenseHeaders [-addheader] jboss-src-root"); System.exit(1); } int rootArg = 0; if (args.length == 2) { if (args[0].startsWith("-add")) addDefaultHeader = true; else { log.severe("Uknown argument: " + args[0]); log.info("Usage: ValidateLicenseHeaders [-addheader] jboss-src-root"); System.exit(1); } rootArg = 1; } File jbossSrcRoot = new File(args[rootArg]); if (jbossSrcRoot.exists() == false) { log.info("Src root does not exist, check " + jbossSrcRoot.getAbsolutePath()); System.exit(1); } URL u = Thread.currentThread().getContextClassLoader() .getResource("META-INF/services/javax.xml.parsers.DocumentBuilderFactory"); System.err.println(u); // Load the valid copyright statements for the licenses File licenseInfo = new File(jbossSrcRoot, "varia/src/etc/license-info.xml"); if (licenseInfo.exists() == false) { log.severe("Failed to find the varia/src/etc/license-info.xml under the src root"); System.exit(1); } DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder db = factory.newDocumentBuilder(); Document doc = db.parse(licenseInfo); NodeList licenses = doc.getElementsByTagName("license"); for (int i = 0; i < licenses.getLength(); i++) { Element license = (Element) licenses.item(i); String key = license.getAttribute("id"); ArrayList headers = new ArrayList(); licenseHeaders.put(key, headers); NodeList copyrights = license.getElementsByTagName("terms-header"); for (int j = 0; j < copyrights.getLength(); j++) { Element copyright = (Element) copyrights.item(j); copyright.normalize(); String id = copyright.getAttribute("id"); // The id will be blank if there is no id attribute if (id.length() == 0) continue; String text = getElementContent(copyright); if (text == null) continue; // Replace all duplicate whitespace and '*' with a single space text = text.replaceAll("[\\s*]+", " "); if (text.length() == 1) continue; text = text.toLowerCase().trim(); // Replace any copyright date0-date1,date2 with copyright ... text = text.replaceAll(COPYRIGHT_REGEX, "..."); LicenseHeader lh = new LicenseHeader(id, text); headers.add(lh); } } log.fine(licenseHeaders.toString()); File[] files = jbossSrcRoot.listFiles(dotJavaFilter); log.info("Root files count: " + files.length); processSourceFiles(files, 0); log.info("Processed " + totalCount); log.info("Updated jboss headers: " + jbossCount); // Files with no headers details log.info("Files with no headers: " + noheaders.size()); FileWriter fw = new FileWriter("NoHeaders.txt"); for (Iterator iter = noheaders.iterator(); iter.hasNext();) { File f = (File) iter.next(); fw.write(f.getAbsolutePath()); fw.write('\n'); } fw.close(); // Files with unknown headers details log.info("Files with invalid headers: " + invalidheaders.size()); fw = new FileWriter("InvalidHeaders.txt"); for (Iterator iter = invalidheaders.iterator(); iter.hasNext();) { File f = (File) iter.next(); fw.write(f.getAbsolutePath()); fw.write('\n'); } fw.close(); // License usage summary log.info("Creating HeadersSummary.txt"); fw = new FileWriter("HeadersSummary.txt"); for (Iterator iter = licenseHeaders.entrySet().iterator(); iter.hasNext();) { Map.Entry entry = (Map.Entry) iter.next(); String key = (String) entry.getKey(); fw.write("+++ License type=" + key); fw.write('\n'); List list = (List) entry.getValue(); Iterator jiter = list.iterator(); while (jiter.hasNext()) { LicenseHeader lh = (LicenseHeader) jiter.next(); fw.write('\t'); fw.write(lh.id); fw.write(", count="); fw.write("" + lh.count); fw.write('\n'); } } fw.close(); }
From source file:ClassFileUtilities.java
/** * Program that computes the dependencies between the Batik jars. * <p>//from w ww.j a v a 2 s. com * Run this from the main Batik distribution directory, after building * the jars. For every jar file in the batik-xxx/ build directory, * it will determine which other jar files it directly depends on. * The output is lines of the form: * </p> * <pre> <i>number</i>,<i>from</i>,<i>to</i></pre> * <p> * where mean that the <i>from</i> jar has <i>number</i> class files * that depend on class files in the <i>to</i> jar. * </p> */ public static void main(String[] args) { boolean showFiles = false; if (args.length == 1 && args[0].equals("-f")) { showFiles = true; } else if (args.length != 0) { System.err.println("usage: ClassFileUtilities [-f]"); System.err.println(); System.err.println(" -f list files that cause each jar file dependency"); System.exit(1); } File cwd = new File("."); File buildDir = null; String[] cwdFiles = cwd.list(); for (int i = 0; i < cwdFiles.length; i++) { if (cwdFiles[i].startsWith("batik-")) { buildDir = new File(cwdFiles[i]); if (!buildDir.isDirectory()) { buildDir = null; } else { break; } } } if (buildDir == null || !buildDir.isDirectory()) { System.out.println("Directory 'batik-xxx' not found in current directory!"); return; } try { Map cs = new HashMap(); Map js = new HashMap(); collectJars(buildDir, js, cs); Set classpath = new HashSet(); Iterator i = js.values().iterator(); while (i.hasNext()) { classpath.add(((Jar) i.next()).jarFile); } i = cs.values().iterator(); while (i.hasNext()) { ClassFile fromFile = (ClassFile) i.next(); // System.out.println(fromFile.name); Set result = getClassDependencies(fromFile.getInputStream(), classpath, false); Iterator j = result.iterator(); while (j.hasNext()) { ClassFile toFile = (ClassFile) cs.get(j.next()); if (fromFile != toFile && toFile != null) { fromFile.deps.add(toFile); } } } i = cs.values().iterator(); while (i.hasNext()) { ClassFile fromFile = (ClassFile) i.next(); Iterator j = fromFile.deps.iterator(); while (j.hasNext()) { ClassFile toFile = (ClassFile) j.next(); Jar fromJar = fromFile.jar; Jar toJar = toFile.jar; if (fromFile.name.equals(toFile.name) || toJar == fromJar || fromJar.files.contains(toFile.name)) { continue; } Integer n = (Integer) fromJar.deps.get(toJar); if (n == null) { fromJar.deps.put(toJar, new Integer(1)); } else { fromJar.deps.put(toJar, new Integer(n.intValue() + 1)); } } } List triples = new ArrayList(10); i = js.values().iterator(); while (i.hasNext()) { Jar fromJar = (Jar) i.next(); Iterator j = fromJar.deps.keySet().iterator(); while (j.hasNext()) { Jar toJar = (Jar) j.next(); Triple t = new Triple(); t.from = fromJar; t.to = toJar; t.count = ((Integer) fromJar.deps.get(toJar)).intValue(); triples.add(t); } } Collections.sort(triples); i = triples.iterator(); while (i.hasNext()) { Triple t = (Triple) i.next(); System.out.println(t.count + "," + t.from.name + "," + t.to.name); if (showFiles) { Iterator j = t.from.files.iterator(); while (j.hasNext()) { ClassFile fromFile = (ClassFile) j.next(); Iterator k = fromFile.deps.iterator(); while (k.hasNext()) { ClassFile toFile = (ClassFile) k.next(); if (toFile.jar == t.to && !t.from.files.contains(toFile.name)) { System.out.println("\t" + fromFile.name + " --> " + toFile.name); } } } } } } catch (IOException e) { e.printStackTrace(); } }
From source file:fr.iphc.grid.jobmanager.JobManager.java
/** * @param args//from w ww . j a va 2 s.c o m */ public static void main(String[] args) throws Exception { JobManager command = new JobManager(); CommandLine line = command.parse(args); ArrayList<File> JdlList = new ArrayList<File>(); Global.getOutputexecutor = Executors.newFixedThreadPool(10); Initialize init = new Initialize(); String SetupFile = "setup_vigrid.xml"; if (line.hasOption(OPT_SETUP)) { SetupFile = line.getOptionValue(OPT_SETUP); } if ((new File(SetupFile).isFile())) { init.GlobalSetup(SetupFile); } // Init Job if (line.hasOption(OPT_JOB)) { File file = new File(line.getOptionValue(OPT_JOB)); if ((file.isFile())) { JdlList.add(file); } else { System.err.println("The file " + file + " doesn't exist"); System.exit(-1); } } else { File file = new File(line.getOptionValue(OPT_FILEJOB)); if ((file.isFile())) { JdlList = init.InitJdl(file); } else { System.err.println("The file " + file + " doesn't exist"); System.exit(-1); } } if (line.hasOption(OPT_WAIT)) { Global.TIMEOUTWAIT = Integer.parseInt(line.getOptionValue(OPT_WAIT)); } if (line.hasOption(OPT_RUN)) { Global.TIMEOUTRUN = Integer.parseInt(line.getOptionValue(OPT_RUN)); } if (line.hasOption(OPT_END)) { Global.TIMEOUTEND = Integer.parseInt(line.getOptionValue(OPT_END)); } if (line.hasOption(OPT_LOGDISPLAY)) { Global.SEUILDISPLAYLOG = Float.parseFloat(line.getOptionValue(OPT_LOGDISPLAY)); } init.InitJob(JdlList); // Init Url Ce if (line.hasOption(OPT_QUEUE)) { Global.file = new File(line.getOptionValue(OPT_QUEUE)); } if (line.hasOption(OPT_BAD)) { Global.BadCe = new File(line.getOptionValue(OPT_BAD)); } if (line.hasOption(OPT_OPTIMIZETIMEOUTRUN)) { Global.OPTTIMEOUTRUN = false; } if (line.hasOption(OPT_CWD)) { File theDir = new File(line.getOptionValue(OPT_CWD)); if (!theDir.exists()) { if (!theDir.mkdirs()) { System.err.println("Working directory create failed: " + line.getOptionValue(OPT_CWD)); System.exit(-1); } } Global.Cwd = line.getOptionValue(OPT_CWD); } else { Global.Cwd = System.getProperty("user.dir"); } if (!(new File(Global.Cwd)).canWrite()) { System.err.println(" Write permission denied : " + Global.Cwd); System.exit(-1); } System.out.println("Current working directory : " + Global.Cwd); Date start = new Date(); init.PrintGlobalSetup(); init.InitUrl(Global.file); init.InitSosCe(); init.rmLoadFailed(Global.Cwd + "/loadFailed.txt"); System.out.println("CE: " + Global.ListUrl.size() + " Nb JOB: " + Global.ListJob.size() + " " + new Date()); if (Global.ListJob.size() < 6) { // pour obtenir rapport de 0.8 Global.OPTTIMEOUTRUN = false; } // check if we can connect to the grid try { SessionFactory.createSession(true); } catch (NoSuccessException e) { System.err.println("Could not connect to the grid at all (" + e.getMessage() + ")"); System.err.println("Aborting"); System.exit(0); } // Launch Tread Job JobThread st = new JobThread(Global.ListJob, Global.ListUrl); st.start(); LoggingThread logst = new LoggingThread(Global.ListJob, Global.ListUrl, Global.SEUILDISPLAYLOG); logst.start(); // create Thread Hook intercept kill +CNTL+C Thread hook = new Thread() { public void run() { try { for (Jdl job : Global.ListJob) { if (job.getJobId() != null) { JobThread.jobCancel(job.getJobId()); } } } catch (Exception e) { System.err.println("Thread Hook:\n" + e.getMessage()); } // give it a change to display final job state try { sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } } }; Runtime.getRuntime().addShutdownHook(hook); // Integer timer = 180 * 60 * 1000; Date now = new Date(); // Boolean Fin = false; while ((!Global.END) && ((now.getTime() - start.getTime()) < Global.TIMEOUTEND * 60 * 1000)) { // TOEND // en // minutes now = new Date(); // int mb = 1024*1024; // Getting the runtime reference from system // Runtime runtime = Runtime.getRuntime(); // System.out.println("##### Heap utilization statistics [MB] // #####"); // Print used memory // System.out.println("Used Memory:" // + (runtime.totalMemory() - runtime.freeMemory()) / mb); // Print free memory // System.out.println("Free Memory:" // + runtime.freeMemory() / mb); // Print total available memory // System.out.println("Total Memory:" + runtime.totalMemory() / mb); // Print Maximum available memory // System.out.println("Max Memory:" + runtime.maxMemory() / mb); // // System.out.println("NB: "+nb_end); // if ((float)(runtime.totalMemory() - // runtime.freeMemory())/(float)runtime.maxMemory() > (float)0.3){ // System.out.println ("GC: "+(float)(runtime.totalMemory() - // runtime.freeMemory())/runtime.maxMemory()); // System.gc(); // }; sleep(15 * 1000); // in ms // System.gc(); // Fin=true; // for (Jdl job : Global.ListJob) { // if (job.getJob() != null) { // System.out.println("JOB: "+job.getId()+"\t"+job.getStatus()); // if (job.getStatus().compareTo("END")==0){ // ((JobImpl) job.getJob()).postStagingAndCleanup(); // System.out.println("END JOB: "+job.getId()); // job.setStatus("END"); // } // if (job.getStatus().compareTo("END")!=0){ // Fin=false; // } // System.out.println("JOB: "+job.getId()+"\t"+job.getStatus() + // "\t"+job.getFail()+"\t"+job.getNodeCe()); // } // } // while ((Global.END==0) && ((new // Date().getTime()-start.getTime())<timer)){ } // Boolean end_load=false; // while (!end_load){ // end_load=true; // for(Jdl job:Global.ListJob){ // if (job.getStatus().equals("LOAD")){ // end_load=false; // } // } // } System.out.println("END JOB: " + now); st.halt(); logst.halt(); Iterator<Url> k = Global.ListUrl.iterator(); while (k.hasNext()) { Url url = k.next(); System.out.println("URL: " + url.getUrl()); } Iterator<Jdl> m = Global.ListJob.iterator(); while (m.hasNext()) { Jdl job = m.next(); System.out.println( "JOB: " + job.getId() + "\t" + job.getFail() + "\t" + job.getStatus() + "\t" + job.getNodeCe()); } System.out.println(start + " " + new Date()); System.exit(0); }
From source file:com.moss.veracity.core.Veracity.java
public static void main(String[] args) throws Exception { File log4jConfigFile = new File("log4j.xml"); if (log4jConfigFile.exists()) { DOMConfigurator.configureAndWatch(log4jConfigFile.getAbsolutePath(), 1000); }//from w ww . j a v a 2 s . c o m final Log log = LogFactory.getLog(Veracity.class); File homeDir = new File(System.getProperty("user.home")); File currentDir = new File(System.getProperty("user.dir")); List<File> configLocations = new LinkedList<File>(); configLocations.addAll(Arrays.asList(new File[] { new File("/etc/veracity.config"), new File(homeDir, ".veracity.config"), new File(currentDir, "config.xml") })); String customConfigFileProperty = System.getProperty("veracity.configFile"); if (customConfigFileProperty != null) { configLocations.clear(); configLocations.add(new File(customConfigFileProperty)); } File configFile = null; Iterator<File> i = configLocations.iterator(); while ((configFile == null || !configFile.exists()) && i.hasNext()) { configFile = i.next(); } LaunchParameters parameters; if (!configFile.exists()) { if (log.isDebugEnabled()) { log.debug("Creating default config file at " + configFile.getAbsolutePath()); } parameters = new LaunchParameters(); parameters.save(configFile); } else { if (log.isDebugEnabled()) { log.debug("Loading parameters from config file at " + configFile.getAbsolutePath()); } parameters = LaunchParameters.load(configFile); } parameters.readSystemProperties(); new Veracity(parameters); }
From source file:com.github.xbn.examples.regexutil.non_xbn.MatchEachWordInEveryLine.java
public static final void main(String[] as_1RqdTxtFilePath) { Iterator<String> lineItr = null; try {/*from www.j av a 2 s. co m*/ lineItr = FileUtils.lineIterator(new File(as_1RqdTxtFilePath[0])); //Throws npx if null } catch (IOException iox) { throw new RuntimeException("Attempting to open \"" + as_1RqdTxtFilePath[0] + "\"", iox); } catch (RuntimeException rx) { throw new RuntimeException("One required parameter: The path to the text file.", rx); } //Dummy search string (""), so it can be reused (reset) Matcher mWord = Pattern.compile("\\b\\w+\\b").matcher(""); while (lineItr.hasNext()) { String sLine = lineItr.next(); mWord.reset(sLine); while (mWord.find()) { System.out.println(mWord.group()); } } }