List of usage examples for java.util.logging Level SEVERE
Level SEVERE
To view the source code for java.util.logging Level SEVERE.
Click Source Link
From source file:edu.usu.sdl.wso2client.SampleWSRegistryClient.java
public static void main(String[] args) throws Exception { Registry registry = initialize(); try {//from ww w . j a v a2s . c o m //load component List<ComponentAll> components; try (InputStream in = new FileInputStream("C:\\temp\\components.json")) { components = StringProcessor.defaultObjectMapper().readValue(in, new TypeReference<List<ComponentAll>>() { }); } catch (IOException ex) { throw ex; } ComponentAll componentAll = components.get(0); Resource resource = registry.newResource(); resource.setContent(componentAll.getComponent().getDescription()); resource.setDescription("Storefront Component"); resource.setMediaType("application/openstorefront"); resource.setUUID(componentAll.getComponent().getComponentId()); try { Map fieldMap = BeanUtils.describe(componentAll.getComponent()); fieldMap.keySet().stream().forEach((key) -> { if ("description".equals(key) == false) { resource.setProperty(Component.class.getSimpleName() + "_" + key, "" + fieldMap.get(key)); //System.out.println("key = " + Component.class.getSimpleName() + "_" + key); //System.out.println("Value = " + fieldMap.get(key)); } }); } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ex) { Logger.getLogger(StringProcessor.class.getName()).log(Level.SEVERE, null, ex); } String resourcePath = "/storefront/components/" + componentAll.getComponent().getComponentId(); registry.put(resourcePath, resource); // System.out.println("A resource added to: " + resourcePath); // registry.rateResource(resourcePath, 4); // // System.out.println("Resource rated with 4 stars!"); // Comment comment = new Comment(); // comment.setText("Testing Connection"); // registry.addComment(resourcePath, comment); // System.out.println("Comment added to resource"); // // Resource getResource = registry.get("/abc2"); // System.out.println("Resource retrived"); // System.out.println("Printing retrieved resource content: " // + new String((byte[]) getResource.getContent())); // Resource resource = registry.newResource(); // resource.setContent("Hello Out there!"); // // String resourcePath = "/abc3"; // registry.put(resourcePath, resource); // // System.out.println("A resource added to: " + resourcePath); // // registry.rateResource(resourcePath, 4); // // System.out.println("Resource rated with 4 stars!"); // Comment comment = new Comment(); // comment.setText("Testing Connection"); // registry.addComment(resourcePath, comment); // System.out.println("Comment added to resource"); // // Resource getResource = registry.get("/abc3"); // System.out.println("Resource retrived"); // System.out.println("Printing retrieved resource content: " // + new String((byte[]) getResource.getContent())); } finally { //Close the session ((WSRegistryServiceClient) registry).logut(); } System.exit(0); }
From source file:edu.ehu.galan.lite.Example.java
public static void main(String[] args) { //initizalize ehcache system System.setProperty("net.sf.ehcache.enableShutdownHook", "true"); if (CacheManager.getCacheManager("ehcacheLitet.xml") == null) { CacheManager.create("ehcacheLitet.xml"); }/*from w ww . jav a 2 s . c o m*/ cache = CacheManager.getInstance().getCache("LiteCache"); //load the corpus to process Corpus corpus = new Corpus("en"); //we spedify the directory and the database mapping (wikipedia in this case) corpus.loadCorpus("testCorpus", Document.SourceType.wikipedia); //will read the document using Illinois NLP utilities PlainTextDocumentReaderLBJEn parser = new PlainTextDocumentReaderLBJEn(); AlgorithmRunner runner = new AlgorithmRunner(); String resources = System.getProperty("user.dir") + "/resources/"; //algorithms initializacion CValueAlgortithm cvalue = new CValueAlgortithm(); cvalue.addNewProcessingFilter(new AdjPrepNounFilter()); TFIDFAlgorithm tf = new TFIDFAlgorithm(new CaseStemmer(CaseStemmer.CaseType.lowercase), "en"); ShallowParsingGrammarAlgortithm sha = new ShallowParsingGrammarAlgortithm( System.getProperty("user.dir") + "/resources/lite/" + "grammars/Cg2EnGrammar.grammar", "cg3/"); KPMinerAlgorithm kp = new KPMinerAlgorithm(); RakeAlgorithm ex = new RakeAlgorithm(); ex.loadStopWordsList("resources/lite/stopWordLists/RakeStopLists/SmartStopListEn"); ex.loadPunctStopWord("resources/lite/stopWordLists/RakeStopLists/RakePunctDefaultStopList"); //algorithm submitting to execute them in parallel runner.submitAlgorithm(kp); runner.submitAlgorithm(cvalue); runner.submitAlgorithm(tf); runner.submitAlgorithm(ex); runner.submitAlgorithm(sha); //load stop list List<String> standardStop = null; try { standardStop = Files.readAllLines(Paths.get(resources + "lite/stopWordLists/standardStopList"), StandardCharsets.UTF_8); } catch (IOException e1x) { Logger.getLogger(Example.class.getName()).log(Level.SEVERE, null, e1x); } //initialize Wikiminer helper (class that interacts with Wikiminer services) WikiminnerHelper helper = WikiminnerHelper.getInstance(resources); helper.setLanguage("en"); //we may operate in local mode (using Wikiminer as API instead of interacting via REST api // helper.setLocalMode(false,"/home/angel/nfs/wikiminer/configs/wikipedia"); WikiMinerMap wikimapping = new WikiMinerMap(resources, helper); CValueWikiDisambiguator disambiguator = new CValueWikiDisambiguator(resources, helper); CValueWikiRelationship relate = new CValueWikiRelationship(resources, helper); WikipediaData data = new WikipediaData(resources, helper); helper.openConnection(); //process all the documents in the corpus while (!corpus.getDocQueue().isEmpty()) { Document doc = corpus.getDocQueue().poll(); doc.setSource(Document.SourceType.wikipedia); parser.readSource(doc.getPath()); doc.setSentenceList(parser.getSentenceList()); doc.setTokenList(parser.getTokenizedSentenceList()); System.out.println(doc.getName()); runner.runAlgorihms(doc, resources); doc.applyGlobalStopWordList(standardStop); doc.mapThreshold(1.9f, new String[] { "CValue" }); doc.mapThreshold(0.00034554f, new String[] { "TFIDF" }); doc.removeAndMixTerms(); //map document wikimapping.mapCorpus(doc); disambiguator.disambiguateTopics(doc); //we may disambiguate topics that do not disambiguated correctly DuplicateRemoval.disambiguationRemoval(doc); DuplicateRemoval.topicDuplicateRemoval(doc); //obtain the wiki links,labels, etc data.processDocument(doc); //measure domain relatedness relate.relate(doc); //save the results Document.saveJsonToDir("", doc); } //close wikiminer connection and caches helper.closeConnection(); cache.dispose(); CacheManager.getInstance().shutdown(); System.exit(0); }
From source file:com.relecotech.bbb.api.APIGenerator.java
public static void main(String[] args) { try {//from ww w . j a v a2 s.co m XmlParser.runAPI(new APIGenerator().createAPI("create", "attendeePW=ap&meetingID=random-9736617&moderatorPW=mp&name=random-9736617&record=false&voiceBridge=79380&welcome=%3Cbr%3EWelcome+to+%3Cb%3E%25%25CONFNAME%25%25%3C%2Fb%3E%21")); //join moderator // XmlParser.runAPI(new APIGenerator().createAPI("join", "fullName=User+6361063&meetingID=random-9736617&password=mp")); // XmlParser.runAPI(new APIGenerator().createAPI("join", "fullName=User+6361063&meetingID=random-9736617&password=ap")); // XmlParser.runAPI(new APIGenerator().createAPI("isMeetingRunning", "meetingID=random-9736617")); // XmlParser.runAPI(new APIGenerator().createAPI("getMeetingInfo", "meetingID=random-9736617&password=mp")); // XmlParser.runAPI(new APIGenerator().createAPI("end", "meetingID=random-9736617&password=mp")); //XmlParser.runAPI(new APIGenerator().createAPI("getMeetings", "")); // XmlParser.runAPI(new APIGenerator().createAPI("getDefaultConfigXML", "")); // XmlParser.runAPI(new APIGenerator().createAPI("getRecordings", "meetingID=random-9736617")); // XmlParser.runAPI(new APIGenerator().createAPI("publishRecordings", "publish=false&recordID=random-9736617")); // XmlParser.runAPI(new APIGenerator().createAPI("deleteRecordings", "recordID=random-9736617")); //join from mobile (as moderator // XmlParser.runAPI(new APIGenerator().createAPI("join", "fullName=User+6361063&meetingID=random-9736617&password=mp")); //join from mobile (as attendee) // XmlParser.runAPI(new APIGenerator().createAPI("join", "fullName=User+6361063&meetingID=random-9736617&password=ap")); } catch (TransformerException ex) { Logger.getLogger(APIGenerator.class.getName()).log(Level.SEVERE, null, ex); } catch (Exception ex) { Logger.getLogger(APIGenerator.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:fr.ericlab.mabed.app.Main.java
public static void main(String[] args) throws IOException { Locale.setDefault(Locale.US); Configuration configuration = new Configuration(); Corpus corpus = new Corpus(configuration); System.out.println("MABED: Mention-Anomaly-Based Event Detection"); if (args.length == 0 || args[0].equals("-help")) { System.out.println("For more information on how to run MABED, see the README.txt file"); } else {//from w w w. j a v a 2s .c o m if (args[0].equals("-run")) { try { if (configuration.numberOfThreads > 1) { System.out.println("Running the parallelized implementation with " + configuration.numberOfThreads + " threads (this computer has " + Runtime.getRuntime().availableProcessors() + " available threads)"); } else { System.out.println("Running the centralized implementation"); } corpus.loadCorpus(configuration.numberOfThreads > 1); String output = "MABED: Mention-Anomaly-Based Event Detection\n" + corpus.output + "\n"; System.out.println("-------------------------\n" + Util.getDate() + " MABED is running\n-------------------------"); output += "-------------------------\n" + Util.getDate() + " MABED is running\n-------------------------\n"; System.out.println(Util.getDate() + " Reading parameters:\n - k = " + configuration.k + ", p = " + configuration.p + ", theta = " + configuration.theta + ", sigma = " + configuration.sigma); MABED mabed = new MABED(); if (configuration.numberOfThreads > 1) { output += mabed.applyParallelized(corpus, configuration); } else { output += mabed.applyCentralized(corpus, configuration); } System.out.println( "--------------------\n" + Util.getDate() + " MABED ended\n--------------------"); output += "--------------------\n" + Util.getDate() + " MABED ended\n--------------------\n"; File outputDir = new File("output"); if (!outputDir.isDirectory()) { outputDir.mkdir(); } File textFile = new File("output/MABED.tex"); FileUtils.writeStringToFile(textFile, mabed.events.toLatex(corpus), false); textFile = new File("output/MABED.log"); FileUtils.writeStringToFile(textFile, output, false); mabed.events.printLatex(corpus); } catch (InterruptedException ex) { Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex); } } else { System.out.println("Unknown option '" + args[0] + "'\nType 'java -jar MABED.jar -help' for more information on how to run MABED"); } } }
From source file:com.cfets.door.yarn.jboss.JBossClient.java
/** * @param args//from ww w . j av a2 s .co m * Command line arguments */ public static void main(String[] args) { boolean result = false; try { JBossClient client = new JBossClient(); LOG.info("Initializing JBossClient"); try { boolean doRun = client.init(args); if (!doRun) { System.exit(0); } } catch (IllegalArgumentException e) { System.err.println(e.getLocalizedMessage()); client.printUsage(); System.exit(-1); } result = client.run(); } catch (Throwable t) { LOG.log(Level.SEVERE, "Error running JBoss Client", t); System.exit(1); } if (result) { LOG.info("Application completed successfully"); System.exit(0); } LOG.log(Level.SEVERE, "Application failed to complete successfully"); System.exit(2); }
From source file:di.uniba.it.tee2.wiki.Wikidump2Text.java
/** * @param args the command line arguments *//*from ww w. j a v a 2s .c om*/ public static void main(String[] args) { try { CommandLine cmd = cmdParser.parse(options, args); if (cmd.hasOption("l") && cmd.hasOption("d") && cmd.hasOption("o")) { encoding = cmd.getOptionValue("e", "UTF-8"); int counter = 0; try { BufferedWriter writer = new BufferedWriter(new OutputStreamWriter( new GZIPOutputStream(new FileOutputStream(cmd.getOptionValue("o"))), "UTF-8")); WikipediaDumpIterator it = new WikipediaDumpIterator(new File(cmd.getOptionValue("d")), encoding); PageCleaner cleaner = PageCleanerWrapper.getInstance(cmd.getOptionValue("l")); while (it.hasNext()) { WikiPage wikiPage = it.next(); ParsedPage parsedPage = wikiPage.getParsedPage(); if (parsedPage != null) { String title = wikiPage.getTitle(); if (!title.matches(notValidTitle)) { if (parsedPage.getText() != null) { writer.append(cleaner.clean(parsedPage.getText())); writer.newLine(); writer.newLine(); counter++; if (counter % 10000 == 0) { System.out.println(counter); writer.flush(); } } } } } writer.flush(); writer.close(); } catch (Exception ex) { Logger.getLogger(Wikidump2Text.class.getName()).log(Level.SEVERE, null, ex); } System.out.println("Indexed pages: " + counter); } else { HelpFormatter helpFormatter = new HelpFormatter(); helpFormatter.printHelp("Wikipedia dump to text", options, true); } } catch (ParseException ex) { Logger.getLogger(Wikidump2Text.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:languages.TabFile.java
public static void main(String[] args) { if (args[0].equals("optimize")) { Scanner sc = new Scanner(System.in); String targetPath;/*from w w w . j a v a 2 s.c om*/ String originPath; System.out.println("Please enter the path of the original *.tab-files:"); originPath = sc.nextLine(); System.out.println( "Please enter the path where you wish to save the optimized *.tab-files (Directories will be created, existing files with same filenames will be overwritten):"); targetPath = sc.nextLine(); sc.close(); File folder = new File(originPath); File[] listOfFiles = folder.listFiles(); assert listOfFiles != null; for (File file : listOfFiles) { if (!file.getName().equals("LICENSE")) { TabFile origin; try { String originFileName = file.getAbsolutePath(); System.out.print("Reading file '" + originFileName + "'..."); origin = new TabFile(originFileName); System.out.println("Done!"); System.out.print("Optimizing file..."); TabFile res = TabFile.optimizeDictionaries(origin, 2, true); System.out.println("Done!"); String targetFileName = targetPath + File.separator + file.getName(); System.out.println("Saving new file as '" + targetFileName + "'..."); res.save(targetFileName); System.out.println("Done!"); } catch (IOException e) { FOKLogger.log(TabFile.class.getName(), Level.SEVERE, "An error occurred", e); } } } } else if (args[0].equals("merge")) { System.err.println( "Merging dictionaries is not supported anymore. Please checkout commit 1a6fa16 to merge dictionaries."); } }
From source file:gr.forth.ics.isl.preprocessfilter1.controller.Controller.java
public static void main(String[] args) throws XPathExpressionException, ParserConfigurationException, SAXException, IOException, PreprocessFilterException, org.apache.commons.cli.ParseException { PropertyReader prop = new PropertyReader(); //The following block of code is executed if there are arguments from the command line if (args.length > 0) { try {// ww w .ja v a 2 s . co m //The values of the arguments are handled as Option instances Options options = new Options(); CommandLineParser PARSER = new PosixParser(); Option inputFile = new Option("inputFile", true, "input xml file"); Option outputFile = new Option("outputFile", true, "output xml file"); Option parentNode = new Option("parentNode", true, "output xml file"); Option delimeter = new Option("delimeter", true, "output xml file"); Option newParentNode = new Option("newParentNode", true, "output xml file"); Option intermediateNodes = new Option("intermediateNodes", true, "output xml file"); Option intermediateNode = new Option("intermediateNode", true, "output xml file"); options.addOption(inputFile).addOption(outputFile).addOption(parentNode).addOption(newParentNode) .addOption(intermediateNode).addOption(intermediateNodes).addOption(delimeter); CommandLine cli = PARSER.parse(options, args); String inputFileArg = cli.getOptionValue("inputFile"); String outputFileArg = cli.getOptionValue("outputFile"); String parentNodeArg = cli.getOptionValue("parentNode"); String newParentNodeArg = cli.getOptionValue("newParentNode"); String intermediateNodeArg = cli.getOptionValue("intermediateNode"); String intermediateNodesArg = cli.getOptionValue("intermediateNodes"); String delimeterArg = cli.getOptionValue("delimeter"); PreprocessFilterUtilities process = new PreprocessFilterUtilities(); //System.out.println("INPUT:"+inputFileArg); //System.out.println("OUTPUT:"+outputFileArg); //System.out.println("PARENT NODE:"+parentNodeArg); //System.out.println("NEW PARENT NODE:"+newParentNodeArg); //System.out.println("INTERMEDIATE NODE:"+intermediateNodeArg); //System.out.println("INTERMEDIATE NODES:"+intermediateNodesArg); //System.out.println("DELIMETER:"+delimeterArg); //The filter's code is executed with the command line arguments as parameters if (process.createOutputFile(inputFileArg, outputFileArg, parentNodeArg, newParentNodeArg, intermediateNodeArg, intermediateNodesArg, delimeterArg)) { System.out.println("Succesfull PreProcessing!!!"); } } catch (PreprocessFilterException ex) { Logger.getLogger(Controller.class.getName()).log(Level.SEVERE, null, ex); throw new PreprocessFilterException("PreProcess Filter Exception:", ex); } } //If there are no command line arguments then the .config file is being used. else { try { String inputFilePathProp = prop.getProperty(inputFilePath); String outputFilePathProp = prop.getProperty(outputFilePath); String parentNodeProp = prop.getProperty(parentNode); String delimeterProp = prop.getProperty(delimeter); String newParentNodeProp = prop.getProperty(newParentNode); String intermediateNodesProp = prop.getProperty(intermediateNodes); String intermediateNodeProp = prop.getProperty(intermediateNode); PreprocessFilterUtilities process = new PreprocessFilterUtilities(); //The filter's code is executed with the .config file's resources as parameters if (process.createOutputFile(inputFilePathProp, outputFilePathProp, parentNodeProp, newParentNodeProp, intermediateNodeProp, intermediateNodesProp, delimeterProp)) { System.out.println("Succesfull PreProcessing!!!"); } } catch (PreprocessFilterException ex) { Logger.getLogger(Controller.class.getName()).log(Level.SEVERE, null, ex); throw new PreprocessFilterException("PreProcess Filter Exception:", ex); } } }
From source file:di.uniba.it.tri.aan.AAN2file.java
/** * Convert ACL dataset in a single file for each paper with year reference * aan_dir output_dir//from w w w . j a va 2 s .c om * * @param args the command line arguments */ public static void main(String[] args) { try { if (args.length > 1) { AAN2file ann = new AAN2file(); ann.build(args[0], args[1]); } else { throw new Exception("Illegal arguments"); } } catch (Exception ex) { Logger.getLogger(AAN2file.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:gr.demokritos.iit.demos.Demo.java
public static void main(String[] args) { try {/*from w w w.ja v a 2s .c o m*/ Options options = new Options(); options.addOption("h", HELP, false, "show help."); options.addOption("i", INPUT, true, "The file containing JSON " + " representations of tweets or SAG posts - 1 per line" + " default file looked for is " + DEFAULT_INFILE); options.addOption("o", OUTPUT, true, "Where to write the output " + " default file looked for is " + DEFAULT_OUTFILE); options.addOption("p", PROCESS, true, "Type of processing to do " + " ner for Named Entity Recognition re for Relation Extraction" + " default is NER"); options.addOption("s", SAG, false, "Whether to process as SAG posts" + " default is off - if passed means process as SAG posts"); CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); // DEFAULTS String filename = DEFAULT_INFILE; String outfilename = DEFAULT_OUTFILE; String process = NER; boolean isSAG = false; if (cmd.hasOption(HELP)) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("NER + RE extraction module", options); System.exit(0); } if (cmd.hasOption(INPUT)) { filename = cmd.getOptionValue(INPUT); } if (cmd.hasOption(OUTPUT)) { outfilename = cmd.getOptionValue(OUTPUT); } if (cmd.hasOption(SAG)) { isSAG = true; } if (cmd.hasOption(PROCESS)) { process = cmd.getOptionValue(PROCESS); } System.out.println(); System.out.println("Reading from file: " + filename); System.out.println("Process type: " + process); System.out.println("Processing SAG: " + isSAG); System.out.println("Writing to file: " + outfilename); System.out.println(); List<String> jsoni = new ArrayList(); Scanner in = new Scanner(new FileReader(filename)); while (in.hasNextLine()) { String json = in.nextLine(); jsoni.add(json); } PrintWriter writer = new PrintWriter(outfilename, "UTF-8"); System.out.println("Read " + jsoni.size() + " lines from " + filename); if (process.equalsIgnoreCase(RE)) { System.out.println("Running Relation Extraction"); System.out.println(); String json = API.RE(jsoni, isSAG); System.out.println(json); writer.print(json); } else { System.out.println("Running Named Entity Recognition"); System.out.println(); jsoni = API.NER(jsoni, isSAG); /* for(String json: jsoni){ NamedEntityList nel = NamedEntityList.fromJSON(json); nel.prettyPrint(); } */ for (String json : jsoni) { System.out.println(json); writer.print(json); } } writer.close(); } catch (ParseException | UnsupportedEncodingException | FileNotFoundException ex) { Logger.getLogger(Demo.class.getName()).log(Level.SEVERE, null, ex); } }