List of usage examples for java.io PrintWriter close
public void close()
From source file:com.endava.webfundamentals.Main.java
public static void main(String[] args) throws JsonParseException, JsonMappingException, IOException { HttpClient httpClient = HttpClientBuilder.create().build(); HttpGet httpRequest = new HttpGet("http://petstore.swagger.wordnik.com/api/api-docs"); HttpResponse httpResponse = httpClient.execute(httpRequest); ObjectMapper objectMapper = new ObjectMapper(); PetStore petStore = objectMapper.readValue(httpResponse.getEntity().getContent(), PetStore.class); PrintWriter out = new PrintWriter("PetStore.html"); out.println("<html>"); out.println("<header>"); out.println(petStore.getInfo().getTitle()); out.println("</header>"); out.println("<body>"); out.println("Api Version " + petStore.getApiVersion()); out.println("Swagger Version " + petStore.getSwaggerVersion()); out.println("<p>"); out.println(petStore.getInfo().getDescription()); out.println("</p>"); out.println("<p>"); out.println(petStore.getInfo().getContact()); out.println("</p>"); out.println(petStore.getInfo().getLicense()); out.println(petStore.getInfo().getLicenseUrl()); out.println("<p>"); out.println(petStore.getInfo().getTermsOfServiceUrl()); out.println("</p>"); out.println("</body>"); out.println("</html>"); out.close(); }
From source file:gobblin.compaction.CompactionRunner.java
public static void main(String[] args) throws ConfigurationException, IOException, SQLException { if (args.length != 1) { LOG.info("Proper usage: java -jar compaction.jar <global-config-file>\n" + "or\n" + "hadoop jar compaction.jar <global-config-file>\n" + "or\n" + "yarn jar compaction.jar <global-config-file>\n"); System.exit(1);/*from w w w . j ava 2s . c o m*/ } Configuration globalConfig = new PropertiesConfiguration(args[0]); properties = ConfigurationConverter.getProperties(globalConfig); File compactionConfigDir = new File(properties.getProperty(COMPACTION_CONFIG_DIR)); File[] listOfFiles = compactionConfigDir.listFiles(); if (listOfFiles == null || listOfFiles.length == 0) { System.err.println("No compaction configuration files found under " + compactionConfigDir); System.exit(1); } int numOfJobs = 0; for (File file : listOfFiles) { if (file.isFile() && !file.getName().startsWith(".")) { numOfJobs++; } } LOG.info("Found " + numOfJobs + " compaction tasks."); PrintWriter pw = new PrintWriter(new OutputStreamWriter( new FileOutputStream(properties.getProperty(TIMING_FILE, TIMING_FILE_DEFAULT)), Charset.forName("UTF-8"))); for (File file : listOfFiles) { if (file.isFile() && !file.getName().startsWith(".")) { Configuration jobConfig = new PropertiesConfiguration(file.getAbsolutePath()); jobProperties = ConfigurationConverter.getProperties(jobConfig); long startTime = System.nanoTime(); compact(); long endTime = System.nanoTime(); long elapsedTime = endTime - startTime; double seconds = TimeUnit.NANOSECONDS.toSeconds(elapsedTime); pw.printf("%s: %f%n", file.getAbsolutePath(), seconds); } } pw.close(); }
From source file:de.citec.sc.matoll.process.Matoll_CreateMax.java
public static void main(String[] args) throws IOException, ParserConfigurationException, SAXException, InstantiationException, IllegalAccessException, ClassNotFoundException, Exception { String directory;//from w w w . j a v a 2s . c om String gold_standard_lexicon; String output_lexicon; String configFile; Language language; String output; Stopwords stopwords = new Stopwords(); HashMap<String, Double> maxima; maxima = new HashMap<String, Double>(); if (args.length < 3) { System.out.print("Usage: Matoll --mode=train/test <DIRECTORY> <CONFIG>\n"); return; } // Classifier classifier; directory = args[1]; configFile = args[2]; final Config config = new Config(); config.loadFromFile(configFile); gold_standard_lexicon = config.getGoldStandardLexicon(); String model_file = config.getModel(); output_lexicon = config.getOutputLexicon(); output = config.getOutput(); language = config.getLanguage(); LexiconLoader loader = new LexiconLoader(); Lexicon gold = loader.loadFromFile(gold_standard_lexicon); Set<String> uris = new HashSet<>(); // Map<Integer,String> sentence_list = new HashMap<>(); Map<Integer, Set<Integer>> mapping_words_sentences = new HashMap<>(); //consider only properties for (LexicalEntry entry : gold.getEntries()) { try { for (Sense sense : entry.getSenseBehaviours().keySet()) { String tmp_uri = sense.getReference().getURI().replace("http://dbpedia.org/ontology/", ""); if (!Character.isUpperCase(tmp_uri.charAt(0))) { uris.add(sense.getReference().getURI()); } } } catch (Exception e) { } ; } ModelPreprocessor preprocessor = new ModelPreprocessor(language); preprocessor.setCoreferenceResolution(false); Set<String> dep = new HashSet<>(); dep.add("prep"); dep.add("appos"); dep.add("nn"); dep.add("dobj"); dep.add("pobj"); dep.add("num"); preprocessor.setDEP(dep); List<File> list_files = new ArrayList<>(); if (config.getFiles().isEmpty()) { File folder = new File(directory); File[] files = folder.listFiles(); for (File file : files) { if (file.toString().contains(".ttl")) list_files.add(file); } } else { list_files.addAll(config.getFiles()); } System.out.println(list_files.size()); int sentence_counter = 0; Map<String, Set<Integer>> bag_words_uri = new HashMap<>(); Map<String, Integer> mapping_word_id = new HashMap<>(); for (File file : list_files) { Model model = RDFDataMgr.loadModel(file.toString()); for (Model sentence : getSentences(model)) { String reference = getReference(sentence); reference = reference.replace("http://dbpedia/", "http://dbpedia.org/"); if (uris.contains(reference)) { sentence_counter += 1; Set<Integer> words_ids = getBagOfWords(sentence, stopwords, mapping_word_id); //TODO: add sentence preprocessing String obj = getObject(sentence); String subj = getSubject(sentence); preprocessor.preprocess(sentence, subj, obj, language); //TODO: also return marker if object or subject of property (in SPARQL this has to be optional of course) String parsed_sentence = getParsedSentence(sentence); try (FileWriter fw = new FileWriter("mapping_sentences_to_ids_goldstandard.tsv", true); BufferedWriter bw = new BufferedWriter(fw); PrintWriter out = new PrintWriter(bw)) { out.println(sentence_counter + "\t" + parsed_sentence); } catch (IOException e) { e.printStackTrace(); } for (Integer word_id : words_ids) { if (mapping_words_sentences.containsKey(word_id)) { Set<Integer> tmp_set = mapping_words_sentences.get(word_id); tmp_set.add(sentence_counter); mapping_words_sentences.put(word_id, tmp_set); } else { Set<Integer> tmp_set = new HashSet<>(); tmp_set.add(sentence_counter); mapping_words_sentences.put(word_id, tmp_set); } } if (bag_words_uri.containsKey(reference)) { Set<Integer> tmp = bag_words_uri.get(reference); for (Integer w : words_ids) { tmp.add(w); } bag_words_uri.put(reference, tmp); } else { Set<Integer> tmp = new HashSet<>(); for (Integer w : words_ids) { tmp.add(w); } bag_words_uri.put(reference, tmp); } } } model.close(); } PrintWriter writer = new PrintWriter("bag_of_words_only_goldstandard.tsv"); StringBuilder string_builder = new StringBuilder(); for (String r : bag_words_uri.keySet()) { string_builder.append(r); for (Integer i : bag_words_uri.get(r)) { string_builder.append("\t"); string_builder.append(i); } string_builder.append("\n"); } writer.write(string_builder.toString()); writer.close(); writer = new PrintWriter("mapping_words_to_sentenceids_goldstandard.tsv"); string_builder = new StringBuilder(); for (Integer w : mapping_words_sentences.keySet()) { string_builder.append(w); for (int i : mapping_words_sentences.get(w)) { string_builder.append("\t"); string_builder.append(i); } string_builder.append("\n"); } writer.write(string_builder.toString()); writer.close(); }
From source file:com.aestel.chemistry.openEye.fp.DistMatrix.java
public static void main(String... args) throws IOException { long start = System.currentTimeMillis(); // create command line Options object Options options = new Options(); Option opt = new Option("i", true, "input file [.tsv from FingerPrinter]"); opt.setRequired(true);// w w w .ja va 2 s. c o m options.addOption(opt); opt = new Option("o", true, "outpur file [.tsv "); opt.setRequired(true); options.addOption(opt); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (Exception e) { System.err.println(e.getMessage()); exitWithHelp(options); } args = cmd.getArgs(); if (args.length != 0) exitWithHelp(options); String file = cmd.getOptionValue("i"); BufferedReader in = new BufferedReader(new FileReader(file)); file = cmd.getOptionValue("o"); PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file))); ArrayList<Fingerprint> fps = new ArrayList<Fingerprint>(); ArrayList<String> ids = new ArrayList<String>(); String line; while ((line = in.readLine()) != null) { String[] parts = line.split("\t"); if (parts.length == 3) { ids.add(parts[0]); fps.add(new ByteFingerprint(parts[2])); } } in.close(); out.print("ID"); for (int i = 0; i < ids.size(); i++) { out.print('\t'); out.print(ids.get(i)); } out.println(); for (int i = 0; i < ids.size(); i++) { out.print(ids.get(i)); Fingerprint fp1 = fps.get(i); for (int j = 0; j <= i; j++) { out.printf("\t%.4g", fp1.tanimoto(fps.get(j))); } out.println(); } out.close(); System.err.printf("Done %d fingerprints in %.2gsec\n", fps.size(), (System.currentTimeMillis() - start) / 1000D); }
From source file:com.kse.bigdata.main.Driver.java
public static void main(String[] args) throws Exception { /********************************************************************************** ** Merge the source files into one. ** /** Should change the directories of each file before executing the program ** ***********************************************************************************/ // String inputFileDirectory = "/media/bk/??/BigData_Term_Project/Debug"; // String resultFileDirectory = "/media/bk/??/BigData_Term_Project/debug.csv"; // File resultFile = new File(resultFileDirectory); // if(!resultFile.exists()) // new SourceFileMerger(inputFileDirectory, resultFileDirectory).mergeFiles(); /********************************************************************************** * Hadoop Operation./* ww w. j ava 2s. c om*/ * Befort Start, Check the Length of Sequence We Want to Predict. **********************************************************************************/ Configuration conf = new Configuration(); //Enable MapReduce intermediate compression as Snappy conf.setBoolean("mapred.compress.map.output", true); conf.set("mapred.map.output.compression.codec", "org.apache.hadoop.io.compress.SnappyCodec"); //Enable Profiling //conf.setBoolean("mapred.task.profile", true); String testPath = null; String inputPath = null; String outputPath = null; int sampleSize = 1; ArrayList<String> results = new ArrayList<String>(); for (int index = 0; index < args.length; index++) { /* * Mandatory command */ //Extract input path string from command line. if (args[index].equals("-in")) inputPath = args[index + 1]; //Extract output path string from command line. if (args[index].equals("-out")) outputPath = args[index + 1]; //Extract test data path string from command line. if (args[index].equals("-test")) testPath = args[index + 1]; /* * Optional command */ //Extract a number of neighbors. if (args[index].equals("-nn")) conf.setInt(Reduce.NUMBER_OF_NEAREAST_NEIGHBOR, Integer.parseInt(args[index + 1])); //Whether job uses normalization or not. if (args[index].equals("-norm")) conf.setBoolean(Map.NORMALIZATION, true); //Extract the number of sample size to test. if (args[index].equals("-s")) sampleSize = Integer.valueOf(args[index + 1]); //Whether job uses mean or median //[Default : mean] if (args[index].equals("-med")) conf.setBoolean(Reduce.MEDIAN, true); } String outputFileName = "part-r-00000"; SequenceSampler sampler = new SequenceSampler(testPath, sampleSize); LinkedList<Sequence> testSequences = sampler.getRandomSample(); // Test Sequence // String testSeqString = "13.591-13.674-13.778-13.892-13.958-14.049-14.153-14.185-14.169-14.092-13.905-13.702-13.438-13.187-13.0-12.914-12.868-12.766-12.62-12.433-12.279-12.142-12.063-12.025-100"; // Sequence testSeq = new Sequence(testSeqString); // LinkedList<Sequence> testSequences = new LinkedList<>(); // testSequences.add(testSeq); for (Sequence seq : testSequences) { /* ******************** Hadoop Launch *********************** */ System.out.println(seq.getTailString()); conf.set(Map.INPUT_SEQUENCE, seq.toString()); Job job = new Job(conf); job.setJarByClass(Driver.class); job.setJobName("term-project-driver"); job.setMapperClass(Map.class); job.setMapOutputKeyClass(NullWritable.class); job.setMapOutputValueClass(Text.class); // Should think another way to implement the combiner class // Current Implementation is not helpful to Job. // job.setCombinerClass(Combiner.class); //Set 1 for number of reduce task for keeping 100 most neighbors in sorted set. job.setNumReduceTasks(1); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.setInputPaths(job, new Path(inputPath)); FileOutputFormat.setOutputPath(job, new Path(outputPath)); job.waitForCompletion(true); /* * if job finishes, get result of the job and store it in results(list). */ try { FileSystem hdfs = FileSystem.get(new Configuration()); BufferedReader fileReader = new BufferedReader( new InputStreamReader(hdfs.open(new Path(outputPath + "/" + outputFileName)))); String line; while ((line = fileReader.readLine()) != null) { results.add(seq.getSeqString() + " " + line); } fileReader.close(); hdfs.delete(new Path(outputPath), true); hdfs.close(); } catch (IOException e) { e.printStackTrace(); System.exit(1); } } /* * if all jobs finish, store results of jobs to output/result.txt file. */ String finalOutputPath = "output/result.csv"; try { FileSystem hdfs = FileSystem.get(new Configuration()); Path file = new Path(finalOutputPath); if (hdfs.exists(file)) { hdfs.delete(file, true); } OutputStream os = hdfs.create(file); PrintWriter printWriter = new PrintWriter(new OutputStreamWriter(os, "UTF-8")); //CSV File Header printWriter.println("Actual,Predicted,MER,MAE"); printWriter.flush(); for (String result : results) { String[] tokens = result.split("\\s+"); printWriter.println(tokens[0] + "," + tokens[1] + "," + tokens[2] + "," + tokens[3]); printWriter.flush(); } printWriter.close(); hdfs.close(); } catch (IOException e) { e.printStackTrace(); System.exit(1); } }
From source file:gr.demokritos.iit.demos.Demo.java
public static void main(String[] args) { try {//from ww w.j a v a 2s. com Options options = new Options(); options.addOption("h", HELP, false, "show help."); options.addOption("i", INPUT, true, "The file containing JSON " + " representations of tweets or SAG posts - 1 per line" + " default file looked for is " + DEFAULT_INFILE); options.addOption("o", OUTPUT, true, "Where to write the output " + " default file looked for is " + DEFAULT_OUTFILE); options.addOption("p", PROCESS, true, "Type of processing to do " + " ner for Named Entity Recognition re for Relation Extraction" + " default is NER"); options.addOption("s", SAG, false, "Whether to process as SAG posts" + " default is off - if passed means process as SAG posts"); CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); // DEFAULTS String filename = DEFAULT_INFILE; String outfilename = DEFAULT_OUTFILE; String process = NER; boolean isSAG = false; if (cmd.hasOption(HELP)) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("NER + RE extraction module", options); System.exit(0); } if (cmd.hasOption(INPUT)) { filename = cmd.getOptionValue(INPUT); } if (cmd.hasOption(OUTPUT)) { outfilename = cmd.getOptionValue(OUTPUT); } if (cmd.hasOption(SAG)) { isSAG = true; } if (cmd.hasOption(PROCESS)) { process = cmd.getOptionValue(PROCESS); } System.out.println(); System.out.println("Reading from file: " + filename); System.out.println("Process type: " + process); System.out.println("Processing SAG: " + isSAG); System.out.println("Writing to file: " + outfilename); System.out.println(); List<String> jsoni = new ArrayList(); Scanner in = new Scanner(new FileReader(filename)); while (in.hasNextLine()) { String json = in.nextLine(); jsoni.add(json); } PrintWriter writer = new PrintWriter(outfilename, "UTF-8"); System.out.println("Read " + jsoni.size() + " lines from " + filename); if (process.equalsIgnoreCase(RE)) { System.out.println("Running Relation Extraction"); System.out.println(); String json = API.RE(jsoni, isSAG); System.out.println(json); writer.print(json); } else { System.out.println("Running Named Entity Recognition"); System.out.println(); jsoni = API.NER(jsoni, isSAG); /* for(String json: jsoni){ NamedEntityList nel = NamedEntityList.fromJSON(json); nel.prettyPrint(); } */ for (String json : jsoni) { System.out.println(json); writer.print(json); } } writer.close(); } catch (ParseException | UnsupportedEncodingException | FileNotFoundException ex) { Logger.getLogger(Demo.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:SendMail.java
public static void main(String[] args) { try {//w w w . ja v a 2s . c o m // If the user specified a mailhost, tell the system about it. if (args.length >= 1) System.getProperties().put("mail.host", args[0]); // A Reader stream to read from the console BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); // Ask the user for the from, to, and subject lines System.out.print("From: "); String from = in.readLine(); System.out.print("To: "); String to = in.readLine(); System.out.print("Subject: "); String subject = in.readLine(); // Establish a network connection for sending mail URL u = new URL("mailto:" + to); // Create a mailto: URL URLConnection c = u.openConnection(); // Create its URLConnection c.setDoInput(false); // Specify no input from it c.setDoOutput(true); // Specify we'll do output System.out.println("Connecting..."); // Tell the user System.out.flush(); // Tell them right now c.connect(); // Connect to mail host PrintWriter out = // Get output stream to host new PrintWriter(new OutputStreamWriter(c.getOutputStream())); // We're talking to the SMTP server now. // Write out mail headers. Don't let users fake the From address out.print("From: \"" + from + "\" <" + System.getProperty("user.name") + "@" + InetAddress.getLocalHost().getHostName() + ">\r\n"); out.print("To: " + to + "\r\n"); out.print("Subject: " + subject + "\r\n"); out.print("\r\n"); // blank line to end the list of headers // Now ask the user to enter the body of the message System.out.println("Enter the message. " + "End with a '.' on a line by itself."); // Read message line by line and send it out. String line; for (;;) { line = in.readLine(); if ((line == null) || line.equals(".")) break; out.print(line + "\r\n"); } // Close (and flush) the stream to terminate the message out.close(); // Tell the user it was successfully sent. System.out.println("Message sent."); } catch (Exception e) { // Handle any exceptions, print error message. System.err.println(e); System.err.println("Usage: java SendMail [<mailhost>]"); } }
From source file:flashcrawler.FlashCrawler.java
/** * @param args the command line arguments *//* w w w .j ava 2 s. c o m*/ public static void main(String[] args) throws FileNotFoundException { Scanner scn = new Scanner(new File("input.txt")); ArrayList<String> ins = new ArrayList(); while (scn.hasNextLine()) { String input = scn.nextLine(); ins.add(input); } String URL; PrintWriter writer = null; writer = new PrintWriter(new FileOutputStream(new File("error-log.txt"), true)); String File; for (String name : ins) { File offlinePath = new File("/games/" + name + ".swf"); String onlinePath = "http://wsh.gamib.com/x/" + name + "/" + name + ".swf"; System.out.println("Downloading " + onlinePath + " into " + offlinePath); URL url = null; try { System.out.println("..."); url = new URL(onlinePath); } catch (MalformedURLException ex) { System.out.println("Failed to create url object"); writer.println("Error when creating url: " + onlinePath + "\tname"); } try { System.out.println("..."); FileUtils.copyURLToFile(url, offlinePath); System.out.println("Success."); } catch (IOException ex) { System.out.println("Error when downloading game: " + offlinePath); writer.println("Error when downloading game: " + offlinePath); } } writer.close(); System.out.println("Process complete!"); }
From source file:edu.usc.ee599.CommunityStats.java
public static void main(String[] args) throws Exception { File dir = new File("results5"); PrintWriter writer = new PrintWriter(new FileWriter("results5_stats.txt")); File[] files = dir.listFiles(); DescriptiveStatistics statistics1 = new DescriptiveStatistics(); DescriptiveStatistics statistics2 = new DescriptiveStatistics(); for (File file : files) { BufferedReader reader = new BufferedReader(new FileReader(file)); String line1 = reader.readLine(); String line2 = reader.readLine(); int balanced = Integer.parseInt(line1.split(",")[1]); int unbalanced = Integer.parseInt(line2.split(",")[1]); double bp = (double) balanced / (double) (balanced + unbalanced); double up = (double) unbalanced / (double) (balanced + unbalanced); statistics1.addValue(bp);//ww w . ja v a 2s . c o m statistics2.addValue(up); } writer.println("AVG Balanced %: " + statistics1.getMean()); writer.println("AVG Unbalanced %: " + statistics2.getMean()); writer.println("STD Balanced %: " + statistics1.getStandardDeviation()); writer.println("STD Unbalanced %: " + statistics2.getStandardDeviation()); writer.flush(); writer.close(); }
From source file:Who.java
public static void main(String[] v) { Socket s = null;/*from ww w. j a va2 s . com*/ PrintWriter out = null; BufferedReader in = null; try { // Connect to port 79 (the standard finger port) on the host. String hostname = "www.java2s.com"; s = new Socket(hostname, 79); // Set up the streams out = new PrintWriter(new OutputStreamWriter(s.getOutputStream())); in = new BufferedReader(new InputStreamReader(s.getInputStream())); // Send a blank line to the finger server, telling it that we want // a listing of everyone logged on instead of information about an // individual user. out.print("\n"); out.flush(); // Send it out // Now read the server's response // The server should send lines terminated with \n or \r. String line; while ((line = in.readLine()) != null) { System.out.println(line); } System.out.println("Who's Logged On: " + hostname); } catch (IOException e) { System.out.println("Who's Logged On: Error"); } // Close the streams! finally { try { in.close(); out.close(); s.close(); } catch (Exception e) { } } }