List of usage examples for java.io BufferedWriter BufferedWriter
public BufferedWriter(Writer out)
From source file:eu.socialSensor.diverseImages2014.datasetCreation.MakeRelevanceDataset.java
public void writeFile(String filename, String[] individualNorms, String normalizeLevel1, String normalizeFinal, boolean sparse) throws Exception { BufferedWriter out = new BufferedWriter(new FileWriter(new File("datasets/relevance/" + filename))); // writing the header of the arff out.write(generateArffHeader());// ww w . ja v a 2 s . c o m // writing the actual data ArrayList<MEDI2014Location> locs = dataStore.getLocationList(); // int locIndex = 0; for (MEDI2014Location loc : locs) { System.out.println(loc.getLocationName()); ArrayList<MEDI2014ImageBase> ims = loc.getImageList(); for (MEDI2014ImageBase im : ims) { System.out.println( "Location: " + loc.getQueryId() + "-" + loc.getLocationName() + " Image: " + im.getId()); // the wikiIndex parameter is not used in this case out.write(imageToARFF(im, loc, individualNorms, normalizeLevel1, normalizeFinal, sparse, 0)); } ims = loc.getImageListWiki(); int wikiIndex = 0; for (MEDI2014ImageBase im : ims) { try { out.write(imageToARFF(im, loc, individualNorms, normalizeLevel1, normalizeFinal, sparse, wikiIndex)); } catch (Exception e) { System.err.println("Image: " + im.getImageFilename()); e.printStackTrace(); System.exit(1); } wikiIndex++; } // locIndex++; } out.close(); }
From source file:org.ivan.service.ExcelExporter.java
public File createExcelExport(List<Person> objects, List<String> headers, String fileName) throws IOException { File xlsFile = new File(fileName); FileWriter writer = new FileWriter(xlsFile); BufferedWriter bufferedWriter = new BufferedWriter(writer); for (int i = 0; i < headers.size(); i++) { String cup = headers.get(i); if (i < headers.size() - 1) { cup += "\t"; }/*from ww w . j a v a 2 s. c o m*/ bufferedWriter.write(cup); } bufferedWriter.newLine(); for (Person obj : objects) { bufferedWriter.write(obj.getPersonId() + "\t"); bufferedWriter.write(obj.getPersonName() + "\t"); bufferedWriter.write(obj.getGender() + "\t"); bufferedWriter.write(obj.getAddress().getAddressId() + "\t"); bufferedWriter.write(obj.getAddress().getStreet() + "\t"); bufferedWriter.write(obj.getAddress().getCity() + "\t"); bufferedWriter.write(obj.getAddress().getCountry() + "\n"); } bufferedWriter.close(); return xlsFile; }
From source file:com.predic8.membrane.annot.generator.Parsers.java
public void writeParserDefinitior(Model m) throws IOException { for (MainInfo main : m.getMains()) { List<Element> sources = new ArrayList<Element>(); sources.addAll(main.getInterceptorElements()); sources.add(main.getElement());/* w ww. j av a 2 s . co m*/ try { FileObject o = processingEnv.getFiler().createSourceFile( main.getAnnotation().outputPackage() + ".NamespaceHandlerAutoGenerated", sources.toArray(new Element[0])); BufferedWriter bw = new BufferedWriter(o.openWriter()); try { bw.write("/* Copyright 2012,2013 predic8 GmbH, www.predic8.com\r\n" + "\r\n" + " Licensed under the Apache License, Version 2.0 (the \"License\");\r\n" + " you may not use this file except in compliance with the License.\r\n" + " You may obtain a copy of the License at\r\n" + "\r\n" + " http://www.apache.org/licenses/LICENSE-2.0\r\n" + "\r\n" + " Unless required by applicable law or agreed to in writing, software\r\n" + " distributed under the License is distributed on an \"AS IS\" BASIS,\r\n" + " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n" + " See the License for the specific language governing permissions and\r\n" + " limitations under the License. */\r\n" + "\r\n" + "package " + main.getAnnotation().outputPackage() + ";\r\n" + "\r\n" + "/**\r\n" + " * Automatically generated by " + Parsers.class.getName() + ".\r\n" + " */\r\n" + "public class NamespaceHandlerAutoGenerated {\r\n" + "\r\n" + " public static void registerBeanDefinitionParsers(NamespaceHandler nh) {\r\n"); for (ElementInfo i : main.getIis()) { if (i.getAnnotation().topLevel()) { bw.write(" nh.registerGlobalBeanDefinitionParser(\"" + i.getAnnotation().name() + "\", new " + i.getParserClassSimpleName() + "());\r\n"); } else { for (ChildElementDeclarationInfo cedi : i.getUsedBy()) { for (ChildElementInfo cei : cedi.getUsedBy()) { TypeElement element = cei.getEi().getElement(); String clazz = AnnotUtils.getRuntimeClassName(element); bw.write(" nh.registerLocalBeanDefinitionParser(\"" + clazz + "\", \"" + i.getAnnotation().name() + "\", new " + i.getParserClassSimpleName() + "());\r\n"); } } } } bw.write(" }\r\n" + "}\r\n" + ""); } finally { bw.close(); } } catch (FilerException e) { if (e.getMessage().contains("Source file already created")) return; throw e; } } }
From source file:matrix.TextUrlMatrix.java
public void textUrlMatrix() throws UnsupportedEncodingException, FileNotFoundException, IOException, ParseException { double a = 0.7; CosSim cossim = new CosSim(); JSONParser jParser = new JSONParser(); BufferedReader in = new BufferedReader(new InputStreamReader( new FileInputStream("/Users/nSabri/Desktop/tweetMatris/userTweets.json"), "ISO-8859-9")); JSONArray jArray = (JSONArray) jParser.parse(in); BufferedReader in2 = new BufferedReader(new InputStreamReader( new FileInputStream("/Users/nSabri/Desktop/tweetMatris/userTweetsUrls.json"), "ISO-8859-9")); JSONArray jArray2 = (JSONArray) jParser.parse(in2); File fout = new File("/Users/nSabri/Desktop/textUrlMatris.csv"); FileOutputStream fos = new FileOutputStream(fout); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fos)); for (int i = 0; i < 100; i++) { for (int j = 0; j < 100; j++) { JSONObject tweet1text = (JSONObject) jArray.get(i); JSONObject tweet2text = (JSONObject) jArray.get(j); JSONObject tweet1url = (JSONObject) jArray2.get(i); JSONObject tweet2url = (JSONObject) jArray2.get(j); String tweetText1 = tweet1text.get("tweets").toString(); String tweetText2 = tweet2text.get("tweets").toString(); String tweetUrl1 = tweet1url.get("title").toString() + tweet1url.get("meta").toString(); String tweetUrl2 = tweet2url.get("title").toString() + tweet1url.get("meta").toString(); double CosSimValueText = cossim.Cosine_Similarity_Score(tweetText1, tweetText2); double CosSimValueUrl = cossim.Cosine_Similarity_Score(tweetUrl1, tweetUrl2); double TextUrlSimValue = (a * CosSimValueText) + ((1 - a) * CosSimValueUrl); TextUrlSimValue = Double.parseDouble(new DecimalFormat("##.###").format(TextUrlSimValue)); bw.write(Double.toString(TextUrlSimValue) + ", "); }//from ww w .j a va 2s. co m bw.newLine(); } bw.close(); }
From source file:com.wesley.urban_cuts.services.barber_services.Write_to_file.java
public void write_to_file(String data1, String data2, String data3) { Date d = new Date(); Writer writer = null;/* w w w.j a v a2 s . c om*/ try { writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream("Urban Cuts Log" + ".txt"), "utf-8")); writer.append(""); writer.append(data1 + " " + data2 + " " + data3 + " " + d); } catch (IOException ex) { System.out.println("couldn't write to file"); } finally { try { writer.close(); } catch (Exception ex) { /*ignore*/} } }
From source file:com.thinkit.operationsys.util.FileUtil.java
public static void copyFile(String src, String dest) { System.out.println("copy"); File f1 = new File(src); File f2 = new File(dest); //int b=0;// w ww . j ava 2s. c om String line = ""; try { FileReader reader = new FileReader(f1); FileWriter writer = new FileWriter(f2); BufferedReader br = new BufferedReader(reader); BufferedWriter bw = new BufferedWriter(writer); while ((line = br.readLine()) != null) { System.out.println(line); bw.write(line); bw.newLine(); bw.flush(); } reader.close(); writer.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:com.creactiviti.piper.plugin.ffmpeg.Vstitch.java
@Override public Object handle(Task aTask) throws Exception { List<String> chunks = aTask.getList("chunks", String.class); File tempFile = File.createTempFile("_chunks", ".txt"); try {/*from ww w . j a v a2 s. c o m*/ try (Writer writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(tempFile, true), "UTF-8"))) { for (String chunk : chunks) { writer.append(String.format("file '%s'", chunk)).append("\n"); } } SimpleTaskExecution ffmpegTask = SimpleTaskExecution.create(); List<String> options = Arrays.asList("-y", "-f", "concat", "-safe", "0", "-i", tempFile.getAbsolutePath(), "-c", "copy", aTask.getRequiredString("output")); ffmpegTask.set("options", options); ffmpeg.handle(ffmpegTask); } finally { FileUtils.deleteQuietly(tempFile); } return null; }
From source file:net.estinet.gFeatures.Feature.gHub.config.gHubConfig.java
public void setup() { Config c = new Config(); c.createDirectory("plugins/gFeatures/gHub", "Created gHub directory!"); c.createFile("plugins/gFeatures/gHub/Config.yml", "Created gHub config!"); File f = new File("plugins/gFeatures/gHub/spawn.txt"); if (!f.exists()) { c.createFile("plugins/gFeatures/gHub/spawn.txt", "Created spawn location!"); BufferedWriter output;/* w w w . ja v a2 s .com*/ try { output = new BufferedWriter(new FileWriter(f, true)); output.write("world"); output.newLine(); output.write("0"); output.newLine(); output.write("0"); output.newLine(); output.write("0"); output.newLine(); output.write("0"); output.newLine(); output.write("0"); output.close(); } catch (Exception e1) { Bukkit.getLogger().info("UHOH"); e1.printStackTrace(); } } }
From source file:de.j4velin.mapsmeasure.Util.java
/** * Writes the given trace of points to the given file in CSV format, * separated by ";"// w w w. j a v a 2 s . co m * * @param f * the file to write to * @param trace * the trace to write * @throws IOException */ static void saveToFile(final File f, final Stack<LatLng> trace) throws IOException { if (!f.exists()) f.createNewFile(); BufferedWriter out = new BufferedWriter(new FileWriter(f)); LatLng current; for (int i = 0; i < trace.size(); i++) { current = trace.get(i); out.append(current.latitude + ";" + current.longitude + "\n"); } out.close(); }
From source file:it.tizianofagni.sparkboost.DataUtils.java
/** * Write a text file on Hadoop file system by using standard Hadoop API. * * @param outputPath The file to be written. * @param content The content to put in the file. *//*from w w w. j a v a2 s.c om*/ public static void saveHadoopTextFile(String outputPath, String content) { try { Configuration configuration = new Configuration(); Path file = new Path(outputPath); Path parentFile = file.getParent(); FileSystem hdfs = FileSystem.get(file.toUri(), configuration); if (parentFile != null) hdfs.mkdirs(parentFile); OutputStream os = hdfs.create(file, true); BufferedWriter br = new BufferedWriter(new OutputStreamWriter(os, "UTF-8")); br.write(content); br.close(); hdfs.close(); } catch (Exception e) { throw new RuntimeException("Writing Hadoop text file", e); } }