List of usage examples for java.io BufferedWriter newLine
public void newLine() throws IOException
From source file:dataviewer.DataViewer.java
private void writeConfig() { try {//from w ww . j a va2 s . c o m if ((new File(config)).exists()) { (new File(config)).delete(); } BufferedWriter out = new BufferedWriter(new FileWriter(config)); out.write("directory = " + cur_path); out.newLine(); out.write("delimiter = " + String.valueOf(delimiter)); out.newLine(); out.write("transpose = " + ((transpose) ? "true" : "false")); out.newLine(); out.write("export = " + ((export) ? "true" : "false")); out.newLine(); out.write("header = " + ((header) ? "true" : "false")); out.newLine(); out.write("N = " + String.valueOf(N)); out.close(); } catch (Exception e) { txt_count.setText(e.getMessage()); } }
From source file:gate.util.reporting.PRTimeReporter.java
/** * Prints benchmark report in text format. * * @param reportContainer/* w w w . j a v a2s .com*/ * An Object of type LinkedHashMap<String, Object> containing the * processing elements (with time in milliseconds) in hierarchical * structure. * @param outputFile * An object of type File representing the output report file. * @param suppressZeroTimeEntries * Indicate whether or not to show 0 millisecond entries. */ @SuppressWarnings("unchecked") private void printToText(Object reportContainer, File outputFile, boolean suppressZeroTimeEntries) { LinkedHashMap<String, Object> globalStore = (LinkedHashMap<String, Object>) reportContainer; prettyPrint(globalStore, "\t", suppressZeroTimeEntries); BufferedWriter out = null; try { out = new BufferedWriter(new FileWriter(outputFile)); for (String line : printLines) { out.write(line); out.newLine(); } } catch (IOException e) { e.printStackTrace(); } finally { try { if (out != null) { out.close(); } } catch (IOException e) { e.printStackTrace(); } } }
From source file:me.ryanhamshire.PopulationDensity.DataStore.java
public void savePlayerData(OfflinePlayer player, PlayerData data) { //save that data in memory this.playerNameToPlayerDataMap.put(player.getUniqueId().toString(), data); BufferedWriter outStream = null; try {// www. j a v a 2s .c o m //open the player's file File playerFile = new File(playerDataFolderPath + File.separator + player.getUniqueId().toString()); playerFile.createNewFile(); outStream = new BufferedWriter(new FileWriter(playerFile)); //first line is home region coordinates outStream.write(data.homeRegion.toString()); outStream.newLine(); //second line is last disconnection date, //note use of the ROOT locale to avoid problems related to regional settings on the server being updated DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.FULL, DateFormat.FULL, Locale.ROOT); outStream.write(dateFormat.format(data.lastDisconnect)); outStream.newLine(); //third line is login priority outStream.write(String.valueOf(data.loginPriority)); outStream.newLine(); } //if any problem, log it catch (Exception e) { PopulationDensity.AddLogEntry("PopulationDensity: Unexpected exception saving data for player \"" + player.getName() + "\": " + e.getMessage()); } try { //close the file if (outStream != null) outStream.close(); } catch (IOException exception) { } }
From source file:Evaluator.PerQueryRelDocs.java
public void storeCosineSimilarity(String fileName, int startQrelNo, int endQrelNo, String vectorFolderLocation) throws IOException, ParseException { FileWriter fw = new FileWriter(new File(fileName), true); BufferedWriter bw = new BufferedWriter(fw); bw.newLine(); VectorExtractor ve = new VectorExtractor(); for (int qid = startQrelNo; qid <= endQrelNo; qid++) { Integer qidValue = qid;// w w w . ja va 2 s.c o m PerQueryRelDocs perqd = perQueryRels.get(qidValue.toString()); HashMap<String, ArrayList<Double>> h = ve.extractVector(vectorFolderLocation + qid + ".txt"); perqd.precomputeCosineSim(h); bw.write("#" + qidValue.toString()); bw.newLine(); Iterator it = perqd.perQuerydocCosineSim.keySet().iterator(); while (it.hasNext()) { String docPair = (String) it.next(); String st[] = docPair.split("#"); bw.write(st[0] + " " + st[1] + " " + perqd.perQuerydocCosineSim.get(docPair)); bw.newLine(); } } bw.close(); }
From source file:geva.Operator.Operations.StatisticsCollectionOperation.java
/** * Print the StatCatcher to file// w w w .java2 s. co m **/ @SuppressWarnings({ "IOResourceOpenedButNotSafelyClosed" }) public void printStats() { try { this.fileName = fileName + System.currentTimeMillis(); FileWriter fw = new FileWriter(fileName + ".dat"); BufferedWriter bw = new BufferedWriter(fw); ArrayList<Double> m, b, aUG, aV, aL, aD; ArrayList<Long> t; ArrayList<Integer> inv; b = stats.getBestFitness(); m = stats.getMeanFitness(); aUG = stats.getMeanUsedGenes(); t = stats.getTime(); inv = stats.getInvalids(); aV = stats.getVarFitness(); aL = stats.getAveLength(); aD = stats.getMeanDerivationTreeDepth(); Iterator<Double> ib = b.iterator(); Iterator<Double> im = m.iterator(); Iterator<Double> iAV = aV.iterator(); Iterator<Double> iAUG = aUG.iterator(); Iterator<Double> iAD = aD.iterator(); Iterator<Long> iT = t.iterator(); Iterator<Integer> iInv = inv.iterator(); Iterator<Double> iAL = aL.iterator(); Long start = iT.next(); Long diff, stop; bw.write(StatisticsCollectionOperation.OUTPUT_COLUMNS); bw.newLine(); while (ib.hasNext() && im.hasNext() && iAUG.hasNext() && iT.hasNext() && iInv.hasNext() && iAV.hasNext() && iAL.hasNext() && iAD.hasNext()) { stop = iT.next(); diff = stop - start; start = stop; bw.write(ib.next() + " " + im.next() + " " + iAUG.next() + " " + diff + " " + iInv.next() + " " + iAV.next() + " " + iAL.next() + " " + iAD.next()); bw.newLine(); } bw.close(); fw.close(); } catch (IOException e) { e.printStackTrace(); } }
From source file:matrix.TextUrlMatrix.java
public void textUrlMatrix() throws UnsupportedEncodingException, FileNotFoundException, IOException, ParseException { double a = 0.7; CosSim cossim = new CosSim(); JSONParser jParser = new JSONParser(); BufferedReader in = new BufferedReader(new InputStreamReader( new FileInputStream("/Users/nSabri/Desktop/tweetMatris/userTweets.json"), "ISO-8859-9")); JSONArray jArray = (JSONArray) jParser.parse(in); BufferedReader in2 = new BufferedReader(new InputStreamReader( new FileInputStream("/Users/nSabri/Desktop/tweetMatris/userTweetsUrls.json"), "ISO-8859-9")); JSONArray jArray2 = (JSONArray) jParser.parse(in2); File fout = new File("/Users/nSabri/Desktop/textUrlMatris.csv"); FileOutputStream fos = new FileOutputStream(fout); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fos)); for (int i = 0; i < 100; i++) { for (int j = 0; j < 100; j++) { JSONObject tweet1text = (JSONObject) jArray.get(i); JSONObject tweet2text = (JSONObject) jArray.get(j); JSONObject tweet1url = (JSONObject) jArray2.get(i); JSONObject tweet2url = (JSONObject) jArray2.get(j); String tweetText1 = tweet1text.get("tweets").toString(); String tweetText2 = tweet2text.get("tweets").toString(); String tweetUrl1 = tweet1url.get("title").toString() + tweet1url.get("meta").toString(); String tweetUrl2 = tweet2url.get("title").toString() + tweet1url.get("meta").toString(); double CosSimValueText = cossim.Cosine_Similarity_Score(tweetText1, tweetText2); double CosSimValueUrl = cossim.Cosine_Similarity_Score(tweetUrl1, tweetUrl2); double TextUrlSimValue = (a * CosSimValueText) + ((1 - a) * CosSimValueUrl); TextUrlSimValue = Double.parseDouble(new DecimalFormat("##.###").format(TextUrlSimValue)); bw.write(Double.toString(TextUrlSimValue) + ", "); }//from www . j a v a 2 s .co m bw.newLine(); } bw.close(); }
From source file:org.apache.playframework.generator.mybatisplus.AutoGenerator.java
/** * ?//from w w w. j a v a2s . co m * * @param columns * @param types * @param comments * @throws IOException */ protected void buildEntityBean(List<String> columns, List<String> types, List<String> comments, String tableComment, Map<String, IdInfo> idMap, String table, String beanName) throws IOException { File beanFile = new File(PATH_ENTITY, beanName + ".java"); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(beanFile))); bw.write("package " + config.getEntityPackage() + ";"); bw.newLine(); bw.newLine(); bw.write("import java.io.Serializable;"); bw.newLine(); if (isDate(types)) { bw.write("import java.util.Date;"); bw.newLine(); } if (isDecimal(types)) { bw.write("import java.math.BigDecimal;"); bw.newLine(); } bw.newLine(); if (config.getIdType() != IdType.ID_WORKER) { bw.write("import com.baomidou.mybatisplus.annotations.IdType;"); bw.newLine(); } bw.newLine(); /* * ? BaseEntity */ if (null != config.getConfigBaseEntity() && null != config.getConfigBaseEntity().getPackageName() && !config.getEntityPackage().equals(config.getConfigBaseEntity().getPackageName())) { bw.write("import " + config.getConfigBaseEntity().getPackageName() + ";"); bw.newLine(); } bw.write("import com.baomidou.mybatisplus.annotations.TableField;"); bw.newLine(); if (null == config.getConfigBaseEntity()) { bw.write("import com.baomidou.mybatisplus.annotations.TableId;"); bw.newLine(); } if (table.contains("_") || config.isResultMap()) { bw.write("import com.baomidou.mybatisplus.annotations.TableName;"); bw.newLine(); } bw = buildClassComment(bw, tableComment); bw.newLine(); /* ? */ if (table.contains("_") || config.isResultMap()) { bw.write("@TableName(value=\"" + table + "\""); if (config.isResultMap()) { bw.write(",resultMap=\"BaseResultMap\""); } bw.write(")"); bw.newLine(); } /** * ???? BaseEntity */ if (null != config.getConfigBaseEntity()) { bw.write("public class " + beanName + " extends " + config.getConfigBaseEntity().getClassName() + " {"); } else { bw.write("public class " + beanName + " implements Serializable {"); } bw.newLine(); bw.newLine(); bw.write("\t@TableField(exist = false)"); bw.newLine(); bw.write("\tprivate static final long serialVersionUID = 1L;"); bw.newLine(); int size = columns.size(); for (int i = 0; i < size; i++) { bw.newLine(); bw.write("\t/** " + comments.get(i) + " */"); bw.newLine(); /* * ID <br> isLine ?? */ String column = columns.get(i); String field = processField(column); boolean isLine = column.contains("_"); IdInfo idInfo = idMap.get(column); if (idInfo != null) { // @TableId(value = "test_id", type = IdType.AUTO) bw.write("\t@TableId"); String idType = toIdType(); if (isLine) { if (config.isDbColumnUnderline()) { // if (null != idType) { bw.write("("); bw.write(idType); bw.write(")"); } } else { bw.write("(value = \"" + column + "\""); if (null != idType) { bw.write(", "); bw.write(idType); } bw.write(")"); } } else if (null != idType) { bw.write("("); bw.write(idType); bw.write(")"); } bw.newLine(); } else if (isLine && !config.isDbColumnUnderline()) { // @TableField(value = "test_type", exist = false) bw.write("\t@TableField(value = \"" + column + "\")"); bw.newLine(); } bw.write("\tprivate " + processType(types.get(i)) + " " + field + ";"); bw.newLine(); } /* * ?? */ this.buildEntityBeanColumnConstant(columns, types, comments, bw, size); bw.newLine(); /* * ?get set */ for (int i = 0; i < size; i++) { String _tempType = processType(types.get(i)); String _tempField = processField(columns.get(i)); String _field = _tempField.substring(0, 1).toUpperCase() + _tempField.substring(1); bw.newLine(); bw.write("\tpublic " + _tempType + " get" + _field + "() {"); bw.newLine(); bw.write("\t\treturn this." + _tempField + ";"); bw.newLine(); bw.write("\t}"); bw.newLine(); bw.newLine(); /* ? */ if (config.isBuliderModel()) { bw.write("\tpublic " + beanName + " set" + _field + "(" + _tempType + " " + _tempField + ") {"); bw.newLine(); bw.write("\t\tthis." + _tempField + " = " + _tempField + ";"); bw.newLine(); bw.write("\t\treturn this;"); } else { bw.write("\tpublic void set" + _field + "(" + _tempType + " " + _tempField + ") {"); bw.newLine(); bw.write("\t\tthis." + _tempField + " = " + _tempField + ";"); } bw.newLine(); bw.write("\t}"); bw.newLine(); } bw.newLine(); bw.write("}"); bw.newLine(); bw.flush(); bw.close(); }
From source file:edu.illinois.cs.cogcomp.wikifier.wiki.importing.WikipediaRedirectExtractor.java
public String run(File inputFile, File outputFile) throws Exception { int invalidCount = 0; long t0 = System.currentTimeMillis(); InputStream fis = new FileInputStream(inputFile); if (inputFile.getName().endsWith(".bz2")) fis = new BZip2InputStream(fis, false); BufferedReader dumpReader = new BufferedReader(new InputStreamReader(fis, "utf-8")); BufferedWriter redirectWriter = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(outputFile), "utf-8")); String titleIdFile = outputFile + "-title-id.txt"; BufferedWriter titleIdWriter = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(titleIdFile), "utf-8")); int count = 0; String title = null;//from w w w .j a va 2 s.c o m String line = null; while ((line = dumpReader.readLine()) != null) { if (line.startsWith(titlePattern)) { title = cleanupTitle(line); continue; } if (line.startsWith(redirectPattern)) { String[] splits = StringUtils.substringsBetween(line, "<redirect title=\"", "\" />"); if (splits == null || splits.length != 1) { invalidCount++; continue; } String redirectedTitle = splits[0]; redirectedTitle = cleanupTitle(redirectedTitle); if (isValidAlias(title, redirectedTitle)) { redirectWriter.write(title + "\t" + redirectedTitle); redirectWriter.newLine(); count++; } else { invalidCount++; System.out.println("Discarded redirect from " + title + " to " + redirectedTitle); } if (count % 100000 == 0) System.out.println("Processed " + (count + invalidCount) + " titles "); } if (SAVE_COMPLETE_TITLE_LIST && line.startsWith(idPattern)) { String[] splits = StringUtils.substringsBetween(line, "<id>", "</id>"); if (splits == null || splits.length != 1) { invalidCount++; continue; } titleIdWriter.write(splits[0] + '\t' + title); titleIdWriter.newLine(); } } dumpReader.close(); fis.close(); redirectWriter.close(); titleIdWriter.close(); System.out.println("---- Wikipedia redirect extraction done ----"); long t1 = System.currentTimeMillis(); // IOUtil.save( map ); System.out.println("Discarded " + invalidCount + " redirects to wikipedia meta articles."); System.out.println("Extracted " + count + " redirects."); System.out.println("Saved output: " + outputFile.getAbsolutePath()); System.out.println("Done in " + ((t1 - t0) / 1000) + " sec."); return titleIdFile; }
From source file:org.corpus_tools.pepper.connectors.impl.MavenAccessor.java
/** * writes the old, freshly installed and forbidden dependencies to the * blacklist file./*from w ww .j av a2 s. c o m*/ */ private void write2Blacklist() { File blacklistFile = new File(BLACKLIST_PATH); if (!blacklistFile.exists()) { if (!blacklistFile.getParentFile().exists() && !blacklistFile.getParentFile().mkdirs()) { logger.warn("Cannot create folder {}. ", blacklistFile.getParentFile()); } try { if (!blacklistFile.createNewFile()) { logger.warn("Cannot create file {}. ", blacklistFile); } } catch (IOException e) { logger.warn("Could not write blacklist file."); } } try { PrintWriter fW = new PrintWriter(blacklistFile); BufferedWriter bW = new BufferedWriter(fW); for (String s : forbiddenFruits) { bW.write(s); bW.newLine(); } bW.close(); fW.close(); } catch (IOException e) { logger.warn("Could not write blacklist file."); } }
From source file:gdsc.smlm.ij.plugins.SpotAnalysis.java
private void writeLine(BufferedWriter tracesFile, String line) throws IOException { tracesFile.write(line);/*from w w w. j a va 2s .c om*/ tracesFile.newLine(); }