List of usage examples for java.io BufferedWriter newLine
public void newLine() throws IOException
From source file:com.mgmtp.jfunk.common.util.ExtendedProperties.java
/** * Writes the properties to the specified writer, including defaults. * /*from w w w.j a va2 s. com*/ * @param writer * The writer * @param comments * The header comment written to the writer * @param sorted * If {@code true}, the properties are written sorted by key * @param process * If {@code true}, place holders are resolved */ public void store(final Writer writer, final String comments, final boolean sorted, final boolean process) throws IOException { BufferedWriter bw = writer instanceof BufferedWriter ? (BufferedWriter) writer : new BufferedWriter(writer); if (comments != null) { for (Scanner scanner = new Scanner(comments); scanner.hasNextLine();) { bw.write("#"); bw.write(scanner.nextLine()); bw.newLine(); } } bw.write("#" + new Date()); bw.newLine(); Set<String> keys = keySet(); if (sorted) { keys = Sets.newTreeSet(keys); } for (String key : keys) { /* * No need to escape embedded and trailing spaces for value, hence pass false to flag. */ bw.write(saveConvert(key, true) + "=" + saveConvert(get(key, process), false)); bw.newLine(); } bw.flush(); }
From source file:com.egt.ejb.toolkit.ToolKitSessionBean.java
private void copyfile(File sourceFile, File targetFile, Map replacements) { if (sourceFile.getName().endsWith(".")) { } else if (sourceFile.getName().endsWith(".gif")) { } else if (sourceFile.getName().endsWith(".jpg")) { } else {/* w w w . ja va 2 s . c om*/ // Bitacora.trace(this.getClass(), "copyfile-source", sourceFile.getAbsolutePath(), sourceFile.getName()); // Bitacora.trace(this.getClass(), "copyfile-target", targetFile.getAbsolutePath(), targetFile.getName()); try { FileOutputStream fos; try (FileInputStream fis = new FileInputStream(sourceFile)) { InputStreamReader isr = new InputStreamReader(fis); BufferedReader br = new BufferedReader(isr); fos = new FileOutputStream(targetFile); OutputStreamWriter osw = new OutputStreamWriter(fos); BufferedWriter bw = new BufferedWriter(osw); int escapeRule = getEscapeRule(sourceFile.getName()); String line; while ((line = br.readLine()) != null) { line = replaceStrings(line, replacements, escapeRule); bw.write(line); bw.newLine(); bw.flush(); } } fos.close(); } catch (FileNotFoundException ex) { // System.out.println(ThrowableUtils.getString(ex)); Bitacora.logFatal(ThrowableUtils.getString(ex)); } catch (IOException ex) { // System.out.println(ThrowableUtils.getString(ex)); Bitacora.logFatal(ThrowableUtils.getString(ex)); } } }
From source file:Evaluator.PerQueryRelDocs.java
public void storeRunMeanAp(String fileName) throws IOException { FileWriter fw = new FileWriter(new File(fileName)); BufferedWriter bw = new BufferedWriter(fw); Iterator it = runApMap.keySet().iterator(); while (it.hasNext()) { String run = (String) it.next(); bw.write(run + " " + runApMap.get(run)); bw.newLine(); }// www. ja va 2 s.com bw.close(); }
From source file:Evaluator.PerQueryRelDocs.java
public void storeApValues(String fileName) throws IOException { FileWriter fw = new FileWriter(new File(fileName)); BufferedWriter bw = new BufferedWriter(fw); Iterator it = qidApMap.keySet().iterator(); while (it.hasNext()) { String qid = (String) it.next(); bw.write(qid + " " + qidApMap.get(qid)); bw.newLine(); }//from w w w. ja v a 2s . c o m bw.close(); }
From source file:analytics.storage.store2csv.java
@Override public void appendRepositoryData(String repoName, int noRecords, float avgFSize, float storageReq, float informativeness, String schema) throws IOException { // TODO Auto-generated method stub String sFileName = "Federation" + "_GeneralInfo" + ".csv"; ConfigureLogger conf = new ConfigureLogger(); Properties props = new Properties(); try {/*from ww w . j a va 2 s . com*/ props.load(new FileInputStream("configure.properties")); } catch (FileNotFoundException e1) { // TODO Auto-generated catch block e1.printStackTrace(); System.exit(-1); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); System.exit(-1); } ; File anls = new File(props.getProperty(AnalyticsConstants.resultsPath) + "Analysis_Results"); if (!anls.exists()) anls.mkdir(); File dir = new File(anls, "Federation"); if (!dir.exists()) dir.mkdir(); File file = new File(dir, sFileName); if (!file.exists()) file.createNewFile(); Logger logger = conf.getLogger("generalInfo", anls + File.separator + "repoGeneralInfo.log"); StringBuffer logString = new StringBuffer(); FileWriter writer; BufferedWriter bw = null; try { writer = new FileWriter(file, true); bw = new BufferedWriter(writer); if (!isAppendData()) { setAppend(true); // create header bw.append("Repository Name"); bw.append(","); bw.append("Number of records"); bw.append(","); bw.append("Average file size(bytes)"); bw.append(","); bw.append("Approximate Storage requirements(bytes)"); bw.append(","); bw.append("AVG informativeness(bits)"); bw.append(","); bw.append("Metadata schema namespace"); bw.newLine(); bw.append(repoName); logString.append(repoName); bw.append(","); bw.append(String.valueOf(noRecords)); logString.append(" " + String.valueOf(noRecords)); bw.append(","); bw.append(String.valueOf(avgFSize)); logString.append(" " + String.valueOf(avgFSize)); bw.append(","); bw.append(String.valueOf(storageReq)); logString.append(" " + String.valueOf(storageReq)); bw.append(","); bw.append(String.valueOf(informativeness)); logString.append(" " + String.valueOf(informativeness)); bw.append(","); bw.append(schema); logString.append(" " + String.valueOf(schema)); bw.newLine(); bw.close(); } else { // insert data bw.append(repoName); logString.append(repoName); bw.append(","); bw.append(String.valueOf(noRecords)); logString.append(" " + String.valueOf(noRecords)); bw.append(","); bw.append(String.valueOf(avgFSize)); logString.append(" " + String.valueOf(avgFSize)); bw.append(","); bw.append(String.valueOf(storageReq)); logString.append(" " + String.valueOf(storageReq)); bw.append(","); bw.append(String.valueOf(informativeness)); logString.append(" " + String.valueOf(informativeness)); bw.append(","); bw.append(schema); logString.append(" " + String.valueOf(schema)); bw.newLine(); bw.close(); } logger.info(logString.toString()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { try { if (bw != null) bw.close(); } catch (IOException ex) { ex.printStackTrace(); } } }
From source file:gov.nih.nci.ncicb.tcga.dcc.qclive.common.util.DownloadNCBITraceXML.java
public int getPagecount() { String countquery = "http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?&cmd=retrieve&val=PROJECT_NAME=\"TCGA\"%20and%20LOAD_DATE>=\"" + latestLoaddate + "\"&dopt=xml_info&dispmax=1&page=0"; BufferedReader br = null;//from w w w. ja v a 2 s .c o m BufferedWriter bw = null; try { downloaddir = downloaddir + File.separator + latestLoaddate; URL url = new URL(countquery); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); getLogger().logToLogger(Level.INFO, "DownloadNCBITraceXML getPagecount site url: " + url); is = connection.getInputStream(); //noinspection IOResourceOpenedButNotSafelyClosed br = new BufferedReader(new InputStreamReader(is)); String line; if (!(new File(downloaddir)).exists()) { boolean newdir = new File(downloaddir).mkdir(); getLogger().logToLogger(Level.INFO, "DownloadNCBITraceXML getPagecount create newdir: " + newdir); } fw = new FileWriter(downloaddir + File.separator + "NCBIXMLDownload" + ".out"); //noinspection IOResourceOpenedButNotSafelyClosed bw = new BufferedWriter(fw); while ((line = br.readLine()) != null) { if (line.contains("Search result") || line.contains("found") || line.contains("items")) { bw.write("old line" + line); bw.newLine(); String valueline = line.replaceAll("\\D", ""); bw.write("valueline = " + valueline); bw.newLine(); if (valueline.length() > 0) { querysize = new Integer(valueline).intValue(); getLogger().logToLogger(Level.INFO, "DownloadNCBITraceXML getPagecount querysize: " + querysize); pagecount = Math.round(querysize / pagesize) + 1; } } } bw.write("latestLoaddate = " + latestLoaddate); bw.newLine(); bw.write("querysize = " + querysize); bw.newLine(); is.close(); } catch (IOException e) { getLogger().logToLogger(Level.ERROR, "DownloadNCBITraceXML getPagecount Exception " + e.toString()); } finally { IOUtils.closeQuietly(br); IOUtils.closeQuietly(bw); } return pagecount; }
From source file:io.hops.experiments.stats.TransactionStatsCumulative.java
private void writeStats(BufferedWriter writer, String transaction, String[] columns, File depthDir, int key) throws IOException { writer.write(key + " "); for (String cache : RESOLVING_CACHES) { File cacheDir = new File(depthDir, cache); Map<String, DescriptiveStatistics> stats = null; Map<String, DescriptiveStatistics> statsResolving = null; if (cacheDir.exists()) { stats = TransactionStatsAggregator.aggregate(new File(cacheDir, HOPSSTATS), transaction); }//from w w w.ja v a 2 s . c o m if (isResolvingCache(cache)) { statsResolving = TransactionStatsAggregator.aggregate(new File(cacheDir, RESOLVING_CACHE_STATS), "GET"); } if (stats != null) { for (String col : columns) { DescriptiveStatistics st = stats.get(col); writer.write(st.getMin() + " " + st.getMean() + " " + st.getMax() + " "); } } if (statsResolving != null) { for (String col : RESOLVING_CACHE_COLUMNS) { DescriptiveStatistics st = statsResolving.get(col); writer.write(st.getMin() + " " + st.getMean() + " " + st.getMax() + " "); } } } writer.newLine(); }
From source file:gtu._work.ui.SqlCreaterUI.java
private void firstRowMakeInsertSqlBtn(ActionEvent evt) { try {//from ww w . j a v a2s .c o m String tableName = Validate.notBlank(tableNameText.getText(), "??"); File srcFile = JCommonUtil.filePathCheck(excelFilePathText2.getText(), "?", "xlsx"); File saveFile = JCommonUtil._jFileChooser_selectFileOnly_saveFile(); if (saveFile == null) { JCommonUtil._jOptionPane_showMessageDialog_error("?"); return; } BufferedWriter writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(saveFile), "utf8")); BufferedInputStream bis = new BufferedInputStream(new FileInputStream(srcFile)); XSSFWorkbook xssfWorkbook = new XSSFWorkbook(bis); Sheet sheet = xssfWorkbook.getSheetAt(0); LinkedHashMap<String, String> valueMap = new LinkedHashMap<String, String>(); for (int ii = 0; ii < sheet.getRow(0).getLastCellNum(); ii++) { valueMap.put(formatCellType(sheet.getRow(0).getCell(ii)), ""); } for (int j = 0; j < sheet.getPhysicalNumberOfRows(); j++) { Row row = sheet.getRow(j); LinkedHashMap<String, String> valueMap2 = (LinkedHashMap<String, String>) valueMap.clone(); int ii = 0; for (String key : valueMap2.keySet()) { valueMap2.put(key, formatCellType(row.getCell(ii))); ii++; } appendLog("" + valueMap2); String insertSql = this.fetchInsertSQL(tableName, valueMap2); appendLog("" + insertSql); writer.write(insertSql); writer.newLine(); } bis.close(); writer.flush(); writer.close(); JCommonUtil._jOptionPane_showMessageDialog_info("? : \n" + saveFile); } catch (Exception ex) { JCommonUtil.handleException(ex); } }
From source file:net.grinder.SingleConsole.java
private void writeReportData(String name, String value) { try {/*from w w w. j ava 2s .c o m*/ BufferedWriter bw = fileWriterMap.get(name); if (bw == null) { bw = new BufferedWriter(new FileWriter(new File(this.reportPath, name), true)); fileWriterMap.put(name, bw); } bw.write(value); bw.newLine(); bw.flush(); } catch (Exception e) { LOGGER.error(e.getMessage(), e); throw processException(e); } }
From source file:org.apache.hadoop.fs.JHLogAnalyzer.java
/** * Result is combined from all reduce output files and is written to * RESULT_FILE in the format//from www. j a v a2 s .c om * column 1: */ private static void analyzeResult(FileSystem fs, int testType, long execTime, Path resFileName) throws IOException { LOG.info("Analyzing results ..."); DataOutputStream out = null; BufferedWriter writer = null; try { out = new DataOutputStream(fs.create(resFileName)); writer = new BufferedWriter(new OutputStreamWriter(out)); writer.write("SERIES\tPERIOD\tTYPE\tSLOT_HOUR\n"); FileStatus[] reduceFiles = fs.listStatus(OUTPUT_DIR); assert reduceFiles.length == JHLAPartitioner.NUM_REDUCERS; for (int i = 0; i < JHLAPartitioner.NUM_REDUCERS; i++) { DataInputStream in = null; BufferedReader lines = null; try { in = fs.open(reduceFiles[i].getPath()); lines = new BufferedReader(new InputStreamReader(in)); String line; while ((line = lines.readLine()) != null) { StringTokenizer tokens = new StringTokenizer(line, "\t*"); String attr = tokens.nextToken(); String dateTime = tokens.nextToken(); String taskType = tokens.nextToken(); double val = Long.parseLong(tokens.nextToken()) / (double) DEFAULT_TIME_INTERVAL_MSEC; writer.write(attr.substring(2)); // skip the stat type "l:" writer.write("\t"); writer.write(dateTime); writer.write("\t"); writer.write(taskType); writer.write("\t"); writer.write(String.valueOf((float) val)); writer.newLine(); } } finally { if (lines != null) lines.close(); if (in != null) in.close(); } } } finally { if (writer != null) writer.close(); if (out != null) out.close(); } LOG.info("Analyzing results ... done."); }