List of usage examples for java.io BufferedWriter flush
public void flush() throws IOException
From source file:com.healthmarketscience.jackcess.ExportUtil.java
/** * Copy a table in this database into a new delimited text file. * //from w w w. j a v a 2 s . co m * @param cursor * Cursor to export * @param out * Writer to export to * @param header * If <code>true</code> the first line contains the column names * @param delim * The column delimiter, <code>null</code> for default (comma) * @param quote * The quote character * @param filter * valid export filter * * @see Builder */ public static void exportWriter(Cursor cursor, BufferedWriter out, boolean header, String delim, char quote, ExportFilter filter) throws IOException { String delimiter = (delim == null) ? DEFAULT_DELIMITER : delim; // create pattern which will indicate whether or not a value needs to be // quoted or not (contains delimiter, separator, or newline) Pattern needsQuotePattern = Pattern.compile("(?:" + "x" + ")|(?:" + "x" + ")|(?:[\n\r])"); //Pattern needsQuotePattern = Pattern.compile( //"(?:" + Pattern.quote(delimiter) + ")|(?:" + //Pattern.quote("" + quote) + ")|(?:[\n\r])"); List<Column> origCols = cursor.getTable().getColumns(); List<Column> columns = new ArrayList<Column>(origCols); columns = filter.filterColumns(columns); Collection<String> columnNames = null; if (!origCols.equals(columns)) { // columns have been filtered columnNames = new HashSet<String>(); for (Column c : columns) { columnNames.add(c.getName()); } } // print the header row (if desired) if (header) { for (Iterator<Column> iter = columns.iterator(); iter.hasNext();) { writeValue(out, iter.next().getName(), quote, needsQuotePattern); if (iter.hasNext()) { out.write(delimiter); } } out.newLine(); } // print the data rows Map<String, Object> row; Object[] unfilteredRowData = new Object[columns.size()]; while ((row = cursor.getNextRow(columnNames)) != null) { // fill raw row data in array for (int i = 0; i < columns.size(); i++) { unfilteredRowData[i] = columns.get(i).getRowValue(row); } // apply filter Object[] rowData = filter.filterRow(unfilteredRowData); if (rowData == null) { continue; } // print row for (int i = 0; i < columns.size(); i++) { Object obj = rowData[i]; if (obj != null) { String value = null; if (obj instanceof byte[]) { value = ByteUtil.toHexString((byte[]) obj); } else { value = String.valueOf(rowData[i]); } writeValue(out, value, quote, needsQuotePattern); } if (i < columns.size() - 1) { out.write(delimiter); } } out.newLine(); } out.flush(); }
From source file:ac.core.ConfigLoader.java
/** * extractConfigFile(...) method verifies if the external config exists, if * not, it tries to extract the config file from jar file. If either are * unsuccessful, exception is thrown to notify user of missing config. * * @param filename location of the config file * @throws Exception// w ww . j a v a2 s .co m */ private void extractConfigFile(String filename) throws Exception { // create a reference to the location of the configuration file File cf = new File(filename); // if the file does not exist, try to extract it from the jar resource if (!cf.exists()) { // notify the user we are extracting the store app.config System.out.println("extracting config file: " + filename); // create directories cf.getParentFile().mkdirs(); // open the input stream from the jar resource BufferedReader configIFile = null; configIFile = new BufferedReader(new InputStreamReader( getClass().getClassLoader().getResourceAsStream(filename.replace("\\", "/")))); // declare storage for the output file BufferedWriter configOFile = null; // if input file if valid, then extract the data if (configIFile != null) { try { // open the output file configOFile = new BufferedWriter(new FileWriter(cf)); // declare storage for the data from the input stream String line; // loop the config file, read each line until no more data while ((line = configIFile.readLine()) != null) { // write the data to the output file and insert the new // line terminator after each line configOFile.write(line + GlobalStack.LINESEPARATOR); // yield processing to other threads Thread.yield(); } // notify user the status of the config file System.out.println("config file extracted successfully"); } catch (Exception ex) { // if exception during processing, return it to the user throw new Exception(getClass().toString() + "//" + ex.getMessage()); } finally { // close the input file to prevent resource leaks try { configIFile.close(); } catch (Exception exi) { } // close the output file to prevent resource leaks if (configOFile != null) { try { configOFile.flush(); } catch (Exception exi) { } try { configOFile.close(); } catch (Exception exi) { } } } } } else { // config file already existed, notify user we are using it System.out.println("using config file: " + filename); } }
From source file:gr.auth.ee.lcs.evaluators.FileLogger.java
/** * A FileLogger constructor to set the directory in which the metrics are stored for all FileLoggers * and copy a backup of the src directory for debugging purposes * // w ww.j a va 2 s . com * * @author alexandros filotheou * * */ public FileLogger(final AbstractLearningClassifierSystem lcs) { file = null; actualEvaluator = null; final Calendar cal = Calendar.getInstance(); final SimpleDateFormat sdf = new SimpleDateFormat("yyyy.MM.dd 'at' kk.mm.ss"); String timestamp = sdf.format(cal.getTime()); // make directory hookedMetrics/{simpleDateFormat} File dir = new File("hookedMetrics/" + timestamp); System.out.println(dir.toString()); if (!dir.exists()) { dir.mkdirs(); } storeDirectory = "hookedMetrics/" + timestamp; dir = new File(storeDirectory + "/evals"); if (!dir.exists()) { dir.mkdirs(); } // set the name of the directory in which the metrics will be stored if (lcs.hookedMetricsFileDirectory == null) lcs.setHookedMetricsFileDirectory(storeDirectory); try { // keep a copy of thedefaultLcs.properties configuration File getConfigurationFile = new File(storeDirectory, "defaultLcs.properties"); if (!getConfigurationFile.exists()) { FileInputStream in = new FileInputStream("defaultLcs.properties"); FileOutputStream out = new FileOutputStream(storeDirectory + "/defaultLcs.properties"); byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); out.close(); } } catch (Exception e) { e.printStackTrace(); } // copy the /src directory into storeDirectory String sourceDir = "src"; File srcDir = new File(sourceDir); String destinationDir = storeDirectory + "/src"; File destDir = new File(destinationDir); if (!destDir.exists()) destDir.mkdir(); try { FileUtils.copyDirectory(srcDir, destDir); } catch (Exception e) { e.printStackTrace(); } try { // record fitness mode, deletion mode and whether # participate in the correct sets in the file essentialSettings.txt final FileWriter fstream = new FileWriter(storeDirectory + "/essentialSettings.txt", true); final BufferedWriter buffer = new BufferedWriter(fstream); int fitness_mode = (int) SettingsLoader.getNumericSetting("FITNESS_MODE", 0); int deletion_mode = (int) SettingsLoader.getNumericSetting("DELETION_MODE", 0); boolean wildCardsParticipateInCorrectSets = String .valueOf(SettingsLoader.getStringSetting("wildCardsParticipateInCorrectSets", "true")) .equals("true"); boolean initializePopulation = String .valueOf(SettingsLoader.getStringSetting("initializePopulation", "true")).equals("true"); buffer.write("fitness mode: " + fitness_mode + System.getProperty("line.separator") + "deletion mode: " + deletion_mode + System.getProperty("line.separator") + "# in correct sets :" + wildCardsParticipateInCorrectSets + System.getProperty("line.separator") + (wildCardsParticipateInCorrectSets ? "balance correct sets: " + String.valueOf( SettingsLoader.getStringSetting("balanceCorrectSets", "true").equals("true")) + (String.valueOf( SettingsLoader.getStringSetting("balanceCorrectSets", "true").equals("true") ? ", with ratio: " + SettingsLoader .getNumericSetting("wildCardParticipationRatio", 0) : "")) : "" + System.getProperty("line.separator") + (initializePopulation ? "population initialized via clustering: " + initializePopulation : "") + System.getProperty("line.separator"))); buffer.flush(); buffer.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:edu.uci.ics.jung.io.GraphMLWriter.java
/** * /*from www .j av a2 s . c om*/ * @param graph * @param w * @throws IOException */ public void save(Hypergraph<V, E> graph, Writer w) throws IOException { BufferedWriter bw = new BufferedWriter(w); // write out boilerplate header bw.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); bw.write("<graphml xmlns=\"http://graphml.graphdrawing.org/xmlns/graphml\"\n" + "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" \n"); bw.write("xsi:schemaLocation=\"http://graphml.graphdrawing.org/xmlns/graphml\">\n"); // write out data specifiers, including defaults for (String key : graph_data.keySet()) writeKeySpecification(key, "graph", graph_data.get(key), bw); for (String key : vertex_data.keySet()) writeKeySpecification(key, "node", vertex_data.get(key), bw); for (String key : edge_data.keySet()) writeKeySpecification(key, "edge", edge_data.get(key), bw); // write out graph-level information // set edge default direction bw.write("<graph edgedefault=\""); directed = !(graph instanceof UndirectedGraph); if (directed) bw.write("directed\">\n"); else bw.write("undirected\">\n"); // write graph description, if any String desc = graph_desc.transform(graph); if (desc != null) bw.write("<desc>" + desc + "</desc>\n"); // write graph data out if any for (String key : graph_data.keySet()) { Transformer<Hypergraph<V, E>, ?> t = graph_data.get(key).transformer; Object value = t.transform(graph); if (value != null) bw.write(format("data", "key", key, value.toString()) + "\n"); } // write vertex information writeVertexData(graph, bw); // write edge information writeEdgeData(graph, bw); // close graph bw.write("</graph>\n"); bw.write("</graphml>\n"); bw.flush(); bw.close(); }
From source file:com.zimbra.cs.util.ProxyConfOverride.java
public static void writeContentToFile(String content, String filePath) throws ServiceException { try {/*from w w w .j ava 2 s . c o m*/ BufferedWriter bw = new BufferedWriter(new FileWriter(filePath)); bw.write(content); bw.flush(); bw.close(); } catch (IOException e) { throw ServiceException.FAILURE("Cannot write the content (" + content + ") to " + filePath, e); } }
From source file:com.baomidou.mybatisplus.generator.AutoGenerator.java
/** * service//from w ww . ja v a 2 s. com * * @param beanName * @param serviceImplName * @param mapperName * @throws IOException */ protected void buildServiceImpl(String beanName, String serviceImplName, String serviceName, String mapperName) throws IOException { File serviceFile = new File(PATH_SERVICE_IMPL, serviceImplName + ".java"); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(serviceFile), "utf-8")); bw.write("package " + config.getServiceImplPackage() + ";"); bw.newLine(); bw.newLine(); bw.write("import org.springframework.stereotype.Service;"); bw.newLine(); bw.newLine(); bw.write("import " + config.getMapperPackage() + "." + mapperName + ";"); bw.newLine(); bw.write("import " + config.getEntityPackage() + "." + beanName + ";"); bw.newLine(); bw.write("import " + config.getServicePackage() + "." + serviceName + ";"); bw.newLine(); String superServiceImpl = config.getSuperServiceImpl(); bw.write("import " + superServiceImpl + ";"); bw.newLine(); bw = buildClassComment(bw, beanName + " ???"); bw.newLine(); bw.write("@Service"); bw.newLine(); superServiceImpl = superServiceImpl.substring(superServiceImpl.lastIndexOf(".") + 1); bw.write("public class " + serviceImplName + " extends " + superServiceImpl + "<" + mapperName + ", " + beanName + "> implements " + serviceName + " {"); bw.newLine(); bw.newLine(); // ----------serviceEnd---------- bw.newLine(); bw.write("}"); bw.flush(); bw.close(); }
From source file:net.pms.formats.v2.SubtitleUtils.java
/** * Shift timing of subtitles in SSA/ASS or SRT format and converts charset to UTF8 if necessary * * * @param inputSubtitles Subtitles file in SSA/ASS or SRT format * @param timeShift Time stamp value//from w w w. j a va2 s .co m * @return Converted subtitles file * @throws IOException */ public static DLNAMediaSubtitle shiftSubtitlesTimingWithUtfConversion(final DLNAMediaSubtitle inputSubtitles, double timeShift) throws IOException { if (inputSubtitles == null) { throw new NullPointerException("inputSubtitles should not be null."); } if (!inputSubtitles.isExternal()) { throw new IllegalArgumentException("inputSubtitles should be external."); } if (isBlank(inputSubtitles.getExternalFile().getName())) { throw new IllegalArgumentException("inputSubtitles' external file should not have blank name."); } if (inputSubtitles.getType() == null) { throw new NullPointerException("inputSubtitles.getType() should not be null."); } if (!isSupportsTimeShifting(inputSubtitles.getType())) { throw new IllegalArgumentException( "inputSubtitles.getType() " + inputSubtitles.getType() + " is not supported."); } final File convertedSubtitlesFile = new File(configuration.getTempFolder(), getBaseName(inputSubtitles.getExternalFile().getName()) + System.currentTimeMillis() + ".tmp"); FileUtils.forceDeleteOnExit(convertedSubtitlesFile); BufferedReader input; final boolean isSubtitlesCodepageForcedInConfigurationAndSupportedByJVM = isNotBlank( configuration.getSubtitlesCodepage()) && Charset.isSupported(configuration.getSubtitlesCodepage()); final boolean isSubtitlesCodepageAutoDetectedAndSupportedByJVM = isNotBlank( inputSubtitles.getExternalFileCharacterSet()) && Charset.isSupported(inputSubtitles.getExternalFileCharacterSet()); if (isSubtitlesCodepageForcedInConfigurationAndSupportedByJVM) { input = new BufferedReader(new InputStreamReader(new FileInputStream(inputSubtitles.getExternalFile()), Charset.forName(configuration.getSubtitlesCodepage()))); } else if (isSubtitlesCodepageAutoDetectedAndSupportedByJVM) { input = new BufferedReader(new InputStreamReader(new FileInputStream(inputSubtitles.getExternalFile()), Charset.forName(inputSubtitles.getExternalFileCharacterSet()))); } else { input = new BufferedReader( new InputStreamReader(new FileInputStream(inputSubtitles.getExternalFile()))); } final BufferedWriter output = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(convertedSubtitlesFile), Charset.forName("UTF-8"))); String line; double startTime; double endTime; try { if (SubtitleType.ASS.equals(inputSubtitles.getType())) { while ((line = input.readLine()) != null) { if (startsWith(line, "Dialogue:")) { String[] timings = splitPreserveAllTokens(line, ","); if (timings.length >= 3 && isNotBlank(timings[1]) && isNotBlank(timings[1])) { startTime = convertSubtitleTimingStringToTime(timings[1]); endTime = convertSubtitleTimingStringToTime(timings[2]); if (startTime >= timeShift) { timings[1] = convertTimeToSubtitleTimingString(startTime - timeShift, TimingFormat.ASS_TIMING); timings[2] = convertTimeToSubtitleTimingString(endTime - timeShift, TimingFormat.ASS_TIMING); output.write(join(timings, ",") + "\n"); } else { continue; } } else { output.write(line + "\n"); } } else { output.write(line + "\n"); } } } else if (SubtitleType.SUBRIP.equals(inputSubtitles.getType())) { int n = 1; while ((line = input.readLine()) != null) { if (contains(line, ("-->"))) { startTime = convertSubtitleTimingStringToTime(line.substring(0, line.indexOf("-->") - 1)); endTime = convertSubtitleTimingStringToTime(line.substring(line.indexOf("-->") + 4)); if (startTime >= timeShift) { output.write("" + (n++) + "\n"); output.write(convertTimeToSubtitleTimingString(startTime - timeShift, TimingFormat.SRT_TIMING)); output.write(" --> "); output.write( convertTimeToSubtitleTimingString(endTime - timeShift, TimingFormat.SRT_TIMING) + "\n"); while (isNotBlank(line = input.readLine())) { // Read all following subs lines output.write(line + "\n"); } output.write("" + "\n"); } } } } } finally { if (output != null) { output.flush(); output.close(); } if (input != null) { input.close(); } } final DLNAMediaSubtitle convertedSubtitles = new DLNAMediaSubtitle(); convertedSubtitles.setExternalFile(convertedSubtitlesFile); convertedSubtitles.setType(inputSubtitles.getType()); convertedSubtitles.setLang(inputSubtitles.getLang()); convertedSubtitles.setFlavor(inputSubtitles.getFlavor()); convertedSubtitles.setId(inputSubtitles.getId()); return convertedSubtitles; }
From source file:com.alibaba.rocketmq.filtersrv.filter.DynaCode.java
private String[] uploadSrcFile() throws Exception { List<String> srcFileAbsolutePaths = new ArrayList<String>(codeStrs.size()); for (String code : codeStrs) { if (StringUtils.isNotBlank(code)) { String packageName = getPackageName(code); String className = getClassName(code); if (StringUtils.isNotBlank(className)) { File srcFile = null; BufferedWriter bufferWriter = null; try { if (StringUtils.isBlank(packageName)) { File pathFile = new File(sourcePath); if (!pathFile.exists()) { if (!pathFile.mkdirs()) { throw new RuntimeException("create PathFile Error!"); }//from ww w . j a v a2 s .c o m } srcFile = new File(sourcePath + FILE_SP + className + ".java"); } else { String srcPath = StringUtils.replace(packageName, ".", FILE_SP); File pathFile = new File(sourcePath + FILE_SP + srcPath); if (!pathFile.exists()) { if (!pathFile.mkdirs()) { throw new RuntimeException("create PathFile Error!"); } } srcFile = new File(pathFile.getAbsolutePath() + FILE_SP + className + ".java"); } synchronized (loadClass) { loadClass.put(getFullClassName(code), null); } if (null != srcFile) { logger.warn("Dyna Create Java Source File:---->" + srcFile.getAbsolutePath()); srcFileAbsolutePaths.add(srcFile.getAbsolutePath()); srcFile.deleteOnExit(); } OutputStreamWriter outputStreamWriter = new OutputStreamWriter( new FileOutputStream(srcFile), encoding); bufferWriter = new BufferedWriter(outputStreamWriter); for (String lineCode : code.split(LINE_SP)) { bufferWriter.write(lineCode); bufferWriter.newLine(); } bufferWriter.flush(); } finally { if (null != bufferWriter) { bufferWriter.close(); } } } } } return srcFileAbsolutePaths.toArray(new String[srcFileAbsolutePaths.size()]); }
From source file:ffx.utilities.BlockAverager.java
/** * Constructor grabs all histograms from the file and loads them into data * structures. TODO: figure out how to disregard histogram-bin combos that * aren't (currently) changing per time. * @param filename//from www.j a va2 s. c om * @param testMode * @param grepCmd * @param psPerHisto * @param blockSizeStep * @param maxBlockSize * @throws java.io.IOException */ public BlockAverager(String filename, boolean testMode, Optional<String> grepCmd, Optional<Double> psPerHisto, Optional<Integer> blockSizeStep, Optional<Integer> maxBlockSize) throws IOException { this.TEST = testMode; this.psPerHisto = (psPerHisto.isPresent()) ? psPerHisto.get() : 1.0; this.blockSizeStep = (blockSizeStep.isPresent()) ? blockSizeStep.get() : 100; int linesPerHistogram = (System.getProperty("ba-lph") == null) ? 201 : Integer.parseInt(System.getProperty("ba-lph")); if (TEST) { logger.info(" Testing Mode "); linesPerHistogram = 1; } File parallelInFile = new File(filename); int nThreads = ParallelTeam.getDefaultThreadCount(); parallelTeam = new ParallelTeam(nThreads); numThreads = parallelTeam.getThreadCount(); BlockRegion parallelBlock = new BlockRegion(parallelInFile); try { parallelTeam.execute(parallelBlock); } catch (Exception ex) { Logger.getLogger(BlockAverager.class.getName()).log(Level.SEVERE, null, ex); } // Step 1: Find histograms and create a stream. Scanner scan = null; File outFile = null; if (preGrep != null) { File file = new File(preGrep); BufferedReader br = new BufferedReader(new FileReader(file)); scan = new Scanner(br); } else { outFile = new File(filename + "-ba.tmp"); if (outFile.exists()) { logger.info(format(" Previous temp file exists: %s", outFile.getName())); if (!outFile.canWrite()) { logger.severe(format("Lacked write permissions to temp file.")); } System.out.print(format(" Delete it? (Y/N) ")); Scanner kb = new Scanner(System.in); if (kb.nextLine().toUpperCase().startsWith("Y")) { outFile.delete(); logger.info(""); } else { logger.severe("Aborted by user."); } } // Manually accomplish a 'grep -A 201 Bins filename'. File inFile = new File(filename); BufferedReader br = new BufferedReader(new FileReader(inFile)); scan = new Scanner(br); BufferedWriter bw = new BufferedWriter(new FileWriter(outFile)); logger.info(" Parsing logfile... "); int numFound = 0; while (scan.hasNextLine()) { String line = scan.nextLine(); if (TEST) { // No headers in test data. if (++numFound % 100 == 0) { logger.info(format(" Parsed %d histograms.", numFound)); } bw.write(line); bw.newLine(); continue; } if (line.contains("Lambda Bins")) { if (++numFound % 100 == 0) { logger.info(format(" Parsed %d histograms.", numFound)); } bw.write(line); bw.newLine(); for (int i = 0; i < linesPerHistogram; i++) { if (!scan.hasNextLine() && i < linesPerHistogram) { logger.warning(format("Found incomplete histogram: %d, %s", numFound, line)); } bw.write(scan.nextLine()); bw.newLine(); } } } bw.flush(); scan = new Scanner(outFile); } // Parse stream into data structures. List<Bin> binList = new ArrayList<>(); Histogram histo = null; while (scan.hasNextLine()) { String line = scan.nextLine(); String[] tokens = line.split("\\s+"); // Catch grep flotsam. if (tokens[0].startsWith("--")) { continue; } // Header line signals time for a new histogram. if (line.contains("Lambda Bins") || TEST) { if (histo != null) { histoList.add(histo); } histo = new Histogram(++histoIndexer); if (histoIndexer % 100 == 0) { if (psPerHisto.isPresent()) { logger.info(format(" BlockAverager loaded %d histograms (simTime %.2f ps).", histoIndexer, histoIndexer * this.psPerHisto)); } else { logger.info(format(" BlockAverager loaded %d histograms.", histoIndexer)); } } if (TEST) { // No headers in test data. histo.bins.add(new Bin(tokens)); } continue; } histo.bins.add(new Bin(tokens)); } histoList.add(histo); Collections.sort(histoList); logger.info(format("")); numObs = histoList.size(); this.maxBlockSize = (maxBlockSize.isPresent()) ? maxBlockSize.get() : numObs; // Validate for (int i = 1; i < histoList.size(); i++) { if (histoList.get(i).index != histoList.get(i - 1).index + 1 || histoList.get(i).bins.size() != histoList.get(i - 1).bins.size()) { logger.warning(format("Improper indexing or bin size mismatch. i,i-1,binsi,binsi-1: %d %d %d %d", histoList.get(i).index, histoList.get(i - 1).index, histoList.get(i).bins.size(), histoList.get(i - 1).bins.size())); throw new ArithmeticException(); } } if (outFile != null && outFile.exists()) { outFile.delete(); } numBins = histoList.get(0).bins.size(); this.describe(); }
From source file:edu.isi.pfindr.learn.util.PairsFileIO.java
public void generatePairsFromTwoDifferentFilesWithClass(String inputFilePath1, String inputFilePath2, String outputFilePath) {/*from w w w . j a va 2 s.c o m*/ List<String> phenotypeList1 = new ArrayList<String>(); List<String> phenotypeList2 = new ArrayList<String>(); try { phenotypeList1 = FileUtils.readLines(new File(inputFilePath1)); phenotypeList2 = FileUtils.readLines(new File(inputFilePath2)); } catch (IOException ioe) { ioe.printStackTrace(); } String[] phenotype1, phenotype2; StringBuffer outputBuffer = new StringBuffer(); //List<String> resultList = new ArrayList<String>(); BufferedWriter bw = null; try { bw = new BufferedWriter(new FileWriter(outputFilePath)); int count = 0; for (int i = 0; i < phenotypeList1.size(); i++) { phenotype1 = phenotypeList1.get(i).split("\t"); for (int j = 0; j < phenotypeList2.size(); j++) { count++; phenotype2 = phenotypeList2.get(j).split("\t"); System.out.println("i " + i + "j " + j + " " + phenotype1[0] + " " + phenotype2[0]); if (phenotype1[1].equals(phenotype2[1])) { //if the classes are the same //if (phenotype1[1].equals(phenotype2[0])) { //if the classes are the same //resultList.add(String.format("%s\t%s\t%d", phenotype1[3], phenotype2[3], 1)); //resultList.add(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[1], 1)); outputBuffer.append(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[0], 1)) .append("\n"); //bw.write(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[0], 1) + "\n"); } else { //resultList.add(String.format("%s\t%s\t%d", phenotype1[3], phenotype2[3], 0)); //resultList.add(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[1], 0)); //bw.write(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[0], 0) + "\n"); outputBuffer.append(String.format("%s\t%s\t%d", phenotype1[0], phenotype2[0], 0)) .append("\n"); } bw.append(outputBuffer.toString()); outputBuffer.setLength(0); } } bw.flush(); System.out.println("The count is: " + count); } catch (IOException io) { try { if (bw != null) bw.close(); io.printStackTrace(); } catch (IOException e) { System.out.println("Problem occured while closing output stream " + bw); e.printStackTrace(); } } catch (Exception e) { e.printStackTrace(); } finally { try { if (bw != null) bw.close(); } catch (IOException e) { System.out.println("Problem occured while closing output stream " + bw); e.printStackTrace(); } } }