List of usage examples for java.io BufferedWriter newLine
public void newLine() throws IOException
From source file:fastcall.FastCallSNP.java
private void callSNPByChromosome(int currentChr, String referenceFileS, String vcfDirS) { int chrIndex = Arrays.binarySearch(chroms, currentChr); String chrSeq = genomeFa.getSeq(chrIndex).toUpperCase(); int regionStart = 1; int regionEnd = chrSeq.length(); this.performPileup(currentChr, regionStart, regionEnd, referenceFileS); String outfileS = "chr" + FStringUtils.getNDigitNumber(3, currentChr) + ".VCF.txt"; outfileS = new File(vcfDirS, outfileS).getAbsolutePath(); int[][] binBound = this.creatBins(currentChr, binSize, regionStart, regionEnd); try {/*from ww w . jav a 2 s .com*/ HashMap<String, BufferedReader> bamPathPileupReaderMap = this.getBamPathPileupReaderMap(); ConcurrentHashMap<BufferedReader, List<String>> readerRemainderMap = this .getReaderRemainderMap(bamPathPileupReaderMap); BufferedWriter bw = IoUtils.getTextWriter(outfileS); bw.write(this.getAnnotation(referenceFileS)); bw.write(this.getVCFHeader()); bw.newLine(); for (int i = 0; i < binBound.length; i++) { long startTimePoint = System.nanoTime(); int binStart = binBound[i][0]; int binEnd = binBound[i][1]; ConcurrentHashMap<String, List<List<String>>> bamPileupResultMap = this.getBamPileupResultMap( currentChr, binStart, binEnd, bamPathPileupReaderMap, readerRemainderMap); StringBuilder[][] baseSb = this.getPopulateBaseBuilder(binStart, binEnd); int[][] depth = this.getPopulatedDepthArray(binStart, binEnd); this.fillDepthAndBase(bamPileupResultMap, baseSb, depth, binStart); String[][] base = this.getBaseMatrix(baseSb); ArrayList<Integer> positionList = this.getPositionList(binStart, binEnd); ConcurrentHashMap<Integer, String> posVCFMap = new ConcurrentHashMap( (int) ((binEnd - binStart + 1) * 1.5)); this.calculateVCF(posVCFMap, positionList, currentChr, binStart, chrSeq, depth, base); for (int j = 0; j < positionList.size(); j++) { String vcfStr = posVCFMap.get(positionList.get(j)); if (vcfStr == null) continue; bw.write(vcfStr); bw.newLine(); } StringBuilder sb = new StringBuilder(); sb.append("Bin from ").append(binStart).append(" to ").append(binEnd).append(" is finished. Took ") .append(Benchmark.getTimeSpanSeconds(startTimePoint)).append(" seconds. Memory used: ") .append(Benchmark.getUsedMemoryGb()).append(" Gb"); System.out.println(sb.toString()); } bw.flush(); bw.close(); } catch (Exception e) { e.printStackTrace(); } System.out.println( "Chromosome " + String.valueOf(currentChr) + " is finished. File written to " + outfileS + "\n"); }
From source file:com.emc.ecs.sync.storage.CasStorageTest.java
@Test public void testVerify() throws Exception { FPPool sourcePool = new FPPool(connectString1); FPPool destPool = new FPPool(connectString2); // create random data (capture summary for comparison) StringWriter sourceSummary = new StringWriter(); List<String> clipIds = createTestClips(sourcePool, 10240, 250, sourceSummary); try {/* w w w . j a v a 2 s .c o m*/ // write clip file File clipFile = File.createTempFile("clip", "lst"); clipFile.deleteOnExit(); BufferedWriter writer = new BufferedWriter(new FileWriter(clipFile)); for (String clipId : clipIds) { writer.write(clipId); writer.newLine(); } writer.close(); // test sync with verify EcsSync sync = createEcsSync(connectString1, connectString2, CAS_THREADS, true); sync.getSyncConfig().getOptions().setSourceListFile(clipFile.getAbsolutePath()); sync.getSyncConfig().getOptions().setVerify(true); run(sync); Assert.assertEquals(0, sync.getStats().getObjectsFailed()); // test verify only sync = createEcsSync(connectString1, connectString2, CAS_THREADS, true); sync.getSyncConfig().getOptions().setSourceListFile(clipFile.getAbsolutePath()); sync.getSyncConfig().getOptions().setVerifyOnly(true); run(sync); Assert.assertEquals(0, sync.getStats().getObjectsFailed()); // delete clips from both delete(sourcePool, clipIds); delete(destPool, clipIds); // create new clips (ECS has a problem reading previously deleted and recreated clip IDs) clipIds = createTestClips(sourcePool, 10240, 250, sourceSummary); writer = new BufferedWriter(new FileWriter(clipFile)); for (String clipId : clipIds) { writer.write(clipId); writer.newLine(); } writer.close(); // test sync+verify with failures sync = createEcsSync(connectString1, connectString2, CAS_THREADS, true); sync.getSyncConfig().getOptions().setSourceListFile(clipFile.getAbsolutePath()); ByteAlteringFilter.ByteAlteringConfig filter = new ByteAlteringFilter.ByteAlteringConfig(); sync.getSyncConfig().setFilters(Collections.singletonList(filter)); sync.getSyncConfig().getOptions().setRetryAttempts(0); // retries will circumvent this test sync.getSyncConfig().getOptions().setVerify(true); run(sync); Assert.assertTrue(filter.getModifiedObjects() > 0); Assert.assertEquals(filter.getModifiedObjects(), sync.getStats().getObjectsFailed()); } finally { // delete clips from both delete(sourcePool, clipIds); delete(destPool, clipIds); try { sourcePool.Close(); } catch (Throwable t) { log.warn("failed to close source pool", t); } try { destPool.Close(); } catch (Throwable t) { log.warn("failed to close dest pool", t); } } }
From source file:gdsc.smlm.ij.plugins.pcpalm.PCPALMClusters.java
/** * Saves the histogram to the selected file. Updates the filename property of the histogram object. * //from w ww . j a v a 2s.c o m * @param histogramData * @param filename */ private boolean saveHistogram(HistogramData histogramData, String filename) { if (filename == null) return false; float[][] hist = histogramData.histogram; filename = Utils.replaceExtension(filename, "tsv"); BufferedWriter output = null; try { output = new BufferedWriter(new FileWriter(filename)); if (histogramData.isCalibrated()) { output.write(String.format("Frames %d", histogramData.frames)); output.newLine(); output.write(String.format("Area %f", histogramData.area)); output.newLine(); output.write(String.format("Units %s", histogramData.units)); output.newLine(); } output.write("Size\tFrequency"); output.newLine(); for (int i = 0; i < hist[0].length; i++) { output.write(String.format("%d\t%s", (int) hist[0][i], Utils.rounded(hist[1][i]))); output.newLine(); } histogramData.filename = filename; return true; } catch (Exception e) { e.printStackTrace(); IJ.log("Failed to save histogram to file: " + filename); } finally { if (output != null) { try { output.close(); } catch (IOException e) { e.printStackTrace(); } } } return false; }
From source file:ml.shifu.shifu.core.processor.StatsModelProcessor.java
/** * Compute correlation value according to correlation statistics from correlation MR job. * //w w w. jav a 2 s. co m * @param corrMap * CorrelationWritable map read from MR job output file * @throws IOException * any IOException to write correlation value to csv file. */ private void computeCorrValue(SortedMap<Integer, CorrelationWritable> corrMap) throws IOException { boolean hasCandidates = CommonUtils.hasCandidateColumns(this.columnConfigList); String localCorrelationCsv = super.pathFinder.getLocalCorrelationCsvPath(); ShifuFileUtils.createFileIfNotExists(localCorrelationCsv, SourceType.LOCAL); BufferedWriter writer = null; Map<Integer, double[]> finalCorrMap = new HashMap<>(); try { writer = ShifuFileUtils.getWriter(localCorrelationCsv, SourceType.LOCAL); writer.write(getColumnIndexes()); writer.newLine(); writer.write(getColumnNames()); writer.newLine(); for (Entry<Integer, CorrelationWritable> entry : corrMap.entrySet()) { ColumnConfig xColumnConfig = this.columnConfigList.get(entry.getKey()); if (xColumnConfig.getColumnFlag() == ColumnFlag.Meta || (hasCandidates && !ColumnFlag.Candidate.equals(xColumnConfig.getColumnFlag()))) { continue; } CorrelationWritable xCw = entry.getValue(); double[] corrArray = new double[this.columnConfigList.size()]; for (int i = 0; i < corrArray.length; i++) { ColumnConfig yColumnConfig = this.columnConfigList.get(i); if (yColumnConfig.getColumnFlag() == ColumnFlag.Meta) { continue; } if (entry.getKey() > i) { double[] reverseDoubleArray = finalCorrMap.get(i); if (reverseDoubleArray != null) { corrArray[i] = reverseDoubleArray[entry.getKey()]; } else { corrArray[i] = 0d; } // not compute all, only up-right matrix are computed, such case, just get [i, j] from [j, i] continue; } double numerator = xCw.getAdjustCount()[i] * xCw.getXySum()[i] - xCw.getAdjustSumX()[i] * xCw.getAdjustSumY()[i]; double denominator1 = Math.sqrt(xCw.getAdjustCount()[i] * xCw.getXxSum()[i] - xCw.getAdjustSumX()[i] * xCw.getAdjustSumX()[i]); double denominator2 = Math.sqrt(xCw.getAdjustCount()[i] * xCw.getYySum()[i] - xCw.getAdjustSumY()[i] * xCw.getAdjustSumY()[i]); if (Double.compare(denominator1, Double.valueOf(0d)) == 0 || Double.compare(denominator2, Double.valueOf(0d)) == 0) { corrArray[i] = 0d; } else { corrArray[i] = numerator / (denominator1 * denominator2); } // if(corrArray[i] > 1.0005d || (entry.getKey() == 54 && i == 2124)) { if (corrArray[i] > 1.0005d) { log.warn("Correlation value for columns {} {} > 1, below is debug info.", entry.getKey(), i); log.warn( "DEBUG: corr {}, value > 1d, numerator " + numerator + " denominator1 " + denominator1 + " denominator2 " + denominator2 + " {}, {}", numerator / (denominator1 * denominator2), entry.getKey(), i); log.warn( "DEBUG: xCw.getAdjustCount()[i] * xCw.getXySum()[i] - xCw.getAdjustSumX()[i] * xCw.getAdjustSumY()[i] : {} * {} - {} * {} ", xCw.getAdjustCount()[i], xCw.getXySum()[i], xCw.getAdjustSumX()[i], xCw.getAdjustSumY()[i]); log.warn( "DEBUG: xCw.getAdjustCount()[i] * xCw.getXxSum()[i] - xCw.getAdjustSumX()[i] * xCw.getAdjustSumX()[i] : {} * {} - {} * {} ", xCw.getAdjustCount()[i], xCw.getXxSum()[i], xCw.getAdjustSumX()[i], xCw.getAdjustSumX()[i]); log.warn( "DEBUG: xCw.getAdjustCount()[i] * xCw.getYySum()[i] - xCw.getAdjustSumY()[i] * xCw.getAdjustSumY()[i] : {} * {} - {} * {} ", xCw.getAdjustCount()[i], xCw.getYySum()[i], xCw.getAdjustSumY()[i], xCw.getAdjustSumY()[i]); } } // put to current map finalCorrMap.put(entry.getKey(), corrArray); // write to csv String corrStr = Arrays.toString(corrArray); String adjustCorrStr = corrStr.substring(1, corrStr.length() - 1); writer.write(entry.getKey() + "," + this.columnConfigList.get(entry.getKey()).getColumnName() + "," + adjustCorrStr); writer.newLine(); } } finally { IOUtils.closeQuietly(writer); } }
From source file:com.emc.ecs.sync.storage.CasStorageTest.java
private void testSyncClipList(int numClips, int maxBlobSize) throws Exception { FPPool sourcePool = new FPPool(connectString1); FPPool destPool = new FPPool(connectString2); // create random data (capture summary for comparison) StringWriter sourceSummary = new StringWriter(); List<String> clipIds = createTestClips(sourcePool, maxBlobSize, numClips, sourceSummary); try {/*from w w w .j a v a2 s . c om*/ // write clip file File clipFile = File.createTempFile("clip", "lst"); clipFile.deleteOnExit(); BufferedWriter writer = new BufferedWriter(new FileWriter(clipFile)); for (String clipId : clipIds) { log.debug("created {}", clipId); writer.write(clipId); writer.newLine(); } writer.close(); EcsSync sync = createEcsSync(connectString1, connectString2, CAS_THREADS, true); sync.getSyncConfig().getOptions().setSourceListFile(clipFile.getAbsolutePath()); run(sync); Assert.assertEquals(0, sync.getStats().getObjectsFailed()); Assert.assertEquals(numClips, sync.getStats().getObjectsComplete()); String destSummary = summarize(destPool, clipIds); Assert.assertEquals("query summaries different", sourceSummary.toString(), destSummary); } finally { delete(sourcePool, clipIds); delete(destPool, clipIds); try { sourcePool.Close(); } catch (Throwable t) { log.warn("failed to close source pool", t); } try { destPool.Close(); } catch (Throwable t) { log.warn("failed to close dest pool", t); } } }
From source file:org.matsim.pt.counts.obsolete.PtCountSimComparisonKMLWriter.java
/** * Creates the CountsErrorGraph for all the data * * @param kmlFilename/*from w ww . j av a2s . c o m*/ * the filename of the kml file * @param visible * true if initially visible * @return the ScreenOverlay Feature */ private ScreenOverlayType createBiasErrorGraph(PtCountsType type, String kmlFilename) { int index = kmlFilename.lastIndexOf(System.getProperty("file.separator")); if (index == -1) { index = kmlFilename.lastIndexOf('/'); } String outdir; if (index == -1) { outdir = ""; } else { outdir = kmlFilename.substring(0, index) + System.getProperty("file.separator"); } // ------------------------------------------------------------------------------ List<CountSimComparison> countComparisonFilter; switch (type) { case Boarding: countComparisonFilter = this.boardCountComparisonFilter.getCountsForHour(null); break; case Alighting: countComparisonFilter = this.alightCountComparisonFilter.getCountsForHour(null); break; default: countComparisonFilter = this.occupancyCountComparisonFilter.getCountsForHour(null); } PtBiasErrorGraph pbeg = new PtBiasErrorGraph(countComparisonFilter, this.iter, null, "error graph - " + type.name()); pbeg.createChart(0); double[] meanError = pbeg.getMeanRelError(); double[] meanBias = pbeg.getMeanAbsBias(); String file = outdir + "biasErrorGraphData" + type.name() + ".txt"; log.info("writing chart data to " + new File(file).getAbsolutePath()); try { BufferedWriter bwriter = IOUtils.getBufferedWriter(file); StringBuilder buffer = new StringBuilder(100); buffer.append("hour \t mean relative error \t mean absolute bias"); bwriter.write(buffer.toString()); bwriter.newLine(); for (int i = 0; i < meanError.length; i++) { buffer.delete(0, buffer.length()); buffer.append(i + 1); buffer.append('\t'); buffer.append(meanError[i]); buffer.append('\t'); buffer.append(meanBias[i]); bwriter.write(buffer.toString()); bwriter.newLine(); } bwriter.close(); } catch (IOException e) { e.printStackTrace(); } String chartFilename = "errorGraphErrorBias" + type.name() + ".png"; try { writeChartToKmz(chartFilename, pbeg.getChart()); return createOverlayBottomRight(chartFilename, "Error Graph [Error/Bias]"); } catch (IOException e) { e.printStackTrace(); } return null; }
From source file:ml.shifu.shifu.util.CommonUtils.java
public static void writeFeatureImportance(String fiPath, Map<Integer, MutablePair<String, Double>> importances) throws IOException { ShifuFileUtils.createFileIfNotExists(fiPath, SourceType.LOCAL); BufferedWriter writer = null; log.info("Writing feature importances to file {}", fiPath); try {//from www.j a va 2s .c om writer = ShifuFileUtils.getWriter(fiPath, SourceType.LOCAL); writer.write("column_id\t\tcolumn_name\t\timportance"); writer.newLine(); for (Map.Entry<Integer, MutablePair<String, Double>> entry : importances.entrySet()) { String content = entry.getKey() + "\t\t" + entry.getValue().getKey() + "\t\t" + entry.getValue().getValue(); writer.write(content); writer.newLine(); } writer.flush(); } finally { IOUtils.closeQuietly(writer); } }
From source file:com.baomidou.mybatisplus.generator.AutoGenerator.java
/** * XML//from www . java2s.c o m * * @param columns * @param types * @param comments * @throws IOException */ protected void buildMapperXml(List<String> columns, List<String> types, List<String> comments, Map<String, IdInfo> idMap, String mapperName, String mapperXMLName) throws IOException { File mapperXmlFile = new File(PATH_XML, mapperXMLName + ".xml"); BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(mapperXmlFile))); bw.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); bw.newLine(); bw.write( "<!DOCTYPE mapper PUBLIC \"-//mybatis.org//DTD Mapper 3.0//EN\" \"http://mybatis.org/dtd/mybatis-3-mapper.dtd\">"); bw.newLine(); bw.write("<mapper namespace=\"" + config.getMapperPackage() + "." + mapperName + "\">"); bw.newLine(); bw.newLine(); /* * ?SqlMapper */ buildSQL(bw, idMap, columns); bw.write("</mapper>"); bw.flush(); bw.close(); }
From source file:io.siddhi.extension.io.file.FileSourceLineModeTestCase.java
@Test public void siddhiIoFileTest16() throws InterruptedException, CannotRestoreSiddhiAppStateException { log.info("test SiddhiIoFile [mode=line] Test 16"); String streams = "" + "@App:name('TestSiddhiApp')" + "@source(type='file', mode='line'," + "file.uri='file:/" + dirUri + "/line/xml/xml_line.txt', " + "tailing='true', " + "@map(type='xml'))" + "define stream FooStream (symbol string, price float, volume long); " + "define stream BarStream (symbol string, price float, volume long); "; String query = "" + "from FooStream " + "select * " + "insert into BarStream; "; SiddhiManager siddhiManager = new SiddhiManager(); SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); SiddhiAppRuntime siddhiAppRuntime2 = siddhiManager.createSiddhiAppRuntime(streams + query); siddhiAppRuntime.addCallback("BarStream", new StreamCallback() { @Override/* w ww . j av a2 s . c om*/ public void receive(Event[] events) { EventPrinter.print(events); int n = count.incrementAndGet(); for (Event event : events) { switch (n) { case 1: AssertJUnit.assertEquals(10000L, event.getData(2)); break; case 2: AssertJUnit.assertEquals(10001L, event.getData(2)); break; case 3: AssertJUnit.assertEquals(10002L, event.getData(2)); break; case 4: AssertJUnit.assertEquals(10003L, event.getData(2)); break; case 5: AssertJUnit.assertEquals(10004L, event.getData(2)); break; default: AssertJUnit.fail(); } } } }); siddhiAppRuntime2.addCallback("BarStream", new StreamCallback() { @Override public void receive(Event[] events) { EventPrinter.print(events); int n = count.incrementAndGet(); for (Event event : events) { switch (n) { case 6: AssertJUnit.assertEquals(1000L, event.getData(2)); break; case 7: AssertJUnit.assertEquals(2000L, event.getData(2)); break; default: AssertJUnit.fail(); } } } }); siddhiAppRuntime.start(); SiddhiTestHelper.waitForEvents(waitTime, 5, count, timeout); byte[] snapshot = siddhiAppRuntime.snapshot(); siddhiAppRuntime.shutdown(); File file = new File(dirUri + "/line/xml/xml_line.txt"); try { StringBuilder sb = new StringBuilder(); sb.append("<events>").append("<event>").append("<symbol>").append("GOOGLE").append("</symbol>") .append("<price>").append("100").append("</price>").append("<volume>").append("1000") .append("</volume>").append("</event>").append("</events>\n"); sb.append("<events>").append("<event>").append("<symbol>").append("YAHOO").append("</symbol>") .append("<price>").append("200").append("</price>").append("<volume>").append("2000") .append("</volume>").append("</event>").append("</events>"); BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(file, true)); bufferedWriter.write(sb.toString()); bufferedWriter.newLine(); bufferedWriter.flush(); bufferedWriter.close(); } catch (IOException e) { log.error(e.getMessage()); } siddhiAppRuntime2.restore(snapshot); siddhiAppRuntime2.start(); SiddhiTestHelper.waitForEvents(waitTime, 7, count, timeout); //assert event count AssertJUnit.assertEquals("Number of events", 7, count.get()); siddhiAppRuntime.shutdown(); }
From source file:io.siddhi.extension.io.file.FileSourceLineModeTestCase.java
@Test public void siddhiIoFileTest3() throws InterruptedException { log.info("test SiddhiIoFile [mode=line] Test 3"); String streams = "" + "@App:name('TestSiddhiApp')" + "@source(type='file', mode='line'," + "dir.uri='file:/" + dirUri + "/line/xml', " + "tailing='true', " + "@map(type='xml'))" + "define stream FooStream (symbol string, price float, volume long); " + "define stream BarStream (symbol string, price float, volume long); "; String query = "" + "from FooStream " + "select * " + "insert into BarStream; "; SiddhiManager siddhiManager = new SiddhiManager(); SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(streams + query); siddhiAppRuntime.addCallback("BarStream", new StreamCallback() { @Override//from www. ja v a 2 s. c o m public void receive(Event[] events) { EventPrinter.print(events); int n = count.incrementAndGet(); for (Event event : events) { switch (n) { case 1: AssertJUnit.assertEquals(10000L, event.getData(2)); break; case 2: AssertJUnit.assertEquals(10001L, event.getData(2)); break; case 3: AssertJUnit.assertEquals(10002L, event.getData(2)); break; case 4: AssertJUnit.assertEquals(10003L, event.getData(2)); break; case 5: AssertJUnit.assertEquals(10004L, event.getData(2)); break; case 6: AssertJUnit.assertEquals(1000L, event.getData(2)); break; case 7: AssertJUnit.assertEquals(2000L, event.getData(2)); break; default: AssertJUnit.fail("More events received than expected."); } } } }); Thread t1 = new Thread(new Runnable() { public void run() { siddhiAppRuntime.start(); } }); t1.start(); SiddhiTestHelper.waitForEvents(waitTime, 5, count, timeout); Thread t2 = new Thread(new Runnable() { @Override public void run() { File file = new File(dirUri + "/line/xml/xml_line (3rd copy).txt"); try { StringBuilder sb = new StringBuilder(); sb.append("<events>").append("<event>").append("<symbol>").append("GOOGLE").append("</symbol>") .append("<price>").append("100").append("</price>").append("<volume>").append("1000") .append("</volume>").append("</event>").append("</events>\n"); sb.append("<events>").append("<event>").append("<symbol>").append("YAHOO").append("</symbol>") .append("<price>").append("200").append("</price>").append("<volume>").append("2000") .append("</volume>").append("</event>").append("</events>"); BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(file, true)); bufferedWriter.write(sb.toString()); bufferedWriter.newLine(); bufferedWriter.flush(); bufferedWriter.close(); } catch (IOException e) { log.error(e.getMessage()); } } }); t2.start(); SiddhiTestHelper.waitForEvents(waitTime, 7, count, timeout); //assert event count AssertJUnit.assertEquals("Number of events", 7, count.get()); siddhiAppRuntime.shutdown(); }