List of usage examples for java.io BufferedWriter newLine
public void newLine() throws IOException
From source file:gate.util.reporting.DocTimeReporter.java
/** * Provides the functionality to separate out pipeline specific benchmark * entries in separate temporary benchmark files in a temporary folder in the * current working directory.//from w ww. j ava2 s. c om * * @param benchmarkFile * An object of type File representing the input benchmark file. * @param report * A file handle to the report file to be written. * @throws BenchmarkReportFileAccessException * if any error occurs while accessing the input benchmark file or * while splitting it. * @throws BenchmarkReportExecutionException * if the given input benchmark file is modified while generating * the report. */ private void splitBenchmarkFile(File benchmarkFile, File report) throws BenchmarkReportFileAccessException, BenchmarkReportInputFileFormatException { File dir = temporaryDirectory; // Folder already exists; then delete all files in the temporary folder if (dir.isDirectory()) { File files[] = dir.listFiles(); for (int count = 0; count < files.length; count++) { if (!files[count].delete()) { throw new BenchmarkReportFileAccessException( "Could not delete files in the folder \"" + temporaryDirectory + "\""); } } } else if (!dir.mkdir()) { throw new BenchmarkReportFileAccessException( "Could not create temporary folder \"" + temporaryDirectory + "\""); } // delete report2 from the filesystem if (getPrintMedia().equalsIgnoreCase(MEDIA_TEXT)) { deleteFile(new File(report.getAbsolutePath() + ".txt")); } else if (getPrintMedia().equalsIgnoreCase(MEDIA_HTML)) { deleteFile(new File(report.getAbsolutePath() + ".html")); } RandomAccessFile in = null; BufferedWriter out = null; try { String logEntry = ""; long fromPos = 0; // File benchmarkFileName; if (getLogicalStart() != null) { fromPos = tail(benchmarkFile, FILE_CHUNK_SIZE); } in = new RandomAccessFile(benchmarkFile, "r"); if (getLogicalStart() != null) { in.seek(fromPos); } ArrayList<String> startTokens = new ArrayList<String>(); String lastStart = ""; Pattern pattern = Pattern.compile("(\\d+) (\\d+) (.*) (.*) \\{(.*)\\}"); Matcher matcher = null; File benchmarkFileName = null; while ((logEntry = in.readLine()) != null) { matcher = pattern.matcher(logEntry); String startToken = ""; if (logEntry.matches(".*START.*")) { String[] splittedStartEntry = logEntry.split("\\s"); if (splittedStartEntry.length > 2) { startToken = splittedStartEntry[2]; } else { throw new BenchmarkReportInputFileFormatException(getBenchmarkFile() + " is invalid."); } if (startToken.endsWith("Start")) { continue; } if (!startTokens.contains(startToken)) { // create a new file for the new pipeline startTokens.add(startToken); benchmarkFileName = new File(temporaryDirectory, startToken + "_benchmark.txt"); if (!benchmarkFileName.createNewFile()) { throw new BenchmarkReportFileAccessException("Could not create \"" + startToken + "_benchmark.txt" + "\" in directory named \"" + temporaryDirectory + "\""); } out = new BufferedWriter(new FileWriter(benchmarkFileName)); out.write(logEntry); out.newLine(); } } // if a valid benchmark entry then write it to the pipeline specific // file if (matcher != null && matcher.matches() && (validateLogEntry(matcher.group(3), startTokens) || logEntry.matches(".*documentLoaded.*"))) { startToken = matcher.group(3).split("\\.")[0]; if (!(lastStart.equals(startToken))) { if (out != null) { out.close(); } benchmarkFileName = new File(temporaryDirectory, startToken + "_benchmark.txt"); out = new BufferedWriter(new FileWriter(benchmarkFileName, true)); } if (out != null) { out.write(logEntry); out.newLine(); } lastStart = startToken; } } } catch (IOException e) { e.printStackTrace(); } finally { try { if (in != null) { in.close(); } if (out != null) { out.close(); } } catch (IOException e) { e.printStackTrace(); } } }
From source file:json_cmp.Comparer.java
public static void main(String[] args) { System.out.println("Testing Begin"); try {/*w w w . ja va 2s . c om*/ String accessLogFolder = "/Users/herizhao/workspace/accessLog/"; // String yqlFileName = "json_cmp/test1.log"; // String yqlpFileName = "json_cmp/test2.log"; String yqlFileName = "tempLog/0812_yql.res"; String yqlpFileName = "tempLog/0812_yqlp.res"; ReadResults input1 = new ReadResults(accessLogFolder + yqlFileName); ReadResults input2 = new ReadResults(accessLogFolder + yqlpFileName); Integer diffNum = 0; Integer errorCount = 0; Integer totalIDNum1 = 0; Integer totalIDNum2 = 0; Integer equalIDwithDuplicate = 0; Integer beacons = 0; Integer lineNum = 0; Integer tempCount = 0; HashMap<String, IDclass> IDarray = new HashMap<String, IDclass>(); FileOutputStream fos = new FileOutputStream( "/Users/herizhao/workspace/accessLog/json_cmp/cmp_result.txt"); OutputStreamWriter osw = new OutputStreamWriter(fos); BufferedWriter bw = new BufferedWriter(osw); FileOutputStream consoleStream = new FileOutputStream( "/Users/herizhao/workspace/accessLog/json_cmp/console"); OutputStreamWriter consoleOSW = new OutputStreamWriter(consoleStream); BufferedWriter console = new BufferedWriter(consoleOSW); while (true) { input1.ReadNextLine(); if (input1.line == null) break; input2.ReadNextLine(); if (input2.line == null) break; while (input1.line.equals("")) { lineNum++; input1.ReadNextLine(); input2.ReadNextLine(); } if (input2.line == null) break; if (input1.line == null) break; lineNum++; System.out.println("lineNum = " + lineNum); String str1 = input1.line; String str2 = input2.line; ObjectMapper mapper1 = new ObjectMapper(); ObjectMapper mapper2 = new ObjectMapper(); JsonNode root1 = mapper1.readTree(str1); JsonNode root2 = mapper2.readTree(str2); JsonNode mediaNode1 = root1.path("query").path("results").path("mediaObj"); JsonNode mediaNode2 = root2.path("query").path("results").path("mediaObj"); if (mediaNode2.isMissingNode() && !mediaNode1.isMissingNode()) tempCount += mediaNode1.size(); //For yqlp if (mediaNode2.isArray()) { totalIDNum2 += mediaNode2.size(); for (int i = 0; i < mediaNode2.size(); i++) { ObjectNode mediaObj = (ObjectNode) mediaNode2.get(i); mediaObj.put("yvap", ""); JsonNode streamsNode = mediaObj.path("streams"); //streams if (streamsNode.isArray()) { for (int j = 0; j < streamsNode.size(); j++) { ObjectNode streamsObj = (ObjectNode) streamsNode.get(j); changeStreamsPath(streamsObj); ChangedHost(streamsObj); //if(streamsObj.path("h264_profile").isMissingNode()) streamsObj.put("h264_profile", ""); if (streamsObj.path("is_primary").isMissingNode()) streamsObj.put("is_primary", false); } } //meta if (!mediaObj.path("meta").isMissingNode()) { ObjectNode metaObj = (ObjectNode) mediaObj.path("meta"); changeMetaThumbnail(metaObj); if (metaObj.path("show_name").isMissingNode()) metaObj.put("show_name", ""); if (metaObj.path("event_start").isMissingNode()) metaObj.put("event_start", ""); if (metaObj.path("event_stop").isMissingNode()) metaObj.put("event_stop", ""); //if(metaObj.path("credits").path("label").isMissingNode()) ((ObjectNode) metaObj.path("credits")).put("label", ""); } //Metrics -> plidl & isrc changeMetrics(mediaObj); } } //For yql if (mediaNode1.isArray()) { totalIDNum1 += mediaNode1.size(); for (int i = 0; i < mediaNode1.size(); i++) { JsonNode mediaObj = mediaNode1.get(i); ((ObjectNode) mediaObj).put("yvap", ""); //Meta //System.out.println("meta: "); if (!mediaObj.path("meta").isMissingNode()) { ObjectNode metaObj = (ObjectNode) mediaObj.path("meta"); changeMetaThumbnail(metaObj); metaObj.put("event_start", ""); metaObj.put("event_stop", ""); FloatingtoInt(metaObj, "duration"); if (metaObj.path("show_name").isMissingNode()) metaObj.put("show_name", ""); //System.out.println("thub_dem: "); if (!metaObj.path("thumbnail_dimensions").isMissingNode()) { ObjectNode thub_demObj = (ObjectNode) metaObj.path("thumbnail_dimensions"); FloatingtoInt(thub_demObj, "height"); FloatingtoInt(thub_demObj, "width"); } ((ObjectNode) metaObj.path("credits")).put("label", ""); } //Visualseek //System.out.println("visualseek: "); if (!mediaObj.path("visualseek").isMissingNode()) { ObjectNode visualseekObj = (ObjectNode) mediaObj.path("visualseek"); FloatingtoInt(visualseekObj, "frequency"); FloatingtoInt(visualseekObj, "width"); FloatingtoInt(visualseekObj, "height"); //visualseek -> images, float to int JsonNode imagesNode = visualseekObj.path("images"); if (imagesNode.isArray()) { for (int j = 0; j < imagesNode.size(); j++) { ObjectNode imageObj = (ObjectNode) imagesNode.get(j); FloatingtoInt(imageObj, "start_index"); FloatingtoInt(imageObj, "count"); } } } //Streams //System.out.println("streams: "); JsonNode streamsNode = mediaObj.path("streams"); if (streamsNode.isArray()) { for (int j = 0; j < streamsNode.size(); j++) { ObjectNode streamsObj = (ObjectNode) streamsNode.get(j); FloatingtoInt(streamsObj, "height"); FloatingtoInt(streamsObj, "bitrate"); FloatingtoInt(streamsObj, "duration"); FloatingtoInt(streamsObj, "width"); changeStreamsPath(streamsObj); ChangedHost(streamsObj); // if(streamsObj.path("h264_profile").isMissingNode()) streamsObj.put("h264_profile", ""); if (streamsObj.path("is_primary").isMissingNode()) streamsObj.put("is_primary", false); } } //Metrics -> plidl & isrc changeMetrics(mediaObj); } } //Compare if (mediaNode2.isArray() && mediaNode1.isArray()) { for (int i = 0; i < mediaNode2.size() && i < mediaNode1.size(); i++) { JsonNode mediaObj1 = mediaNode1.get(i); JsonNode mediaObj2 = mediaNode2.get(i); if (!mediaObj1.equals(mediaObj2)) { if (!mediaObj1.path("id").toString().equals(mediaObj2.path("id").toString())) { errorCount++; } else { Integer IFdiffStreams = 0; Integer IFdiffMeta = 0; Integer IFdiffvisualseek = 0; Integer IFdiffMetrics = 0; Integer IFdifflicense = 0; Integer IFdiffclosedcaptions = 0; String statusCode = ""; MetaClass tempMeta = new MetaClass(); if (!mediaObj1.path("status").equals(mediaObj2.path("status"))) { JsonNode statusNode1 = mediaObj1.path("status"); JsonNode statusNode2 = mediaObj2.path("status"); if (statusNode2.path("code").toString().equals("\"100\"") || (statusNode1.path("code").toString().equals("\"400\"") && statusNode1.path("code").toString().equals("\"404\"")) || (statusNode1.path("code").toString().equals("\"200\"") && statusNode1.path("code").toString().equals("\"200\"")) || (statusNode1.path("code").toString().equals("\"200\"") && statusNode1.path("code").toString().equals("\"403\""))) statusCode = ""; else statusCode = "yql code: " + mediaObj1.path("status").toString() + " yqlp code:" + mediaObj2.path("status").toString(); } else {//Status code is 100 if (!mediaObj1.path("streams").equals(mediaObj2.path("streams"))) IFdiffStreams = 1; if (!tempMeta.CompareMeta(mediaObj1.path("meta"), mediaObj2.path("meta"), lineNum)) IFdiffMeta = 1; if (!mediaObj1.path("visualseek").equals(mediaObj2.path("visualseek"))) IFdiffvisualseek = 1; if (!mediaObj1.path("metrics").equals(mediaObj2.path("metrics"))) { IFdiffMetrics = 1; JsonNode metrics1 = mediaObj1.path("metrics"); JsonNode metrics2 = mediaObj2.path("metrics"); if (!metrics1.path("beacons").equals(metrics2.path("beacons"))) beacons++; } if (!mediaObj1.path("license").equals(mediaObj2.path("license"))) IFdifflicense = 1; if (!mediaObj1.path("closedcaptions").equals(mediaObj2.path("closedcaptions"))) IFdiffclosedcaptions = 1; } if (IFdiffStreams + IFdiffMeta + IFdiffvisualseek + IFdiffMetrics + IFdifflicense + IFdiffclosedcaptions != 0 || !statusCode.equals("")) { String ID_str = mediaObj1.path("id").toString(); if (!IDarray.containsKey(ID_str)) { IDclass temp_IDclass = new IDclass(ID_str); temp_IDclass.addNum(IFdiffStreams, IFdiffMeta, IFdiffvisualseek, IFdiffMetrics, IFdifflicense, IFdiffclosedcaptions, lineNum); if (!statusCode.equals("")) temp_IDclass.statusCode = statusCode; IDarray.put(ID_str, temp_IDclass); } else { IDarray.get(ID_str).addNum(IFdiffStreams, IFdiffMeta, IFdiffvisualseek, IFdiffMetrics, IFdifflicense, IFdiffclosedcaptions, lineNum); if (!statusCode.equals("")) IDarray.get(ID_str).statusCode = statusCode; } IDarray.get(ID_str).stream.CompareStream(IFdiffStreams, mediaObj1.path("streams"), mediaObj2.path("streams"), lineNum); if (!IDarray.get(ID_str).metaDone) { IDarray.get(ID_str).meta = tempMeta; IDarray.get(ID_str).metaDone = true; } } else equalIDwithDuplicate++; } } else equalIDwithDuplicate++; } } bw.flush(); console.flush(); } //while System.out.println("done"); bw.write("Different ID" + " " + "num "); bw.write(PrintStreamsTitle()); bw.write(PrintMetaTitle()); bw.write(PrintTitle()); bw.newLine(); Iterator<String> iter = IDarray.keySet().iterator(); while (iter.hasNext()) { String key = iter.next(); bw.write(key + " "); bw.write(IDarray.get(key).num.toString() + " "); bw.write(IDarray.get(key).stream.print()); bw.write(IDarray.get(key).meta.print()); bw.write(IDarray.get(key).print()); bw.newLine(); //System.out.println(key); } //System.out.println("different log num = " + diffNum); //System.out.println("same log num = " + sameLogNum); System.out.println("Different ID size = " + IDarray.size()); // System.out.println("streamEqual = " + streamEqual); // System.out.println("metaEqual = " + metaEqual); // System.out.println("metricsEqual = " + metricsEqual); // System.out.println("visualseekEqual = " + visualseekEqual); // System.out.println("licenseEqual = " + licenseEqual); // System.out.println("closedcaptionsEqualEqual = " + closedcaptionsEqual); System.out.println(tempCount); System.out.println("beacons = " + beacons); System.out.println("equalIDwithDuplicate = " + equalIDwithDuplicate); System.out.println("Total ID num yql (including duplicates) = " + totalIDNum1); System.out.println("Total ID num yqpl (including duplicates) = " + totalIDNum2); System.out.println("Error " + errorCount); bw.close(); console.close(); } catch (IOException e) { } }
From source file:edu.du.penrose.systems.fedoraApp.util.MetsBatchFileSplitter.java
/** * Split a file with multiple METS sections into separate files each containing * a single METS record. The input file must contain one or more multiple * <mets></mets> elements (sections). if true a OBJID must exist in every <mets> * element, this will become the file name. Otherwise a unique file name is * generated. Return true if the <batch update="true"> is set in the batch file other * wise return false (defaults to new file being ingested ). * /* w w w.j av a 2s.co m*/ * @see edu.du.penrose.systems.fedoraApp.batchIngest.bus.BatchIngestThreadManager#setBatchSetStatus(String, String) * @param threadStatus this object receives status updates while splitting the file. * @param inFile file to split * @param metsNewDirectory directory containing METS for new objects. * @param metsUpdatesDirectory directory containing METS for existing objects. * @param nameFileFromOBJID if true a OBJID must exist in every <mets> element, this will become the file name. Otherwise a unique file name is * generated * @deprecated * @throws Exception on any IO error. */ static public void splitMetsBatchFile_version_1(BatchIngestOptions ingestOptions, ThreadStatusMsg threadStatus, File inFile, String metsNewDirectory, String metsUpdatesDirectory, boolean nameFileFromOBJID) throws FatalException { String metsDirectory = null; // will get set to either the new directory or the updates directory. FileInputStream batchFileInputStream; try { batchFileInputStream = new FileInputStream(inFile); } catch (FileNotFoundException e) { throw new FatalException(e.getMessage()); } DataInputStream batchFileDataInputStream = new DataInputStream(batchFileInputStream); BufferedReader batchFileBufferedReader = new BufferedReader( new InputStreamReader(batchFileDataInputStream)); File outFile = null; FileOutputStream metsFileOutputStream = null; BufferedWriter metsBufferedWriter = null; String oneLine = null; String documentType = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"; int fileCount = 0; try { while (batchFileBufferedReader.ready()) { threadStatus.setStatus("Spliting XML file #: " + fileCount); oneLine = batchFileBufferedReader.readLine(); if (oneLine.contains("<?xml version")) { documentType = oneLine; } if (oneLine.contains("<batch")) { if (oneLine.contains(FedoraAppConstants.BATCH_FILE_UPDATE_MARKER + "=" + QUOTE + "true" + QUOTE) || oneLine.contains( FedoraAppConstants.BATCH_FILE_UPDATE_MARKER + "=" + APOST + "true" + APOST)) { ingestOptions.setBatchIsUpdates(true); metsDirectory = metsUpdatesDirectory; } else { ingestOptions.setBatchIsUpdates(false); metsDirectory = metsNewDirectory; } } if (oneLine.contains("<mets:mets")) { boolean haveEntireMetsLine = false; while (!haveEntireMetsLine) { StringBuffer tempBuffer = new StringBuffer(oneLine); String moreOfMetsLine = null; if (!oneLine.contains(">")) { moreOfMetsLine = batchFileBufferedReader.readLine(); tempBuffer.append(moreOfMetsLine); if (moreOfMetsLine.contains(">")) { haveEntireMetsLine = true; oneLine = tempBuffer.toString(); } else { oneLine = tempBuffer.toString(); } } else { haveEntireMetsLine = true; } } // Name the output file for a single Mets element and its contents. if (nameFileFromOBJID) { String objID = MetsBatchFileSplitter.getObjID(oneLine); outFile = new File(metsDirectory + objID + ".xml"); logger.info("outputSplitFile METS file: " + metsDirectory + objID + ".xml"); if (outFile.exists()) { String errorMsg = "OBJID already exists:" + outFile.getName(); System.out.println(errorMsg); logger.error(errorMsg); throw new FatalException(errorMsg); } } else { outFile = new File(metsDirectory + edu.du.penrose.systems.util.FileUtil.getDateTimeMilliSecondEnsureUnique() + ".xml"); } logger.info("outputSplitFile METS file: " + outFile.toString() + "\n\n"); metsFileOutputStream = new FileOutputStream(outFile); metsBufferedWriter = new BufferedWriter(new OutputStreamWriter(metsFileOutputStream, "UTF-8")); metsBufferedWriter.write(documentType); metsBufferedWriter.newLine(); // This is a version 1 batch file, so write a default version 2 command line for the new ingester metsBufferedWriter.write(FedoraAppConstants.VERSION_ONE_COMMAND_LINE); metsBufferedWriter.newLine(); while (!oneLine.contains("</mets:mets")) { // null pointer on premature end of file. metsBufferedWriter.write(oneLine); metsBufferedWriter.newLine(); oneLine = batchFileBufferedReader.readLine(); } metsBufferedWriter.write(oneLine); metsBufferedWriter.newLine(); metsBufferedWriter.close(); fileCount++; } } // while } catch (NullPointerException e) { String errorMsg = "Unable to split files, Permature end of file: Corrupt:" + inFile.toString() + " ?"; throw new FatalException(errorMsg); } catch (Exception e) { String errorMsg = "Unable to split files: " + e.getMessage(); logger.fatal(errorMsg); throw new FatalException(errorMsg); } finally { try { if (batchFileBufferedReader != null) { batchFileBufferedReader.close(); } if (metsBufferedWriter != null) { metsBufferedWriter.close(); } } catch (IOException e) { throw new FatalException(e.getMessage()); } } }
From source file:it.infn.ct.nuclemd.Nuclemd.java
public String RemoveCarriageReturn(String InputFileName, String OutputFileName) { // Remove the carriage return char from a named file. FileInputStream fis;/*from ww w.j a v a 2 s . co m*/ try { fis = new FileInputStream(InputFileName); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); File fout = new File(OutputFileName); FileOutputStream fos = new FileOutputStream(fout); BufferedWriter out = new BufferedWriter(new OutputStreamWriter(fos)); // The pattern matches control characters Pattern p = Pattern.compile("\r"); Matcher m = p.matcher(""); String aLine = null; try { while ((aLine = in.readLine()) != null) { m.reset(aLine); //Replaces control characters with an empty string. String result = m.replaceAll(""); out.write(result); out.newLine(); } out.close(); } catch (IOException ex) { Logger.getLogger(Nuclemd.class.getName()).log(Level.SEVERE, null, ex); } } catch (FileNotFoundException ex) { Logger.getLogger(Nuclemd.class.getName()).log(Level.SEVERE, null, ex); } log.info("\n- Writing the user's stripped file: [ " + OutputFileName.toString() + " ] to disk"); return OutputFileName; }
From source file:org.apache.ctakes.ytex.kernel.IntrinsicInfoContentEvaluatorImpl.java
/** * recursively compute the number of leaves. fill in the icInfoMap as we go * along/*w w w. j a v a 2 s .c om*/ * * @param concept * concept for which we should get the leaves * @param leafCache * cache of concept's leaves * @param icInfoMap * to be updated with leaf counts * @param cg * @param w * @param visitedNodes * list of nodes that have already been visited - we don't need * to revisit them when getting the leaves * @return * @throws IOException */ private HashSet<Integer> getLeaves(ConcRel concept, SoftReference<HashSet<Integer>>[] leafCache, Map<String, IntrinsicICInfo> icInfoMap, ConceptGraph cg, BufferedWriter w, HashSet<Integer> visitedNodes) throws IOException { // look in cache SoftReference<HashSet<Integer>> refLeaves = leafCache[concept.getNodeIndex()]; if (refLeaves != null && refLeaves.get() != null) { return refLeaves.get(); } // not in cache - compute recursively HashSet<Integer> leaves = new HashSet<Integer>(); leafCache[concept.getNodeIndex()] = new SoftReference<HashSet<Integer>>(leaves); if (concept.isLeaf()) { // for leaves, just add the concept id leaves.add(concept.getNodeIndex()); } else { IntrinsicICInfo icInfo = icInfoMap.get(concept.getConceptID()); // have we already computed the leaf count for this node? // if yes, then we can ignore previously visited nodes // if no, then compute it now and revisit previously visited nodes // if we have to boolean needLeaves = (icInfo != null && icInfo.getLeafCount() == 0); HashSet<Integer> visitedNodesLocal = visitedNodes; if (needLeaves || visitedNodesLocal == null) { // allocate a set to keep track of nodes we've already visited // so that we don't revisit them. if we have already computed // this node's leaf count then we reuse whatever the caller gave // us if non null, else allocate a new one. // if we haven't already computed this node's leaf count, // allocate a new set to avoid duplications in the traversal for // this node visitedNodesLocal = new HashSet<Integer>(); } // for inner nodes, recurse for (ConcRel child : concept.getChildren()) { // if we've already visited a node, then don't bother adding // that node's leaves - we already have them if (!visitedNodesLocal.contains(child.getNodeIndex())) { leaves.addAll(getLeaves(child, leafCache, icInfoMap, cg, w, visitedNodesLocal)); } } // add this node to the set of visited nodes so we know not to // revisit. This is only of importance if the caller gave us // a non-empty set. if (visitedNodes != null && visitedNodes != visitedNodesLocal) { visitedNodes.add(concept.getNodeIndex()); visitedNodes.addAll(visitedNodesLocal); } // update the leaf count if we haven't done so already if (needLeaves) { icInfo.setLeafCount(leaves.size()); // output leaves if desired if (w != null) { w.write(concept.getConceptID()); w.write("\t"); w.write(Integer.toString(leaves.size())); w.write("\t"); Iterator<Integer> iter = leaves.iterator(); while (iter.hasNext()) { w.write(cg.getConceptList().get(iter.next()).getConceptID()); w.write(" "); } w.newLine(); } } } return leaves; }
From source file:stainingestimation.StainingEstimation.java
/** * Opens a dialog to let the user choose a file in which the table of the staining estimation parameters is saved. *///from w w w . ja v a2 s.c o m private void saveTable() { if (previewOriginal != null && jXTable1.getRowCount() > 0) { String sep = manager.getColumnSeparator(); String currentDir = manager.getCurrentDir(); File file; List<String> exts = new ArrayList<>(1); exts.add("csv"); List<String> descrs = new ArrayList<>(1); descrs.add("Semicolon separated file"); String filename = null; if (manager.getVisibleTMAspot() != null) { filename = Misc.FilePathStringtoFilenameWOExtension(manager.getVisibleTMAspot().getName()) + ".csv"; } file = FileChooser.chooseSavingFile(this, currentDir, filename, exts, descrs); if (file != null) { setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); // Save csv try { BufferedWriter bfw = new BufferedWriter(new FileWriter(file)); for (int i = 0; i < jXTable1.getModel().getColumnCount(); i++) { bfw.write("\"" + jXTable1.getModel().getColumnName(i) + "\""); if (i < jXTable1.getModel().getColumnCount() - 1) bfw.write(sep); } for (int i = 0; i < jXTable1.getRowCount(); i++) { bfw.newLine(); for (int j = 0; j < jXTable1.getModel().getColumnCount(); j++) { Object o = jXTable1.getModel().getValueAt(i, j); if (((DefaultTableModel) jXTable1.getModel()).getColumnClass(j) == Double.class) { bfw.write((Double.toString((Double) jXTable1.getModel().getValueAt(i, j)))); } else if (((DefaultTableModel) jXTable1.getModel()) .getColumnClass(j) == Integer.class) { bfw.write((Integer.toString((Integer) jXTable1.getModel().getValueAt(i, j)))); } else if (((DefaultTableModel) jXTable1.getModel()) .getColumnClass(j) == String.class) { bfw.write("\"" + (String) (jXTable1.getModel().getValueAt(i, j)) + "\""); } else { bfw.write(jXTable1.getModel().getValueAt(i, j).toString()); } if (j < jXTable1.getModel().getColumnCount() - 1) bfw.write(sep); } } bfw.close(); } catch (Exception e) { Logger.getLogger(StainingEstimation.class.getName()).log(Level.SEVERE, null, e); JOptionPane.showMessageDialog(this, "Could not save file " + file.getName() + "\n(Maybe it is in use?)", "Error writing file", JOptionPane.ERROR_MESSAGE); } manager.setCurrentDir(file.getAbsolutePath()); setCursor(Cursor.getDefaultCursor()); } } }
From source file:Install.java
public void processFile(String sourceFileStr, String destFileStr, Properties p_properties) throws IOException { File sourceFile = new File(sourceFileStr); File destFile = new File(destFileStr); System.out.print("\nProcessing " + sourceFile.getName() + "..."); fireActionEvent(sourceFile.getName()); destFile.getParentFile().mkdirs();// www .j av a 2s . c om destFile.createNewFile(); try { InputStream inputstream = getResource(sourceFileStr); BufferedReader in = new BufferedReader(new InputStreamReader(inputstream)); BufferedWriter out = new BufferedWriter(new FileWriter(destFile)); String str, newstr; while ((str = in.readLine()) != null) { if (str.startsWith("#")) // It's a comment { newstr = str; } else { newstr = str; // deal with the case of "log4j.proterties.template" if (str.indexOf("%%Jboss_JNDI_prefix%%") != -1) // has match { newstr = replace(str, "%%Jboss_JNDI_prefix%%", "topic/"); } // deal with the case of "log4j.proterties.template" else if (str.indexOf("%%ldap_user_password%%") != -1) // has // match { newstr = replace(str, "%%ldap_user_password%%", encodeMD5(p_properties.getProperty("ldap_password"))); } else if (str.indexOf("%%super_admin_password%%") != -1) // has // match { newstr = replace(str, "%%super_admin_password%%", encodeMD5(p_properties.getProperty("system4_admin_password"))); } else { // Iterate over the array to see if the string matches // *any* of the install keys for (Enumeration<?> e = p_properties.propertyNames(); e.hasMoreElements();) { String key = (String) e.nextElement(); String pattern = "%%" + key + "%%"; Object replaceObj = p_properties.get(key); String replace = replaceObj.toString(); if (str.indexOf(pattern) == -1) // no match { continue; } newstr = replace(str, pattern, replacePathSlash(replace)); str = newstr; } } } out.write(newstr); out.newLine(); } in.close(); out.close(); } catch (IOException e) { System.out.println("Error processing file."); throw e; } System.out.println("done."); }
From source file:gdsc.smlm.ij.plugins.CreateData.java
/** * Save the fluorophores to a text file//from w w w . j av a2 s.c o m * * @param fluorophores */ private void saveFluorophores(List<? extends FluorophoreSequenceModel> fluorophores) { if (!settings.saveFluorophores || fluorophores == null) return; String[] path = Utils.decodePath(settings.fluorophoresFilename); OpenDialog chooser = new OpenDialog("Fluorophores_File", path[0], path[1]); if (chooser.getFileName() != null) { settings.fluorophoresFilename = chooser.getDirectory() + chooser.getFileName(); settings.fluorophoresFilename = Utils.replaceExtension(settings.fluorophoresFilename, "xls"); BufferedWriter output = null; try { output = new BufferedWriter(new FileWriter(settings.fluorophoresFilename)); output.write(createResultsFileHeader()); output.write("#Id\tn-Blinks\tStart\tStop\t..."); output.newLine(); for (int id = 1; id <= fluorophores.size(); id++) { FluorophoreSequenceModel f = fluorophores.get(id - 1); StringBuffer sb = new StringBuffer(); sb.append(f.getId()).append("\t"); sb.append(f.getNumberOfBlinks()).append("\t"); for (double[] burst : f.getBurstSequence()) { sb.append(Utils.rounded(burst[0], 3)).append("\t").append(Utils.rounded(burst[1], 3)) .append("\t"); } output.write(sb.toString()); output.newLine(); } } catch (Exception e) { // Q. Add better handling of errors? e.printStackTrace(); IJ.log("Failed to save fluorophores to file: " + settings.fluorophoresFilename); } finally { if (output != null) { try { output.close(); } catch (IOException e) { e.printStackTrace(); } } } } }
From source file:gdsc.smlm.ij.plugins.CreateData.java
/** * Save the localisations to a text file * /*from w ww . ja v a2s .c o m*/ * @param localisations */ private void saveLocalisations(List<LocalisationModel> localisations) { if (!settings.saveLocalisations) return; sortLocalisationsByTime(localisations); // Collections.sort(localisations, new Comparator<LocalisationModel>(){ // // public int compare(LocalisationModel o1, LocalisationModel o2) // { // int cellx1 = (int)(o1.getX() / settings.cellSize); // int cellx2 = (int)(o2.getX() / settings.cellSize); // int result = cellx2 - cellx1; // if (result != 0) // return result; // int celly1 = (int)(o1.getY() / settings.cellSize); // int celly2 = (int)(o2.getY() / settings.cellSize); // result = celly2 - celly1; // if (result != 0) // return result; // return (o1.getZ() == o2.getZ()) ? 0 : (o1.getZ() == 0) ? -1 : 1; // }}); String[] path = Utils.decodePath(settings.localisationsFilename); OpenDialog chooser = new OpenDialog("Localisations_File", path[0], path[1]); if (chooser.getFileName() != null) { settings.localisationsFilename = chooser.getDirectory() + chooser.getFileName(); settings.localisationsFilename = Utils.replaceExtension(settings.localisationsFilename, "xls"); BufferedWriter output = null; try { output = new BufferedWriter(new FileWriter(settings.localisationsFilename)); output.write(createResultsFileHeader()); output.write("#T\tId\tX\tY\tZ\tIntensity"); output.newLine(); for (LocalisationModel l : localisations) { StringBuffer sb = new StringBuffer(); sb.append(l.getTime()).append("\t"); sb.append(l.getId()).append("\t"); sb.append(IJ.d2s(l.getX(), 6)).append("\t"); sb.append(IJ.d2s(l.getY(), 6)).append("\t"); sb.append(IJ.d2s(l.getZ(), 6)).append("\t"); sb.append(l.getIntensity()); output.write(sb.toString()); output.newLine(); } } catch (Exception e) { // Q. Add better handling of errors? e.printStackTrace(); IJ.log("Failed to save localisations to file: " + settings.localisationsFilename); } finally { if (output != null) { try { output.close(); } catch (IOException e) { e.printStackTrace(); } } } } }
From source file:com.jbombardier.reports.OldReportGenerator.java
private void writePerSecondResults(File reportsDir, Chunker chunker, Set<String> totalsTransactionNames) { try {/*from w w w . j a v a 2 s . com*/ BufferedWriter writer = new BufferedWriter( new FileWriter(new File(reportsDir, "persecond.results.csv"))); DateFormat format = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss.SSS"); writer.write("transaction,\t"); writer.write("time,\t"); writer.write("count,\t"); writer.write("total,\t"); writer.write("mean,\t"); writer.write("median,\t"); writer.write("mode,\t"); writer.write("stddev,\t"); writer.write("min,\t"); writer.write("max,\t"); writer.write("10th,\t"); writer.write("20th,\t"); writer.write("30th,\t"); writer.write("40th,\t"); writer.write("50th,\t"); writer.write("60th,\t"); writer.write("70th,\t"); writer.write("80th,\t"); writer.write("90th,\t"); writer.write("95th,\t"); writer.write("99th,\t"); for (String transaction : totalsTransactionNames) { List<Chunk> timeOrderedResults = chunker.getTimeOrderedResults(transaction); for (Chunk chunk : timeOrderedResults) { long chunkStart = chunk.getChunkStart(); Date chunkStartDate = new Date(chunkStart); String timeFormatted = format.format(chunkStartDate); writer.write(transaction); writer.write(",\t"); writer.write(timeFormatted); writer.write(",\t"); Statistics statistics = chunk.getStatistics(); writer.write(Double.toString(statistics.getCount())); writer.write(",\t"); writer.write(Double.toString(statistics.calculateSum())); writer.write(",\t"); writer.write(Double.toString(statistics.calculateMean())); writer.write(",\t"); writer.write(Double.toString(statistics.calculateMedian())); writer.write(",\t"); writer.write(Double.toString(statistics.calculateMode())); writer.write(",\t"); writer.write(Double.toString(statistics.calculateStandardDeviationFast())); writer.write(",\t"); writer.write(Double.toString(statistics.calculateMinimum())); writer.write(",\t"); writer.write(Double.toString(statistics.calculateMaximum())); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(10))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(20))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(30))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(40))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(50))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(60))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(70))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(80))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(90))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(95))); writer.write(",\t"); writer.write(Double.toString(statistics.calculatePercentile(99))); writer.newLine(); } } writer.close(); } catch (IOException e) { e.printStackTrace(); } }