List of usage examples for java.io PrintStream close
public void close()
From source file:com.fluidops.iwb.deepzoom.CXMLServlet.java
/** * We handle different types of requests: * 1) queries: these produce results according to the CXML standard, making reference to a collection.xml file * 2) collection.xml files that contains the images for the query results * 3) the jpg images themselves// ww w.j av a2 s. co m */ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { String requestURI = req.getRequestURI(); initialize(); // this is the request for the images if (requestURI.endsWith("jpg") && requestURI.contains("dzimages")) { // The format of such a request is as follows: /pivot/dzimages/09/1113490945_files/6/0_0.jpg String filestring = requestURI.substring(req.getRequestURI().lastIndexOf("dzimages")); String filename = filestring.substring(filestring.indexOf("/") + 1, filestring.indexOf("_")); String substring = requestURI.substring(req.getRequestURI().lastIndexOf("files")); int zoomLevel = Integer .parseInt(substring.substring(substring.indexOf("/") + 1, substring.lastIndexOf("/"))); try { DeepZoomCollection.handleTileRequest(filename, zoomLevel, resp); } catch (Exception e) { logger.trace("Exception while loading images: " + e.getMessage(), e); // TODO: for now, problems when loading images are ignored, only the exception on the console is avoided } return; } // this is the request for the images if (requestURI.endsWith("jpg")) { // The format of such a request is as follows: /wikipedia/collection3_files/8/0_0.jpg // wikipedia identifies the name of the global collection (currently we assume there is only one dynamic collection) // collection3 identifies the query that has generated a query result, which is a subset of the global collection // The 8 is the zoom level (how deep we have zoomed in) // 0_0 identified the position of the tile (horizontal and vertical offset) int queryNumber = Integer .parseInt(requestURI.substring(requestURI.indexOf("collection") + 10, requestURI.indexOf("_"))); String substring = requestURI.substring(req.getRequestURI().lastIndexOf("files")); int zoomLevel = Integer .parseInt(substring.substring(substring.indexOf("/") + 1, substring.lastIndexOf("/"))); int x_Offset = Integer .parseInt(substring.substring(substring.lastIndexOf("/") + 1, substring.lastIndexOf("_"))); int y_Offset = Integer .parseInt(substring.substring(substring.lastIndexOf("_") + 1, substring.lastIndexOf("."))); Vector<String> imageVector = getImagesFromCacheFile("imageCache", queryNumber); // try { DeepZoomCollection.handleTileRequest(imageVector, zoomLevel, x_Offset, y_Offset, resp); } catch (Exception e) { logger.trace("Exception while loading images: " + e.getMessage(), e); // TODO: for now, problems when loading images are ignored, only the exception on the console is avoided } return; } PrintStream out = new PrintStream(resp.getOutputStream(), false, "UTF-8"); if (requestURI.endsWith(".xml")) { resp.setContentType("text/xml"); int collectionNumber = 0; collectionNumber = Integer.parseInt( requestURI.substring(requestURI.indexOf("collection") + 10, requestURI.indexOf(".xml"))); getFromCacheFile("collectionCache", collectionNumber, out); out.flush(); out.close(); return; } String q = PivotControl.decodeQuery(req.getParameter("q")); q = StringEscapeUtils.unescapeHtml(q); String uriParm = req.getParameter("uri"); Repository repository = Global.repository; URI uri = null; if (uriParm != null) uri = ValueFactoryImpl.getInstance().createURI(uriParm); if (q != null) { int maxEntities = 1000; try { maxEntities = Integer.parseInt(req.getParameter("maxEntities")); } catch (NumberFormatException e) { logger.debug("wrong number format in parameter 'maxEntities'"); } int maxFacets = 0; try { maxFacets = Integer.parseInt(req.getParameter("maxFacets")); } catch (NumberFormatException e) { logger.debug("wrong number format in parameter 'maxFacets'"); } int hash = hash(q + maxEntities + maxFacets); String res = null; validateCache(); res = getFromCacheFile("resultCache", hash, out); if (res != null) { logger.trace("Result loaded from cache..."); out.close(); } else { handleQuery(q, uri, repository, out, req, maxEntities, maxFacets); out.close(); } return; } }
From source file:hudson.scm.CvsTagsParamDefinition.java
@Exported public ListBoxModel getSymbolicNames() { ListBoxModel model = new ListBoxModel(); CvsChangeSet changeSet = null;/*from ww w.j a va 2 s . c om*/ RlogCommand statusCommand = new RlogCommand(); statusCommand.setHeaderOnly(true); statusCommand.setModule(moduleName); statusCommand.setRecursive(true); try { final File tempRlogSpill = File.createTempFile("cvs", "status ); ` "); final DeferredFileOutputStream outputStream = new DeferredFileOutputStream(100 * 1024, tempRlogSpill); final PrintStream logStream = new PrintStream(outputStream, true, getCvsDescriptor().getChangelogEncoding()); final OutputStream errorOutputStream = new OutputStream() { final StringBuffer buffer = new StringBuffer(); @Override public void write(int b) throws IOException { if ((int) ("\n".getBytes()[0]) == b) { flush(); } else { buffer.append(new String(new byte[] { (byte) b })); } } @Override public void flush() throws IOException { logger.info(buffer.toString()); buffer.delete(0, buffer.length()); super.flush(); } public void close() throws IOException { flush(); super.close(); } }; final PrintStream errorPrintStream = new PrintStream(errorOutputStream); Client cvsClient = getCvsClient(cvsRoot, passwordRequired, password); cvsClient.getEventManager().addCVSListener(new BasicListener(logStream, errorPrintStream)); cvsClient.executeCommand(statusCommand, getGlobalOptions(cvsRoot)); logStream.close(); errorPrintStream.flush(); errorPrintStream.close(); CvsLog parser = new CvsLog() { @Override public Reader read() throws IOException { if (outputStream.isInMemory()) return new InputStreamReader(new ByteArrayInputStream(outputStream.getData()), getCvsDescriptor().getChangelogEncoding()); else return new InputStreamReader(new FileInputStream(outputStream.getFile()), getCvsDescriptor().getChangelogEncoding()); } @Override public void dispose() { tempRlogSpill.delete(); } }; changeSet = parser.mapCvsLog(cvsRoot, new CvsRepositoryLocation.HeadRepositoryLocation()); } catch (IOException ex) { model.add(new ListBoxModel.Option("Could not load symbolic names - " + ex.getLocalizedMessage())); return model; } catch (CommandAbortedException ex) { model.add(new ListBoxModel.Option("Could not load symbolic names - " + ex.getLocalizedMessage())); return model; } catch (CommandException ex) { model.add(new ListBoxModel.Option("Could not load symbolic names - " + ex.getLocalizedMessage())); return model; } catch (AuthenticationException ex) { model.add(new ListBoxModel.Option("Could not load symbolic names - " + ex.getLocalizedMessage())); return model; } model.add(new ListBoxModel.Option("Head", "HEAD")); for (String branchName : changeSet.getBranchNames()) { model.add(new ListBoxModel.Option(branchName + " (Branch)", branchName)); } for (String tagName : changeSet.getTagNames()) { model.add(new ListBoxModel.Option(tagName + " (Tag)", tagName)); } return model; }
From source file:edu.msu.cme.rdp.kmer.cli.KmerCoverage.java
public void printCovereage(OutputStream coverage_out, OutputStream abundance_out) throws IOException { adjustCount();//w w w. j a va 2s. com // print out the weighted kmer coverage // we found mean coverage matched the previous biological observation PrintStream coverage_outStream = new PrintStream(coverage_out); coverage_outStream.println("#total reads: " + totalReads.intValue()); coverage_outStream.println("#use mean_cov to adjust the contig abundance, not median_cov "); coverage_outStream.println("#seqid\tmean_cov\tmedian_cov\ttotal_pos\tcovered_pos\tcovered_ratio"); for (Contig contig : contigMap.values()) { ArrayList<Double> counts = new ArrayList<Double>(); int coveredPos = 0; for (int pos = 0; pos < contig.coverage.length; pos++) { if (contig.coverage[pos] > 0) { coveredPos++; } counts.add(contig.coverage[pos]); } if (coveredPos > 0) { coverage_outStream.println(contig.name + "\t" + String.format(dformat, StdevCal.calMean(counts)) + "\t" + String.format(dformat, (StdevCal.calMedian(counts))) + "\t" + counts.size() + "\t" + coveredPos + "\t" + String.format(dformat, (double) coveredPos / (double) contig.coverage.length)); } else { // no coverage coverage_outStream.println( contig.name + "\t" + 0 + "\t" + 0 + "\t" + contig.coverage.length + "\t" + 0 + "\t" + 0); } } coverage_outStream.close(); // print kmer abundance HashMap<Integer, Integer> abundanceCountMap = new HashMap<Integer, Integer>(); // the frequeny of the kmer abundance PrintStream abundance_outStream = new PrintStream(abundance_out); // need to merge the counts from forward and reverse together. HashSet<Kmer> kmerSet = new HashSet<Kmer>(); kmerSet.addAll(kmerMaps[0].keySet()); for (Kmer kmer : kmerSet) { AtomicInteger abundance = kmerMaps[0].get(kmer).count; String reverseKmerStr = IUBUtilities.reverseComplement(kmer.decodeLong(kmer.getLongKmers())); Kmer reverseKmer = (new NuclKmerGenerator(reverseKmerStr, this.kmerSize)).next(); KmerAbund kmerAbund = kmerMaps[1].get(reverseKmer); if (kmerAbund != null) { abundance.addAndGet(kmerAbund.count.get()); } Integer count = abundanceCountMap.get(abundance.get()); if (count == null) { abundanceCountMap.put(abundance.get(), 1); } else { abundanceCountMap.put(abundance.get(), count + 1); } } abundance_outStream.println("kmer_abundance\tfrequency"); for (Integer abundance : abundanceCountMap.keySet()) { abundance_outStream.println(abundance + "\t" + abundanceCountMap.get(abundance)); } abundance_outStream.close(); }
From source file:cn.edu.henu.rjxy.lms.controller.TeaController.java
@RequestMapping("teacher/homework_submit") public @ResponseBody String homework_submit(HttpServletRequest request, @RequestParam("file") MultipartFile file) throws FileNotFoundException, IOException { int length = 0; String textWork = request.getParameter("arr1");//? String time = request.getParameter("time");//?? String miaoshu = request.getParameter("miaoshu"); String coursename = request.getParameter("courseName"); String term = request.getParameter("term"); String tec_name = TeacherDao.getTeacherBySn(getCurrentUsername()).getTeacherName(); String sn = getCurrentUsername(); String collage = TeacherDao.getTeacherBySn(getCurrentUsername()).getTeacherCollege(); // ? ?? ?? String ff = getFileFolder(request) + "homework/" + term + "/" + collage + "/" + sn + "/" + tec_name + "/" + coursename + "/"; file(ff);//?? length = haveFile(ff);// ww w. j a v a 2 s. c o m ff = ff + (length + 1) + "/"; file(ff); //?html OutputStreamWriter pw = null;//? pw = new OutputStreamWriter(new FileOutputStream(new File(ff + File.separator + "textWork.html")), "GBK"); pw.write(textWork); pw.close(); //?? PrintStream ps = null; ps = new PrintStream(new FileOutputStream(new File(ff + File.separator + "Workall.txt"))); ps.printf(miaoshu);//??: ps.println(); ; ps.println(time);//??: ps.close(); // if (!file.isEmpty()) { try { InputStream in; try (FileOutputStream os = new FileOutputStream(ff + "/" + file.getOriginalFilename())) { in = file.getInputStream(); int b = 0; while ((b = in.read()) != -1) { os.write(b); } os.flush(); } in.close(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } return "1"; }
From source file:org.apache.pdfbox.text.TestTextStripper.java
/** * Validate text extraction on a single file. * * @param inFile The PDF file to validate * @param outDir The directory to store the output in * @param bLogResult Whether to log the extracted text * @param bSort Whether or not the extracted text is sorted * @throws Exception when there is an exception *///from w w w. j a v a 2 s.co m public void doTestFile(File inFile, File outDir, boolean bLogResult, boolean bSort) throws Exception { if (bSort) { log.info("Preparing to parse " + inFile.getName() + " for sorted test"); } else { log.info("Preparing to parse " + inFile.getName() + " for standard test"); } if (!outDir.exists()) { if (!outDir.mkdirs()) { throw (new Exception("Error creating " + outDir.getAbsolutePath() + " directory")); } } //System.out.println(" " + inFile + (bSort ? " (sorted)" : "")); PDDocument document = PDDocument.load(inFile); try { File outFile; File diffFile; File expectedFile; if (bSort) { outFile = new File(outDir, inFile.getName() + "-sorted.txt"); diffFile = new File(outDir, inFile.getName() + "-sorted-diff.txt"); expectedFile = new File(inFile.getParentFile(), inFile.getName() + "-sorted.txt"); } else { outFile = new File(outDir, inFile.getName() + ".txt"); diffFile = new File(outDir, inFile.getName() + "-diff.txt"); expectedFile = new File(inFile.getParentFile(), inFile.getName() + ".txt"); } // delete possible leftover diffFile.delete(); OutputStream os = new FileOutputStream(outFile); try { os.write(0xEF); os.write(0xBB); os.write(0xBF); Writer writer = new BufferedWriter(new OutputStreamWriter(os, ENCODING)); try { //Allows for sorted tests stripper.setSortByPosition(bSort); stripper.writeText(document, writer); } finally { // close the written file before reading it again writer.close(); } } finally { os.close(); } if (bLogResult) { log.info("Text for " + inFile.getName() + ":"); log.info(stripper.getText(document)); } if (!expectedFile.exists()) { this.bFail = true; log.error("FAILURE: Input verification file: " + expectedFile.getAbsolutePath() + " did not exist"); return; } boolean localFail = false; LineNumberReader expectedReader = new LineNumberReader( new InputStreamReader(new FileInputStream(expectedFile), ENCODING)); LineNumberReader actualReader = new LineNumberReader( new InputStreamReader(new FileInputStream(outFile), ENCODING)); while (true) { String expectedLine = expectedReader.readLine(); while (expectedLine != null && expectedLine.trim().length() == 0) { expectedLine = expectedReader.readLine(); } String actualLine = actualReader.readLine(); while (actualLine != null && actualLine.trim().length() == 0) { actualLine = actualReader.readLine(); } if (!stringsEqual(expectedLine, actualLine)) { this.bFail = true; localFail = true; log.error("FAILURE: Line mismatch for file " + inFile.getName() + " (sort = " + bSort + ")" + " at expected line: " + expectedReader.getLineNumber() + " at actual line: " + actualReader.getLineNumber() + "\nexpected line was: \"" + expectedLine + "\"" + "\nactual line was: \"" + actualLine + "\"" + "\n"); //lets report all lines, even though this might produce some verbose logging //break; } if (expectedLine == null || actualLine == null) { break; } } expectedReader.close(); actualReader.close(); if (!localFail) { outFile.delete(); } else { // https://code.google.com/p/java-diff-utils/wiki/SampleUsage List<String> original = fileToLines(expectedFile); List<String> revised = fileToLines(outFile); // Compute diff. Get the Patch object. Patch is the container for computed deltas. Patch patch = DiffUtils.diff(original, revised); PrintStream diffPS = new PrintStream(diffFile, ENCODING); for (Object delta : patch.getDeltas()) { if (delta instanceof ChangeDelta) { ChangeDelta cdelta = (ChangeDelta) delta; diffPS.println("Org: " + cdelta.getOriginal()); diffPS.println("New: " + cdelta.getRevised()); diffPS.println(); } else if (delta instanceof DeleteDelta) { DeleteDelta ddelta = (DeleteDelta) delta; diffPS.println("Org: " + ddelta.getOriginal()); diffPS.println("New: " + ddelta.getRevised()); diffPS.println(); } else if (delta instanceof InsertDelta) { InsertDelta idelta = (InsertDelta) delta; diffPS.println("Org: " + idelta.getOriginal()); diffPS.println("New: " + idelta.getRevised()); diffPS.println(); } else { diffPS.println(delta); } } diffPS.close(); } } finally { document.close(); } }
From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.sensitivity.SobolAnalysis.java
@Override public void run(CommandLine commandLine) throws Exception { PrintStream output = null; //setup the parameters parameterFile = new ParameterFile(new File(commandLine.getOptionValue("parameterFile"))); index = Integer.parseInt(commandLine.getOptionValue("metric")); P = parameterFile.size();//from w w w . ja va 2 s .c o m if (commandLine.hasOption("resamples")) { resamples = Integer.parseInt(commandLine.getOptionValue("resamples")); } //load and validate the model output file File input = new File(commandLine.getOptionValue("input")); N = validate(input); load(input); try { //setup the output stream if (commandLine.hasOption("output")) { output = new PrintStream(new File(commandLine.getOptionValue("output"))); } else { output = System.out; } //perform the Sobol analysis and display the results if (commandLine.hasOption("simple")) { displaySimple(output); } else { display(output); } } finally { if ((output != null) && (output != System.out)) { output.close(); } } }
From source file:org.apache.hadoop.hbase.PerformanceEvaluationDoubleTable.java
private Path writeInputFile(final Configuration c) throws IOException { FileSystem fs = FileSystem.get(c); if (!fs.exists(PERF_EVAL_DIR)) { fs.mkdirs(PERF_EVAL_DIR);/*w w w . j a v a 2 s . com*/ } SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss"); Path subdir = new Path(PERF_EVAL_DIR, formatter.format(new Date())); fs.mkdirs(subdir); Path inputFile = new Path(subdir, "input.txt"); PrintStream out = new PrintStream(fs.create(inputFile)); // Make input random. Map<Integer, String> m = new TreeMap<Integer, String>(); Hash h = MurmurHash.getInstance(); int perClientRows = (this.R / this.N); try { for (int i = 0; i < 10; i++) { for (int j = 0; j < N; j++) { String s = "startRow=" + ((j * perClientRows) + (i * (perClientRows / 10))) + ", perClientRunRows=" + (perClientRows / 10) + ", totalRows=" + this.R + ", clients=" + this.N; int hash = h.hash(Bytes.toBytes(s)); m.put(hash, s); } } for (Map.Entry<Integer, String> e : m.entrySet()) { out.println(e.getValue()); } } finally { out.close(); } return subdir; }
From source file:cn.edu.henu.rjxy.lms.controller.TeaController.java
@RequestMapping("teacher/xgwork") public @ResponseBody String xgwork(HttpServletRequest request) throws IOException { String textWork = request.getParameter("arred");//? String time = request.getParameter("time");//?? String miaoshu = request.getParameter("miaoshu"); String starttime = request.getParameter("starttime"); String coursename = request.getParameter("courseName"); String term = request.getParameter("term"); String id = request.getParameter("id"); String tec_name = TeacherDao.getTeacherBySn(getCurrentUsername()).getTeacherName(); String collage = TeacherDao.getTeacherBySn(getCurrentUsername()).getTeacherCollege(); String sn = getCurrentUsername(); // ? ?? ?? String ff = getFileFolder(request) + "homework/" + term + "/" + collage + "/" + sn + "/" + tec_name + "/" + coursename + "/" + id + "/"; //?html/* www . j a v a 2 s . c o m*/ OutputStreamWriter pw = null; pw = new OutputStreamWriter(new FileOutputStream(new File(ff + File.separator + "textWork.html")), "GBK"); pw.write(textWork); pw.close(); //?? PrintStream ps = null; ps = new PrintStream(new FileOutputStream(new File(ff + File.separator + "Workall.txt"))); ps.printf(miaoshu);//??: ps.println(); ; ps.println(time);//??: ps.println(starttime);//??: ps.close(); return "1"; }
From source file:cn.edu.henu.rjxy.lms.controller.TeaController.java
@RequestMapping("teacher/work") public @ResponseBody String work(HttpServletRequest request) throws IOException { int length = 0; String textWork = request.getParameter("arred");//? String time = request.getParameter("time");//?? String starttime = request.getParameter("onetime");//?? String miaoshu = request.getParameter("miaoshu"); String coursename = request.getParameter("courseName"); String term = request.getParameter("term"); String tec_name = TeacherDao.getTeacherBySn(getCurrentUsername()).getTeacherName(); String collage = TeacherDao.getTeacherBySn(getCurrentUsername()).getTeacherCollege(); String sn = getCurrentUsername(); // ? ?? ?? String ff = getFileFolder(request) + "homework/" + term + "/" + collage + "/" + sn + "/" + tec_name + "/" + coursename + "/"; file(ff);//?? length = haveFile(ff);/* w w w . j a va2s . c o m*/ ff = ff + (length + 1) + "/"; file(ff); //?html OutputStreamWriter pw = null; pw = new OutputStreamWriter(new FileOutputStream(new File(ff + File.separator + "textWork.html")), "GBK"); pw.write(textWork); pw.close(); //?? PrintStream ps = null; ps = new PrintStream(new FileOutputStream(new File(ff + File.separator + "Workall.txt"))); ps.printf(miaoshu);//??: ps.println(); ; ps.println(time);//??: ps.println(starttime);//?? ps.close(); return "1"; }
From source file:azkaban.jobtype.ReportalTeradataRunner.java
private void outputQueryResult(ResultSet result, OutputStream outputStream) throws SQLException { final PrintStream outFile = new PrintStream(outputStream); final String delim = ","; boolean isHeaderPending = true; if (result != null) { while (result.next()) { int numColumns = result.getMetaData().getColumnCount(); StringBuilder dataString = new StringBuilder(); if (isHeaderPending) { StringBuilder headerString = new StringBuilder(); for (int j = 1; j <= numColumns; j++) { String colName = formatValue(result.getMetaData().getColumnName(j)); if (j > 1) { headerString.append(delim).append(colName); } else { headerString.append(colName); }/* ww w . jav a2s . c o m*/ } isHeaderPending = false; outFile.println(headerString.toString()); } for (int j = 1; j <= numColumns; j++) { String colVal = result.getString(j); if (colVal == null) { colVal = "\"null\""; } else { colVal = formatValue(colVal); } if (j > 1) { dataString.append(delim).append(colVal); } else { dataString.append(colVal); } } outFile.println(dataString.toString()); } } outFile.close(); }