List of usage examples for java.io Writer flush
public abstract void flush() throws IOException;
From source file:com.norconex.jef4.suite.JobSuite.java
private void writeJobSuiteIndex(JobSuiteStatusSnapshot statusTree) throws IOException { Writer out = null; try {/*from ww w . jav a 2 s .c om*/ out = new OutputStreamWriter(new FileOutputStream(getSuiteIndexFile()), CharEncoding.UTF_8); out.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>"); out.write("<suite-index>"); //--- Log Manager --- out.flush(); getLogManager().saveToXML(out); //--- JobStatusSerializer --- out.flush(); getJobStatusStore().saveToXML(out); //--- Job Status --- writeJobId(out, statusTree, statusTree.getRoot()); out.write("</suite-index>"); out.flush(); } finally { IOUtils.closeQuietly(out); } }
From source file:com.adobe.acs.commons.reports.internal.ReportCSVExportServlet.java
private void updateCSV(Resource config, SlingHttpServletRequest request, List<ReportCellCSVExporter> exporters, Csv csv, Writer writer) throws ReportException { QueryReportExecutor executor = request.adaptTo(QueryReportExecutor.class); executor.setConfiguration(config);// w w w . jav a 2 s.c om log.debug("Retrieved executor {}", executor); ResultsPage queryResult = executor.getAllResults(); List<? extends Object> results = queryResult.getResults(); log.debug("Retrieved {} results", results.size()); for (Object result : results) { List<String> row = new ArrayList<String>(); try { for (ReportCellCSVExporter exporter : exporters) { row.add(exporter.getValue(result)); } csv.writeRow(row.toArray(new String[row.size()])); writer.flush(); } catch (Exception e) { log.warn("Exception writing row: " + row, e); } } log.debug("Results written successfully"); }
From source file:com.netxforge.oss2.config.DiscoveryConfigFactory.java
/** * <p>saveXml</p>// ww w . ja v a2s.com * * @param xml a {@link java.lang.String} object. * @throws java.io.IOException if any. */ protected void saveXml(final String xml) throws IOException { if (xml != null) { getWriteLock().lock(); try { final Writer fileWriter = new OutputStreamWriter( new FileOutputStream( ConfigFileConstants.getFile(ConfigFileConstants.DISCOVERY_CONFIG_FILE_NAME)), "UTF-8"); fileWriter.write(xml); fileWriter.flush(); fileWriter.close(); } finally { getWriteLock().unlock(); } } }
From source file:de.mpg.mpdl.inge.pubman.web.multipleimport.ImportProcess.java
private String createTaskItemXml() { try {//from ww w . jav a 2 s .com String fwUrl = PropertyReader.getFrameworkUrl(); HttpClient client = new HttpClient(); ProxyHelper.setProxy(client, fwUrl); StringBuilder sb = new StringBuilder(ResourceUtil.getResourceAsString( "multipleImport/ImportTaskTemplate.xml", ImportProcess.class.getClassLoader())); replace("$01", escape(this.escidocContext.getObjectId()), sb); replace("$02", escape(PropertyReader.getProperty("escidoc.import.task.content-model")), sb); replace("$03", escape("Import Task Item for import " + name + " "), sb); // Upload original data PutMethod method = new PutMethod(fwUrl + "/st/staging-file"); method.setRequestHeader("Content-Type", this.format.toString()); method.setRequestHeader("Cookie", "escidocCookie=" + this.user.getHandle()); InputStream is = new FileInputStream(this.formatProcessor.getSourceFile()); method.setRequestEntity(new InputStreamRequestEntity(is)); client.executeMethod(method); is.close(); String response = method.getResponseBodyAsString(); URL originalDataUrl = xmlTransforming.transformUploadResponseToFileURL(response); replace("$04", escape(this.name), sb); replace("$05", escape(this.fileName), sb); replace("$06", escape(originalDataUrl.toExternalForm()), sb); replace("$07", escape(log.getStoredId() + ""), sb); replace("$08", escape(this.format.toString()), sb); replace("$09", escape(String.valueOf(this.formatProcessor.getLength())), sb); // Upload and create task item xml File tempLogXml = File.createTempFile("multipleImportLogXml", "xml"); Writer fw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(tempLogXml), "UTF-8")); log.toXML(fw); fw.flush(); fw.close(); PutMethod method2 = new PutMethod(fwUrl + "/st/staging-file"); method2.setRequestHeader("Content-Type", "text/xml"); method2.setRequestHeader("Cookie", "escidocCookie=" + this.user.getHandle()); is = new FileInputStream(tempLogXml); method2.setRequestEntity(new InputStreamRequestEntity(is)); client.executeMethod(method2); is.close(); response = method2.getResponseBodyAsString(); URL logXmlUrl = xmlTransforming.transformUploadResponseToFileURL(response); replace("$10", escape(this.name), sb); replace("$11", "importlog.xml", sb); replace("$12", escape(logXmlUrl.toExternalForm()), sb); replace("$13", escape(log.getStoredId() + ""), sb); replace("$14", escape(String.valueOf(tempLogXml.length())), sb); tempLogXml.delete(); /* * String taskItemXml = * ResourceUtil.getResourceAsString("multipleImport/ImportTaskTemplate.xml"); taskItemXml = * taskItemXml.replace("$1", escape(this.escidocContext.getObjectId())); taskItemXml = * taskItemXml.replace("$2", * escape(PropertyReader.getProperty("escidoc.import.task.content-model"))); taskItemXml = * taskItemXml.replace("$4", escape(this.name)); taskItemXml = taskItemXml.replace("$5", * escape(this.fileName)); taskItemXml = taskItemXml.replace("$6", * escape(this.formatProcessor.getDataAsBase64())); taskItemXml = taskItemXml.replace("$7", * escape(log.getStoredId() + "")); taskItemXml = taskItemXml.replace("$8", * escape(this.format.toString())); taskItemXml = taskItemXml.replace("$9", * escape(this.formatProcessor.getLength() + "")); */ log.finishItem(); log.close(); return sb.toString(); } catch (Exception e) { throw new RuntimeException(e); } }
From source file:org.wso2.carbon.ml.rest.api.ModelApiV20.java
/** * Predict using a file and return predictions as a CSV. * * @param modelId Unique id of the model * @param dataFormat Data format of the file (CSV or TSV) * @param columnHeader Whether the file contains the column header as the first row (YES or NO) * @param inputStream Input stream generated from the file used for predictions * @param percentile a threshold value used to identified cluster boundaries * @param skipDecoding whether the decoding should not be done (true or false) * @return A file as a {@link StreamingOutput} *//*from w w w . ja v a2s .c o m*/ @POST @Path("/predictionStreams") @Produces(MediaType.APPLICATION_OCTET_STREAM) @Consumes(MediaType.MULTIPART_FORM_DATA) public Response streamingPredqict(@Multipart("modelId") long modelId, @Multipart("dataFormat") String dataFormat, @Multipart("columnHeader") String columnHeader, @Multipart("file") InputStream inputStream, @QueryParam("percentile") double percentile, @QueryParam("skipDecoding") boolean skipDecoding) { PrivilegedCarbonContext carbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); int tenantId = carbonContext.getTenantId(); String userName = carbonContext.getUsername(); try { // validate input parameters // if it is a file upload, check whether the file is sent if (inputStream == null || inputStream.available() == 0) { String msg = String.format( "No file found to predict with model [id] %s of tenant [id] %s and [user] %s .", modelId, tenantId, userName); logger.error(msg); return Response.status(Response.Status.BAD_REQUEST).entity(new MLErrorBean(msg)) .type(MediaType.APPLICATION_JSON).build(); } final String predictions = mlModelHandler.streamingPredict(tenantId, userName, modelId, dataFormat, columnHeader, inputStream, percentile, skipDecoding); StreamingOutput stream = new StreamingOutput() { @Override public void write(OutputStream outputStream) throws IOException { Writer writer = new BufferedWriter( new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)); writer.write(predictions); writer.flush(); writer.close(); } }; return Response.ok(stream).header("Content-disposition", "attachment; filename=Predictions_" + modelId + "_" + MLUtils.getDate() + MLConstants.CSV) .build(); } catch (IOException e) { String msg = MLUtils.getErrorMsg(String.format( "Error occurred while reading the file for model [id] %s of tenant [id] %s and [user] %s.", modelId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.BAD_REQUEST).entity(new MLErrorBean(e.getMessage())) .type(MediaType.APPLICATION_JSON).build(); } catch (MLModelHandlerException e) { String msg = MLUtils.getErrorMsg(String.format( "Error occurred while predicting from model [id] %s of tenant [id] %s and [user] %s.", modelId, tenantId, userName), e); logger.error(msg, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(new MLErrorBean(e.getMessage())) .type(MediaType.APPLICATION_JSON).build(); } }
From source file:de.mpg.escidoc.pubman.multipleimport.ImportProcess.java
private String createTaskItemXml() { try {/*from w w w . jav a 2 s . c o m*/ String fwUrl = de.mpg.escidoc.services.framework.ServiceLocator.getFrameworkUrl(); HttpClient client = new HttpClient(); ProxyHelper.setProxy(client, fwUrl); StringBuilder sb = new StringBuilder(ResourceUtil.getResourceAsString( "multipleImport/ImportTaskTemplate.xml", ImportProcess.class.getClassLoader())); replace("$01", escape(this.escidocContext.getObjectId()), sb); replace("$02", escape(PropertyReader.getProperty("escidoc.import.task.content-model")), sb); replace("$03", escape("Import Task Item for import " + name + " "), sb); //Upload original data PutMethod method = new PutMethod(fwUrl + "/st/staging-file"); method.setRequestHeader("Content-Type", this.format.toString()); method.setRequestHeader("Cookie", "escidocCookie=" + this.user.getHandle()); InputStream is = new FileInputStream(this.formatProcessor.getSourceFile()); method.setRequestEntity(new InputStreamRequestEntity(is)); client.executeMethod(method); is.close(); String response = method.getResponseBodyAsString(); URL originalDataUrl = xmlTransforming.transformUploadResponseToFileURL(response); replace("$04", escape(this.name), sb); replace("$05", escape(this.fileName), sb); replace("$06", escape(originalDataUrl.toExternalForm()), sb); replace("$07", escape(log.getStoredId() + ""), sb); replace("$08", escape(this.format.toString()), sb); replace("$09", escape(String.valueOf(this.formatProcessor.getLength())), sb); //Upload and create task item xml File tempLogXml = File.createTempFile("multipleImportLogXml", "xml"); Writer fw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(tempLogXml), "UTF-8")); log.toXML(fw); fw.flush(); fw.close(); PutMethod method2 = new PutMethod(fwUrl + "/st/staging-file"); method2.setRequestHeader("Content-Type", "text/xml"); method2.setRequestHeader("Cookie", "escidocCookie=" + this.user.getHandle()); is = new FileInputStream(tempLogXml); method2.setRequestEntity(new InputStreamRequestEntity(is)); client.executeMethod(method2); is.close(); response = method2.getResponseBodyAsString(); URL logXmlUrl = xmlTransforming.transformUploadResponseToFileURL(response); replace("$10", escape(this.name), sb); replace("$11", "importlog.xml", sb); replace("$12", escape(logXmlUrl.toExternalForm()), sb); replace("$13", escape(log.getStoredId() + ""), sb); replace("$14", escape(String.valueOf(tempLogXml.length())), sb); tempLogXml.delete(); /* String taskItemXml = ResourceUtil.getResourceAsString("multipleImport/ImportTaskTemplate.xml"); taskItemXml = taskItemXml.replace("$1", escape(this.escidocContext.getObjectId())); taskItemXml = taskItemXml.replace("$2", escape(PropertyReader.getProperty("escidoc.import.task.content-model"))); taskItemXml = taskItemXml.replace("$4", escape(this.name)); taskItemXml = taskItemXml.replace("$5", escape(this.fileName)); taskItemXml = taskItemXml.replace("$6", escape(this.formatProcessor.getDataAsBase64())); taskItemXml = taskItemXml.replace("$7", escape(log.getStoredId() + "")); taskItemXml = taskItemXml.replace("$8", escape(this.format.toString())); taskItemXml = taskItemXml.replace("$9", escape(this.formatProcessor.getLength() + "")); */ log.finishItem(); log.close(); return sb.toString(); } catch (Exception e) { throw new RuntimeException(e); } }
From source file:br.msf.maven.compressor.processor.JavaScriptCompressor.java
@Override protected CharSequence proccessMinify(final CharSequence originalContent, final CompressorSettings settings) throws Exception { Reader reader = null;/*from w ww. j av a2s. co m*/ Writer writer = null; try { reader = new CharSequenceReader(originalContent); com.yahoo.platform.yui.compressor.JavaScriptCompressor compressor = new com.yahoo.platform.yui.compressor.JavaScriptCompressor( reader, new MavenErrorReporter(settings.getLog(), settings.isShowJsWarnings())); final StringBuilder out = new StringBuilder(originalContent.length()); writer = new StringBuilderWriter(out); try { compressor.compress(writer, LINE_BREAK_POS, false, settings.isVerbose(), true, false); } catch (Exception e) { throw new YUICompressorException( "An Error has occurred while compressing javascript. Probably there is a bad practice on the source...", e); } writer.flush(); return out; } finally { IOUtils.closeQuietly(reader); IOUtils.closeQuietly(writer); } }
From source file:hermes.impl.DefaultXMLHelper.java
public void saveContent(MessageSet messages, Writer writer) throws Exception { JAXBContext jc = JAXBContext.newInstance("hermes.xml"); Marshaller m = jc.createMarshaller(); m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE); m.marshal(new JAXBElement<MessageSet>(new QName("", "content"), MessageSet.class, messages), writer); writer.flush(); }
From source file:edu.ku.brc.specify.utilapps.SiteGen.java
public void doAlphaIndexPage(final Class<?> clazz, final String fieldName) { StringBuilder sb = new StringBuilder(); Connection connection = null; Statement stmt = null;//w w w. j a va2 s . c om ResultSet rs = null; try { connection = DBConnection.getInstance().createConnection(); stmt = connection.createStatement(); rs = stmt.executeQuery("select " + clazz.getSimpleName() + "ID, " + fieldName + " from " + clazz.getSimpleName().toLowerCase() + " order by " + fieldName + " asc"); char currChar = '_'; while (rs.next()) { String name = rs.getString(2); int id = rs.getInt(1); if (StringUtils.isEmpty(name)) { name = rs.getString(1); } //list.add(new SorterInfoStruct(name, rs.getInt(1))); if (currChar != name.charAt(0)) { currChar = name.charAt(0); sb.append("<br/>" + currChar + "<br/>\n"); } sb.append("<a href=\"" + clazz.getSimpleName() + id + ".html\">" + name + "</a><br/>\n"); } } catch (Exception ex) { ex.printStackTrace(); } finally { try { if (rs != null) { rs.close(); } if (stmt != null) { stmt.close(); } if (connection != null) { connection.close(); } } catch (Exception e) { e.printStackTrace(); } } Writer oFile = null; try { oFile = new BufferedWriter(new FileWriter(new File("site/" + clazz.getSimpleName() + ".html"))); String content = template; content = StringUtils.replace(content, "<!-- Title -->", clazz.getSimpleName()); content = StringUtils.replace(content, "<!-- Content -->", sb.toString()); oFile.write(content); oFile.flush(); oFile.close(); } catch (Exception ex) { ex.printStackTrace(); } log.info("Done"); }
From source file:dk.ange.octave.io.DataWriteFunctor.java
@Override public void doWrites(final Writer writer) { try {// w w w .j av a 2 s. com // Enter octave in "read data from input mode" log.trace("write: 'load(\"-text\", \"-\")' to start read data from input mode"); writer.write("load(\"-text\", \"-\")\n"); // Push the data into octave for (final Map.Entry<String, OctaveObject> entry : octaveTypes.entrySet()) { final String name = entry.getKey(); final OctaveObject value = entry.getValue(); if (log.isTraceEnabled()) { log.trace("write: variable '" + name + "', value=<<<" + value + ">>>"); } OctaveIO.write(writer, name, value); } // Exit octave from read data mode log.trace("write: '# name:' to exit octave from read data mode"); writer.write("# name: \n"); writer.flush(); } catch (final IOException e) { // Will happen when we write to a dead octave process final String message = "Unexpected IOException"; log.debug(message, e); throw new OctaveIOException(message, e); } }