List of usage examples for java.io OutputStreamWriter OutputStreamWriter
public OutputStreamWriter(OutputStream out, CharsetEncoder enc)
From source file:com.textocat.textokit.commons.wfstore.DefaultWordformStorePrinter.java
private void run() throws Exception { // deserialize DefaultWordformStore<?> ws = (DefaultWordformStore<?>) deserialize( toBufferedInputStream(openInputStream(serFile))); // print/*from www . j a va 2s .c o m*/ PrintWriter out; boolean closeOut; if (outFile == null) { out = new PrintWriter(System.out, true); closeOut = false; } else { OutputStream os = openOutputStream(outFile); out = new PrintWriter(new BufferedWriter(new OutputStreamWriter(os, "utf-8")), true); closeOut = true; } try { for (Map.Entry<String, ?> e : ws.strKeyMap.entrySet()) { out.print(escapeTabs(e.getKey())); out.print('\t'); out.print(e.getValue()); out.println(); } } finally { if (closeOut) closeQuietly(out); } }
From source file:eionet.util.VocabularyCSVOutputHelper.java
/** * Writes CSV to output stream.// ww w . ja v a 2s.c om * * @param out * outputstream * @param uriPrefix * uri prefix for teh element identifiers * @param folderContextRoot * parent vocabulary folder root for related identifiers * @param concepts * list of vocabulary concepts * @param attributesMeta * list of field names to the CSV header row * @throws IOException * if error in I/O */ public static void writeCSV(OutputStream out, String uriPrefix, String folderContextRoot, List<VocabularyConcept> concepts, List<Triple<String, String, Integer>> attributesMeta) throws IOException { OutputStreamWriter osw = new OutputStreamWriter(out, "UTF-8"); addBOM(out); List<String> toBeAddedToHeader = new ArrayList<String>(); for (Triple<String, String, Integer> row : attributesMeta) { String lang = ""; if (StringUtils.isNotEmpty(row.getCentral())) { lang = "@" + row.getCentral(); } int numOfElements = row.getRight(); for (int i = 0; i < numOfElements; i++) { toBeAddedToHeader.add(row.getLeft() + lang); } } String[] entries = new String[CONCEPT_ENTRIES_COUNT + toBeAddedToHeader.size()]; addFixedEntryHeaders(entries); for (int i = 0; i < toBeAddedToHeader.size(); i++) { entries[i + CONCEPT_ENTRIES_COUNT] = toBeAddedToHeader.get(i); } CSVWriter writer = new CSVWriter(osw, ','); writer.writeNext(entries); DateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd"); for (VocabularyConcept c : concepts) { int elemPos = 0; String value = ""; List<DataElement> attributeElems = null; // add fixed entries entries = new String[CONCEPT_ENTRIES_COUNT + toBeAddedToHeader.size()]; entries[URI_INDEX] = uriPrefix + c.getIdentifier(); entries[LABEL_INDEX] = c.getLabel(); entries[DEFINITION_INDEX] = c.getDefinition(); entries[NOTATION_INDEX] = c.getNotation(); entries[STATUS_INDEX] = c.getStatus().getLabel(); entries[ACCEPTED_DATE_INDEX] = c.getAcceptedDate() != null ? dateFormatter.format(c.getAcceptedDate()) : ""; // add extra fields for (Triple<String, String, Integer> row : attributesMeta) { String elemName = row.getLeft(); attributeElems = VocabularyOutputHelper.getDataElementValuesByNameAndLang(elemName, row.getCentral(), c.getElementAttributes()); int sizeOfAttributeElems = 0; if (attributeElems != null) { sizeOfAttributeElems = attributeElems.size(); for (int j = 0; j < sizeOfAttributeElems; j++) { DataElement e = attributeElems.get(j); if (e.isRelationalElement()) { value = e.getRelatedConceptUri(); } else if (StringUtils.isNotEmpty(e.getRelatedConceptIdentifier()) && StringUtils.isNotEmpty(e.getDatatype()) && e.getDatatype().equalsIgnoreCase("reference")) { value = folderContextRoot + e.getRelatedConceptIdentifier(); } else { value = e.getAttributeValue(); } // value = "\"" + value + "\""; entries[CONCEPT_ENTRIES_COUNT + elemPos + j] = value; } } int maximumNumberOfElements = row.getRight(); // add missing columns for (int j = sizeOfAttributeElems; j < maximumNumberOfElements; j++) { entries[CONCEPT_ENTRIES_COUNT + elemPos + j] = null; } elemPos += maximumNumberOfElements; } writer.writeNext(entries); } writer.close(); osw.close(); }
From source file:edu.gmu.isa681.ctn.EncodedChannel.java
public EncodedChannel(String name, InputStream in, int inboundCapacity, OutputStream out, int outboundCapacity) throws UnsupportedEncodingException { this.name = name; this.sin = new Scanner(new BufferedReader(new InputStreamReader(in, "UTF-8"))); this.sout = new BufferedWriter(new OutputStreamWriter(out, "UTF-8")); startHandlers(inboundCapacity, outboundCapacity); }
From source file:net.link.util.servlet.BufferedServletResponseWrapper.java
@Override public PrintWriter getWriter() throws IOException { if (null == writer) { OutputStreamWriter outputStreamWriter = new OutputStreamWriter(bufferedServletOutputStream, getCharacterEncoding()); writer = new PrintWriter(outputStreamWriter); }//from w ww. java 2 s .c om return writer; }
From source file:com.iisigroup.cap.report.AbstractReportHtmlService.java
@Override public ByteArrayOutputStream generateReport(Request request) throws CapException { ByteArrayOutputStream out = new ByteArrayOutputStream(); Writer writer = null;/*from ww w . j av a 2 s. c o m*/ OutputStreamWriter wr = null; try { Template t = getFmConfg().getConfiguration().getTemplate(getReportDefinition() + REPORT_SUFFIX); Map<String, Object> reportData = execute(request); wr = new OutputStreamWriter(out, getSysConfig().getProperty(ReportParamEnum.defaultEncoding.toString(), DEFAULT_ENCORDING)); writer = new BufferedWriter(wr); t.process(reportData, writer); } catch (Exception e) { if (e.getCause() != null) { throw new CapException(e.getCause(), e.getClass()); } else { throw new CapException(e, e.getClass()); } } finally { IOUtils.closeQuietly(wr); IOUtils.closeQuietly(writer); IOUtils.closeQuietly(out); } return out; }
From source file:info.novatec.testit.livingdoc.report.FileReportGenerator.java
@Override public void closeReport(Report report) throws IOException { Writer out = null;/*from w w w. j a va 2s . c o m*/ try { File reportFile = new File(reportsDirectory, outputNameOf(report)); reportFile.getParentFile().mkdirs(); out = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(reportFile.getAbsolutePath()), "UTF-8")); report.printTo(out); out.flush(); } finally { IOUtils.closeQuietly(out); } }
From source file:com.github.jarscanner.XmlGenerator.java
public void generate() { Properties p = new Properties(); p.setProperty("resource.loader", "string"); p.setProperty("resource.loader.class", "org.apache.velocity.runtime.resource.loader.StringResourceLoader"); Velocity.init(p);//from w w w. j a v a 2s.com Template template = getTemplate("com/github/jarscanner/jar-data.vm"); VelocityContext context = new VelocityContext(); context.put("duplicatesImpl", duplicatesImpl); context.put("duplicatesBridges", duplicatesBridges); try { Writer writer = new OutputStreamWriter(new FileOutputStream(outXml), "utf-8"); template.merge(context, writer); writer.flush(); writer.close(); } catch (IOException e) { LOG.error(e, e.getMessage()); } }
From source file:clientserver.ServerThread.java
final void initClientData() { try {/*from w w w . j av a2s .co m*/ OutputStreamWriter os = new OutputStreamWriter(socket.getOutputStream(), StandardCharsets.UTF_8); JSONObject jWriteobj = new JSONObject(); jWriteobj.put("name1", "sp_on"); jWriteobj.put("value1", 245); jWriteobj.put("name2", "sp_off"); jWriteobj.put("value2", 45); jWriteobj.put("name3", "mc_on"); jWriteobj.put("value3", 3455); jWriteobj.put("name4", "mc_off"); jWriteobj.put("value4", 2045); os.write(jWriteobj.toString()); os.flush(); } catch (IOException e) { System.out.println("Write socket closing" + e.getMessage()); } }
From source file:gridool.util.csv.CsvWriter.java
public CsvWriter(@CheckForNull File file, @CheckForNull String encoding, boolean append) { if (file == null) { throw new IllegalArgumentException(); }/*from w w w . j av a 2s . c o m*/ if (encoding == null) { throw new IllegalArgumentException(); } try { FileOutputStream out = new FileOutputStream(file, append); OutputStreamWriter osw = new OutputStreamWriter(out, encoding); this.writer = new FastBufferedWriter(osw, 16384); } catch (IOException e) { throw new IllegalStateException("failed to writer to file: " + file.getAbsolutePath(), e); } }
From source file:ivory.core.tokenize.Tokenizer.java
@SuppressWarnings("static-access") public static void main(String[] args) { Options options = new Options(); options.addOption(OptionBuilder.withArgName("full path to model file or directory").hasArg() .withDescription("model file").create("model")); options.addOption(OptionBuilder.withArgName("full path to input file").hasArg() .withDescription("input file").isRequired().create("input")); options.addOption(OptionBuilder.withArgName("full path to output file").hasArg() .withDescription("output file").isRequired().create("output")); options.addOption(OptionBuilder.withArgName("en | zh | de | fr | ar | tr | es").hasArg() .withDescription("2-character language code").isRequired().create("lang")); options.addOption(OptionBuilder.withArgName("path to stopwords list").hasArg() .withDescription("one stopword per line").create("stopword")); options.addOption(OptionBuilder.withArgName("path to stemmed stopwords list").hasArg() .withDescription("one stemmed stopword per line").create("stemmed_stopword")); options.addOption(OptionBuilder.withArgName("true|false").hasArg().withDescription("turn on/off stemming") .create("stem")); options.addOption(OptionBuilder.withDescription("Hadoop option to load external jars") .withArgName("jar packages").hasArg().create("libjars")); CommandLine cmdline;/*from ww w .j a va 2 s . co m*/ CommandLineParser parser = new GnuParser(); try { String stopwordList = null, stemmedStopwordList = null, modelFile = null; boolean isStem = true; cmdline = parser.parse(options, args); if (cmdline.hasOption("stopword")) { stopwordList = cmdline.getOptionValue("stopword"); } if (cmdline.hasOption("stemmed_stopword")) { stemmedStopwordList = cmdline.getOptionValue("stemmed_stopword"); } if (cmdline.hasOption("stem")) { isStem = Boolean.parseBoolean(cmdline.getOptionValue("stem")); } if (cmdline.hasOption("model")) { modelFile = cmdline.getOptionValue("model"); } ivory.core.tokenize.Tokenizer tokenizer = TokenizerFactory.createTokenizer( cmdline.getOptionValue("lang"), modelFile, isStem, stopwordList, stemmedStopwordList, null); BufferedWriter out = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(cmdline.getOptionValue("output")), "UTF8")); BufferedReader in = new BufferedReader( new InputStreamReader(new FileInputStream(cmdline.getOptionValue("input")), "UTF8")); String line = null; while ((line = in.readLine()) != null) { String[] tokens = tokenizer.processContent(line); String s = ""; for (String token : tokens) { s += token + " "; } out.write(s.trim() + "\n"); } in.close(); out.close(); } catch (Exception exp) { System.out.println(exp); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("Tokenizer", options); System.exit(-1); } }