List of usage examples for org.apache.commons.csv CSVPrinter CSVPrinter
public CSVPrinter(final Appendable out, final CSVFormat format) throws IOException
From source file:org.apache.jackrabbit.oak.plugins.tika.CSVFileBinaryResourceProviderTest.java
@Test public void testGetBinaries() throws Exception { StringBuilder sb = new StringBuilder(); CSVPrinter p = new CSVPrinter(sb, CSVFileBinaryResourceProvider.FORMAT); // BLOB_ID, LENGTH, JCR_MIMETYPE, JCR_ENCODING, JCR_PATH p.printRecord("a", 123, "text/plain", null, "/a"); p.printRecord("a2", 123, "text/plain", null, "/a/c"); p.printRecord("b", null, "text/plain", null, "/b"); p.printRecord(null, null, "text/plain", null, "/c"); File dataFile = temporaryFolder.newFile(); Files.write(sb, dataFile, Charsets.UTF_8); CSVFileBinaryResourceProvider provider = new CSVFileBinaryResourceProvider(dataFile, new MemoryBlobStore()); Map<String, BinaryResource> binaries = provider.getBinaries("/").uniqueIndex(BinarySourceMapper.BY_BLOBID); assertEquals(3, binaries.size());//from ww w . ja v a2 s.co m assertEquals("a", binaries.get("a").getBlobId()); assertEquals("/a", binaries.get("a").getPath()); binaries = provider.getBinaries("/a").uniqueIndex(BinarySourceMapper.BY_BLOBID); assertEquals(1, binaries.size()); provider.close(); }
From source file:org.apache.jackrabbit.oak.plugins.tika.CSVFileGenerator.java
public void generate(FluentIterable<BinaryResource> binaries) throws IOException { Closer closer = Closer.create();//from ww w . j a v a 2 s .c o m int count = 0; try { CSVPrinter printer = new CSVPrinter(Files.newWriter(outFile, Charsets.UTF_8), CSVFileBinaryResourceProvider.FORMAT); closer.register(printer); for (BinaryResource br : binaries) { count++; printer.printRecord(br.getBlobId(), br.getByteSource().size(), br.getMimeType(), br.getEncoding(), br.getPath()); } printer.flush(); log.info("Generated csv output at {} with {} entries", outFile.getAbsolutePath(), count); } finally { closer.close(); } }
From source file:org.apache.nifi.csv.WriteCSVResult.java
public WriteCSVResult(final CSVFormat csvFormat, final RecordSchema recordSchema, final SchemaAccessWriter schemaWriter, final OutputStream out, final String dateFormat, final String timeFormat, final String timestampFormat, final boolean includeHeaderLine, final String charSet) throws IOException { super(out);/*from w ww . j a v a2 s . co m*/ this.recordSchema = recordSchema; this.schemaWriter = schemaWriter; this.dateFormat = dateFormat; this.timeFormat = timeFormat; this.timestampFormat = timestampFormat; this.includeHeaderLine = includeHeaderLine; final CSVFormat formatWithHeader = csvFormat.withSkipHeaderRecord(true); final OutputStreamWriter streamWriter = new OutputStreamWriter(out, charSet); printer = new CSVPrinter(streamWriter, formatWithHeader); fieldValues = new Object[recordSchema.getFieldCount()]; }
From source file:org.apache.nifi.processors.ParseCSV.ParseCSV.java
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final Charset charset = Charset.defaultCharset(); FlowFile flowFile = session.get();//from www . j av a 2 s .c o m if (flowFile == null) { return; } // TODO implement final Map<String, String> attributes = new LinkedHashMap<>(); final String format = context.getProperty(FORMAT).getValue(); final boolean create_attributes = Boolean.parseBoolean(context.getProperty(CREATE_ATTRIBUTES).getValue()); final char delimiter = context.getProperty(DELIMITER).getValue().charAt(0); final boolean with_header = Boolean.parseBoolean(context.getProperty(WITH_HEADER).getValue()); final String output_format = context.getProperty(OUTPUT_FORMAT).getValue(); final String custom_header = context.getProperty(CUSTOM_HEADER).getValue(); final String column_mask = context.getProperty(COLUMN_MASK).getValue(); final String column_encrypt = context.getProperty(COLUMN_ENCRYPT).getValue(); final String column_tokenize = context.getProperty(COLUMN_TOKENIZE).getValue(); final String tokenize_unique_identifier = context.getProperty(TOKENIZE_UNQIUE_IDENTIFIER).getValue(); final String tokenized_ouput = context.getProperty(TOKENIZED_OUTPUT).getValue(); final String encryptionKey = "Bar12345Bar12345"; final String static_schema = context.getProperty(STATIC_SCHEMA).getValue(); // new flowfile here final org.apache.nifi.util.ObjectHolder<FlowFile> holder = new org.apache.nifi.util.ObjectHolder<>(null); flowFile = session.write(flowFile, new StreamCallback() { @Override public void process(InputStream inputStream, OutputStream outputStream) throws IOException { CSVFormat csvFormat = buildFormat(format, delimiter, with_header, custom_header); CSVParser csvParser = new CSVParser(new InputStreamReader(inputStream, charset), csvFormat); CSVPrinter csvPrinter = new CSVPrinter(new OutputStreamWriter(outputStream, charset), csvFormat); String headerArray[]; ArrayList<String> columnMaskList = new ArrayList<>(); ArrayList<String> columnEncryptList = new ArrayList<String>(); ArrayList<String> columnTokenizeList = new ArrayList<String>(); List<String> maskValueHolder = new LinkedList<>(); FlowFile tokenized = session.create(); // print header if needed if (custom_header != null && output_format.equals("CSV") && static_schema == null) { csvPrinter.printRecord(custom_header); headerArray = custom_header.split(","); } else if (static_schema != null && custom_header == null) { csvPrinter.printRecord(static_schema.replace("\"", "")); headerArray = static_schema.split(","); } else { headerArray = csvParser.getHeaderMap().keySet().toArray(new String[0]); csvPrinter.printRecord(headerArray); } if (column_mask != null) { columnMaskList = new ArrayList<>(Arrays.asList(column_mask.replace("\"", "").split(","))); } if (column_encrypt != null) { columnEncryptList = new ArrayList<>(Arrays.asList(column_encrypt.split(","))); } if (column_tokenize != null) { columnTokenizeList = new ArrayList<>(Arrays.asList(column_tokenize.split(","))); } // loop through records and print for (final CSVRecord record : csvParser) { Map<String, String> k = record.toMap(); for (Map.Entry<String, String> konj : k.entrySet()) { //System.out.println(konj.getValue()); } // generate attributes if required per record if (create_attributes) { for (int i = 0; i < headerArray.length; i++) { //attributes.put(headerArray[i], record.get(i)); attributes.put(headerArray[i] + "." + record.getRecordNumber(), record.get(i)); } } // check masked columns if (column_mask != null || column_encrypt != null) { // we have to loop through the header array and match user requested mask columns for (int i = 0; i < headerArray.length; i++) { //System.out.println(headerArray[i] + "." + record.getRecordNumber() + " - " + mask(record.get(i))); if (columnMaskList.contains(headerArray[i])) { // set mask maskValueHolder.add(mask(record.get(i))); // construct tokenization row for external DB store if (columnTokenizeList.contains(headerArray[i])) { final String tokenizedRow; tokenizedRow = tokenizationOut(tokenized_ouput, headerArray[i], tokenize_unique_identifier, mask(record.get(i)), record.get(i), Long.toString(record.getRecordNumber())); tokenized = session.append(tokenized, new OutputStreamCallback() { @Override public void process(OutputStream outputStream) throws IOException { outputStream.write(tokenizedRow.getBytes()); } }); } } else if (columnEncryptList.contains(headerArray[i])) { // encrypt maskValueHolder.add(new String(Encrypt(record.get(i), encryptionKey), "UTF-8")); } else { // no mask maskValueHolder.add(record.get(i)); } } csvPrinter.printRecord(maskValueHolder); // clear mask column holder maskValueHolder.clear(); } else { // no masking or encryption required, print record switch (output_format) { case "CSV": //csvPrinter.printRecord(record); List<String> items = Arrays.asList(static_schema.split(",")); String lastColumn = items.get(items.size() - 1); String test = ""; for (String item : items) { if (item != lastColumn) { test += record.get(item) + ","; } else { test += record.get(item); } } csvPrinter.printRecord(test.replace("^\"|\"$", "")); break; case "JSON": String json = new ObjectMapper().writer().withDefaultPrettyPrinter() .writeValueAsString(record.toMap()) + "\n"; if (json.length() > 0) { outputStream.write(json.getBytes()); } //List<Map<?, ?>> data = readObjectsFromCsv(inputStream); //String adis = writeAsJson(data); //outputStream.write(writeAsJson(data).getBytes()); break; case "XML": outputStream.write(new XmlMapper().writeValueAsString(record.toMap()).getBytes()); break; } } } csvPrinter.flush(); csvPrinter.close(); holder.set(tokenized); } }); flowFile = session.putAllAttributes(flowFile, attributes); session.transfer(flowFile, RELATIONSHIP_SUCCESS); session.transfer(holder.get(), RELATIONSHIP_TOKENIZED); }
From source file:org.apache.phoenix.pherf.result.impl.CSVFileResultHandler.java
@Override protected void open(String header) throws IOException { // Check if already so we only open one writer if (csvPrinter != null) { return;//from ww w . j a v a2 s . c o m } csvPrinter = new CSVPrinter(new PrintWriter(resultFileName), CSVFormat.DEFAULT); Object[] records = header.split(PherfConstants.RESULT_FILE_DELIMETER); csvPrinter.printRecord(records); isClosed = false; }
From source file:org.apache.solr.response.CSVResponseWriter.java
public void writeResponse() throws IOException { SolrParams params = req.getParams(); strategy = new CSVStrategy(',', '"', CSVStrategy.COMMENTS_DISABLED, CSVStrategy.ESCAPE_DISABLED, false, false, false, true);//w w w . jav a 2 s .c om CSVStrategy strat = strategy; String sep = params.get(CSV_SEPARATOR); if (sep != null) { if (sep.length() != 1) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid separator:'" + sep + "'"); strat.setDelimiter(sep.charAt(0)); } String nl = params.get(CSV_NEWLINE); if (nl != null) { if (nl.length() == 0) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid newline:'" + nl + "'"); strat.setPrinterNewline(nl); } String encapsulator = params.get(CSV_ENCAPSULATOR); String escape = params.get(CSV_ESCAPE); if (encapsulator != null) { if (encapsulator.length() != 1) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid encapsulator:'" + encapsulator + "'"); strat.setEncapsulator(encapsulator.charAt(0)); } if (escape != null) { if (escape.length() != 1) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid escape:'" + escape + "'"); strat.setEscape(escape.charAt(0)); if (encapsulator == null) { strat.setEncapsulator(CSVStrategy.ENCAPSULATOR_DISABLED); } } if (strat.getEscape() == '\\') { // If the escape is the standard backslash, then also enable // unicode escapes (it's harmless since 'u' would not otherwise // be escaped. strat.setUnicodeEscapeInterpretation(true); } printer = new CSVPrinter(writer, strategy); CSVStrategy mvStrategy = new CSVStrategy(strategy.getDelimiter(), CSVStrategy.ENCAPSULATOR_DISABLED, CSVStrategy.COMMENTS_DISABLED, '\\', false, false, false, false); strat = mvStrategy; sep = params.get(MV_SEPARATOR); if (sep != null) { if (sep.length() != 1) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid mv separator:'" + sep + "'"); strat.setDelimiter(sep.charAt(0)); } encapsulator = params.get(MV_ENCAPSULATOR); escape = params.get(MV_ESCAPE); if (encapsulator != null) { if (encapsulator.length() != 1) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid mv encapsulator:'" + encapsulator + "'"); strat.setEncapsulator(encapsulator.charAt(0)); if (escape == null) { strat.setEscape(CSVStrategy.ESCAPE_DISABLED); } } escape = params.get(MV_ESCAPE); if (escape != null) { if (escape.length() != 1) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid mv escape:'" + escape + "'"); strat.setEscape(escape.charAt(0)); // encapsulator will already be disabled if it wasn't specified } returnScore = returnFields != null && returnFields.contains("score"); boolean needListOfFields = returnFields == null || returnFields.size() == 0 || (returnFields.size() == 1 && returnScore) || returnFields.contains("*"); Collection<String> fields = returnFields; Object responseObj = rsp.getValues().get("response"); if (needListOfFields) { if (responseObj instanceof SolrDocumentList) { // get the list of fields from the SolrDocumentList fields = new LinkedHashSet<String>(); for (SolrDocument sdoc : (SolrDocumentList) responseObj) { fields.addAll(sdoc.getFieldNames()); } } else { // get the list of fields from the index fields = req.getSearcher().getFieldNames(); } if (returnScore) { fields.add("score"); } else { fields.remove("score"); } } CSVSharedBufPrinter csvPrinterMV = new CSVSharedBufPrinter(mvWriter, mvStrategy); for (String field : fields) { if (field.equals("score")) { CSVField csvField = new CSVField(); csvField.name = "score"; csvFields.put("score", csvField); continue; } SchemaField sf = schema.getFieldOrNull(field); if (sf == null) { FieldType ft = new StrField(); sf = new SchemaField(field, ft); } // if we got the list of fields from the index, only list stored fields if (returnFields == null && sf != null && !sf.stored()) { continue; } // check for per-field overrides sep = params.get("f." + field + '.' + CSV_SEPARATOR); encapsulator = params.get("f." + field + '.' + CSV_ENCAPSULATOR); escape = params.get("f." + field + '.' + CSV_ESCAPE); CSVSharedBufPrinter csvPrinter = csvPrinterMV; if (sep != null || encapsulator != null || escape != null) { // create a new strategy + printer if there were any per-field overrides strat = (CSVStrategy) mvStrategy.clone(); if (sep != null) { if (sep.length() != 1) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid mv separator:'" + sep + "'"); strat.setDelimiter(sep.charAt(0)); } if (encapsulator != null) { if (encapsulator.length() != 1) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid mv encapsulator:'" + encapsulator + "'"); strat.setEncapsulator(encapsulator.charAt(0)); if (escape == null) { strat.setEscape(CSVStrategy.ESCAPE_DISABLED); } } if (escape != null) { if (escape.length() != 1) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid mv escape:'" + escape + "'"); strat.setEscape(escape.charAt(0)); if (encapsulator == null) { strat.setEncapsulator(CSVStrategy.ENCAPSULATOR_DISABLED); } } csvPrinter = new CSVSharedBufPrinter(mvWriter, strat); } CSVField csvField = new CSVField(); csvField.name = field; csvField.sf = sf; csvField.mvPrinter = csvPrinter; csvFields.put(field, csvField); } NullValue = params.get(CSV_NULL, ""); if (params.getBool(CSV_HEADER, true)) { for (CSVField csvField : csvFields.values()) { printer.print(csvField.name); } printer.println(); } if (responseObj instanceof DocList) { writeDocList(null, (DocList) responseObj, null, null); } else if (responseObj instanceof SolrDocumentList) { writeSolrDocumentList(null, (SolrDocumentList) responseObj, null, null); } }
From source file:org.apache.storm.sql.runtime.serde.csv.CsvSerializer.java
@Override public ByteBuffer write(List<Object> data, ByteBuffer buffer) { try {//from w ww . j av a2s . c o m StringWriter writer = new StringWriter(); CSVPrinter printer = new CSVPrinter(writer, CSVFormat.RFC4180); for (Object o : data) { printer.print(o); } //since using StringWriter, we do not need to close it. return ByteBuffer.wrap(writer.getBuffer().toString().getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { throw new RuntimeException(e); } }
From source file:org.bedework.eventreg.bus.CSVOutputter.java
@Override public String next() { if (!regit.hasNext()) { return null; }/* w w w. j ava2s . c om*/ /* ><c:forEach var="reg" items="${regs}" varStatus="loopStatus"><%-- </c:forEach> */ final List flds = new ArrayList(); final Registration reg = regit.next(); final StringBuilder out = new StringBuilder(); try { final CSVPrinter csv = new CSVPrinter(out, CSVFormat.EXCEL); flds.add(reg.getEvSummary()); flds.add(reg.getEvDate()); flds.add(reg.getEvTime()); flds.add(reg.getEvLocation()); flds.add(reg.getRegistrationId()); flds.add(reg.getAuthid()); flds.add(reg.getTicketsRequested()); flds.add(reg.getNumTickets()); flds.add(reg.getType()); flds.add(reg.getComment()); flds.add(reg.getCreated()); flds.add(reg.getLastmod()); if (form == null) { csv.printRecord(flds.toArray()); csv.flush(); csv.close(); return out.toString(); } final FormFields ff = new FormFields(form.getFields()); try { final Map vals = reg.restoreFormValues(); for (final FieldDef fd : ff) { final Object val = vals.get(fd.getName()); if (val == null) { flds.add(""); } else { flds.add(val); } } } catch (final Throwable t) { out.append("Exception restoring form values"); } csv.printRecord(flds.toArray()); csv.flush(); csv.close(); } catch (final Throwable t) { return "Exception " + t.getLocalizedMessage(); } return out.toString(); }
From source file:org.cast.cwm.admin.CSVDownload.java
/** * creates a new resource response based on the request attributes * * @param attributes current request attributes from client * @return resource response for answering request *//*w ww . j ava 2 s.c o m*/ @Override protected ResourceResponse newResourceResponse(Attributes attributes) { ResourceResponse rr = new ResourceResponse(); rr.disableCaching(); rr.setFileName("log.csv"); rr.setContentDisposition(ContentDisposition.ATTACHMENT); rr.setContentType("text/csv"); if (rr.dataNeedsToBeWritten(attributes)) { rr.setWriteCallback(new WriteCallback() { @Override public void writeData(Attributes attributes) { Response response = attributes.getResponse(); try { CSVPrinter writer = new CSVPrinter( new OutputStreamWriter(response.getOutputStream(), "UTF-8"), CSVFormat.EXCEL); // Write header row for (IDataColumn<E> col : columns) { writer.print(col.getHeaderString()); } writer.println(); // Write documentation row, if requested if (includeDocumentationRow) { for (IDataColumn<E> col : columns) { if (col instanceof IDocumentedColumn && ((IDocumentedColumn) col).getDocumentationModel() != null) { writer.print(((IDocumentedColumn) col).getDocumentationModel().getObject()); } else { writer.print(""); } } writer.println(); } // Write Data Iterator<? extends E> it = iteratorProvider.getIterator(); while (it.hasNext()) { E e = it.next(); for (IDataColumn<E> col : columns) { String columnValue = col.getItemString(new Model<E>(e)); if (columnValue == null) { log.warn("Got a null value for {} of item {}", col.getHeaderString(), e); columnValue = "null"; } // Clean up text -- CSV file cannot have newlines in it writer.print(columnValue.replaceAll("[\r\n]", " ")); } writer.println(); } writer.close(); } catch (UnsupportedEncodingException e) { throw new StringValueConversionException("UTF-8 translation not supported?!", e); } catch (IOException e) { throw new WicketRuntimeException("Couldn't write to resource", e); } } }); } return rr; }
From source file:org.chanthing.csvtool.CSVTrans.java
public static void main(String[] args) throws IOException { CSVParser reader = null;/*from w ww . java 2 s .c o m*/ CSVPrinter writer = null; CSVXform xFormer = null; if (args.length < 2) { System.out.println("Usage: java CSVTrans <src_csv_file> <dest_csv_file>"); return; } try { reader = new CSVParser(new FileReader(args[0]), srcFormat); writer = new CSVPrinter(new FileWriter(args[1]), destFormat); xFormer = new CSVXform(destHeaders.size(), xforms); writer.printRecord(destHeaders); // Write out headers to destination file /* * For each record in the input file */ for (CSVRecord record : reader) { List<String> destRecord = xFormer.xform(record); writer.printRecord(destRecord); } } finally { if (reader != null) { reader.close(); } if (writer != null) { writer.close(); } } }