List of usage examples for org.apache.commons.csv CSVPrinter printRecord
public void printRecord(final Object... values) throws IOException
From source file:com.ibm.watson.developer_cloud.professor_languo.ingestion.indexing.StackExchangeThreadSerializer.java
/** * Extract the major fields from the duplicate StackExchangeQuestion And save those key fields in * the txt File.// w ww. ja va 2s. co m * * @param dupThread - the duplicate StackExchange Thread to be serialized to the txt file * @param originThreadId - post id of the StackExchange Thread representing the other duplicate * threads * @param serFileName - path(relative to the csvDir folder) of the binary serialization file * @throws IngestionException */ public static void serializeDupThreadToTsvFile(CSVPrinter csvPrinter, StackExchangeThread dupThread, int originThreadId, String serFileName) throws IngestionException { // CSVPrinter csvPrinter = getCsvPrinter(tsvDir); List<String> record = new ArrayList<String>(Arrays.asList(Integer.toString(dupThread.getId()), dupThread.getQuestion().getTitle(), dupThread.getQuestion().getBody(), Integer.toString(originThreadId), serFileName, dupThread.getConcatenatedTagsText())); try { csvPrinter.printRecord(record); } catch (IOException e) { throw new IngestionException(e); } }
From source file:com.ibm.watson.developer_cloud.natural_language_classifier.v1.util.TrainingDataUtils.java
/** * Converts a training like argument list to a CSV representation. * // w w w . j a v a 2 s . c o m * @param data * the training data data * @return the string with the CSV representation for the training data */ public static String toCSV(final TrainingData... data) { Validate.notEmpty(data, "data cannot be null or empty"); StringWriter stringWriter = new StringWriter(); try { CSVPrinter printer = new CSVPrinter(stringWriter, CSVFormat.EXCEL); for (TrainingData trainingData : data) { if (trainingData.getText() == null || trainingData.getClasses() == null || trainingData.getClasses().isEmpty()) log.log(Level.WARNING, trainingData + " couldn't be converted to a csv record"); else { List<String> record = new ArrayList<String>(); record.add(trainingData.getText()); record.addAll(trainingData.getClasses()); printer.printRecord(record.toArray()); } } printer.close(); } catch (IOException e) { log.log(Level.SEVERE, "Error creating the csv", e); } return stringWriter.toString(); }
From source file:javalibs.CSVExtractor.java
/** * Write a single CSV record to a CSV file * @param path The path to save the CSV file * @param rec The record to be written/*from w w w . ja v a2 s.c om*/ * @param headers Headers for the CSV. If this value is null there will be no * headers added to the CSV */ public static void writeCSVRecord(String path, CSVRecord rec, String[] headers) { BufferedWriter bw = null; CSVPrinter printer = null; try { bw = Files.newBufferedWriter(Paths.get(path)); if (headers != null) printer = new CSVPrinter(bw, CSVFormat.DEFAULT.withHeader(headers)); else printer = new CSVPrinter(bw, CSVFormat.DEFAULT); } catch (IOException e) { TSL.get().exception(e); } TSL.get().require(bw != null, "BufferedWriter cannot be null"); TSL.get().require(printer != null, "CSVPrinter cannot be null"); try { printer.printRecord(rec); printer.flush(); } catch (IOException e) { TSL.get().exception(e); } }
From source file:cz.lbenda.dataman.db.ExportTableData.java
/** Write rows to CSV file * @param sqlQueryRows rows/*from w w w . java2s.c om*/ * @param writer where are data */ public static void writeSqlQueryRowsToCSV(SQLQueryRows sqlQueryRows, Writer writer) throws IOException { CSVFormat csvFileFormat = CSVFormat.DEFAULT.withRecordSeparator(Constants.CSV_NEW_LINE_SEPARATOR); CSVPrinter csvFilePrinter = new CSVPrinter(writer, csvFileFormat); csvFilePrinter .printRecord(sqlQueryRows.getMetaData().getColumns().stream().map(ColumnDesc::getName).toArray()); for (RowDesc row : sqlQueryRows.getRows()) { csvFilePrinter.printRecord( sqlQueryRows.getMetaData().getColumns().stream().map(row::getColumnValueStr).toArray()); } }
From source file:de.tudarmstadt.ukp.experiments.argumentation.sequence.evaluation.GenerateCrossDomainCVReport.java
/** * Merges id2outcome files from sub-folders with cross-domain and creates a new folder * with overall results//from w w w . j a v a2 s . c o m * * @param folder folder * @throws java.io.IOException */ public static void aggregateDomainResults(File folder, String subDirPrefix, final String taskFolderSubText, String outputFolderName) throws IOException { // list all sub-folders File[] folders = folder.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.isDirectory() && pathname.getName().contains(taskFolderSubText); } }); if (folders.length == 0) { throw new IllegalArgumentException("No sub-folders 'SVMHMMTestTask*' found in " + folder); } // write to a file File outFolder = new File(folder, outputFolderName); File output = new File(outFolder, subDirPrefix); output.mkdirs(); File outCsv = new File(output, TOKEN_LEVEL_PREDICTIONS_CSV); CSVPrinter csvPrinter = new CSVPrinter(new FileWriter(outCsv), SVMHMMUtils.CSV_FORMAT); csvPrinter.printComment(SVMHMMUtils.CSV_COMMENT); ConfusionMatrix cm = new ConfusionMatrix(); for (File domain : folders) { File tokenLevelPredictionsCsv = new File(domain, subDirPrefix + "/" + TOKEN_LEVEL_PREDICTIONS_CSV); if (!tokenLevelPredictionsCsv.exists()) { throw new IllegalArgumentException( "Cannot locate tokenLevelPredictions.csv: " + tokenLevelPredictionsCsv); } CSVParser csvParser = new CSVParser(new FileReader(tokenLevelPredictionsCsv), CSVFormat.DEFAULT.withCommentMarker('#')); for (CSVRecord csvRecord : csvParser) { // copy record csvPrinter.printRecord(csvRecord); // update confusion matrix cm.increaseValue(csvRecord.get(0), csvRecord.get(1)); } } // write to file FileUtils.writeStringToFile(new File(outFolder, "confusionMatrix.txt"), cm.toString() + "\n" + cm.printNiceResults() + "\n" + cm.printLabelPrecRecFm() + "\n" + cm.printClassDistributionGold()); // write csv IOUtils.closeQuietly(csvPrinter); }
From source file:com.chargebee.Application.MappingHeaders.java
private void print(Collection list, CSVPrinter printer) throws IOException { printer.printRecord(list); }
From source file:com.ibm.watson.app.common.tagEvent.TagRecord.java
public String asCsv() { prepareForFormat();// ww w . ja va 2s . co m StringBuilder sb = new StringBuilder(); try { CSVPrinter printer = new CSVPrinter(sb, CSVFormat.DEFAULT); printer.printRecord(this); printer.close(); } catch (IOException e) { } return sb.toString(); }
From source file:com.leadscope.commanda.maps.CSVLineMap.java
@Override public String apply(List<String> element) { StringWriter sw = new StringWriter(); try {/*ww w . ja v a2 s. c om*/ CSVPrinter printer = new CSVPrinter(sw, noBreakFormat); printer.printRecord(element); printer.close(); return sw.toString(); } catch (RuntimeException re) { throw re; } catch (Exception e) { throw new RuntimeException(e); } }
From source file:io.github.hebra.elasticsearch.beat.meterbeat.service.CSVFileOutputService.java
@Override public synchronized void send(BeatOutput output) { if (isEnable()) { try (PrintWriter writer = new PrintWriter(new FileOutputStream(outputPath.toFile(), true))) { CSVFormat csvFileFormat = CSVFormat.DEFAULT.withHeader().withDelimiter(delimiter).withQuote(quote) .withQuoteMode(QuoteMode.NON_NUMERIC).withSkipHeaderRecord(true); CSVPrinter csvFilePrinter = new CSVPrinter(writer, csvFileFormat); csvFilePrinter.printRecord(output.asIterable()); csvFilePrinter.close();/*from w ww . j av a 2s . c o m*/ } catch (IOException ioEx) { log.error(ioEx.getMessage()); } } }
From source file:com.hys.enterprise.dominoes.reporting.ReportBuilder.java
/** * Writes record to csv file// w w w.ja v a 2s. c om * @param record * @throws IOException */ public void writeRecord(ReportRecord record) throws IOException { fileWriter = new FileWriter(file, true); CSVPrinter csvFilePrinter; csvFilePrinter = new CSVPrinter(fileWriter, csvFileFormat); if (!file.exists() || file.length() == 0) { csvFilePrinter.printRecord(record.getHeader()); } csvFilePrinter.printRecord(record.getData()); if (fileWriter != null) { fileWriter.flush(); fileWriter.close(); } csvFilePrinter.close(); }