Example usage for org.apache.commons.csv CSVFormat DEFAULT

List of usage examples for org.apache.commons.csv CSVFormat DEFAULT

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVFormat DEFAULT.

Prototype

CSVFormat DEFAULT

To view the source code for org.apache.commons.csv CSVFormat DEFAULT.

Click Source Link

Document

Standard comma separated format, as for #RFC4180 but allowing empty lines.

Usage

From source file:net.sourceforge.ganttproject.io.GanttCSVExport.java

/**
 * Save the project as CSV on a stream//from   w w  w.ja va  2 s .c  o  m
 *
 * @throws IOException
 */
public void save(OutputStream stream) throws IOException {
    OutputStreamWriter writer = new OutputStreamWriter(stream);
    CSVFormat format = CSVFormat.DEFAULT.withEscape('\\');
    if (csvOptions.sSeparatedChar.length() == 1) {
        format = format.withDelimiter(csvOptions.sSeparatedChar.charAt(0));
    }
    if (csvOptions.sSeparatedTextChar.length() == 1) {
        format = format.withEncapsulator(csvOptions.sSeparatedTextChar.charAt(0));
    }

    CSVPrinter csvPrinter = new CSVPrinter(writer, format);

    if (csvOptions.bFixedSize) {
        // TODO The CVS library we use is lacking support for fixed size
        getMaxSize();
    }

    writeTasks(csvPrinter);

    if (myProject.getHumanResourceManager().getResources().size() > 0) {
        csvPrinter.println();
        csvPrinter.println();
        writeResources(csvPrinter);
    }
    writer.flush();
    writer.close();
}

From source file:co.cask.hydrator.transforms.CSVParser2.java

@Override
public void initialize(TransformContext context) throws Exception {
    super.initialize(context);

    String csvFormatString = config.format.toLowerCase();
    switch (csvFormatString) {
    case "default":
        csvFormat = CSVFormat.DEFAULT;
        break;/* ww w  .j  av  a2 s . c o  m*/

    case "excel":
        csvFormat = CSVFormat.EXCEL;
        break;

    case "mysql":
        csvFormat = CSVFormat.MYSQL;
        break;

    case "rfc4180":
        csvFormat = CSVFormat.RFC4180;
        break;

    case "tdf":
        csvFormat = CSVFormat.TDF;
        break;

    default:
        throw new IllegalArgumentException(
                "Format {} specified is not one of the allowed format. Allowed formats are"
                        + "DEFAULT, EXCEL, MYSQL, RFC4180 and TDF");
    }

    if (config.field == null || config.field.isEmpty()) {
        throw new IllegalArgumentException("Field for applying transformation is not specified.");
    }

    try {
        outSchema = Schema.parseJson(config.schema);
        fields = outSchema.getFields();
    } catch (IOException e) {
        throw new IllegalArgumentException("Format of schema specified is invalid. Please check the format.");
    }
}

From source file:fi.vm.kapa.identification.adapter.service.AdapterPropertyMapper.java

@PostConstruct
public void initAdapterUtils() {
    try {/*from   www  . j av a 2  s  .c o m*/
        /* The session attribute map has the following syntax:
         * [External-IdP-attribute-key];[SP-attribute-key]
         */
        this.sessionAttributeMap.putAll(getMapFromFromCsvFile(adapterMapFile,
                CSVFormat.DEFAULT.withDelimiter(';').withCommentMarker('#')));

        // Tupas properties contains each bank specific values and attributes
        this.tupasProperties.putAll(getMapFromFromCsvFile(tupasPropertiesFile,
                CSVFormat.DEFAULT.withDelimiter(';').withCommentMarker('#')));
        tupasFormTemplate = new String(Files.readAllBytes(Paths.get(tupasRequestFormTemplate)), "UTF-8");
    } catch (Exception e) {
        logger.error("Error initializing session parser", e);
    }
}

From source file:com.bjond.Main.java

/**
*  Given an input stream _in_ to an audit log, the unobfuscated log will be stream to _out_.
* 
* @param in/*  w w w . j  av  a  2  s .  c o  m*/
* @param out
* 
* @throws IOException
* @throws SQLException
*/
public static void process(final InputStream in, final OutputStream out) throws IOException, SQLException {
    log.info("Execution begins...");

    // Generate the POSTGRESQL URL form system envirionment variables.
    POSTGRESQL_URL = String.format("jdbc:postgresql://%s:%s/%s", OPENSHIFT_POSTGRESQL_DB_HOST,
            OPENSHIFT_POSTGRESQL_DB_PORT, OPENSHIFT_APP_NAME);

    try (final Connection db = DriverManager.getConnection(POSTGRESQL_URL, OPENSHIFT_POSTGRESQL_DB_USERNAME,
            OPENSHIFT_POSTGRESQL_DB_PASSWORD)) {

        final PrintStream outPrintStream = new PrintStream(out, true, "UTF-8");
        final Reader inReader = new InputStreamReader(in, "UTF-8");
        final Iterable<CSVRecord> records = CSVFormat.DEFAULT.withQuote('\'').parse(inReader);

        log.info("PostgreSQL DB connectiion valid: {}", db.isValid(1000));

        records.forEach(record -> {
            record.iterator().forEachRemaining(e -> {
                try {
                    if (!e.isEmpty()) {
                        final String[] tuple = keyValueSplitter(e);
                        outPrintStream.printf("%s='%s',", tuple[0], resolve(db, tuple[0], tuple[1]));
                    }
                } catch (final Exception exception) {
                    log.error("unexpected error on " + e, exception);
                }
            });

            outPrintStream.printf("%n"); // EOL
        });
    }

    log.info("Execution ends...");
}

From source file:io.ecarf.core.cloud.task.processor.reason.phase2.DoReasonTask8IntTest.java

@Test
@Ignore//from  ww  w .  ja  v a  2  s . c o m
public void testCsvParser() throws FileNotFoundException, IOException {

    String filename = "/var/folders/3h/0whnrhjn1ddfb5p9pq_c6_mh0000gn/T//ecarf-evm-1_1456690870927_QueryResults_0";
    int rows = 0;

    try (BufferedReader reader = new BufferedReader(
            new InputStreamReader(new GZIPInputStream(new FileInputStream(filename), Constants.GZIP_BUF_SIZE)),
            Constants.GZIP_BUF_SIZE);) {

        Iterable<CSVRecord> records = CSVFormat.DEFAULT.withHeader().withSkipHeaderRecord().parse(reader);

        for (CSVRecord record : records) {

            ETriple instanceTriple = ETriple.fromCSV(record.values());
            rows++;
        }
    }

    assertEquals(8091263, rows);

}

From source file:de.tudarmstadt.ukp.dkpro.tc.svmhmm.report.SVMHMMBatchCrossValidationReport.java

protected void aggregateResults(String testTaskCSVFile, String outputPrefix) throws Exception {
    StorageService storageService = getContext().getStorageService();

    // aggregate rows from all CSVs from all folds
    List<List<String>> allOutcomes = new ArrayList<>();

    List<TaskContextMetadata> testTasks = collectTestTasks();

    // we need test tasks!
    if (testTasks.isEmpty()) {
        throw new IllegalStateException("No test tasks found. Make sure you properly "
                + "define the test task in getTestTaskClass() (currently: " + getTestTaskClass().getName());
    }/*from   w  w  w. j  a  va  2s  . c  om*/

    // iterate over all sub tasks
    for (TaskContextMetadata subContext : testTasks) {
        // locate CSV file with outcomes (gold, predicted, token, etc.)
        File csvFile = storageService.getStorageFolder(subContext.getId(),
                Constants.TEST_TASK_OUTPUT_KEY + File.separator + testTaskCSVFile);

        // load the CSV
        CSVParser csvParser = new CSVParser(new FileReader(csvFile), CSVFormat.DEFAULT.withCommentMarker('#'));

        // and add the all rows
        for (CSVRecord csvRecord : csvParser) {
            // row for particular instance
            List<String> row = new ArrayList<>();
            for (String value : csvRecord) {
                row.add(value);
            }
            allOutcomes.add(row);
        }

        IOUtils.closeQuietly(csvParser);
    }

    // store aggregated outcomes again to CSV
    File evaluationFile = new File(getContext().getStorageLocation(Constants.TEST_TASK_OUTPUT_KEY,
            StorageService.AccessMode.READWRITE), testTaskCSVFile);
    log.debug("Evaluation file: " + evaluationFile.getAbsolutePath());

    CSVPrinter csvPrinter = new CSVPrinter(new FileWriter(evaluationFile), SVMHMMUtils.CSV_FORMAT);
    csvPrinter.printComment(SVMHMMUtils.CSV_COMMENT);
    csvPrinter.printRecords(allOutcomes);
    IOUtils.closeQuietly(csvPrinter);

    // compute confusion matrix
    ConfusionMatrix cm = new ConfusionMatrix();

    for (List<String> singleInstanceOutcomeRow : allOutcomes) {
        // first item is the gold label
        String gold = singleInstanceOutcomeRow.get(0);
        // second item is the predicted label
        String predicted = singleInstanceOutcomeRow.get(1);

        cm.increaseValue(gold, predicted);
    }

    // and write all reports
    SVMHMMUtils.writeOutputResults(getContext(), cm, outputPrefix);

    // and print detailed results
    log.info(outputPrefix + "; " + cm.printNiceResults());
    log.info(outputPrefix + "; " + cm.printLabelPrecRecFm());
}

From source file:biz.webgate.dominoext.poi.component.kernel.CSVProcessor.java

public ByteArrayOutputStream generateCSV(UICSV csvDef, FacesContext context) throws IOException, POIException {
    ByteArrayOutputStream csvBAOS = new ByteArrayOutputStream();
    OutputStreamWriter csvWriter = new OutputStreamWriter(csvBAOS);
    CSVPrinter csvPrinter = new CSVPrinter(csvWriter, CSVFormat.DEFAULT);

    List<CSVColumn> lstColumns = csvDef.getColumns();
    Collections.sort(lstColumns, new Comparator<CSVColumn>() {

        public int compare(CSVColumn o1, CSVColumn o2) {
            Integer p1 = Integer.valueOf(o1.getPosition());
            Integer p2 = Integer.valueOf(o2.getPosition());
            return p1.compareTo(p2);
        }/*from   w  w w .  ja  v  a2s .c o  m*/

    });
    if (csvDef.isIncludeHeader()) {
        for (CSVColumn cl : lstColumns) {
            csvPrinter.print(cl.getTitle());
        }
        csvPrinter.println();
    }

    // DATASOURCE holen und verarbeiten.
    if (csvDef.getDataSource() != null) {
        EmbeddedDataSourceExportProcessor.getInstance().process(lstColumns, csvDef, csvPrinter, context);
    } else {
        XPagesDataSourceExportProcessor.getInstance().process(lstColumns, csvDef, csvPrinter, context);
    }

    csvPrinter.flush();
    return csvBAOS;
}

From source file:com.x460dot10.b.registrar.StartupManager.java

/**
 * Imports data/students.txt into <code>University.students</code>
 *
 * @return             Indicates import of students was successful
 * @throws IOException //w  ww  . ja va2 s  . c o m
 */
public boolean importStudents() throws IOException {
    Boolean importStudentsSuccessful = true;
    File file = new File("data/mockstudents.dat");
    FileReader reader = null;
    ArrayList<MockStudent> fileStudents = new ArrayList<MockStudent>();
    Object nextStudent;
    try {
        reader = new FileReader(file);
        CSVFormat format = CSVFormat.DEFAULT;
        List<CSVRecord> records = new CSVParser(reader, format).getRecords();

        for (CSVRecord record : records) {
            String idAsString = record.values[0];
            Integer id = Integer.parseInt(idAsString);
            String first = record.values[1];
            String last = record.values[2];
            String dob = record.values[3];
            nextStudent = MockStudent.getStaticInstance(id, first, last, dob).clone();
            fileStudents.add((MockStudent) nextStudent);
        }
        uni.students.addAll(fileStudents);

    } catch (Exception ex) {
        // TODO send error message to a log file
        System.err.println("Error: " + ex.getMessage());
        importStudentsSuccessful = false;
    } finally {
        if (reader != null)
            reader.close();
    }
    return importStudentsSuccessful;
}

From source file:com.streamsets.pipeline.lib.parser.delimited.DelimitedDataParserFactory.java

private DataParser createParser(String id, OverrunReader reader, long offset) throws DataParserException {
    Utils.checkState(reader.getPos() == 0,
            Utils.formatL("reader must be in position '0', it is at '{}'", reader.getPos()));
    CSVFormat csvFormat = getSettings().getMode(CsvMode.class).getFormat();
    if (getSettings().getMode(CsvMode.class) == CsvMode.CUSTOM) {
        csvFormat = CSVFormat.DEFAULT
                .withDelimiter((char) getSettings().getConfig(DelimitedDataConstants.DELIMITER_CONFIG))
                .withEscape((char) getSettings().getConfig(DelimitedDataConstants.ESCAPE_CONFIG))
                .withQuote((char) getSettings().getConfig(DelimitedDataConstants.QUOTE_CONFIG));
    }/*  w  w w .  ja v  a  2  s . c  om*/
    try {
        return new DelimitedCharDataParser(getSettings().getContext(), id, reader, offset,
                (Integer) getSettings().getConfig(DelimitedDataConstants.SKIP_START_LINES), csvFormat,
                getSettings().getMode(CsvHeader.class), getSettings().getMaxRecordLen(),
                getSettings().getMode(CsvRecordType.class));
    } catch (IOException ex) {
        throw new DataParserException(Errors.DELIMITED_PARSER_00, id, offset, ex.toString(), ex);
    }
}

From source file:de.tudarmstadt.ukp.experiments.argumentation.sequence.significance.SignificanceMain.java

/**
 * Prints table to output string as CSV//w ww.j  a v  a  2s . c o  m
 *
 * @param out   output
 * @param <T>   value type
 * @param table table
 * @throws IOException
 */
public static <T> String tableToCsv(Table<String, String, Boolean> table) throws IOException {
    StringWriter sw = new StringWriter();
    CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);

    List<String> firstRow = new ArrayList<>();
    firstRow.add(" ");
    firstRow.addAll(table.columnKeySet());
    printer.printRecord(firstRow);

    for (String rowKey : table.rowKeySet()) {
        printer.print(rowKey);
        for (String columnKey : table.columnKeySet()) {
            printer.print(table.get(rowKey, columnKey));
        }
        printer.println();
    }

    printer.close();

    return sw.toString();
}