Example usage for org.apache.commons.csv CSVFormat withHeader

List of usage examples for org.apache.commons.csv CSVFormat withHeader

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVFormat withHeader.

Prototype

public CSVFormat withHeader(final ResultSetMetaData metaData) throws SQLException 

Source Link

Document

Sets the header of the format.

Usage

From source file:cz.pichlik.goodsentiment.common.CSVFormats.java

public static CSVFormat format(String... header) {
    CSVFormat csvFormat = CSVFormat.newFormat(',').withRecordSeparator("\r\n").withQuote('"')
            .withNullString("");
    return (header.length > 0) ? csvFormat.withHeader(header) : csvFormat;
}

From source file:com.datascience.hadoop.CsvOutputFormat.java

/**
 * Creates a CSV format from a Hadoop configuration.
 *//* w  w  w. j a  v a2s.c  om*/
private static CSVFormat createFormat(Configuration conf) {
    CSVFormat format = CSVFormat
            .newFormat(conf.get(CSV_WRITER_DELIMITER, DEFAULT_CSV_WRITER_DELIMITER).charAt(0))
            .withSkipHeaderRecord(conf.getBoolean(CSV_WRITER_SKIP_HEADER, DEFAULT_CSV_WRITER_SKIP_HEADER))
            .withRecordSeparator(conf.get(CSV_WRITER_RECORD_SEPARATOR, DEFAULT_CSV_WRITER_RECORD_SEPARATOR))
            .withIgnoreEmptyLines(
                    conf.getBoolean(CSV_WRITER_IGNORE_EMPTY_LINES, DEFAULT_CSV_WRITER_IGNORE_EMPTY_LINES))
            .withIgnoreSurroundingSpaces(conf.getBoolean(CSV_WRITER_IGNORE_SURROUNDING_SPACES,
                    DEFAULT_CSV_WRITER_IGNORE_SURROUNDING_SPACES))
            .withNullString(conf.get(CSV_WRITER_NULL_STRING, DEFAULT_CSV_WRITER_NULL_STRING));

    String[] header = conf.getStrings(CSV_WRITER_COLUMNS);
    if (header != null && header.length > 0)
        format = format.withHeader(header);

    String escape = conf.get(CSV_WRITER_ESCAPE_CHARACTER, DEFAULT_CSV_WRITER_ESCAPE_CHARACTER);
    if (escape != null)
        format = format.withEscape(escape.charAt(0));

    String quote = conf.get(CSV_WRITER_QUOTE_CHARACTER, DEFAULT_CSV_WRITER_QUOTE_CHARACTER);
    if (quote != null)
        format = format.withQuote(quote.charAt(0));

    String quoteMode = conf.get(CSV_WRITER_QUOTE_MODE, DEFAULT_CSV_WRITER_QUOTE_MODE);
    if (quoteMode != null)
        format = format.withQuoteMode(QuoteMode.valueOf(quoteMode));
    return format;
}

From source file:com.datascience.hadoop.CsvInputFormat.java

/**
 * Creates a CSV format from a Hadoop configuration.
 */// w  w  w  .j  a  v a2 s .c  o  m
private static CSVFormat createFormat(Configuration conf) {
    CSVFormat format = CSVFormat
            .newFormat(conf.get(CSV_READER_DELIMITER, DEFAULT_CSV_READER_DELIMITER).charAt(0))
            .withSkipHeaderRecord(conf.getBoolean(CSV_READER_SKIP_HEADER, DEFAULT_CSV_READER_SKIP_HEADER))
            .withRecordSeparator(conf.get(CSV_READER_RECORD_SEPARATOR, DEFAULT_CSV_READER_RECORD_SEPARATOR))
            .withIgnoreEmptyLines(
                    conf.getBoolean(CSV_READER_IGNORE_EMPTY_LINES, DEFAULT_CSV_READER_IGNORE_EMPTY_LINES))
            .withIgnoreSurroundingSpaces(conf.getBoolean(CSV_READER_IGNORE_SURROUNDING_SPACES,
                    DEFAULT_CSV_READER_IGNORE_SURROUNDING_SPACES))
            .withNullString(conf.get(CSV_READER_NULL_STRING, DEFAULT_CSV_READER_NULL_STRING));

    String[] header = conf.getStrings(CSV_READER_COLUMNS);
    if (header != null && header.length > 0)
        format = format.withHeader(header);

    String escape = conf.get(CSV_READER_ESCAPE_CHARACTER, DEFAULT_CSV_READER_ESCAPE_CHARACTER);
    if (escape != null)
        format = format.withEscape(escape.charAt(0));

    String quote = conf.get(CSV_READER_QUOTE_CHARACTER, DEFAULT_CSV_READER_QUOTE_CHARACTER);
    if (quote != null)
        format = format.withQuote(quote.charAt(0));

    String quoteMode = conf.get(CSV_READER_QUOTE_MODE, DEFAULT_CSV_READER_QUOTE_MODE);
    if (quoteMode != null)
        format = format.withQuoteMode(QuoteMode.valueOf(quoteMode));
    return format;
}

From source file:jp.co.cyberagent.parquet.msgpack.CSVHeaderMap.java

public CSVFormat injectHeaderFormat(CSVFormat format) {
    String[] names = new String[headers.length];
    int i = 0;//from  w  w w.j av a 2  s  . c o  m
    for (Entry header : headers) {
        names[i] = header.name;
        i += 1;
    }
    return format.withHeader(names);
}

From source file:com.streamsets.pipeline.lib.generator.delimited.DelimitedCharDataGenerator.java

public DelimitedCharDataGenerator(Writer writer, CSVFormat format, CsvHeader header, String headerKey,
        String valueKey, String replaceNewLines) throws IOException {
    format = format.withHeader((String[]) null);
    this.format = format;
    this.headerKey = headerKey;
    this.valueKey = valueKey;
    printer = new CSVPrinter(writer, format);
    this.header = header;
    firstRecord = true;/*from  ww  w  .jav  a  2  s .com*/
    this.replaceNewLines = replaceNewLines;
}

From source file:com.linkedin.pinot.core.data.readers.CSVRecordReader.java

private CSVFormat getFormat() {
    CSVFormat format = getFormatFromConfig().withDelimiter(getDelimiterFromConfig());
    String[] header = getHeaderFromConfig();

    if (header != null) {
        format = format.withHeader(header);
    } else {/*from   ww  w.ja  va 2s  .c  om*/
        format = format.withHeader();
    }

    return format;
}

From source file:com.streamsets.pipeline.lib.parser.delimited.DelimitedCharDataParser.java

public DelimitedCharDataParser(Stage.Context context, String readerId, OverrunReader reader, long readerOffset,
        int skipStartLines, CSVFormat format, CsvHeader header, int maxObjectLen, CsvRecordType recordType)
        throws IOException {
    this.context = context;
    this.readerId = readerId;
    this.recordType = recordType;
    switch (header) {
    case WITH_HEADER:
        format = format.withHeader((String[]) null).withSkipHeaderRecord(true);
        break;/*from w  ww  .ja  va  2 s .co  m*/
    case IGNORE_HEADER:
        format = format.withHeader((String[]) null).withSkipHeaderRecord(true);
        break;
    case NO_HEADER:
        format = format.withHeader((String[]) null).withSkipHeaderRecord(false);
        break;
    default:
        throw new RuntimeException(Utils.format("Unknown header error: {}", header));
    }
    parser = new OverrunCsvParser(reader, format, readerOffset, skipStartLines, maxObjectLen);
    String[] hs = parser.getHeaders();
    if (header != CsvHeader.IGNORE_HEADER && hs != null) {
        headers = new ArrayList<>();
        for (String h : hs) {
            headers.add(Field.create(h));
        }
    }
}

From source file:biz.ganttproject.impex.csv.GanttCSVOpen.java

private CSVFormat createCSVFormat(List<String> headers) {
    CSVFormat format = CSVFormat.DEFAULT.withIgnoreEmptyLines(false).withIgnoreSurroundingSpaces(true);
    if (myCsvOptions != null) {
        format = format.withDelimiter(myCsvOptions.sSeparatedChar.charAt(0))
                .withQuote(myCsvOptions.sSeparatedTextChar.charAt(0));
    }/* w  w  w . j  a v  a2 s  .  c  o m*/
    if (headers != null) {
        format = format.withHeader(headers.toArray(new String[0]));
    }
    return format;
}

From source file:com.hurence.logisland.service.cache.CSVKeyValueCacheService.java

@Override
// @OnEnabled/*from w  w w. ja va  2  s.c o  m*/
public void init(ControllerServiceInitializationContext context) throws InitializationException {
    super.init(context);
    try {

        if (context.getPropertyValue(DATABASE_FILE_URI).isSet()) {
            dbUri = context.getPropertyValue(DATABASE_FILE_URI).asString();
        }

        if (context.getPropertyValue(DATABASE_FILE_PATH).isSet()) {
            dbPath = context.getPropertyValue(DATABASE_FILE_PATH).asString();
        }

        if ((dbUri == null) && (dbPath == null)) {
            throw new Exception(
                    "You must declare " + DATABASE_FILE_URI.getName() + " or " + DATABASE_FILE_PATH.getName());
        }

        InputStream is = null;
        if (dbUri != null) {
            logger.info("opening csv database from hdfs : " + dbUri);
            is = initFromUri(dbUri);
        }

        if (dbPath != null) {
            logger.info("opening csv database from local fs : " + dbPath);
            is = initFromPath(context, dbPath);
        }

        if (is == null) {
            throw new InitializationException("Something went wrong while initializing csv db from "
                    + DATABASE_FILE_URI.getName() + " or " + DATABASE_FILE_PATH.getName());
        }

        // final Reader reader = new InputStreamReader(is);
        CSVFormat format = CSVFormat.DEFAULT;
        if (context.getPropertyValue(CSV_FORMAT).asString().equals(CSV_EXCEL.getValue())) {
            format = CSVFormat.EXCEL;
        } else if (context.getPropertyValue(CSV_FORMAT).asString().equals(CSV_EXCEL_FR.getValue())) {
            format = CSVFormat.EXCEL.withDelimiter(';');
        } else if (context.getPropertyValue(CSV_FORMAT).asString().equals(CSV_MYSQL.getValue())) {
            format = CSVFormat.MYSQL;
        } else if (context.getPropertyValue(CSV_FORMAT).asString().equals(CSV_RFC4180.getValue())) {
            format = CSVFormat.RFC4180;
        } else if (context.getPropertyValue(CSV_FORMAT).asString().equals(CSV_TDF.getValue())) {
            format = CSVFormat.TDF;
        }

        if (context.getPropertyValue(CSV_HEADER).isSet()) {
            String[] columnNames = context.getPropertyValue(CSV_HEADER).asString().split(",");
            for (String name : columnNames) {
                headers.get().put(name, "string");
            }
            format = format.withHeader(columnNames);
        } else if (context.getPropertyValue(FIRST_LINE_HEADER).isSet()) {
            format = format.withFirstRecordAsHeader();
        } else {
            throw new InitializationException("unable to get headers from somewhere");
        }

        Charset charset = Charset.forName("UTF-8");
        if (context.getPropertyValue(ENCODING_CHARSET).isSet()) {
            String encoding = context.getPropertyValue(ENCODING_CHARSET).asString();
            charset = Charset.forName(encoding);
        }

        rowKey = context.getPropertyValue(ROW_KEY).asString();
        CSVParser parser = CSVParser.parse(is, charset, format); //new CSVParser(reader, format);

        /*
        *    CSVParser parser = null;
                
        if (context.getPropertyValue(ENCODING_CHARSET).isSet()) {
        String encoding = context.getPropertyValue(ENCODING_CHARSET).asString();
        parser = CSVParser.parse(reader, Charset.forName(encoding), format);
        } else {
        parser = CSVParser.parse(reader, format);
        }
        */
        long count = 0;
        try {
            final Set<String> columnNames = parser.getHeaderMap().keySet();
            for (final CSVRecord record : parser) {

                Record logislandRecord = new StandardRecord();
                for (final String column : columnNames) {
                    logislandRecord.setStringField(column, record.get(column));
                }

                set(logislandRecord.getField(rowKey).asString(), logislandRecord);
                count++;
            }
        } finally {
            logger.info("successfully loaded " + count + " records from CSV file");

            parser.close();
            is.close();
        }

    } catch (Exception e) {
        getLogger().error("Could not load database file: {}", new Object[] { e.getMessage() });
        throw new InitializationException(e);
    }
}

From source file:org.apache.camel.dataformat.csv.CsvMarshaller.java

/**
 * Creates a new instance.//from ww w  .  j  a va  2  s.c om
 *
 * @param format     CSV format
 * @param dataFormat Camel CSV data format
 * @return New instance
 */
public static CsvMarshaller create(CSVFormat format, CsvDataFormat dataFormat) {
    // If we don't want the header record, clear it
    if (format.getSkipHeaderRecord()) {
        format = format.withHeader((String[]) null);
    }

    String[] fixedColumns = dataFormat.getHeader();
    if (fixedColumns != null && fixedColumns.length > 0) {
        return new FixedColumnsMarshaller(format, fixedColumns);
    }
    return new DynamicColumnsMarshaller(format);
}