List of usage examples for org.apache.commons.csv CSVFormat withHeader
public CSVFormat withHeader(final ResultSetMetaData metaData) throws SQLException
From source file:org.apache.nifi.csv.CSVRecordReader.java
public CSVRecordReader(final InputStream in, final ComponentLog logger, final RecordSchema schema, final CSVFormat csvFormat, final boolean hasHeader, final boolean ignoreHeader, final String dateFormat, final String timeFormat, final String timestampFormat, final String encoding) throws IOException { this.schema = schema; final DateFormat df = dateFormat == null ? null : DataTypeUtils.getDateFormat(dateFormat); final DateFormat tf = timeFormat == null ? null : DataTypeUtils.getDateFormat(timeFormat); final DateFormat tsf = timestampFormat == null ? null : DataTypeUtils.getDateFormat(timestampFormat); LAZY_DATE_FORMAT = () -> df;/*from ww w . j ava 2 s .c o m*/ LAZY_TIME_FORMAT = () -> tf; LAZY_TIMESTAMP_FORMAT = () -> tsf; final Reader reader = new InputStreamReader(new BOMInputStream(in), encoding); CSVFormat withHeader; if (hasHeader) { withHeader = csvFormat.withSkipHeaderRecord(); if (ignoreHeader) { withHeader = withHeader.withHeader(schema.getFieldNames().toArray(new String[0])); } } else { withHeader = csvFormat.withHeader(schema.getFieldNames().toArray(new String[0])); } csvParser = new CSVParser(reader, withHeader); }
From source file:org.apache.nifi.processors.ParseCSV.ParseCSV.java
private CSVFormat buildFormat(String format, char delimiter, Boolean with_header, String custom_header) { CSVFormat csvFormat = null; // set pre built format if (format.equals("DEFAULT")) { csvFormat = CSVFormat.DEFAULT;/*from w w w. j a va 2 s.com*/ } else if (format.equals("EXCEL")) { csvFormat = CSVFormat.EXCEL; } if (with_header & custom_header != null) { csvFormat = csvFormat.withSkipHeaderRecord(true); csvFormat = csvFormat.withHeader(custom_header); } else if (with_header & custom_header == null) { csvFormat = csvFormat.withHeader(); } if (delimiter > 0) { csvFormat = csvFormat.withDelimiter(delimiter); } return csvFormat; }
From source file:org.apache.phoenix.util.CSVCommonsLoader.java
/** * default settings/*from www .ja v a2 s . c o m*/ * delimiter = ',' * quoteChar = '"', * escape = null * recordSeparator = CRLF, CR, or LF * ignore empty lines allows the last data line to have a recordSeparator * * @return CSVFormat based on constructor settings. */ private CSVFormat buildFormat() { CSVFormat format = CSVFormat.DEFAULT.withIgnoreEmptyLines(true) .withDelimiter(asControlCharacter(fieldDelimiter)).withQuote(asControlCharacter(quoteCharacter)); if (escapeCharacter != null) { format = format.withEscape(asControlCharacter(escapeCharacter)); } switch (headerSource) { case FROM_TABLE: // obtain headers from table, so format should not expect a header. break; case IN_LINE: // an empty string array triggers csv loader to grab the first line as the header format = format.withHeader(new String[0]); break; case SUPPLIED_BY_USER: // a populated string array supplied by the user format = format.withHeader(columns.toArray(new String[columns.size()])); break; default: throw new RuntimeException("Header source was unable to be inferred."); } return format; }
From source file:org.gephi.io.importer.plugin.file.spreadsheet.SpreadsheetUtils.java
public static CSVParser configureCSVParser(File file, Character fieldSeparator, Charset charset, boolean withFirstRecordAsHeader) throws IOException { if (fieldSeparator == null) { fieldSeparator = ','; }/* w w w . ja va 2s. c o m*/ CSVFormat csvFormat = CSVFormat.DEFAULT.withDelimiter(fieldSeparator).withEscape('\\') .withIgnoreEmptyLines(true).withNullString("").withIgnoreSurroundingSpaces(true).withTrim(true); if (withFirstRecordAsHeader) { csvFormat = csvFormat.withFirstRecordAsHeader().withAllowMissingColumnNames(false) .withIgnoreHeaderCase(false); } else { csvFormat = csvFormat.withHeader((String[]) null).withSkipHeaderRecord(false); } boolean hasBOM = false; try (FileInputStream is = new FileInputStream(file)) { CharsetToolkit charsetToolkit = new CharsetToolkit(is); hasBOM = charsetToolkit.hasUTF8Bom() || charsetToolkit.hasUTF16BEBom() || charsetToolkit.hasUTF16LEBom(); } catch (IOException e) { //NOOP } FileInputStream fileInputStream = new FileInputStream(file); InputStreamReader is = new InputStreamReader(fileInputStream, charset); if (hasBOM) { try { is.read(); } catch (IOException e) { // should never happen, as a file with no content // but with a BOM has at least one char } } return new CSVParser(is, csvFormat); }
From source file:org.thingsboard.server.service.install.cql.CassandraDbHelper.java
public static Path dumpCfIfExists(KeyspaceMetadata ks, Session session, String cfName, String[] columns, String[] defaultValues, String dumpPrefix, boolean printHeader) throws Exception { if (ks.getTable(cfName) != null) { Path dumpFile = Files.createTempFile(dumpPrefix, null); Files.deleteIfExists(dumpFile); CSVFormat csvFormat = CSV_DUMP_FORMAT; if (printHeader) { csvFormat = csvFormat.withHeader(columns); }//from w w w . j av a2s. co m try (CSVPrinter csvPrinter = new CSVPrinter(Files.newBufferedWriter(dumpFile), csvFormat)) { Statement stmt = new SimpleStatement("SELECT * FROM " + cfName); stmt.setFetchSize(1000); ResultSet rs = session.execute(stmt); Iterator<Row> iter = rs.iterator(); while (iter.hasNext()) { Row row = iter.next(); if (row != null) { dumpRow(row, columns, defaultValues, csvPrinter); } } } return dumpFile; } else { return null; } }
From source file:org.thingsboard.server.service.install.sql.SqlDbHelper.java
public static Path dumpTableIfExists(Connection conn, String tableName, String[] columns, String[] defaultValues, String dumpPrefix, boolean printHeader) throws Exception { if (tableExists(conn, tableName)) { Path dumpFile = Files.createTempFile(dumpPrefix, null); Files.deleteIfExists(dumpFile); CSVFormat csvFormat = CSV_DUMP_FORMAT; if (printHeader) { csvFormat = csvFormat.withHeader(columns); }/*from w ww. j a va2s . co m*/ try (CSVPrinter csvPrinter = new CSVPrinter(Files.newBufferedWriter(dumpFile), csvFormat)) { try (PreparedStatement stmt = conn.prepareStatement("SELECT * FROM " + tableName)) { try (ResultSet tableRes = stmt.executeQuery()) { ResultSetMetaData resMetaData = tableRes.getMetaData(); Map<String, Integer> columnIndexMap = new HashMap<>(); for (int i = 1; i <= resMetaData.getColumnCount(); i++) { String columnName = resMetaData.getColumnName(i); columnIndexMap.put(columnName.toUpperCase(), i); } while (tableRes.next()) { dumpRow(tableRes, columnIndexMap, columns, defaultValues, csvPrinter); } } } } return dumpFile; } else { return null; } }