List of usage examples for org.apache.commons.csv CSVParser close
@Override public void close() throws IOException
From source file:phonedirectory.PhoneDirectoryModel.java
public void csvToDatabase(String csv, String column[]) { try {/*from w w w. j ava 2 s . c o m*/ Connection con = DriverManager.getConnection(url, user, password); stmt1 = con.createStatement(); int i; CSVParser parser = new CSVParser(new FileReader(csv), format); for (CSVRecord record : parser) { stmt = con.prepareStatement("insert into person(name,address) values(?,?)"); stmt.setString(1, record.get(0)); stmt.setString(2, record.get(1)); stmt.executeUpdate(); stmt.close(); rs = stmt1.executeQuery("select person_id from person"); rs.last(); int person_id = rs.getInt(1); stmt = con.prepareStatement("insert into phone values(?,?,?)"); for (i = 2; i < record.size(); i++) { stmt.setString(1, record.get(i)); stmt.setString(2, column[i]); stmt.setInt(3, person_id); stmt.executeUpdate(); } stmt.close(); } parser.close(); con.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:us.parr.animl.data.DataTable.java
public static DataTable loadCSV(String fileName, String formatType, VariableType[] colTypesOverride, String[] colNamesOverride, boolean hasHeaderRow) { try {/*from w w w . ja v a 2 s . c o m*/ // use apache commons io + csv to load but convert to list of String[] // byte-order markers are handled if present at start of file. FileInputStream fis = new FileInputStream(fileName); final Reader reader = new InputStreamReader(new BOMInputStream(fis), "UTF-8"); CSVFormat format; if (formatType == null) { format = hasHeaderRow ? CSVFormat.RFC4180.withHeader() : CSVFormat.RFC4180; } else { switch (formatType.toLowerCase()) { case "tsv": format = hasHeaderRow ? CSVFormat.TDF.withHeader() : CSVFormat.TDF; break; case "mysql": format = hasHeaderRow ? CSVFormat.MYSQL.withHeader() : CSVFormat.MYSQL; break; case "excel": format = hasHeaderRow ? CSVFormat.EXCEL.withHeader() : CSVFormat.EXCEL; break; case "rfc4180": default: format = hasHeaderRow ? CSVFormat.RFC4180.withHeader() : CSVFormat.RFC4180; break; } } final CSVParser parser = new CSVParser(reader, format); List<String[]> rows = new ArrayList<>(); int numHeaderNames = parser.getHeaderMap().size(); try { for (final CSVRecord record : parser) { String[] row = new String[record.size()]; for (int j = 0; j < record.size(); j++) { row[j] = record.get(j); } rows.add(row); } } finally { parser.close(); reader.close(); } VariableType[] actualTypes = computeColTypes(rows, numHeaderNames); Set<String> colNameSet = parser.getHeaderMap().keySet(); String[] colNames = colNameSet.toArray(new String[colNameSet.size()]); if (colNamesOverride != null) { colNames = colNamesOverride; } if (colTypesOverride != null) { actualTypes = colTypesOverride; } return fromStrings(rows, actualTypes, colNames, false); } catch (Exception e) { throw new IllegalArgumentException("Can't open and/or read " + fileName, e); } }