Example usage for org.apache.commons.csv CSVParser close

List of usage examples for org.apache.commons.csv CSVParser close

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVParser close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

Closes resources.

Usage

From source file:org.apache.phoenix.end2end.CSVCommonsLoaderIT.java

@Test
public void testCSVCommonsUpsertBadEncapsulatedControlChars() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;//  w  w w .  j  a  v a2  s. co m
    try {
        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + ENCAPSULATED_CHARS_TABLE
                + "(MYKEY VARCHAR NOT NULL PRIMARY KEY, MYVALUE VARCHAR);";
        conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, ENCAPSULATED_CHARS_TABLE,
                Collections.<String>emptyList(), true);
        try {
            csvUtil.upsert(new StringReader(CSV_VALUES_BAD_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER));
            fail();
        } catch (RuntimeException e) {
            assertTrue(e.getMessage(),
                    e.getMessage().contains("invalid char between encapsulated token and delimiter"));
        }
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}

From source file:org.apache.phoenix.end2end.CSVCommonsLoaderIT.java

@Test
public void testCSVCommonsUpsert_WithArray() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;//from w ww  .  java2s  .  c om
    try {

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS ARRAY_TABLE "
                + "(ID BIGINT NOT NULL PRIMARY KEY, VALARRAY INTEGER ARRAY);";
        conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "ARRAY_TABLE", ImmutableList.<String>of(), true,
                ',', '"', null, "!");
        csvUtil.upsert(new StringReader("ID,VALARRAY\n" + "1,2!3!4\n"));

        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn.prepareStatement("SELECT ID, VALARRAY FROM ARRAY_TABLE");
        ResultSet phoenixResultSet = statement.executeQuery();
        assertTrue(phoenixResultSet.next());
        assertEquals(1L, phoenixResultSet.getLong(1));
        assertEquals(PArrayDataType.instantiatePhoenixArray(PInteger.INSTANCE, new Integer[] { 2, 3, 4 }),
                phoenixResultSet.getArray(2));
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}

From source file:org.apache.phoenix.end2end.CSVCommonsLoaderIT.java

@Test
public void testCSVCommonsUpsert_WithTimestamp() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;//  www.  j  a  v  a2 s.c  o m
    try {

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS TS_TABLE "
                + "(ID BIGINT NOT NULL PRIMARY KEY, TS TIMESTAMP);";
        conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "TS_TABLE", ImmutableList.<String>of(), true, ',',
                '"', null, "!");
        csvUtil.upsert(new StringReader("ID,TS\n" + "1,1970-01-01 00:00:10\n" + "2,1970-01-01 00:00:10.123\n"));

        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn.prepareStatement("SELECT ID, TS FROM TS_TABLE ORDER BY ID");
        ResultSet phoenixResultSet = statement.executeQuery();
        assertTrue(phoenixResultSet.next());
        assertEquals(1L, phoenixResultSet.getLong(1));
        assertEquals(10000L, phoenixResultSet.getTimestamp(2).getTime());
        assertTrue(phoenixResultSet.next());
        assertEquals(2L, phoenixResultSet.getLong(1));
        assertEquals(10123L, phoenixResultSet.getTimestamp(2).getTime());
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}

From source file:org.apache.phoenix.end2end.CSVCommonsLoaderTest.java

@Test
public void testTDVCommonsUpsert() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;/* www  . j  a v a2  s .c  o m*/
    try {

        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
                + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
        conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);

        // Upsert TDV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE, Collections.<String>emptyList(),
                true, Arrays.asList("\t", "0", "0"));
        csvUtil.upsert(new StringReader(STOCK_TDV_VALUES_WITH_HEADER));

        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn.prepareStatement("SELECT SYMBOL, COMPANY FROM " + STOCK_TABLE);
        ResultSet phoenixResultSet = statement.executeQuery();
        parser = new CSVParser(new StringReader(STOCK_TDV_VALUES_WITH_HEADER), csvUtil.getFormat());
        for (CSVRecord record : parser) {
            assertTrue(phoenixResultSet.next());
            int i = 0;
            for (String value : record) {
                assertEquals(value, phoenixResultSet.getString(i + 1));
                i++;
            }
        }
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}

From source file:org.apache.phoenix.end2end.CSVCommonsLoaderTest.java

@Test
public void testCSVUpsertWithCustomDelimiters() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;//w  w  w. ja  v a2 s .  c om
    try {
        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
                + "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
        conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE, Arrays.<String>asList(STOCK_COLUMNS),
                true, Arrays.asList("1", "2", "3"));
        csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_DELIMITER));

        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn.prepareStatement("SELECT SYMBOL, COMPANY FROM " + STOCK_TABLE);
        ResultSet phoenixResultSet = statement.executeQuery();
        parser = new CSVParser(new StringReader(STOCK_CSV_VALUES_WITH_DELIMITER), csvUtil.getFormat());
        for (CSVRecord record : parser) {
            assertTrue(phoenixResultSet.next());
            int i = 0;
            for (String value : record) {
                assertEquals(value, phoenixResultSet.getString(i + 1));
                i++;
            }
        }
        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}

From source file:org.apache.phoenix.end2end.CSVCommonsLoaderTest.java

@Test
public void testAllDatatypes() throws Exception {
    CSVParser parser = null;
    PhoenixConnection conn = null;/*from   w  ww  . j a va 2s .com*/
    try {
        // Create table
        String statements = "CREATE TABLE IF NOT EXISTS " + DATATYPE_TABLE
                + " (CKEY VARCHAR NOT NULL PRIMARY KEY,"
                + "  CVARCHAR VARCHAR, CINTEGER INTEGER, CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);";
        conn = DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class);
        PhoenixRuntime.executeStatements(conn, new StringReader(statements), null);

        // Upsert CSV file
        CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, DATATYPE_TABLE, Collections.<String>emptyList(),
                true);
        csvUtil.upsert(new StringReader(DATATYPES_CSV_VALUES));

        // Compare Phoenix ResultSet with CSV file content
        PreparedStatement statement = conn.prepareStatement(
                "SELECT CKEY, CVARCHAR, CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM "
                        + DATATYPE_TABLE);
        ResultSet phoenixResultSet = statement.executeQuery();
        parser = new CSVParser(new StringReader(DATATYPES_CSV_VALUES), csvUtil.getFormat());

        for (CSVRecord record : parser) {
            assertTrue(phoenixResultSet.next());
            int i = 0;
            int size = record.size();
            for (String value : record) {
                assertEquals(value, phoenixResultSet.getObject(i + 1).toString().toUpperCase());
                if (i < size - 2)
                    break;
                i++;
            }
            // special case for matching date, time values
            assertEquals(DateUtil.parseTime(record.get(8)), phoenixResultSet.getTime("CTIME"));
            assertEquals(DateUtil.parseDate(record.get(9)), phoenixResultSet.getDate("CDATE"));
        }

        assertFalse(phoenixResultSet.next());
    } finally {
        if (parser != null)
            parser.close();
        if (conn != null)
            conn.close();
    }
}

From source file:org.apache.phoenix.pherf.result.impl.CSVFileResultHandler.java

public synchronized List<Result> read() throws IOException {
    CSVParser parser = null;
    util.ensureBaseResultDirExists();//from  w w  w .j a  va  2 s  . co  m
    try {
        File file = new File(resultFileName);
        parser = CSVParser.parse(file, Charset.defaultCharset(), CSVFormat.DEFAULT);
        List<CSVRecord> records = parser.getRecords();
        List<Result> results = new ArrayList<>();
        String header = null;
        for (CSVRecord record : records) {

            // First record is the CSV Header
            if (record.getRecordNumber() == 1) {
                header = record.toString();
                continue;
            }
            List<ResultValue> resultValues = new ArrayList<>();
            for (String val : record.toString().split(PherfConstants.RESULT_FILE_DELIMETER)) {
                resultValues.add(new ResultValue(val));
            }
            Result result = new Result(resultFileDetails, header, resultValues);
            results.add(result);
        }
        return results;
    } finally {
        parser.close();
    }
}

From source file:org.apache.phoenix.pherf.util.GoogleChartGenerator.java

/**
 * Reads aggregate file and convert it to DataNode 
 * @param label/*w w  w.  j  av a  2 s .  c  om*/
 * @throws Exception
 */
private void read(String label) throws Exception {
    String resultFileName = resultDir + PherfConstants.PATH_SEPARATOR + PherfConstants.RESULT_PREFIX + label
            + ResultFileDetails.CSV_AGGREGATE_PERFORMANCE.getExtension();

    FileReader in = new FileReader(resultFileName);
    final CSVParser parser = new CSVParser(in, CSVFormat.DEFAULT.withHeader());

    for (CSVRecord record : parser) {
        String group = record.get("QUERY_GROUP");
        String query = record.get("QUERY");
        String explain = record.get("EXPLAIN_PLAN");
        String tenantId = record.get("TENANT_ID");
        long avgTime = Long.parseLong(record.get("AVG_TIME_MS"));
        long minTime = Long.parseLong(record.get("AVG_MIN_TIME_MS"));
        long numRuns = Long.parseLong(record.get("RUN_COUNT"));
        long rowCount = Long.parseLong(record.get("RESULT_ROW_COUNT"));
        Node node = new Node(minTime, avgTime, numRuns, explain, query, tenantId, label, rowCount);

        if (datanodes.containsKey(group)) {
            datanodes.get(group).getDataSet().put(label, node);
        } else {
            datanodes.put(group, new DataNode(label, node));
        }
    }
    parser.close();
}

From source file:org.apache.phoenix.util.CSVCommonsLoader.java

/**
 * Data is batched up based on connection batch size.
 * Column PDataType is read from metadata and is used to convert
 * column value to correct type before upsert.
 *
 * The format is determined by the supplied csvParser.
        //from ww  w  .  jav a  2s .  c  om
 * @param csvParser
 *            CSVParser instance
 * @throws Exception
 */
public void upsert(CSVParser csvParser) throws Exception {
    List<ColumnInfo> columnInfoList = buildColumnInfoList(csvParser);

    boolean wasAutoCommit = conn.getAutoCommit();
    try {
        conn.setAutoCommit(false);
        long start = System.currentTimeMillis();
        CsvUpsertListener upsertListener = new CsvUpsertListener(conn, conn.getMutateBatchSize(), isStrict);
        CsvUpsertExecutor csvUpsertExecutor = CsvUpsertExecutor.create(conn, tableName, columnInfoList,
                upsertListener, arrayElementSeparator);

        csvUpsertExecutor.execute(csvParser);
        csvUpsertExecutor.close();

        conn.commit();
        double elapsedDuration = ((System.currentTimeMillis() - start) / 1000.0);
        System.out.println("CSV Upsert complete. " + upsertListener.getTotalUpsertCount() + " rows upserted");
        System.out.println("Time: " + elapsedDuration + " sec(s)\n");

    } finally {

        // release reader resources.
        if (csvParser != null) {
            csvParser.close();
        }
        if (wasAutoCommit) {
            conn.setAutoCommit(true);
        }
    }
}

From source file:org.apache.ranger.unixusersync.process.FileSourceUserGroupBuilder.java

public Map<String, List<String>> readTextFile(File textFile) throws Exception {

    Map<String, List<String>> ret = new HashMap<String, List<String>>();

    String delimiter = config.getUserSyncFileSourceDelimiter();

    CSVFormat csvFormat = CSVFormat.newFormat(delimiter.charAt(0));

    CSVParser csvParser = new CSVParser(new BufferedReader(new FileReader(textFile)), csvFormat);

    List<CSVRecord> csvRecordList = csvParser.getRecords();

    if (csvRecordList != null) {
        for (CSVRecord csvRecord : csvRecordList) {
            List<String> groups = new ArrayList<String>();
            String user = csvRecord.get(0);

            user = user.replaceAll("^\"|\"$", "");

            int i = csvRecord.size();

            for (int j = 1; j < i; j++) {
                String group = csvRecord.get(j);
                if (group != null && !group.isEmpty()) {
                    group = group.replaceAll("^\"|\"$", "");
                    groups.add(group);//w  w w  .  j ava  2s. c  o  m
                }
            }
            ret.put(user, groups);
        }
    }

    csvParser.close();

    return ret;
}