Example usage for java.sql ResultSetMetaData getColumnName

List of usage examples for java.sql ResultSetMetaData getColumnName

Introduction

In this page you can find the example usage for java.sql ResultSetMetaData getColumnName.

Prototype

String getColumnName(int column) throws SQLException;

Source Link

Document

Get the designated column's name.

Usage

From source file:org.apache.sqoop.connector.jdbc.GenericJdbcExecutor.java

public String[] getQueryColumns(String query) {
    try (Statement statement = createStatement(); ResultSet rs = statement.executeQuery(query);) {
        ResultSetMetaData rsmd = rs.getMetaData();
        int count = rsmd.getColumnCount();
        String[] columns = new String[count];
        for (int i = 0; i < count; i++) {
            columns[i] = rsmd.getColumnName(i + 1);
        }//from  w w w .ja v  a 2 s.c om

        return columns;

    } catch (SQLException e) {
        logSQLException(e);
        throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0003, e);
    }
}

From source file:org.apache.empire.db.codegen.CodeGenParser.java

/**
 * Adds DBColumn object to the given DBTable. The DBColumn is created from
 * the given ResultSet/*w  ww .  ja v  a2s .  c o  m*/
 */
private DBTableColumn addColumn(DBTable t, ResultSet rs) throws SQLException {
    String name = rs.getString("COLUMN_NAME");
    DataType empireType = getEmpireDataType(rs.getInt("DATA_TYPE"));

    double colSize = rs.getInt("COLUMN_SIZE");
    if (empireType == DataType.DECIMAL || empireType == DataType.FLOAT) { // decimal digits
        int decimalDig = rs.getInt("DECIMAL_DIGITS");
        if (decimalDig > 0) { // parse
            try {
                int intSize = rs.getInt("COLUMN_SIZE");
                colSize = Double.parseDouble(String.valueOf(intSize) + '.' + decimalDig);
            } catch (Exception e) {
                log.error("Failed to parse decimal digits for column " + name);
            }
        }
        // make integer?
        if (colSize < 1.0d) { // Turn into an integer
            empireType = DataType.INTEGER;
        }
    }

    // mandatory field?
    boolean required = false;
    String defaultValue = rs.getString("COLUMN_DEF");
    if (rs.getString("IS_NULLABLE").equalsIgnoreCase("NO"))
        required = true;

    // The following is a hack for MySQL which currently gets sent a string "CURRENT_TIMESTAMP" from the Empire-db driver for MySQL.
    // This will avoid the driver problem because CURRENT_TIMESTAMP in the db will just do the current datetime.
    // Essentially, Empire-db needs the concept of default values of one type that get mapped to another.
    // In this case, MySQL "CURRENT_TIMESTAMP" for Types.TIMESTAMP needs to emit from the Empire-db driver the null value and not "CURRENT_TIMESTAMP".
    if (rs.getInt("DATA_TYPE") == Types.TIMESTAMP && defaultValue != null
            && defaultValue.equals("CURRENT_TIMESTAMP")) {
        required = false; // It is in fact not required even though MySQL schema is required because it has a default value. Generally, should Empire-db emit (required && defaultValue != null) to truly determine if a column is required?
        defaultValue = null; // If null (and required per schema?) MySQL will apply internal default value.
    }

    // AUTOINC indicator is not in java.sql.Types but rather meta data from DatabaseMetaData.getColumns()
    // getEmpireDataType() above is not enough to support AUTOINC as it will only return DataType.INTEGER
    DataType originalType = empireType;
    ResultSetMetaData metaData = rs.getMetaData();
    int colCount = metaData.getColumnCount();
    String colName;
    for (int i = 1; i <= colCount; i++) {
        colName = metaData.getColumnName(i);
        // MySQL matches on IS_AUTOINCREMENT column.
        // SQL Server matches on TYPE_NAME column with identity somewhere in the string value.
        if ((colName.equalsIgnoreCase("IS_AUTOINCREMENT") && rs.getString(i).equalsIgnoreCase("YES"))
                || (colName.equals("TYPE_NAME") && rs.getString(i).matches(".*(?i:identity).*"))) {
            empireType = DataType.AUTOINC;

        }
    }

    // Move from the return statement below so we can add
    // some AUTOINC meta data to the column to be used by
    // the ParserUtil and ultimately the template.
    log.info("\tCOLUMN:\t" + name + " (" + empireType + ")");
    DBTableColumn col = t.addColumn(name, empireType, colSize, required, defaultValue);

    // We still need to know the base data type for this AUTOINC
    // because the Record g/setters need to know this, right?
    // So, let's add it as meta data every time the column is AUTOINC
    // and reference it in the template.
    if (empireType.equals(DataType.AUTOINC))
        col.setAttribute("AutoIncDataType", originalType);
    return col;

}

From source file:org.apache.tika.eval.ComparerBatchTest.java

private Map<String, String> getRow(String table, String where) throws Exception {
    String sql = getSql("*", table, where);
    Map<String, String> results = new HashMap<String, String>();
    Statement st = null;/*from   www .ja  v  a2s.  c om*/

    try {
        st = conn.createStatement();
        ResultSet rs = st.executeQuery(sql);
        ResultSetMetaData m = rs.getMetaData();
        int rows = 0;
        while (rs.next()) {
            if (rows > 0) {
                throw new RuntimeException("returned more than one row!");
            }
            for (int i = 1; i <= m.getColumnCount(); i++) {
                results.put(m.getColumnName(i), rs.getString(i));
            }
            rows++;
        }
    } finally {
        if (st != null) {
            st.close();
        }
    }
    return results;

}

From source file:at.alladin.rmbt.statisticServer.export.ExportResource.java

@Get
public Representation request(final String entity) {
    //Before doing anything => check if a cached file already exists and is new enough
    String property = System.getProperty("java.io.tmpdir");

    final String filename_zip;
    final String filename_csv;

    //allow filtering by month/year
    int year = -1;
    int month = -1;
    int hours = -1;
    boolean hoursExport = false;
    boolean dateExport = false;

    if (getRequest().getAttributes().containsKey("hours")) { // export by hours
        try {//from www .ja  v a 2s  .  c  o m
            hours = Integer.parseInt(getRequest().getAttributes().get("hours").toString());
        } catch (NumberFormatException ex) {
            //Nothing -> just fall back
        }
        if (hours <= 7 * 24 && hours >= 1) { //limit to 1 week (avoid DoS)
            hoursExport = true;
        }
    } else if (!hoursExport && getRequest().getAttributes().containsKey("year")) { // export by month/year 
        try {
            year = Integer.parseInt(getRequest().getAttributes().get("year").toString());
            month = Integer.parseInt(getRequest().getAttributes().get("month").toString());
        } catch (NumberFormatException ex) {
            //Nothing -> just fall back
        }
        if (year < 2099 && month > 0 && month <= 12 && year > 2000) {
            dateExport = true;
        }
    }

    if (hoursExport) {
        filename_zip = FILENAME_ZIP_HOURS.replace("%HOURS%", String.format("%03d", hours));
        filename_csv = FILENAME_CSV_HOURS.replace("%HOURS%", String.format("%03d", hours));
        cacheThresholdMs = 5 * 60 * 1000; //5 minutes
    } else if (dateExport) {
        filename_zip = FILENAME_ZIP.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%",
                String.format("%02d", month));
        filename_csv = FILENAME_CSV.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%",
                String.format("%02d", month));
        cacheThresholdMs = 23 * 60 * 60 * 1000; //23 hours
    } else {
        filename_zip = FILENAME_ZIP_CURRENT;
        filename_csv = FILENAME_CSV_CURRENT;
        cacheThresholdMs = 3 * 60 * 60 * 1000; //3 hours
    }

    final File cachedFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv));
    final File generatingFile = new File(
            property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp");
    if (cachedFile.exists()) {

        //check if file has been recently created OR a file is currently being created
        if (((cachedFile.lastModified() + cacheThresholdMs) > (new Date()).getTime())
                || (generatingFile.exists()
                        && (generatingFile.lastModified() + cacheThresholdMs) > (new Date()).getTime())) {

            //if so, return the cached file instead of a cost-intensive new one
            final OutputRepresentation result = new OutputRepresentation(
                    zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) {

                @Override
                public void write(OutputStream out) throws IOException {
                    InputStream is = new FileInputStream(cachedFile);
                    IOUtils.copy(is, out);
                    out.close();
                }

            };
            if (zip) {
                final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT);
                disposition.setFilename(filename_zip);
                result.setDisposition(disposition);
            }
            return result;

        }
    }

    final String timeClause;

    if (dateExport)
        timeClause = " AND (EXTRACT (month FROM t.time AT TIME ZONE 'UTC') = " + month
                + ") AND (EXTRACT (year FROM t.time AT TIME ZONE 'UTC') = " + year + ") ";
    else if (hoursExport)
        timeClause = " AND time > now() - interval '" + hours + " hours' ";
    else
        timeClause = " AND time > current_date - interval '31 days' ";

    final String sql = "SELECT" + " ('P' || t.open_uuid) open_uuid,"
            + " ('O' || t.open_test_uuid) open_test_uuid,"
            + " to_char(t.time AT TIME ZONE 'UTC', 'YYYY-MM-DD HH24:MI:SS') time_utc,"
            + " nt.group_name cat_technology," + " nt.name network_type,"
            + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN"
            + " t.geo_lat" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_lat*1111)/1111" + " ELSE null"
            + " END) lat,"
            + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN"
            + " t.geo_long" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_long*741)/741 " + " ELSE null"
            + " END) long," + " (CASE WHEN ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN"
            + " 'rastered'" + //make raster transparent
            " ELSE t.geo_provider" + " END) loc_src,"
            + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') "
            + " THEN round(t.geo_accuracy::float * 10)/10 "
            + " WHEN (t.geo_accuracy < 100) AND ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN 100"
            + // limit accuracy to 100m
            " WHEN (t.geo_accuracy < ?) THEN round(t.geo_accuracy::float * 10)/10"
            + " ELSE null END) loc_accuracy, "
            + " (CASE WHEN (t.zip_code < 1000 OR t.zip_code > 9999) THEN null ELSE t.zip_code END) zip_code,"
            + " t.gkz gkz," + " t.country_location country_location," + " t.speed_download download_kbit,"
            + " t.speed_upload upload_kbit," + " round(t.ping_median::float / 100000)/10 ping_ms,"
            + " t.lte_rsrp," + " t.lte_rsrq," + " ts.name server_name," + " duration test_duration,"
            + " num_threads," + " t.plattform platform," + " COALESCE(adm.fullname, t.model) model,"
            + " client_software_version client_version," + " network_operator network_mcc_mnc,"
            + " network_operator_name network_name," + " network_sim_operator sim_mcc_mnc," + " nat_type,"
            + " public_ip_asn asn," + " client_public_ip_anonymized ip_anonym,"
            + " (ndt.s2cspd*1000)::int ndt_download_kbit," + " (ndt.c2sspd*1000)::int ndt_upload_kbit,"
            + " COALESCE(t.implausible, false) implausible," + " t.signal_strength" + " FROM test t"
            + " LEFT JOIN network_type nt ON nt.uid=t.network_type"
            + " LEFT JOIN device_map adm ON adm.codename=t.model"
            + " LEFT JOIN test_server ts ON ts.uid=t.server_id" + " LEFT JOIN test_ndt ndt ON t.uid=ndt.test_id"
            + " WHERE " + " t.deleted = false" + timeClause + " AND status = 'FINISHED'" + " ORDER BY t.uid";

    final String[] columns;
    final List<String[]> data = new ArrayList<>();
    PreparedStatement ps = null;
    ResultSet rs = null;
    try {
        ps = conn.prepareStatement(sql);

        //insert filter for accuracy
        double accuracy = Double.parseDouble(settings.getString("RMBT_GEO_ACCURACY_DETAIL_LIMIT"));
        ps.setDouble(1, accuracy);
        ps.setDouble(2, accuracy);
        ps.setDouble(3, accuracy);
        ps.setDouble(4, accuracy);
        ps.setDouble(5, accuracy);
        ps.setDouble(6, accuracy);

        if (!ps.execute())
            return null;
        rs = ps.getResultSet();

        final ResultSetMetaData meta = rs.getMetaData();
        final int colCnt = meta.getColumnCount();
        columns = new String[colCnt];
        for (int i = 0; i < colCnt; i++)
            columns[i] = meta.getColumnName(i + 1);

        while (rs.next()) {
            final String[] line = new String[colCnt];

            for (int i = 0; i < colCnt; i++) {
                final Object obj = rs.getObject(i + 1);
                line[i] = obj == null ? null : obj.toString();
            }

            data.add(line);
        }
    } catch (final SQLException e) {
        e.printStackTrace();
        return null;
    } finally {
        try {
            if (rs != null)
                rs.close();
            if (ps != null)
                ps.close();
        } catch (final SQLException e) {
            e.printStackTrace();
        }
    }

    final OutputRepresentation result = new OutputRepresentation(
            zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) {
        @Override
        public void write(OutputStream out) throws IOException {
            //cache in file => create temporary temporary file (to 
            // handle errors while fulfilling a request)
            String property = System.getProperty("java.io.tmpdir");
            final File cachedFile = new File(
                    property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp");
            OutputStream outf = new FileOutputStream(cachedFile);

            if (zip) {
                final ZipOutputStream zos = new ZipOutputStream(outf);
                final ZipEntry zeLicense = new ZipEntry("LIZENZ.txt");
                zos.putNextEntry(zeLicense);
                final InputStream licenseIS = getClass().getResourceAsStream("DATA_LICENSE.txt");
                IOUtils.copy(licenseIS, zos);
                licenseIS.close();

                final ZipEntry zeCsv = new ZipEntry(filename_csv);
                zos.putNextEntry(zeCsv);
                outf = zos;
            }

            final OutputStreamWriter osw = new OutputStreamWriter(outf);
            final CSVPrinter csvPrinter = new CSVPrinter(osw, csvFormat);

            for (final String c : columns)
                csvPrinter.print(c);
            csvPrinter.println();

            for (final String[] line : data) {
                for (final String f : line)
                    csvPrinter.print(f);
                csvPrinter.println();
            }
            csvPrinter.flush();

            if (zip)
                outf.close();

            //if we reach this code, the data is now cached in a temporary tmp-file
            //so, rename the file for "production use2
            //concurrency issues should be solved by the operating system
            File newCacheFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv));
            Files.move(cachedFile.toPath(), newCacheFile.toPath(), StandardCopyOption.ATOMIC_MOVE,
                    StandardCopyOption.REPLACE_EXISTING);

            FileInputStream fis = new FileInputStream(newCacheFile);
            IOUtils.copy(fis, out);
            fis.close();
            out.close();
        }
    };
    if (zip) {
        final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT);
        disposition.setFilename(filename_zip);
        result.setDisposition(disposition);
    }

    return result;
}

From source file:com.cws.esolutions.security.dao.reference.impl.SecurityReferenceDAOImpl.java

/**
 * @see com.cws.esolutions.security.dao.reference.interfaces.ISecurityReferenceDAO#obtainSecurityQuestionList()
 *///from  w  w w  .j a  v  a 2 s. c  o  m
public synchronized List<String> obtainSecurityQuestionList() throws SQLException {
    final String methodName = ISecurityReferenceDAO.CNAME + "#obtainSecurityQuestionList() throws SQLException";

    if (DEBUG) {
        DEBUGGER.debug(methodName);
    }

    Connection sqlConn = null;
    ResultSet resultSet = null;
    CallableStatement stmt = null;
    List<String> questionList = null;

    try {
        sqlConn = dataSource.getConnection();

        if (sqlConn.isClosed()) {
            throw new SQLException("Unable to obtain application datasource connection");
        }

        sqlConn.setAutoCommit(true);
        stmt = sqlConn.prepareCall("{CALL retrieve_user_questions()}");

        if (DEBUG) {
            DEBUGGER.debug("CallableStatement: {}", stmt);
        }

        if (stmt.execute()) {
            resultSet = stmt.getResultSet();
            resultSet.last();
            int iRowCount = resultSet.getRow();

            if (iRowCount == 0) {
                throw new SQLException("No security questions are currently configured.");
            }

            resultSet.first();
            ResultSetMetaData resultData = resultSet.getMetaData();

            int iColumns = resultData.getColumnCount();

            questionList = new ArrayList<String>();

            for (int x = 1; x < iColumns + 1; x++) {
                if (DEBUG) {
                    DEBUGGER.debug("resultSet.getObject: {}", resultSet.getObject(resultData.getColumnName(x)));
                }

                // check if column is null
                resultSet.getObject(resultData.getColumnName(x));

                // if the column was null, insert n/a, otherwise, insert the column's contents
                questionList.add((String) (resultSet.wasNull() ? "N/A"
                        : resultSet.getObject(resultData.getColumnName(x))));
            }
        }
    } catch (SQLException sqx) {
        throw new SQLException(sqx.getMessage(), sqx);
    } finally {
        if (resultSet != null) {
            resultSet.close();
        }

        if (stmt != null) {
            stmt.close();
        }

        if ((sqlConn != null) && (!(sqlConn.isClosed()))) {
            sqlConn.close();
        }
    }

    return questionList;
}

From source file:com.netspective.axiom.sql.ResultSetUtils.java

/**
 * Create a text array that contains the headings of the columns in the given result set.
 *
 * @param resultSet                  The result set that we want to create column headers for
 * @param preferColumnLabelForHeader True if we want to use the label (if available) for a column or use it's name if unavailable or false
 *
 * @return The headings/*  www  .j  a va  2  s .  c o  m*/
 */
public String[] getColumnHeadings(ResultSet resultSet, boolean preferColumnLabelForHeader) throws SQLException {
    ResultSetMetaData rsmd = resultSet.getMetaData();
    int columnsCount = rsmd.getColumnCount();

    String[] header = new String[columnsCount];
    if (preferColumnLabelForHeader) {
        for (int i = 1; i < columnsCount; i++) {
            String label = rsmd.getColumnLabel(i);
            if (label != null && label.length() > 0)
                header[i - 1] = label;
            else
                header[i - 1] = rsmd.getColumnName(i);
        }
    } else {
        for (int i = 1; i < columnsCount; i++)
            header[i - 1] = rsmd.getColumnName(i);
    }

    return header;
}

From source file:org.apache.nifi.processors.standard.util.TestJdbcCommon.java

@Test
public void testCreateSchemaTypes() throws SQLException, IllegalArgumentException, IllegalAccessException {
    final Set<Integer> fieldsToIgnore = new HashSet<>();
    fieldsToIgnore.add(Types.NULL);
    fieldsToIgnore.add(Types.OTHER);

    final Field[] fieldTypes = Types.class.getFields();
    for (final Field field : fieldTypes) {
        final Object fieldObject = field.get(null);
        final int type = (int) fieldObject;

        if (fieldsToIgnore.contains(Types.NULL)) {
            continue;
        }/*from  w  ww .j  a v  a  2s.c om*/

        final ResultSetMetaData metadata = mock(ResultSetMetaData.class);
        when(metadata.getColumnCount()).thenReturn(1);
        when(metadata.getColumnType(1)).thenReturn(type);
        when(metadata.getColumnName(1)).thenReturn(field.getName());
        when(metadata.getTableName(1)).thenReturn("table");

        final ResultSet rs = mock(ResultSet.class);
        when(rs.getMetaData()).thenReturn(metadata);

        try {
            JdbcCommon.createSchema(rs);
        } catch (final IllegalArgumentException | SQLException sqle) {
            sqle.printStackTrace();
            Assert.fail("Failed when using type " + field.getName());
        }
    }
}

From source file:com.kylinolap.query.test.KylinTestBase.java

protected int output(ResultSet resultSet, boolean needDisplay) throws SQLException {
    int count = 0;
    ResultSetMetaData metaData = resultSet.getMetaData();
    int columnCount = metaData.getColumnCount();
    StringBuilder sb = new StringBuilder("\n");
    if (needDisplay) {
        for (int i = 1; i <= columnCount; i++) {
            sb.append(metaData.getColumnName(i));
            sb.append("-");
            sb.append(metaData.getTableName(i));
            sb.append("-");
            sb.append(metaData.getColumnTypeName(i));
            if (i < columnCount) {
                sb.append("\t");
            } else {
                sb.append("\n");
            }/*from ww  w. j  a v  a  2  s. c  om*/
        }
    }

    while (resultSet.next()) {
        if (needDisplay) {
            for (int i = 1; i <= columnCount; i++) {
                sb.append(resultSet.getString(i));
                if (i < columnCount) {
                    sb.append("\t");
                } else {
                    sb.append("\n");
                }
            }
        }
        count++;
    }
    printInfo(sb.toString());
    return count;
}

From source file:org.apache.nifi.processors.standard.util.TestJdbcCommon.java

@Test
public void testConvertToAvroStreamForShort() throws SQLException, IOException {
    final ResultSetMetaData metadata = mock(ResultSetMetaData.class);
    when(metadata.getColumnCount()).thenReturn(1);
    when(metadata.getColumnType(1)).thenReturn(Types.TINYINT);
    when(metadata.getColumnName(1)).thenReturn("t_int");
    when(metadata.getTableName(1)).thenReturn("table");

    final ResultSet rs = mock(ResultSet.class);
    when(rs.getMetaData()).thenReturn(metadata);

    final AtomicInteger counter = new AtomicInteger(1);
    Mockito.doAnswer(new Answer<Boolean>() {
        @Override//from  w w w  .jav a2  s.c  o m
        public Boolean answer(InvocationOnMock invocation) throws Throwable {
            return counter.getAndDecrement() > 0;
        }
    }).when(rs).next();

    final short s = 25;
    when(rs.getObject(Mockito.anyInt())).thenReturn(s);

    final ByteArrayOutputStream baos = new ByteArrayOutputStream();

    JdbcCommon.convertToAvroStream(rs, baos, false);

    final byte[] serializedBytes = baos.toByteArray();

    final InputStream instream = new ByteArrayInputStream(serializedBytes);

    final DatumReader<GenericRecord> datumReader = new GenericDatumReader<>();
    try (final DataFileStream<GenericRecord> dataFileReader = new DataFileStream<>(instream, datumReader)) {
        GenericRecord record = null;
        while (dataFileReader.hasNext()) {
            record = dataFileReader.next(record);
            assertEquals(Short.toString(s), record.get("t_int").toString());
        }
    }
}

From source file:org.apache.nifi.processors.standard.util.TestJdbcCommon.java

@Test
public void testConvertToAvroStreamForBigDecimal() throws SQLException, IOException {
    final ResultSetMetaData metadata = mock(ResultSetMetaData.class);
    when(metadata.getColumnCount()).thenReturn(1);
    when(metadata.getColumnType(1)).thenReturn(Types.NUMERIC);
    when(metadata.getColumnName(1)).thenReturn("The.Chairman");
    when(metadata.getTableName(1)).thenReturn("1the::table");

    final ResultSet rs = mock(ResultSet.class);
    when(rs.getMetaData()).thenReturn(metadata);

    final AtomicInteger counter = new AtomicInteger(1);
    Mockito.doAnswer(new Answer<Boolean>() {
        @Override//from   ww  w.  j  a  va2s. co m
        public Boolean answer(InvocationOnMock invocation) throws Throwable {
            return counter.getAndDecrement() > 0;
        }
    }).when(rs).next();

    final BigDecimal bigDecimal = new BigDecimal(38D);
    when(rs.getObject(Mockito.anyInt())).thenReturn(bigDecimal);

    final ByteArrayOutputStream baos = new ByteArrayOutputStream();

    JdbcCommon.convertToAvroStream(rs, baos, true);

    final byte[] serializedBytes = baos.toByteArray();

    final InputStream instream = new ByteArrayInputStream(serializedBytes);

    final DatumReader<GenericRecord> datumReader = new GenericDatumReader<>();
    try (final DataFileStream<GenericRecord> dataFileReader = new DataFileStream<>(instream, datumReader)) {
        GenericRecord record = null;
        while (dataFileReader.hasNext()) {
            record = dataFileReader.next(record);
            assertEquals("_1the__table", record.getSchema().getName());
            assertEquals(bigDecimal.toString(), record.get("The_Chairman").toString());
        }
    }
}