Example usage for java.sql ResultSetMetaData getColumnType

List of usage examples for java.sql ResultSetMetaData getColumnType

Introduction

In this page you can find the example usage for java.sql ResultSetMetaData getColumnType.

Prototype

int getColumnType(int column) throws SQLException;

Source Link

Document

Retrieves the designated column's SQL type.

Usage

From source file:org.apache.sqoop.connector.hbase.HbaseFromInitializer.java

private void configurePartitionProperties(MutableContext context, LinkConfiguration linkConfig,
        FromJobConfiguration jobConf) throws SQLException {
    // Assertions that should be valid (verified via validator)
    assert (jobConf.fromJobConfig.tableName != null && jobConf.fromJobConfig.sql == null)
            || (jobConf.fromJobConfig.tableName == null && jobConf.fromJobConfig.sql != null);
    assert (jobConf.fromJobConfig.boundaryQuery == null && jobConf.incrementalRead.checkColumn == null)
            || (jobConf.fromJobConfig.boundaryQuery != null && jobConf.incrementalRead.checkColumn == null)
            || (jobConf.fromJobConfig.boundaryQuery == null && jobConf.incrementalRead.checkColumn != null);

    // We have few if/else conditions based on import type
    boolean tableImport = jobConf.fromJobConfig.tableName != null;
    boolean incrementalImport = jobConf.incrementalRead.checkColumn != null;

    // For generating queries
    StringBuilder sb = new StringBuilder();

    // Partition column name
    String partitionColumnName = jobConf.fromJobConfig.partitionColumn;
    // If it's not specified, we can use primary key of given table (if it's table based import)
    if (StringUtils.isBlank(partitionColumnName) && tableImport) {
        partitionColumnName = executor.getPrimaryKey(jobConf.fromJobConfig.tableName);
    }/*from ww  w. j a v  a  2 s  .  c  om*/
    // If we don't have partition column name, we will error out
    if (partitionColumnName != null) {
        context.setString(HbaseConnectorConstants.CONNECTOR_JDBC_PARTITION_COLUMNNAME, partitionColumnName);
    } else {
        throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0005);
    }
    LOG.info("Using partition column: " + partitionColumnName);

    // From fragment for subsequent queries
    String fromFragment;
    if (tableImport) {
        String tableName = jobConf.fromJobConfig.tableName;
        String schemaName = jobConf.fromJobConfig.schemaName;

        fromFragment = executor.delimitIdentifier(tableName);
        if (schemaName != null) {
            fromFragment = executor.delimitIdentifier(schemaName) + "." + fromFragment;
        }
    } else {
        sb.setLength(0);
        sb.append("(");
        sb.append(jobConf.fromJobConfig.sql.replace(HbaseConnectorConstants.SQL_CONDITIONS_TOKEN, "1 = 1"));
        sb.append(") ");
        sb.append(HbaseConnectorConstants.SUBQUERY_ALIAS);
        fromFragment = sb.toString();
    }

    // If this is incremental, then we need to get new maximal value and persist is a constant
    String incrementalMaxValue = null;
    if (incrementalImport) {
        sb.setLength(0);
        sb.append("SELECT ");
        sb.append("MAX(").append(jobConf.incrementalRead.checkColumn).append(") ");
        sb.append("FROM ");
        sb.append(fromFragment);

        String incrementalNewMaxValueQuery = sb.toString();
        LOG.info("Incremental new max value query:  " + incrementalNewMaxValueQuery);

        ResultSet rs = null;
        try {
            rs = executor.executeQuery(incrementalNewMaxValueQuery);

            if (!rs.next()) {
                throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0022);
            }

            incrementalMaxValue = rs.getString(1);
            context.setString(HbaseConnectorConstants.CONNECTOR_JDBC_LAST_INCREMENTAL_VALUE,
                    incrementalMaxValue);
            LOG.info("New maximal value for incremental import is " + incrementalMaxValue);
        } finally {
            if (rs != null) {
                rs.close();
            }
        }
    }

    // Retrieving min and max values for partition column
    String minMaxQuery = jobConf.fromJobConfig.boundaryQuery;
    if (minMaxQuery == null) {
        sb.setLength(0);
        sb.append("SELECT ");
        sb.append("MIN(").append(partitionColumnName).append("), ");
        sb.append("MAX(").append(partitionColumnName).append(") ");
        sb.append("FROM ").append(fromFragment).append(" ");

        if (incrementalImport) {
            sb.append("WHERE ");
            sb.append(jobConf.incrementalRead.checkColumn).append(" > ?");
            sb.append(" AND ");
            sb.append(jobConf.incrementalRead.checkColumn).append(" <= ?");
        }

        minMaxQuery = sb.toString();
    }
    LOG.info("Using min/max query: " + minMaxQuery);

    PreparedStatement ps = null;
    ResultSet rs = null;
    try {
        ps = executor.createStatement(minMaxQuery);
        if (incrementalImport) {
            ps.setString(1, jobConf.incrementalRead.lastValue);
            ps.setString(2, incrementalMaxValue);
        }

        rs = ps.executeQuery();
        if (!rs.next()) {
            throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0006);
        }

        // Boundaries for the job
        String min = rs.getString(1);
        String max = rs.getString(2);

        // Type of the partition column
        ResultSetMetaData rsmd = rs.getMetaData();
        if (rsmd.getColumnCount() != 2) {
            throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0006);
        }
        int columnType = rsmd.getColumnType(1);

        LOG.info("Boundaries for the job: min=" + min + ", max=" + max + ", columnType=" + columnType);

        context.setInteger(HbaseConnectorConstants.CONNECTOR_JDBC_PARTITION_COLUMNTYPE, columnType);
        context.setString(HbaseConnectorConstants.CONNECTOR_JDBC_PARTITION_MINVALUE, min);
        context.setString(HbaseConnectorConstants.CONNECTOR_JDBC_PARTITION_MAXVALUE, max);
    } finally {
        if (ps != null) {
            ps.close();
        }
        if (rs != null) {
            rs.close();
        }
    }
}

From source file:org.apache.hadoop.chukwa.database.TestDatabaseIostat.java

public void testChukwaFramework() {
    try {//from w w w  .  j a  v  a  2s .com
        // Test Chukwa Agent Controller and Agent Communication
        ChukwaAgentController cli = new ChukwaAgentController("localhost", agentPort);
        String[] source = new File(System.getenv("CHUKWA_DATA_DIR") + File.separator + "log")
                .list(new FilenameFilter() {
                    public boolean accept(File dir, String name) {
                        return name.endsWith(".log");
                    }
                });

        for (String fname : source) {
            StringBuilder fullPath = new StringBuilder();
            fullPath.append(System.getenv("CHUKWA_DATA_DIR"));
            fullPath.append(File.separator);
            fullPath.append("log");
            fullPath.append(File.separator);
            fullPath.append(fname);
            String recordType = fname.substring(0, fname.indexOf("."));
            String adaptorId = cli.add(
                    "org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8NewLineEscaped",
                    recordType, "0 " + fullPath.toString(), 0);
            assertNotNull(adaptorId);
            Thread.sleep(2000);
        }
        cli.removeAll();
        Thread.sleep(30000);
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.toString());
    }

    // Test Data Sink files written by Collector    
    Path demuxDir = new Path(dataSink + "/*");
    FileSystem fs;
    try {
        fs = dfs.getFileSystem();
        FileStatus[] events = fs.globStatus(demuxDir);
        log.info("Number of data sink files written:" + events.length);
        assertTrue(events.length != 0);
    } catch (IOException e) {
        e.printStackTrace();
        fail("File System Error.");
    }

    // Test Demux    
    log.info("Testing demux");
    try {
        //ChukwaConfiguration conf = new ChukwaConfiguration();
        System.setProperty("hadoop.log.dir", System.getProperty("test.build.data", "/tmp"));

        String[] sortArgs = { DEMUX_INPUT_PATH.toString(), DEMUX_OUTPUT_PATH.toString() };
        //      JobConf job = mr.createJobConf();
        JobConf job = new JobConf(new ChukwaConfiguration(), Demux.class);
        job.addResource(System.getenv("CHUKWA_CONF_DIR") + File.separator + "chukwa-demux-conf.xml");
        job.setJobName("Chukwa-Demux_" + day.format(new Date()));
        job.setInputFormat(SequenceFileInputFormat.class);
        job.setMapperClass(Demux.MapClass.class);
        job.setPartitionerClass(ChukwaRecordPartitioner.class);
        job.setReducerClass(Demux.ReduceClass.class);

        job.setOutputKeyClass(ChukwaRecordKey.class);
        job.setOutputValueClass(ChukwaRecord.class);
        job.setOutputFormat(ChukwaRecordOutputFormat.class);
        job.setJobPriority(JobPriority.VERY_HIGH);
        job.setNumMapTasks(2);
        job.setNumReduceTasks(1);
        Path input = new Path(
                fileSys.getUri().toString() + File.separator + dataSink + File.separator + "*.done");
        FileInputFormat.setInputPaths(job, input);
        FileOutputFormat.setOutputPath(job, DEMUX_OUTPUT_PATH);
        String[] jars = new File(System.getenv("CHUKWA_HOME")).list(new FilenameFilter() {
            public boolean accept(File dir, String name) {
                return name.endsWith(".jar");
            }
        });
        job.setJar(System.getenv("CHUKWA_HOME") + File.separator + jars[0]);
        //assertEquals(ToolRunner.run(job, new Demux(), sortArgs), 0);
        JobClient.runJob(job);
    } catch (Exception e) {
        fail(e.toString());
    }

    // Test DataLoader
    try {
        fs = dfs.getFileSystem();
        Path outputPath = new Path(DEMUX_OUTPUT_PATH.toString() + File.separator + "/*/*/*.evt");
        FileStatus[] demuxOutput = fs.globStatus(outputPath);
        log.info("Number of chukwa records files written:" + demuxOutput.length);
        assertTrue(demuxOutput.length != 0);
        for (FileStatus fname : demuxOutput) {
            MetricDataLoader mdl = new MetricDataLoader(conf, fs, fname.getPath().toUri().toString());
            mdl.call();
        }
    } catch (IOException e) {
        e.printStackTrace();
        fail("Metric Data Loader Error.");
    }

    // Verify Data
    DatabaseWriter db = null;
    try {
        db = new DatabaseWriter(cluster);
        Macro mp = new Macro(current, current, "select * from [system_metrics]");
        String query = mp.toString();
        ResultSet rs = db.query(query);
        ResultSetMetaData rmeta = rs.getMetaData();
        int size = rmeta.getColumnCount();
        while (rs.next()) {
            for (int i = 1; i <= size; i++) {
                int columnType = rmeta.getColumnType(i);
                if (columnType == java.sql.Types.BIGINT || columnType == java.sql.Types.INTEGER) {
                    long testValue = rs.getLong(i);
                    assertTrue(testValue < 1000000000L);
                } else if (columnType == java.sql.Types.FLOAT || columnType == java.sql.Types.DOUBLE) {
                    double testValue = rs.getDouble(i);
                    assertTrue(testValue < 1000000000L);
                }
            }
        }
    } catch (Throwable ex) {
        fail("Data verification failed.");
    } finally {
        if (db != null) {
            db.close();
        }
    }

}

From source file:org.xsystem.sql2.dml.DmlCommand.java

Map<String, Object> rowAsMap(ResultSet rs) throws SQLException {
    Map row = new LinkedHashMap();

    Connection con = rs.getStatement().getConnection();
    AbstactNativeHelper nativeHelper = nativeHelperFactory(con);
    ResultSetMetaData metaData = rs.getMetaData();
    int cnt = metaData.getColumnCount();
    for (int i = 1; i <= cnt; i++) {
        String cn = metaData.getColumnName(i);
        int jdbcType = metaData.getColumnType(i);
        String colTypeName = metaData.getColumnTypeName(i);
        Object value = rs.getObject(i);
        if (rs.wasNull()) {
            value = null;//from  w w  w .j  a  v a 2  s.  c  o m
        }
        if (upperTag != null) {
            if (upperTag) {
                cn = cn.toUpperCase();
            } else {
                cn = cn.toLowerCase();
            }
        }
        value = getValue(value, jdbcType, colTypeName, con, nativeHelper);
        row.put(cn, value);
    }

    return row;
}

From source file:org.jumpmind.vaadin.ui.common.CommonUiUtils.java

@SuppressWarnings("unchecked")
public static Grid putResultsInGrid(final ResultSet rs, List<Integer> pkcolumns, int maxResultSize,
        final boolean showRowNumbers, String... excludeValues) throws SQLException {

    final Grid grid = new Grid();
    grid.setImmediate(true);/*  w w  w .  j  a va 2s .  c  o m*/
    grid.setSelectionMode(SelectionMode.MULTI);
    grid.setColumnReorderingAllowed(true);
    grid.setData(new HashMap<Object, List<Object>>());

    final ResultSetMetaData meta = rs.getMetaData();
    int columnCount = meta.getColumnCount();
    grid.addColumn("#", Integer.class).setHeaderCaption("#").setHidable(true);
    Set<String> columnNames = new HashSet<String>();
    Set<Integer> skipColumnIndexes = new HashSet<Integer>();
    int[] types = new int[columnCount];
    for (int i = 1; i <= columnCount; i++) {
        String realColumnName = meta.getColumnName(i);
        String columnName = realColumnName;
        if (!Arrays.asList(excludeValues).contains(columnName)) {

            int index = 1;
            while (columnNames.contains(columnName)) {
                columnName = realColumnName + "_" + index++;
            }
            columnNames.add(columnName);

            Class<?> typeClass = Object.class;
            int type = meta.getColumnType(i);
            types[i - 1] = type;
            switch (type) {
            case Types.FLOAT:
            case Types.DOUBLE:
            case Types.NUMERIC:
            case Types.REAL:
            case Types.DECIMAL:
                typeClass = BigDecimal.class;
                break;
            case Types.TINYINT:
            case Types.SMALLINT:
            case Types.BIGINT:
            case Types.INTEGER:
                typeClass = Long.class;
                break;
            case Types.VARCHAR:
            case Types.CHAR:
            case Types.NVARCHAR:
            case Types.NCHAR:
            case Types.CLOB:
                typeClass = String.class;
            default:
                break;
            }
            Column column = grid.addColumn(columnName, typeClass).setHeaderCaption(columnName).setHidable(true);
            if (typeClass.equals(Long.class)) {
                column.setConverter(new StringToLongConverter() {
                    private static final long serialVersionUID = 1L;

                    @Override
                    public String convertToPresentation(Long value, Class<? extends String> targetType,
                            Locale locale) throws com.vaadin.data.util.converter.Converter.ConversionException {
                        if (value == null) {
                            return NULL_TEXT;
                        } else {
                            return value.toString();
                        }
                    }
                });
            } else if (typeClass.equals(BigDecimal.class)) {
                column.setConverter(new StringToBigDecimalConverter() {
                    private static final long serialVersionUID = 1L;

                    @Override
                    public String convertToPresentation(BigDecimal value, Class<? extends String> targetType,
                            Locale locale) throws com.vaadin.data.util.converter.Converter.ConversionException {
                        if (value == null) {
                            return NULL_TEXT;
                        } else {
                            return value.toString();
                        }
                    }
                });
            } else {
                column.setConverter(new Converter<String, Object>() {
                    private static final long serialVersionUID = 1L;

                    @Override
                    public Object convertToModel(String value, Class<? extends Object> targetType,
                            Locale locale) throws com.vaadin.data.util.converter.Converter.ConversionException {
                        return null;
                    }

                    @Override
                    public String convertToPresentation(Object value, Class<? extends String> targetType,
                            Locale locale) throws com.vaadin.data.util.converter.Converter.ConversionException {
                        if (value == null) {
                            return NULL_TEXT;
                        } else {
                            return value.toString();
                        }
                    }

                    @Override
                    public Class<Object> getModelType() {
                        return Object.class;
                    }

                    @Override
                    public Class<String> getPresentationType() {
                        return String.class;
                    }

                });
            }
        } else {
            skipColumnIndexes.add(i - 1);
        }

    }
    int rowNumber = 1;
    while (rs.next() && rowNumber <= maxResultSize) {
        Object[] row = new Object[columnNames.size() + 1];
        row[0] = new Integer(rowNumber);
        int rowIndex = 1;
        for (int i = 0; i < columnCount; i++) {
            if (!skipColumnIndexes.contains(i)) {
                Object o = getObject(rs, i + 1);
                int type = types[i];
                switch (type) {
                case Types.FLOAT:
                case Types.DOUBLE:
                case Types.REAL:
                case Types.NUMERIC:
                case Types.DECIMAL:
                    if (o != null && !(o instanceof BigDecimal)) {
                        o = new BigDecimal(castToNumber(o.toString()));
                    }
                    break;
                case Types.TINYINT:
                case Types.SMALLINT:
                case Types.BIGINT:
                case Types.INTEGER:
                    if (o != null && !(o instanceof Long)) {
                        o = new Long(castToNumber(o.toString()));
                    }
                    break;
                default:
                    break;
                }
                List<Object> primaryKeys = new ArrayList<Object>();
                for (Integer pkcolumn : pkcolumns) {
                    primaryKeys.add(getObject(rs, pkcolumn + 1));
                }
                ((HashMap<Object, List<Object>>) grid.getData()).put(o, primaryKeys);
                row[rowIndex] = o;
                rowIndex++;
            }
        }
        grid.addRow(row);
        rowNumber++;
    }

    if (rowNumber < 100) {
        grid.getColumn("#").setWidth(75);
    } else if (rowNumber < 1000) {
        grid.getColumn("#").setWidth(95);
    } else {
        grid.getColumn("#").setWidth(115);
    }

    if (!showRowNumbers) {
        grid.getColumn("#").setHidden(true);
    } else {
        grid.setFrozenColumnCount(1);
    }

    return grid;
}

From source file:com.streamsets.pipeline.stage.executor.jdbc.TestJdbcQueryExecutor.java

/**
 * Validate structure of the result set (column names and types).
 *///w  w  w  .  ja v  a 2  s .  c  om
public void assertResultSetStructure(ResultSet rs, Pair<String, Integer>... columns) throws Exception {
    ResultSetMetaData metaData = rs.getMetaData();
    Assert.assertEquals(Utils.format("Unexpected number of columns"), columns.length,
            metaData.getColumnCount());
    int i = 1;
    for (Pair<String, Integer> column : columns) {
        Assert.assertEquals(Utils.format("Unexpected name for column {}", i), column.getLeft(),
                metaData.getColumnName(i));
        Assert.assertEquals(Utils.format("Unexpected type for column {}", i), (int) column.getRight(),
                metaData.getColumnType(i));
        i++;
    }
}

From source file:org.apache.hadoop.chukwa.database.TestDatabaseWebJson.java

protected void verifyTableData(String table) {
    Calendar startCalendar = new GregorianCalendar();
    // startCalendar.add(Calendar.HOUR_OF_DAY,-1);
    startCalendar.add(Calendar.MINUTE, -30);
    long startTime = startCalendar.getTime().getTime();

    Calendar endCalendar = new GregorianCalendar();
    // endCalendar.add(Calendar.HOUR_OF_DAY,1);
    long endTime = endCalendar.getTime().getTime();

    String url = data_url + "?table=" + table + "&start=" + startTime + "&end=" + endTime;
    System.out.println(url);/*from  w  ww. j  a  v  a  2 s.  c  om*/

    HttpClient client = new HttpClient();
    GetMethod method = new GetMethod(url);

    try {

        /*
         * 1. get the json result for the specified table
         */
        int statusCode = client.executeMethod(method);
        if (statusCode != HttpStatus.SC_OK) {
            System.out.println("Http Error: " + method.getStatusLine());
        }
        BufferedReader reader = new BufferedReader(
                new InputStreamReader(method.getResponseBodyAsStream(), method.getResponseCharSet()));
        String json_str = "";
        String str;
        while ((str = reader.readLine()) != null) {
            json_str += str;
        }

        /*
         * 2. convert the json string to individual field and compare it 
         * with the database
         */

        String cluster = "demo";
        DatabaseWriter db = new DatabaseWriter(cluster);

        JSONArray json_array = new JSONArray(json_str);
        for (int i = 0; i < json_array.length(); i++) {
            JSONObject row_obj = json_array.getJSONObject(i);

            // get the database row

            String queryString = getDatabaseQuery(table, row_obj);
            Macro m = new Macro(startTime, endTime, queryString);
            ResultSet rs = db.query(m.toString());
            // move to the first record
            rs.next();
            ResultSetMetaData md = rs.getMetaData();
            Iterator names = row_obj.keys();
            while (names.hasNext()) {
                String name = (String) names.next();
                String jsonValue = (String) row_obj.get(name);
                String dbValue = rs.getString(name);
                int dbCol = rs.findColumn(name);
                int dbType = md.getColumnType(dbCol);
                if (dbType == 93) {
                    // timestamp
                    dbValue = Long.toString(rs.getTimestamp(name).getTime());
                }
                // System.out.println("compare "+name+":"+dbType+":"+dbValue+":"+jsonValue);
                assertEquals(dbValue, jsonValue);
            }
        }

        db.close();
    } catch (SQLException e) {
        System.out.println("Exception: " + e.toString() + ":" + e.getMessage());
        System.out.println("Exception: " + e.toString() + ":" + e.getSQLState());
        System.out.println("Exception: " + e.toString() + ":" + e.getErrorCode());
        fail("SQL Error:" + ExceptionUtil.getStackTrace(e));
    } catch (Exception eOther) {
        System.out.println("Other Exception: " + eOther.toString());
        eOther.printStackTrace();
        fail("Error:" + ExceptionUtil.getStackTrace(eOther));
    } finally {
    }
}

From source file:com.streamsets.pipeline.stage.processor.parser.sql.SqlParserProcessor.java

private void resolveSchema(SchemaAndTable schemaAndTable) throws StageException {
    Map<String, Integer> columns = new HashMap<>();
    String schema = schemaAndTable.getSchema();
    String table = schemaAndTable.getTable();
    try (Statement s = connection.createStatement()) {
        ResultSetMetaData md = s
                .executeQuery(Utils.format("SELECT * FROM {}{} WHERE 1 = 0",
                        StringUtils.isNotEmpty(schema) ? "\"" + schema + "\"." : "", "\"" + table + "\""))
                .getMetaData();/*  w w w.  j  ava2  s  . c  o m*/
        int colCount = md.getColumnCount();
        for (int i = 1; i <= colCount; i++) {
            int colType = md.getColumnType(i);
            String colName = md.getColumnName(i);
            if (!configBean.caseSensitive) {
                colName = colName.toUpperCase();
            }
            if (colType == Types.DATE || colType == Types.TIME || colType == Types.TIMESTAMP) {
                dateTimeColumns.computeIfAbsent(schemaAndTable, k -> new HashMap<>());
                dateTimeColumns.get(schemaAndTable).put(colName, md.getColumnTypeName(i));
            }

            if (colType == Types.DECIMAL || colType == Types.NUMERIC) {
                decimalColumns.computeIfAbsent(schemaAndTable, k -> new HashMap<>()).put(colName,
                        new PrecisionAndScale(md.getPrecision(i), md.getScale(i)));
            }
            columns.put(md.getColumnName(i), md.getColumnType(i));
        }
        tableSchemas.put(schemaAndTable, columns);
    } catch (SQLException ex) {
        throw new StageException(JDBC_00, configBean.hikariConfigBean.connectionString);
    }
}

From source file:eu.stratosphere.api.java.io.jdbc.JDBCInputFormat.java

private void extractTypes(OUT tuple) throws SQLException, IOException {
    ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
    columnTypes = new int[resultSetMetaData.getColumnCount()];
    if (tuple.getArity() != columnTypes.length) {
        close();/*from  w  ww . j  av a2 s . c  om*/
        throw new IOException("Tuple size does not match columncount");
    }
    for (int pos = 0; pos < columnTypes.length; pos++) {
        columnTypes[pos] = resultSetMetaData.getColumnType(pos + 1);
    }
}

From source file:org.apache.syncope.core.persistence.jpa.content.XMLContentExporter.java

private void doExportTable(final TransformerHandler handler, final String dbSchema, final Connection conn,
        final String tableName, final String whereClause) throws SQLException, SAXException {

    LOG.debug("Export table {}", tableName);

    AttributesImpl attrs = new AttributesImpl();

    PreparedStatement stmt = null;
    ResultSet rs = null;//from  ww  w  .java 2s . c  om
    try {
        StringBuilder orderBy = new StringBuilder();

        DatabaseMetaData meta = conn.getMetaData();

        // ------------------------------------
        // retrieve foreign keys (linked to the same table) to perform an ordered select
        ResultSet pkeyRS = null;
        try {
            pkeyRS = meta.getImportedKeys(conn.getCatalog(), dbSchema, tableName);
            while (pkeyRS.next()) {
                if (tableName.equals(pkeyRS.getString("PKTABLE_NAME"))) {
                    String columnName = pkeyRS.getString("FKCOLUMN_NAME");
                    if (columnName != null) {
                        if (orderBy.length() > 0) {
                            orderBy.append(",");
                        }

                        orderBy.append(columnName);
                    }
                }
            }
        } finally {
            if (pkeyRS != null) {
                try {
                    pkeyRS.close();
                } catch (SQLException e) {
                    LOG.error("While closing result set", e);
                }
            }
        }

        // retrieve primary keys to perform an ordered select
        try {
            pkeyRS = meta.getPrimaryKeys(null, null, tableName);
            while (pkeyRS.next()) {
                String columnName = pkeyRS.getString("COLUMN_NAME");
                if (columnName != null) {
                    if (orderBy.length() > 0) {
                        orderBy.append(",");
                    }

                    orderBy.append(columnName);
                }
            }
        } finally {
            if (pkeyRS != null) {
                try {
                    pkeyRS.close();
                } catch (SQLException e) {
                    LOG.error("While closing result set", e);
                }
            }
        }

        // ------------------------------------
        StringBuilder query = new StringBuilder();
        query.append("SELECT * FROM ").append(tableName).append(" a");
        if (StringUtils.isNotBlank(whereClause)) {
            query.append(" WHERE ").append(whereClause);
        }
        if (orderBy.length() > 0) {
            query.append(" ORDER BY ").append(orderBy);
        }
        stmt = conn.prepareStatement(query.toString());

        rs = stmt.executeQuery();
        while (rs.next()) {
            attrs.clear();

            final ResultSetMetaData rsMeta = rs.getMetaData();
            for (int i = 0; i < rsMeta.getColumnCount(); i++) {
                final String columnName = rsMeta.getColumnName(i + 1);
                final Integer columnType = rsMeta.getColumnType(i + 1);

                // Retrieve value taking care of binary values.
                String value = getValues(rs, columnName, columnType);
                if (value != null && (!COLUMNS_TO_BE_NULLIFIED.containsKey(tableName)
                        || !COLUMNS_TO_BE_NULLIFIED.get(tableName).contains(columnName))) {

                    attrs.addAttribute("", "", columnName, "CDATA", value);
                }
            }

            handler.startElement("", "", tableName, attrs);
            handler.endElement("", "", tableName);

            LOG.debug("Add record {}", attrs);
        }
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                LOG.error("While closing result set", e);
            }
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
                LOG.error("While closing result set", e);
            }
        }
    }
}

From source file:org.apache.sqoop.connector.jdbc.GenericJdbcFromInitializer.java

@Override
public Schema getSchema(InitializerContext context, LinkConfiguration linkConfig,
        FromJobConfiguration fromJobConfig) {
    configureJdbcProperties(context.getContext(), linkConfig, fromJobConfig);

    String schemaName = fromJobConfig.fromJobConfig.tableName;
    if (schemaName == null) {
        schemaName = "Query";
    } else if (fromJobConfig.fromJobConfig.schemaName != null) {
        schemaName = fromJobConfig.fromJobConfig.schemaName + "." + schemaName;
    }/* w  w w  .j  av  a  2  s .c  o m*/

    Schema schema = new Schema(schemaName);
    ResultSet rs = null;
    ResultSetMetaData rsmt = null;
    try {
        rs = executor.executeQuery(context.getString(GenericJdbcConnectorConstants.CONNECTOR_JDBC_FROM_DATA_SQL)
                .replace(GenericJdbcConnectorConstants.SQL_CONDITIONS_TOKEN, "1 = 0"));

        rsmt = rs.getMetaData();
        for (int i = 1; i <= rsmt.getColumnCount(); i++) {
            Column column = SqlTypesUtils.sqlTypeToAbstractType(rsmt.getColumnType(i));

            String columnName = rsmt.getColumnName(i);
            if (columnName == null || columnName.equals("")) {
                columnName = rsmt.getColumnLabel(i);
                if (null == columnName) {
                    columnName = "Column " + i;
                }
            }

            column.setName(columnName);
            schema.addColumn(column);
        }

        return schema;
    } catch (SQLException e) {
        throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0016, e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                LOG.info("Ignoring exception while closing ResultSet", e);
            }
        }
        if (executor != null) {
            executor.close();
        }
    }
}