Example usage for java.sql ResultSetMetaData getColumnCount

List of usage examples for java.sql ResultSetMetaData getColumnCount

Introduction

In this page you can find the example usage for java.sql ResultSetMetaData getColumnCount.

Prototype

int getColumnCount() throws SQLException;

Source Link

Document

Returns the number of columns in this ResultSet object.

Usage

From source file:db.migration.V023__UpdateOrganisationToimipisteKoodi.java

public void migrate(JdbcTemplate jdbcTemplate) throws Exception {
    LOG.info("migrate()...");

    // Get all organisations
    List<Map> resultSet = jdbcTemplate.query("SELECT * FROM organisaatio o", new RowMapper<Map>() {
        @Override/*from  ww  w . j a  v  a  2 s .co m*/
        public Map mapRow(ResultSet rs, int rowNum) throws SQLException {
            Map r = new HashMap<String, Object>();

            ResultSetMetaData metadata = rs.getMetaData();
            for (int i = 1; i <= metadata.getColumnCount(); i++) {
                String cname = metadata.getColumnName(i);
                int ctype = metadata.getColumnType(i);

                switch (ctype) {
                case Types.VARCHAR:
                    r.put(cname, rs.getString(cname));
                    break;

                default:
                    break;
                }
            }

            LOG.debug("  read from db : org = {}", r);

            _organisations.put((String) r.get("oid"), r);
            return r;
        }
    });

    // Generate and update initial values for toimipistekoodis
    for (Map org : resultSet) {
        if (isToimipiste(org, jdbcTemplate)) {
            String tpKoodi = calculateToimipisteKoodi(org, jdbcTemplate);
            updateToimipisteKoodi(org, tpKoodi, jdbcTemplate);
        }
    }

    LOG.info("  Processed {} organisations, updated {} Opetuspistes", _organisations.size(), _numUpdated);

    LOG.info("migrate()... done.");
}

From source file:com.simplymeasured.prognosticator.ThreadedQueryRunnable.java

@Override
public void run() {
    NamedParameterJdbcTemplate template = new NamedParameterJdbcTemplate(dataSource);

    try {/*from  w ww .  j a v  a 2 s . com*/
        template.query(query, parameters, new RowCallbackHandler() {
            @Override
            public void processRow(ResultSet resultSet) throws SQLException {
                try {
                    Map<String, Object> result = Maps.newHashMap();

                    final ResultSetMetaData metadata = resultSet.getMetaData();

                    for (int i = 1; i <= metadata.getColumnCount(); i++) {
                        String columnTypeName = metadata.getColumnTypeName(i);

                        final Object value;

                        if ("array".equalsIgnoreCase(columnTypeName)) {
                            String stringValue = resultSet.getString(i);

                            if (stringValue != null) {
                                value = objectMapper.readValue(stringValue, List.class);
                            } else {
                                value = null;
                            }
                        } else if ("map".equalsIgnoreCase(columnTypeName)
                                || "struct".equalsIgnoreCase(columnTypeName)) {
                            String stringValue = resultSet.getString(i);

                            if (stringValue != null) {
                                value = objectMapper.readValue(stringValue, Map.class);
                            } else {
                                value = null;
                            }
                        } else {
                            value = resultSet.getObject(i);
                        }

                        result.put(metadata.getColumnName(i), value);
                    }

                    resultQueue.put(result);
                } catch (SQLException se) {
                    LOG.warn("Database error!", se);
                    throw new RuntimeException("Database error!", se);
                } catch (InterruptedException ie) {
                    LOG.warn("Query killed!", ie);
                    throw new RuntimeException("Query killed!", ie);
                } catch (Exception ex) {
                    LOG.warn("Unable to parse row!", ex);
                    throw new RuntimeException("Unable to parse row!", ex);
                }
            }
        });

        resultQueue.put(Collections.<String, Object>emptyMap());
    } catch (DataAccessException dae) {
        try {
            resultQueue.put(Collections.<String, Object>emptyMap());
        } catch (InterruptedException ie) {
            LOG.warn("Queue is dead!", ie);
        }

        LOG.warn("Unable to execute query - attempting to clean up", dae);
    } catch (InterruptedException ie) {
        LOG.warn("Queue is dead!", ie);
    }
}

From source file:com.aw.core.util.QTTbBnMapperBasicRowProcessor.java

private void initForBean(ResultSet rs) throws SQLException {
    if (!beanMapper.isMetatadaBuilt()) {
        ResultSetMetaData metaData = rs.getMetaData();
        List colNames = new ArrayList();
        for (int i = 0; i < metaData.getColumnCount(); i++) {
            String colName = metaData.getColumnName(i + 1);
            colNames.add(colName);//from  w  ww .j  a v a  2 s.  c  om
        }

        beanMapper.buildMetadata(colNames);
        dataRowProvider = new BeanDataRowProvider(rs);
    }
}

From source file:io.lightlink.oracle.AbstractOracleType.java

protected Map<String, Object> getMapFromStruct(OracleConnection con, Struct struct) throws SQLException {

    Object[] attributes = struct.getAttributes();
    HashMap<String, Object> res = new HashMap<String, Object>();

    StructDescriptor structType = safeCreateStructureDescriptor(struct.getSQLTypeName(), con);
    ResultSetMetaData stuctMeteData = structType.getMetaData();

    for (int col = 1; col <= stuctMeteData.getColumnCount(); col++) {
        String columnName = stuctMeteData.getColumnName(col);
        res.put(columnName, attributes[col - 1]);

    }//from ww  w  .  j a va  2  s  .  c o  m
    return res;
}

From source file:org.schedoscope.metascope.util.HiveQueryExecutor.java

@Transactional
public HiveQueryResult executeQuery(String databaseName, String tableName, String fields,
        Set<MetascopeField> parameters, Map<String, String> params) {
    List<List<String>> rows = new ArrayList<List<String>>();

    HiveServerConnection hiveConn = new HiveServerConnection(config);

    hiveConn.connect();/*from ww w  .ja  v  a2  s  . co  m*/

    if (hiveConn.getConnection() == null) {
        return new HiveQueryResult("Could not connect to HiveServer2");
    }

    String where = "";
    List<String> values = new ArrayList<String>();
    if (params != null) {
        for (Entry<String, String> param : params.entrySet()) {
            if (param.getKey().equals("fqdn") || param.getKey().equals("_csrf")) {
                continue;
            }
            if (!param.getValue().isEmpty()) {
                boolean parameterExists = false;
                for (MetascopeField parameter : parameters) {
                    if (parameter.getFieldName().equals(param.getKey())) {
                        parameterExists = true;
                    }
                }
                if (!parameterExists) {
                    hiveConn.close();
                    return new HiveQueryResult("Query not allowed");
                }

                if (!where.isEmpty()) {
                    where += " AND ";
                }
                where += param.getKey() + "=?";
                values.add(param.getValue());
            }
        }
    }

    String sql = " SELECT " + fields;
    String parameterList = "";
    for (MetascopeField parameter : parameters) {
        sql += "," + parameter.getFieldName();
    }
    sql += parameterList;
    sql += " FROM " + databaseName + "." + tableName;
    sql += where.isEmpty() ? "" : " WHERE " + where;
    sql += " LIMIT 10";

    List<String> header = new ArrayList<String>();
    try {
        PreparedStatement pstmt = hiveConn.getConnection().prepareStatement(sql);
        for (int i = 1; i <= values.size(); i++) {
            pstmt.setString(i, values.get(i - 1));
        }
        ResultSet rs = pstmt.executeQuery();
        ResultSetMetaData rsmd = rs.getMetaData();

        for (int i = 1; i <= rsmd.getColumnCount(); i++) {
            header.add(rsmd.getColumnName(i));
        }

        while (rs.next()) {
            List<String> row = new ArrayList<String>();
            for (int i = 1; i <= rsmd.getColumnCount(); i++) {
                Object val = rs.getObject(i);
                String strVal = (val == null ? null : val.toString());
                row.add(strVal);
            }
            rows.add(row);
        }
    } catch (SQLException e) {
        LOG.error("Could not execute query", e);
        hiveConn.close();
        return new HiveQueryResult(e.getMessage());
    }

    hiveConn.close();
    return new HiveQueryResult(header, rows);
}

From source file:com.openddal.test.BaseTestCase.java

public static void printResultSet(ResultSet rs) {
    try {//from   www  .jav a  2 s  . co  m
        if (rs != null) {
            ResultSetMetaData md = rs.getMetaData();
            int cols = md.getColumnCount();
            StringBuffer sb = new StringBuffer();
            for (int i = 0; i < cols; i++) {
                sb.append(md.getColumnName(i + 1) + " ");
            }
            sb.append('\n');
            for (int i = 0; i < cols; i++) {
                sb.append("-------------------");
            }
            sb.append('\n');
            while (rs.next()) {
                for (int i = 0; i < cols; i++) {
                    sb.append(rs.getString(i + 1) + " ");
                }
            }
            sb.append("\n");
            System.out.println(sb.toString());
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:net.certifi.audittablegen.PostgresqlDMR.java

/**
 * Get List of ColumnDef objects for all tables
 * in the targeted database/schema.  Postgres specific code replaces 
 * 'serial' date type with integer, because the column in the audit table
 * must be of type integer and not serial.  Since this data is interpreted
 * by ChangeSourceFactory, which should be database independent, the
 * translation needs to be in the DMR.//from   www  . j  a va  2 s. c  o  m
 * 
 * @param tableName
 * @return ArrayList of ColumnDef objects or an empty list if none are found.
 */
@Override
public List getColumns(String tableName) {

    //getDataTypes will initialize the map if it isn't already loaded
    Map<String, DataTypeDef> dtds = getDataTypes();

    List columns = new ArrayList<>();

    try {
        Connection conn = dataSource.getConnection();
        DatabaseMetaData dmd = conn.getMetaData();
        ResultSet rs = dmd.getColumns(null, verifiedSchema, tableName, null);

        //load all of the metadata in the result set into a map for each column

        ResultSetMetaData rsmd = rs.getMetaData();
        int metaDataColumnCount = rsmd.getColumnCount();
        if (!rs.isBeforeFirst()) {
            throw new RuntimeException(
                    "No results for DatabaseMetaData.getColumns(" + verifiedSchema + "." + tableName + ")");
        }
        while (rs.next()) {
            ColumnDef columnDef = new ColumnDef();
            Map columnMetaData = new CaseInsensitiveMap();
            for (int i = 1; i <= metaDataColumnCount; i++) {
                columnMetaData.put(rsmd.getColumnName(i), rs.getString(i));
            }
            columnDef.setName(rs.getString("COLUMN_NAME"));

            String type_name = rs.getString("TYPE_NAME");
            if (type_name.equalsIgnoreCase("serial")) {
                columnDef.setTypeName("int4");
            } else {
                columnDef.setTypeName(type_name);
            }
            columnDef.setSqlType(rs.getInt("DATA_TYPE"));
            columnDef.setSize(rs.getInt("COLUMN_SIZE"));
            columnDef.setDecimalSize(rs.getInt("DECIMAL_DIGITS"));
            columnDef.setSourceMeta(columnMetaData);

            if (dtds.containsKey(columnDef.getTypeName())) {
                columnDef.setDataTypeDef(dtds.get(columnDef.getTypeName()));
            } else {
                throw new RuntimeException(
                        "Missing DATA_TYPE definition for data type " + columnDef.getTypeName());
            }
            columns.add(columnDef);
        }

    } catch (SQLException e) {
        throw Throwables.propagate(e);
    }

    return columns;

}

From source file:org.schedoscope.metascope.service.MetascopeDataDistributionService.java

@Async("background")
public void calculateDistribution(MetascopeTable table) {
    runningJobs.put(table.getFqdn(), true);

    String sql = DataDistributionSqlUtil.buildSql(table);

    HiveServerConnection hiveConn = new HiveServerConnection(config);

    hiveConn.connect();/*from   ww w .ja v  a 2  s.c om*/

    if (hiveConn.getConnection() == null) {
        runningJobs.put(table.getFqdn(), false);
        return;
    }

    try {
        Statement stmt = hiveConn.getConnection().createStatement();
        ResultSet rs = stmt.executeQuery(sql);

        ResultSetMetaData rsmd = rs.getMetaData();

        List<String> columnNames = new ArrayList<>();
        for (int i = 1; i <= rsmd.getColumnCount(); i++) {
            columnNames.add(rsmd.getColumnName(i));
        }

        long ts = System.currentTimeMillis();
        if (rs.next()) {
            for (String columnName : columnNames) {
                MetascopeDataDistribution mdd = new MetascopeDataDistribution();
                mdd.setId(table.getFqdn() + "." + columnName);
                mdd.setFqdn(table.getFqdn());
                mdd.setMetric(columnName);
                mdd.setValue(rs.getString(columnName));
                metascopeDataDistributionRepository.save(mdd);
            }
        }
    } catch (SQLException e) {
        hiveConn.close();
        runningJobs.put(table.getFqdn(), false);
        LOG.error("Could not execute hive query", e);
    }

    hiveConn.close();

    runningJobs.put(table.getFqdn(), false);
}

From source file:com.streamsets.pipeline.lib.jdbc.multithread.CDCJdbcRunnable.java

@Override
public void generateSchemaChanges(BatchContext batchContext) throws SQLException {
    Map<String, Integer> source = new HashMap<>();
    ResultSet rs = tableReadContext.getMoreResultSet();
    String schemaName = "";
    String tableName = "";
    String captureInstanceName = "";

    if (rs != null && rs.next()) {
        ResultSetMetaData data = rs.getMetaData();

        for (int i = 1; i <= data.getColumnCount(); i++) {
            String label = data.getColumnLabel(i);
            if (label.equals(MSQueryUtil.CDC_SOURCE_SCHEMA_NAME)) {
                schemaName = rs.getString(label);
            } else if (label.equals(MSQueryUtil.CDC_SOURCE_TABLE_NAME)) {
                tableName = rs.getString(label);
            } else if (label.equals(MSQueryUtil.CDC_CAPTURE_INSTANCE_NAME)) {
                captureInstanceName = rs.getString(label);
            } else {
                int type = data.getColumnType(i);
                source.put(label, type);
            }/*from  www .j  a v a 2s . c  o  m*/
        }

        boolean schemaChanges = getDiff(captureInstanceName, source,
                tableRuntimeContext.getSourceTableContext().getColumnToType());

        if (schemaChanges) {
            JdbcEvents.SCHEMA_CHANGE.create(context, batchContext).with("source-table-schema-name", schemaName)
                    .with("source-table-name", tableName).with("capture-instance-name", captureInstanceName)
                    .createAndSend();
            context.processBatch(batchContext);
        }
    }
}

From source file:esg.gateway.service.ESGAccessLogServiceImpl.java

/**
   Initializes the service by setting up the database connection and result handling.
*//*from w  w w  .j a  v  a 2s  .com*/
public void init() {
    Properties props = new Properties();
    props.setProperty("db.protocol", "jdbc:postgresql:");
    props.setProperty("db.host", "localhost");
    props.setProperty("db.port", "5432");
    props.setProperty("db.database", "esgcet");
    props.setProperty("db.user", "dbsuper");
    props.setProperty("db.password", "changeme");
    try {
        props.putAll(new ESGFProperties());
    } catch (IOException ex) {
        log.error(ex);
    }

    queryRunner = new QueryRunner(DatabaseResource.init(props.getProperty("db.driver", "org.postgresql.Driver"))
            .setupDataSource(props).getDataSource());

    resultSetHandler = new ResultSetHandler<List<String[]>>() {
        public List<String[]> handle(ResultSet rs) throws SQLException {
            ArrayList<String[]> results = new ArrayList<String[]>();
            String[] record = null;
            assert (null != results);

            ResultSetMetaData meta = rs.getMetaData();
            int cols = meta.getColumnCount();
            log.trace("Number of fields: " + cols);

            log.trace("adding column data...");
            record = new String[cols];
            for (int i = 0; i < cols; i++) {
                try {
                    record[i] = meta.getColumnLabel(i + 1) + "|" + meta.getColumnType(i + 1);
                } catch (SQLException e) {
                    log.error(e);
                }
            }
            results.add(record);

            for (int i = 0; rs.next(); i++) {
                log.trace("Looking at record " + (i + 1));
                record = new String[cols];
                for (int j = 0; j < cols; j++) {
                    record[j] = rs.getString(j + 1);
                    log.trace("gathering result record column " + (j + 1) + " -> " + record[j]);
                }
                log.trace("adding record ");
                results.add(record);
                record = null; //gc courtesy
            }
            return results;
        }
    };
    log.trace("initialization complete");
}