Example usage for java.sql ResultSet getMetaData

List of usage examples for java.sql ResultSet getMetaData

Introduction

In this page you can find the example usage for java.sql ResultSet getMetaData.

Prototype

ResultSetMetaData getMetaData() throws SQLException;

Source Link

Document

Retrieves the number, types and properties of this ResultSet object's columns.

Usage

From source file:cz.lbenda.dataman.db.DbStructureFactory.java

private void generateStructureForeignKeys(Map<String, CatalogDesc> catalogs, DatabaseMetaData dmd)
        throws SQLException {
    SQLDialect di = dbConfig.getJdbcConfiguration().getDialect();
    StatusHelper.getInstance().progressNextStep(this, STEP_READ_FOREIGN_KEYS,
            catalogs.values().stream().mapToInt(
                    cat -> cat.getSchemas().stream().mapToInt(schema -> schema.getTables().size()).sum())
                    .sum());//from  www.  j av a2  s  .  co m
    for (CatalogDesc ch : catalogs.values()) {
        for (SchemaDesc schema : ch.getSchemas()) {
            for (TableDesc td : schema.getTables()) {
                StatusHelper.getInstance().progress(this);
                ResultSet rsEx = dmd.getExportedKeys(ch.getName(), schema.getName(), td.getName());
                writeColumnNames("generateStructureForeignKeys", rsEx.getMetaData());
                while (rsEx.next()) {
                    String slaveCatalogName = rsEx.getString(di.fkSlaveTableCatalog());
                    String slaveSchemaName = rsEx.getString(di.fkSlaveTableSchema());
                    String slaveTableName = rsEx.getString(di.fkSlaveTableName());
                    TableDesc slaveTD = catalogs.get(slaveCatalogName).getSchema(slaveSchemaName)
                            .getTable(slaveTableName);
                    //noinspection ConstantConditions
                    ForeignKey fk = new ForeignKey(rsEx.getString(di.fkName()), td,
                            td.getColumn(rsEx.getString(di.fkMasterColumnName())), slaveTD,
                            slaveTD.getColumn(rsEx.getString(di.fkSlaveColumnName())),
                            rsEx.getString(di.fkUpdateRule()), rsEx.getString(di.fkDeleteRule()));
                    td.addForeignKey(fk);
                    slaveTD.addForeignKey(fk);
                }
            }
        }
    }
}

From source file:com.arcane.dao.Impl.PatternDaoImpl.java

@Override
public List<Pattern> getAllPatternList() {
    //select all patterns from specified category
    LOG.info("Selecting all patterns from specific category");
    JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
    List<String> patternNameList = getPatternNames();
    String sql = "";
    List<Pattern> listPattern = new ArrayList<Pattern>();
    for (String pattern : patternNameList) {
        sql = "SELECT * from " + pattern;
        List<Pattern> templistPattern = jdbcTemplate.query(sql, new RowMapper<Pattern>() {

            @Override//from  ww w . j a v  a 2  s.c  o  m
            public Pattern mapRow(ResultSet rs, int rowNumber) throws SQLException {
                Pattern pattern1 = new Pattern();
                pattern1.setId(rs.getInt("id"));
                pattern1.setStock(rs.getString("stock"));
                pattern1.setTimeStamp(rs.getString("breakPoint"));
                pattern1.setName(rs.getMetaData().getTableName(1));
                return pattern1;
            }

        });
        listPattern.addAll(templistPattern);
    }
    Collections.sort(listPattern, comparator);
    return listPattern;
}

From source file:com.streamsets.pipeline.stage.origin.jdbc.JdbcSource.java

private Record processRow(ResultSet resultSet, long rowCount) throws SQLException, StageException {
    Source.Context context = getContext();
    ResultSetMetaData md = resultSet.getMetaData();
    int numColumns = md.getColumnCount();

    LinkedHashMap<String, Field> fields = jdbcUtil.resultSetToFields(resultSet, commonSourceConfigBean,
            errorRecordHandler, unknownTypeAction, null);

    if (fields.size() != numColumns) {
        errorRecordHandler.onError(JdbcErrors.JDBC_35, fields.size(), numColumns);
        return null; // Don't output this record.
    }/*from w  w w.  ja v a 2  s  .com*/

    final String recordContext = StringUtils.substring(query.replaceAll("[\n\r]", ""), 0, 100) + "::rowCount:"
            + rowCount + (StringUtils.isEmpty(offsetColumn) ? "" : ":" + resultSet.getString(offsetColumn));
    Record record = context.createRecord(recordContext);
    if (jdbcRecordType == JdbcRecordType.LIST_MAP) {
        record.set(Field.createListMap(fields));
    } else if (jdbcRecordType == JdbcRecordType.MAP) {
        record.set(Field.create(fields));
    } else {
        // type is LIST
        List<Field> row = new ArrayList<>();
        for (Map.Entry<String, Field> fieldInfo : fields.entrySet()) {
            Map<String, Field> cell = new HashMap<>();
            cell.put("header", Field.create(fieldInfo.getKey()));
            cell.put("value", fieldInfo.getValue());
            row.add(Field.create(cell));
        }
        record.set(Field.create(row));
    }
    if (createJDBCNsHeaders) {
        jdbcUtil.setColumnSpecificHeaders(record, Collections.<String>emptySet(), md, jdbcNsHeaderPrefix);
    }
    // We will add cdc operation type to record header even if createJDBCNsHeaders is false
    // we currently support CDC on only MS SQL.
    if (hikariConfigBean.getConnectionString().startsWith("jdbc:sqlserver")) {
        MSOperationCode.addOperationCodeToRecordHeader(record);
    }

    return record;
}

From source file:CSVWriter.java

/**
 * Writes the entire ResultSet to a CSV file.
 *
 * The caller is responsible for closing the ResultSet.
 *
 * @param rs the recordset to write/*from w ww  . j  a  v a 2s . co  m*/
 * @param includeColumnNames true if you want column names in the output, false otherwise
 *
 */
public void writeAll(java.sql.ResultSet rs, boolean includeColumnNames)  throws SQLException, IOException {
      
  ResultSetMetaData metadata = rs.getMetaData();
      
      
  if (includeColumnNames) {
  writeColumnNames(metadata);
}

  int columnCount =  metadata.getColumnCount();
      
  while (rs.next())
  {
      String[] nextLine = new String[columnCount];
          
      for (int i = 0; i < columnCount; i++) {
    nextLine[i] = getColumnValue(rs, metadata.getColumnType(i + 1), i + 1);
  }
          
    writeNext(nextLine);
  }
}

From source file:com.erbjuder.logger.server.rest.util.ResultSetConverter.java

private JSONArray toJSONArray(ResultSet rs, JSONArray json) throws Exception {

    String temp = null;//from   ww w  . jav  a2  s  .  com
    try {

        // we will need the column names, this will save the table meta-data like column nmae.
        java.sql.ResultSetMetaData rsmd = rs.getMetaData();
        //loop through the ResultSet
        while (rs.next()) {

            //figure out how many columns there are
            int numColumns = rsmd.getColumnCount();

            //each row in the ResultSet will be converted to a JSON Object
            JSONObject obj = new JSONObject();

            // loop through all the columns and place them into the JSON Object
            for (int i = 1; i < numColumns + 1; i++) {
                String column_name = rsmd.getColumnName(i);
                if (rsmd.getColumnType(i) == java.sql.Types.ARRAY) {
                    obj.put(column_name, rs.getArray(column_name).toString());

                } else if (rsmd.getColumnType(i) == java.sql.Types.BIGINT) {
                    obj.put(column_name, rs.getBigDecimal(column_name).toBigInteger().toString());

                } else if (rsmd.getColumnType(i) == java.sql.Types.BOOLEAN) {
                    obj.put(column_name, ((Boolean) rs.getBoolean(column_name)).toString());

                } else if (rsmd.getColumnType(i) == java.sql.Types.BLOB) {
                    obj.put(column_name, rs.getBlob(column_name));

                } else if (rsmd.getColumnType(i) == java.sql.Types.DOUBLE) {
                    obj.put(column_name, ((Double) rs.getDouble(column_name)).toString());

                } else if (rsmd.getColumnType(i) == java.sql.Types.FLOAT) {
                    obj.put(column_name, ((Float) rs.getFloat(column_name)).toString());

                } else if (rsmd.getColumnType(i) == java.sql.Types.INTEGER) {
                    obj.put(column_name, ((Integer) rs.getInt(column_name)).toString());

                } else if (rsmd.getColumnType(i) == java.sql.Types.NVARCHAR) {
                    obj.put(column_name, rs.getNString(column_name));

                } else if (rsmd.getColumnType(i) == java.sql.Types.VARCHAR) {
                    //                        temp = rs.getString(column_name); //saving column data to temp variable
                    //                        temp = ESAPI.encoder().canonicalize(temp); //decoding data to base state
                    //                        temp = ESAPI.encoder().encodeForHTML(temp); //encoding to be browser safe
                    //                        obj.put(column_name, temp); //putting data into JSON object
                    //                    
                    obj.put(column_name, rs.getNString(column_name));

                } else if (rsmd.getColumnType(i) == java.sql.Types.TINYINT) {
                    obj.put(column_name, ((Integer) rs.getInt(column_name)).toString());

                } else if (rsmd.getColumnType(i) == java.sql.Types.SMALLINT) {
                    obj.put(column_name, ((Integer) rs.getInt(column_name)).toString());

                } else if (rsmd.getColumnType(i) == java.sql.Types.DATE) {
                    obj.put(column_name, rs.getDate(column_name).toString());

                } else if (rsmd.getColumnType(i) == java.sql.Types.TIME) {
                    obj.put(column_name, TimeStampUtils.dateTimeToString(rs.getTime(column_name)));

                } else if (rsmd.getColumnType(i) == java.sql.Types.TIMESTAMP) {
                    obj.put(column_name, TimeStampUtils.timeStampToString(rs.getTimestamp(column_name)));

                } else if (rsmd.getColumnType(i) == java.sql.Types.NUMERIC) {
                    obj.put(column_name, rs.getBigDecimal(column_name).toString());

                } else {
                    obj.put(column_name, rs.getObject(column_name));

                }
            } //end foreach
            json.add(obj);
        } //end while
    } catch (Exception e) {
        e.printStackTrace();
    }
    return json; //return JSON array
}

From source file:fr.jetoile.hadoopunit.integrationtest.IntegrationBootstrapTest.java

@Test
@Ignore/*from   w w w .j  a v a 2s .c o m*/
public void hiveServer2ShouldStart() throws InterruptedException, ClassNotFoundException, SQLException {

    //        assertThat(Utils.available("127.0.0.1", 20103)).isFalse();

    // Load the Hive JDBC driver
    LOGGER.info("HIVE: Loading the Hive JDBC Driver");
    Class.forName("org.apache.hive.jdbc.HiveDriver");

    //
    // Create an ORC table and describe it
    //
    // Get the connection
    Connection con = DriverManager
            .getConnection(
                    "jdbc:hive2://" + configuration.getString(HadoopUnitConfig.HIVE_SERVER2_HOSTNAME_KEY) + ":"
                            + configuration.getInt(HadoopUnitConfig.HIVE_SERVER2_PORT_KEY) + "/"
                            + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY),
                    "user", "pass");

    // Create the DB
    Statement stmt;
    try {
        String createDbDdl = "CREATE DATABASE IF NOT EXISTS "
                + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY);
        stmt = con.createStatement();
        LOGGER.info("HIVE: Running Create Database Statement: {}", createDbDdl);
        stmt.execute(createDbDdl);
    } catch (Exception e) {
        e.printStackTrace();
    }

    // Drop the table incase it still exists
    String dropDdl = "DROP TABLE " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "."
            + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY);
    stmt = con.createStatement();
    LOGGER.info("HIVE: Running Drop Table Statement: {}", dropDdl);
    stmt.execute(dropDdl);

    // Create the ORC table
    String createDdl = "CREATE TABLE IF NOT EXISTS "
            + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "."
            + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY) + " (id INT, msg STRING) "
            + "PARTITIONED BY (dt STRING) " + "CLUSTERED BY (id) INTO 16 BUCKETS "
            + "STORED AS ORC tblproperties(\"orc.compress\"=\"NONE\")";
    stmt = con.createStatement();
    LOGGER.info("HIVE: Running Create Table Statement: {}", createDdl);
    stmt.execute(createDdl);

    // Issue a describe on the new table and display the output
    LOGGER.info("HIVE: Validating Table was Created: ");
    ResultSet resultSet = stmt.executeQuery(
            "DESCRIBE FORMATTED " + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY));
    int count = 0;
    while (resultSet.next()) {
        ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
        for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
            System.out.print(resultSet.getString(i));
        }
        System.out.println();
        count++;
    }
    assertEquals(33, count);

    // Drop the table
    dropDdl = "DROP TABLE " + configuration.getString(HadoopUnitConfig.HIVE_TEST_DATABASE_NAME_KEY) + "."
            + configuration.getString(HadoopUnitConfig.HIVE_TEST_TABLE_NAME_KEY);
    stmt = con.createStatement();
    LOGGER.info("HIVE: Running Drop Table Statement: {}", dropDdl);
    stmt.execute(dropDdl);
}

From source file:jp.primecloud.auto.tool.management.db.SQLExecuter.java

public List<Map<String, Object>> showColumns(String sql) throws SQLException, Exception {
    Connection con = null;/*from  www .j ava2  s  .  com*/
    Statement stmt = null;
    ResultSet rs = null;
    log.info("[" + sql + "] ???");
    List<Map<String, Object>> results = new ArrayList<Map<String, Object>>();
    try {
        con = dbConnector.getConnection();
        stmt = con.createStatement();
        rs = stmt.executeQuery(sql);
        ResultSetMetaData rsMetaData = rs.getMetaData();

        int size = rsMetaData.getColumnCount();
        while (rs.next()) {
            Map<String, Object> result = new HashMap<String, Object>();
            for (int i = 1; i <= size; i++) {
                result.put(parseColumnName(rsMetaData.getColumnName(i)), rs.getObject(i));
            }
            results.add(result);
        }
        log.info("[" + sql + "] ????");
    } catch (SQLException e) {
        log.error(e.getMessage(), e);

        throw new SQLException(e);
    } catch (Exception e) {
        log.error(e.getMessage(), e);
        throw new Exception(e);
    } finally {
        try {
            dbConnector.closeConnection(con, stmt, rs);
        } catch (Exception e) {
            e.printStackTrace();
        }

    }
    return results;
}

From source file:com.insframework.common.spring.jdbc.mapper.BeanPropertyRowMapper.java

/**
 * Extract the values for all columns in the current row.
 * <p>Utilizes public setters and result set metadata.
 * @see java.sql.ResultSetMetaData/*from  w ww. j av  a2  s  .co m*/
 */
@Override
public T mapRow(ResultSet rs, int rowNumber) throws SQLException {
    Assert.state(this.mappedClass != null, "Mapped class was not specified");
    T mappedObject = BeanUtils.instantiate(this.mappedClass);
    BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(mappedObject);
    initBeanWrapper(bw);

    ResultSetMetaData rsmd = rs.getMetaData();
    int columnCount = rsmd.getColumnCount();
    Set<String> populatedProperties = (isCheckFullyPopulated() ? new HashSet<String>() : null);

    for (int index = 1; index <= columnCount; index++) {
        String column = JdbcUtils.lookupColumnName(rsmd, index);
        PropertyDescriptor pd = this.mappedFields.get(column.replaceAll(" ", "").toLowerCase());
        if (pd != null) {
            try {
                Object value = getColumnValue(rs, index, pd);
                if (logger.isDebugEnabled() && rowNumber == 0) {
                    logger.debug("Mapping column '" + column + "' to property '" + pd.getName() + "' of type "
                            + pd.getPropertyType());
                }
                try {
                    //add by guom
                    if (pd.getPropertyType() != null
                            && "java.lang.String".equals(pd.getPropertyType().getName())) {
                        if (value != null) {
                            bw.setPropertyValue(pd.getName(), String.valueOf(value));
                        } else {
                            bw.setPropertyValue(pd.getName(), "");
                        }
                    } else if (pd.getPropertyType() != null
                            && "double".equals(pd.getPropertyType().getName())) {
                        if (value != null) {
                            bw.setPropertyValue(pd.getName(), value);
                        } else {
                            bw.setPropertyValue(pd.getName(), 0d);
                        }
                    } else {
                        bw.setPropertyValue(pd.getName(), value);
                    }

                } catch (TypeMismatchException e) {
                    if (value == null && primitivesDefaultedForNullValue) {
                        logger.info("Intercepted TypeMismatchException for row " + rowNumber + " and column '"
                                + column + "' with value " + value + " when setting property '" + pd.getName()
                                + "' of type " + pd.getPropertyType() + " on object: " + mappedObject);
                    } else {
                        throw e;
                    }
                }
                if (populatedProperties != null) {
                    populatedProperties.add(pd.getName());
                }
            } catch (NotWritablePropertyException ex) {
                throw new DataRetrievalFailureException(
                        "Unable to map column " + column + " to property " + pd.getName(), ex);
            }
        }
    }

    if (populatedProperties != null && !populatedProperties.equals(this.mappedProperties)) {
        throw new InvalidDataAccessApiUsageException("Given ResultSet does not contain all fields "
                + "necessary to populate object of class [" + this.mappedClass + "]: " + this.mappedProperties);
    }

    return mappedObject;
}

From source file:com.termmed.statistics.Processor.java

/**
 * Prints the report./*from w  w w  .  j a v a2  s .c o  m*/
 *
 * @param bw the bw
 * @param tableMap the table map
 * @throws Exception the exception
 */
private void printReport(BufferedWriter bw, OutputFileTableMap tableMap) throws Exception {

    SQLStatementExecutor executor = new SQLStatementExecutor(connection);

    for (SelectTableMap select : tableMap.getSelect()) {
        String query = "Select * from " + select.getTableName();
        if (executor.executeQuery(query, null)) {
            ResultSet rs = executor.getResultSet();

            if (rs != null) {
                ResultSetMetaData meta = rs.getMetaData();
                while (rs.next()) {
                    for (int i = 0; i < meta.getColumnCount(); i++) {
                        if (rs.getObject(i + 1) != null) {
                            bw.append(rs.getObject(i + 1).toString());
                        } else {
                            bw.append("");
                        }
                        if (i + 1 < meta.getColumnCount()) {
                            bw.append(",");
                        } else {
                            bw.append("\r\n");
                        }
                    }
                }

                meta = null;
                rs.close();
            }
        }
    }
    executor = null;
}

From source file:com.xpfriend.fixture.cast.temp.Database.java

private boolean hasIdentityColumnInternal(String tableName) {
    DbCommand command = createCommand("select * from " + tableName);
    try {/*  w  w  w.  j  av a2s .c  o  m*/
        ResultSet resultSet = command.executeQuery();
        ResultSetMetaData metaData = resultSet.getMetaData();
        int columnCount = metaData.getColumnCount();
        for (int i = 1; i <= columnCount; i++) {
            if (metaData.isAutoIncrement(i)) {
                return true;
            }
        }
        return false;
    } catch (SQLException e) {
        throw new ConfigException(e);
    } finally {
        command.close();
    }
}