List of usage examples for java.sql ResultSetMetaData getColumnCount
int getColumnCount() throws SQLException;
ResultSet
object. From source file:chh.utils.db.source.common.JdbcClient.java
public List<List<Column>> select(String sqlQuery, List<Column> queryParams) { Connection connection = null; try {//from ww w.j a v a 2 s .c o m connection = connectionProvider.getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(sqlQuery); if (queryTimeoutSecs > 0) { preparedStatement.setQueryTimeout(queryTimeoutSecs); } setPreparedStatementParams(preparedStatement, queryParams); ResultSet resultSet = preparedStatement.executeQuery(); List<List<Column>> rows = Lists.newArrayList(); while (resultSet.next()) { ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); List<Column> row = Lists.newArrayList(); for (int i = 1; i <= columnCount; i++) { String columnLabel = metaData.getColumnLabel(i); int columnType = metaData.getColumnType(i); Class columnJavaType = Util.getJavaType(columnType); if (columnJavaType.equals(String.class)) { row.add(new Column<String>(columnLabel, resultSet.getString(columnLabel), columnType)); } else if (columnJavaType.equals(Integer.class)) { row.add(new Column<Integer>(columnLabel, resultSet.getInt(columnLabel), columnType)); } else if (columnJavaType.equals(Double.class)) { row.add(new Column<Double>(columnLabel, resultSet.getDouble(columnLabel), columnType)); } else if (columnJavaType.equals(Float.class)) { row.add(new Column<Float>(columnLabel, resultSet.getFloat(columnLabel), columnType)); } else if (columnJavaType.equals(Short.class)) { row.add(new Column<Short>(columnLabel, resultSet.getShort(columnLabel), columnType)); } else if (columnJavaType.equals(Boolean.class)) { row.add(new Column<Boolean>(columnLabel, resultSet.getBoolean(columnLabel), columnType)); } else if (columnJavaType.equals(byte[].class)) { row.add(new Column<byte[]>(columnLabel, resultSet.getBytes(columnLabel), columnType)); } else if (columnJavaType.equals(Long.class)) { row.add(new Column<Long>(columnLabel, resultSet.getLong(columnLabel), columnType)); } else if (columnJavaType.equals(Date.class)) { row.add(new Column<Date>(columnLabel, resultSet.getDate(columnLabel), columnType)); } else if (columnJavaType.equals(Time.class)) { row.add(new Column<Time>(columnLabel, resultSet.getTime(columnLabel), columnType)); } else if (columnJavaType.equals(Timestamp.class)) { row.add(new Column<Timestamp>(columnLabel, resultSet.getTimestamp(columnLabel), columnType)); } else { throw new RuntimeException( "type = " + columnType + " for column " + columnLabel + " not supported."); } } rows.add(row); } return rows; } catch (SQLException e) { throw new RuntimeException("Failed to execute select query " + sqlQuery, e); } finally { closeConnection(connection); } }
From source file:com.recomdata.transmart.data.export.util.FileWriterUtil.java
@SuppressWarnings("unused") private boolean validate(ResultSet resultSet, String[] headerValues) throws Exception { boolean valid = true; try {// www . j ava2s. co m ResultSetMetaData rsmd = resultSet.getMetaData(); if (null == resultSet || (null != resultSet && rsmd.getColumnCount() <= 0)) { valid = false; log.error((null != outputFile) ? outputFile.getAbsolutePath() : "" + " :: Empty resultSet"); } if (null == outputFile) { valid = false; log.error("Invalid outputFile"); } } catch (SQLException e) { valid = false; log.error((null != outputFile) ? outputFile.getAbsolutePath() : "" + " :: Empty resultSet"); } return valid; }
From source file:com.tesora.dve.sql.util.JdbcConnectionResourceResponse.java
private void assertEqualProxyConnMetadata(String cntxt, ProxyConnectionResourceResponse pcrr) throws Throwable { ResultSetMetaData rsmd = results.getMetaData(); ColumnSet sysColumns = pcrr.getColumns(); assertEquals(cntxt + " mismatched column set width", rsmd.getColumnCount(), sysColumns.getColumnList().size()); List<ColumnMetadata> sysCols = sysColumns.getColumnList(); for (int i = 0; i < rsmd.getColumnCount(); i++) { ColumnMetadata sc = sysCols.get(i); String colcntxt = cntxt + " column " + sc.getAliasName(); // still don't handle non column labels right assertEquals(colcntxt + " mismatched column name", rsmd.getColumnName(i + 1), sc.getName()); assertEquals(colcntxt + " mismatched column label", rsmd.getColumnLabel(i + 1), sc.getAliasName()); if (rsmd.getColumnType(i + 1) != sc.getDataType()) { // emit names - easier to read fail(colcntxt + " mismatched column type. Expected " + rsmd.getColumnTypeName(i + 1) + " (" + rsmd.getColumnType(i + 1) + ") but found " + sc.getTypeName() + " (" + sc.getDataType() + ")"); }//from ww w . ja v a 2 s.c o m } }
From source file:com.gzj.tulip.jade.rowmapper.BeanPropertyRowMapper.java
/** * Extract the values for all columns in the current row. * <p>//from w ww . java 2s . com * Utilizes public setters and result set metadata. * * @see java.sql.ResultSetMetaData */ public Object mapRow(ResultSet rs, int rowNumber) throws SQLException { // spring's : Object mappedObject = BeanUtils.instantiateClass(this.mappedClass); // jade's : private Object instantiateClass(this.mappedClass); // why: ??mappedClass.newInstranceBeanUtils.instantiateClass(mappedClass)1? Object mappedObject = instantiateClass(this.mappedClass); BeanWrapper bw = new BeanWrapperImpl(mappedObject); ResultSetMetaData rsmd = rs.getMetaData(); int columnCount = rsmd.getColumnCount(); boolean warnEnabled = logger.isWarnEnabled(); boolean debugEnabled = logger.isDebugEnabled(); Set<String> populatedProperties = (checkProperties ? new HashSet<String>() : null); for (int index = 1; index <= columnCount; index++) { String column = JdbcUtils.lookupColumnName(rsmd, index).toLowerCase(); PropertyDescriptor pd = this.mappedFields.get(column); if (pd != null) { try { Object value = JdbcUtils.getResultSetValue(rs, index, pd.getPropertyType()); if (debugEnabled && rowNumber == 0) { logger.debug("Mapping column '" + column + "' to property '" + pd.getName() + "' of type " + pd.getPropertyType()); } bw.setPropertyValue(pd.getName(), value); if (populatedProperties != null) { populatedProperties.add(pd.getName()); } } catch (NotWritablePropertyException ex) { throw new DataRetrievalFailureException( "Unable to map column " + column + " to property " + pd.getName(), ex); } } else { if (checkColumns) { throw new InvalidDataAccessApiUsageException("Unable to map column '" + column + "' to any properties of bean " + this.mappedClass.getName()); } if (warnEnabled && rowNumber == 0) { logger.warn("Unable to map column '" + column + "' to any properties of bean " + this.mappedClass.getName()); } } } if (populatedProperties != null && !populatedProperties.equals(this.mappedProperties)) { throw new InvalidDataAccessApiUsageException("Given ResultSet does not contain all fields " + "necessary to populate object of class [" + this.mappedClass + "]: " + this.mappedProperties); } return mappedObject; }
From source file:com.sinosoft.one.data.jade.rowmapper.BeanPropertyRowMapper.java
/** * Extract the values for all columns in the current row. * <p>//from ww w . j a v a2s . c om * Utilizes public setters and result set metadata. * * @see java.sql.ResultSetMetaData */ public Object mapRow(ResultSet rs, int rowNumber) throws SQLException { // spring's : Object mappedObject = BeanUtils.instantiateClass(this.mappedClass); // jade's : private Object instantiateClass(this.mappedClass); // why: ??mappedClass.newInstranceBeanUtils.instantiateClass(mappedClass)1? Object mappedObject = instantiateClass(this.mappedClass); BeanWrapper bw = new BeanWrapperImpl(mappedObject); bw.setConversionService(this.conversionService); ResultSetMetaData rsmd = rs.getMetaData(); int columnCount = rsmd.getColumnCount(); boolean warnEnabled = logger.isWarnEnabled(); boolean debugEnabled = logger.isDebugEnabled(); Set<String> populatedProperties = (checkProperties ? new HashSet<String>() : null); for (int index = 1; index <= columnCount; index++) { String column = JdbcUtils.lookupColumnName(rsmd, index).toLowerCase(); PropertyDescriptor pd = this.mappedFields.get(column); if (pd != null) { try { Object value = JdbcUtils.getResultSetValue(rs, index, pd.getPropertyType()); if (debugEnabled && rowNumber == 0) { logger.debug("Mapping column '" + column + "' to property '" + pd.getName() + "' of type " + pd.getPropertyType()); } bw.setPropertyValue(pd.getName(), value); if (populatedProperties != null) { populatedProperties.add(pd.getName()); } } catch (NotWritablePropertyException ex) { throw new DataRetrievalFailureException( "Unable to map column " + column + " to property " + pd.getName(), ex); } } else { if (checkColumns) { throw new InvalidDataAccessApiUsageException("Unable to map column '" + column + "' to any properties of bean " + this.mappedClass.getName()); } if (warnEnabled && rowNumber == 0) { logger.warn("Unable to map column '" + column + "' to any properties of bean " + this.mappedClass.getName()); } } } if (populatedProperties != null && !populatedProperties.equals(this.mappedProperties)) { throw new InvalidDataAccessApiUsageException("Given ResultSet does not contain all fields " + "necessary to populate object of class [" + this.mappedClass + "]: " + this.mappedProperties); } return mappedObject; }
From source file:com.netspective.axiom.sql.ResultSetUtils.java
/** * Create a text array that contains the headings of the columns in the given result set. * * @param resultSet The result set that we want to create column headers for * @param preferColumnLabelForHeader True if we want to use the label (if available) for a column or use it's name if unavailable or false * * @return The headings// ww w . j av a2 s. c o m */ public String[] getColumnHeadings(ResultSet resultSet, boolean preferColumnLabelForHeader) throws SQLException { ResultSetMetaData rsmd = resultSet.getMetaData(); int columnsCount = rsmd.getColumnCount(); String[] header = new String[columnsCount]; if (preferColumnLabelForHeader) { for (int i = 1; i < columnsCount; i++) { String label = rsmd.getColumnLabel(i); if (label != null && label.length() > 0) header[i - 1] = label; else header[i - 1] = rsmd.getColumnName(i); } } else { for (int i = 1; i < columnsCount; i++) header[i - 1] = rsmd.getColumnName(i); } return header; }
From source file:at.ac.univie.isc.asio.engine.sql.WebRowSetWriter.java
private void metadata() throws XMLStreamException, SQLException { final ResultSetMetaData rsmd = context(); // @formatter:off xml.writeStartElement(WRS, "metadata"); tag("column-count", rsmd.getColumnCount()); for (int index = 1; index <= rsmd.getColumnCount(); index++) { columnDefinition(index, rsmd);// www.j a v a2s.c om } xml.writeEndElement(); // @formatter:on }
From source file:org.jfree.data.jdbc.JDBCPieDataset.java
/** * ExecuteQuery will attempt execute the query passed to it against the * existing database connection. If no connection exists then no action * is taken.//w w w . j av a2 s.co m * The results from the query are extracted and cached locally, thus * applying an upper limit on how many rows can be retrieved successfully. * * @param query the query to be executed * @param con the connection the query is to be executed against * * @throws SQLException if there is a problem executing the query. */ public void executeQuery(Connection con, String query) throws SQLException { Statement statement = null; ResultSet resultSet = null; try { statement = con.createStatement(); resultSet = statement.executeQuery(query); ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); if (columnCount != 2) { throw new SQLException("Invalid sql generated. PieDataSet requires 2 columns only"); } int columnType = metaData.getColumnType(2); double value = Double.NaN; while (resultSet.next()) { Comparable key = resultSet.getString(1); switch (columnType) { case Types.NUMERIC: case Types.REAL: case Types.INTEGER: case Types.DOUBLE: case Types.FLOAT: case Types.DECIMAL: case Types.BIGINT: value = resultSet.getDouble(2); setValue(key, value); break; case Types.DATE: case Types.TIME: case Types.TIMESTAMP: Timestamp date = resultSet.getTimestamp(2); value = date.getTime(); setValue(key, value); break; default: System.err.println("JDBCPieDataset - unknown data type"); break; } } fireDatasetChanged(new DatasetChangeInfo()); //TODO: fill in real change info } finally { if (resultSet != null) { try { resultSet.close(); } catch (Exception e) { System.err.println("JDBCPieDataset: swallowing exception."); } } if (statement != null) { try { statement.close(); } catch (Exception e) { System.err.println("JDBCPieDataset: swallowing exception."); } } } }
From source file:io.lightlink.oracle.AbstractOracleType.java
protected STRUCT createStruct(Connection con, Object value, String type) throws SQLException { if (value == null) return null; Map mapValue;//from w ww. j av a 2 s .c om if (value instanceof Map) { mapValue = (Map) value; mapValue = new CaseInsensitiveMap(mapValue); } else { // create a Map from bean Map map = new CaseInsensitiveMap(new BeanMap(value)); map.remove("class"); mapValue = map; } STRUCT struct; StructDescriptor structType = safeCreateStructureDescriptor(type, con); ResultSetMetaData stuctMeteData = structType.getMetaData(); List<Object> orderedValues = new ArrayList<Object>(); if (stuctMeteData.getColumnCount() == 1 && mapValue.size() == 1) { orderedValues.add(mapValue.values().iterator().next()); } else { for (int col = 1; col <= stuctMeteData.getColumnCount(); col++) { Object v = mapValue.get(stuctMeteData.getColumnName(col)); if (v == null) { v = mapValue.get(stuctMeteData.getColumnName(col).replaceAll("_", "")); } String typeName = stuctMeteData.getColumnTypeName(col); int columnType = stuctMeteData.getColumnType(col); if (columnType == OracleTypes.ARRAY) { v = createArray(con, v, typeName); } else if (columnType == OracleTypes.JAVA_STRUCT || columnType == OracleTypes.JAVA_OBJECT || columnType == OracleTypes.STRUCT) { v = createStruct(con, v, typeName); } orderedValues.add(v); } } Object[] values = orderedValues.toArray(); for (int j = 0; j < values.length; j++) { Object v = values[j]; if (v instanceof Long && stuctMeteData.getColumnTypeName(j + 1).equalsIgnoreCase("TIMESTAMP")) { values[j] = new Timestamp((Long) v); } else if (v instanceof Long && stuctMeteData.getColumnTypeName(j + 1).equalsIgnoreCase("DATE")) { values[j] = new Date((Long) v); } } struct = new STRUCT(structType, con, values); return struct; }
From source file:com.thinkbiganalytics.hive.service.HiveService.java
public QueryResult query(String query) throws DataAccessException { final DefaultQueryResult queryResult = new DefaultQueryResult(query); final List<QueryResultColumn> columns = new ArrayList<>(); final Map<String, Integer> displayNameMap = new HashMap<>(); if (query != null && !query.toLowerCase().startsWith("show")) { query = safeQuery(query);/* w w w .j a v a 2 s . co m*/ } try { // Setting in order to query complex formats like parquet jdbcTemplate.execute("set hive.optimize.index.filter=false"); jdbcTemplate.query(query, new RowMapper<Map<String, Object>>() { @Override public Map<String, Object> mapRow(ResultSet rs, int rowNum) throws SQLException { if (columns.isEmpty()) { ResultSetMetaData rsMetaData = rs.getMetaData(); for (int i = 1; i <= rsMetaData.getColumnCount(); i++) { String colName = rsMetaData.getColumnName(i); DefaultQueryResultColumn column = new DefaultQueryResultColumn(); column.setField(rsMetaData.getColumnName(i)); String displayName = rsMetaData.getColumnLabel(i); column.setHiveColumnLabel(displayName); //remove the table name if it exists displayName = StringUtils.substringAfterLast(displayName, "."); Integer count = 0; if (displayNameMap.containsKey(displayName)) { count = displayNameMap.get(displayName); count++; } displayNameMap.put(displayName, count); column.setDisplayName(displayName + "" + (count > 0 ? count : "")); column.setTableName(StringUtils.substringAfterLast(rsMetaData.getColumnName(i), ".")); column.setDataType(ParserHelper.sqlTypeToHiveType(rsMetaData.getColumnType(i))); columns.add(column); } queryResult.setColumns(columns); } Map<String, Object> row = new LinkedHashMap<>(); for (QueryResultColumn column : columns) { row.put(column.getDisplayName(), rs.getObject(column.getHiveColumnLabel())); } queryResult.addRow(row); return row; } }); } catch (DataAccessException dae) { dae.printStackTrace(); throw dae; } return queryResult; }