List of usage examples for java.sql ResultSetMetaData getScale
int getScale(int column) throws SQLException;
From source file:org.apache.kylin.query.adhoc.PushDownRunnerJdbcImpl.java
@Override public void executeQuery(String query, List<List<String>> results, List<SelectedColumnMeta> columnMetas) throws Exception { Statement statement = null;/* w w w. j a v a 2 s . co m*/ Connection connection = this.getConnection(); ResultSet resultSet = null; //extract column metadata ResultSetMetaData metaData = null; int columnCount = 0; try { statement = connection.createStatement(); resultSet = statement.executeQuery(query); extractResults(resultSet, results); metaData = resultSet.getMetaData(); columnCount = metaData.getColumnCount(); // fill in selected column meta for (int i = 1; i <= columnCount; ++i) { columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), false, metaData.isCurrency(i), metaData.isNullable(i), false, metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), null, null, null, metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i), false, false)); } } catch (SQLException sqlException) { throw sqlException; } finally { DBUtils.closeQuietly(resultSet); DBUtils.closeQuietly(statement); closeConnection(connection); } }
From source file:com.netspective.axiom.sql.QueryResultSet.java
public void fillReportFromMetaData(TabularReport report) throws SQLException { ResultSetMetaData rsmd = resultSet.getMetaData(); int numColumns = rsmd.getColumnCount(); TabularReportColumns columns = report.getColumns(); columns.clear();//from w w w. ja va2s . co m for (int c = 1; c <= numColumns; c++) { TabularReportColumn column = null; int dataType = rsmd.getColumnType(c); switch (dataType) { case Types.INTEGER: case Types.SMALLINT: case Types.BIGINT: case Types.TINYINT: case Types.BIT: column = new NumericColumn(); break; case Types.FLOAT: case Types.REAL: column = new DecimalColumn(); break; case Types.NUMERIC: case Types.DECIMAL: if (rsmd.getScale(c) > 0) column = new DecimalColumn(); else column = new NumericColumn(); break; default: column = new GeneralColumn(); break; } column.setColIndex(c - 1); column.setHeading(new StaticValueSource( TextUtils.getInstance().sqlIdentifierToText(rsmd.getColumnLabel(c), true))); column.setDataType(dataType); column.setWidth(rsmd.getColumnDisplaySize(c)); columns.add(column); } report.finalizeContents(); }
From source file:com.xpfriend.fixture.cast.temp.Database.java
private DynaClass getDynaClass(ResultSet resultSet) throws SQLException { ResultSetMetaData md = resultSet.getMetaData(); int count = md.getColumnCount(); DynaProperty[] properties = new DynaProperty[count]; for (int i = 0; i < properties.length; i++) { int column = i + 1; Class<?> type = TypeConverter.getJavaType(md.getColumnType(column), md.getColumnTypeName(column), md.getPrecision(column), md.getScale(column)); String name = getColumnLabel(md, column); properties[i] = new DynaProperty(name, type); }/*from w w w . j a va2s . c o m*/ return new BasicDynaClass(null, null, properties); }
From source file:at.ac.univie.isc.asio.engine.sql.WebRowSetWriter.java
private void columnDefinition(final int idx, final ResultSetMetaData context) throws XMLStreamException, SQLException { // @formatter:off xml.writeStartElement(WRS, "column-definition"); tag("column-index", idx); tag("auto-increment", context.isAutoIncrement(idx)); tag("case-sensitive", context.isCaseSensitive(idx)); tag("currency", context.isCurrency(idx)); tag("nullable", context.isNullable(idx)); tag("signed", context.isSigned(idx)); tag("searchable", context.isSearchable(idx)); tag("column-display-size", context.getColumnDisplaySize(idx)); tag("column-label", context.getColumnLabel(idx)); tag("column-name", context.getColumnName(idx)); tag("schema-name", context.getSchemaName(idx)); tag("column-precision", context.getPrecision(idx)); tag("column-scale", context.getScale(idx)); tag("table-name", context.getTableName(idx)); tag("catalog-name", context.getCatalogName(idx)); tag("column-type", context.getColumnType(idx)); tag("column-type-name", context.getColumnTypeName(idx)); xml.writeEndElement();/* w ww. jav a 2 s. co m*/ // @formatter:on }
From source file:com.mapd.utility.SQLImporter.java
private void createMapDTable(ResultSetMetaData metaData) { StringBuilder sb = new StringBuilder(); sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("("); // Now iterate the metadata try {//w w w. j a v a 2 s . c o m for (int i = 1; i <= metaData.getColumnCount(); i++) { if (i > 1) { sb.append(","); } LOGGER.debug("Column name is " + metaData.getColumnName(i)); LOGGER.debug("Column type is " + metaData.getColumnTypeName(i)); LOGGER.debug("Column type is " + metaData.getColumnType(i)); sb.append(metaData.getColumnName(i)).append(" "); sb.append(getColType(metaData.getColumnType(i), metaData.getPrecision(i), metaData.getScale(i))); } sb.append(")"); if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) { sb.append(" with (fragment_size = "); sb.append(cmd.getOptionValue("fragmentSize", "0")); sb.append(")"); } } catch (SQLException ex) { LOGGER.error("Error processing the metadata - " + ex.toString()); exit(1); } executeMapDCommand(sb.toString()); }
From source file:org.apache.sqoop.connector.hbase.HbaseFromInitializer.java
@Override public Schema getSchema(InitializerContext context, LinkConfiguration linkConfig, FromJobConfiguration fromJobConfig) { executor = new HbaseExecutor(linkConfig.linkConfig); String schemaName = fromJobConfig.fromJobConfig.tableName; if (schemaName == null) { schemaName = "Query"; } else if (fromJobConfig.fromJobConfig.schemaName != null) { schemaName = fromJobConfig.fromJobConfig.schemaName + "." + schemaName; }/*from w w w. ja va2 s .c o m*/ Schema schema = new Schema(schemaName); ResultSet rs = null; ResultSetMetaData rsmt = null; try { rs = executor.executeQuery(context.getString(HbaseConnectorConstants.CONNECTOR_JDBC_FROM_DATA_SQL) .replace(HbaseConnectorConstants.SQL_CONDITIONS_TOKEN, "1 = 0")); rsmt = rs.getMetaData(); for (int i = 1; i <= rsmt.getColumnCount(); i++) { String columnName = rsmt.getColumnLabel(i); if (StringUtils.isEmpty(columnName)) { columnName = rsmt.getColumnName(i); if (StringUtils.isEmpty(columnName)) { columnName = "Column " + i; } } Column column = SqlTypesUtils.sqlTypeToSchemaType(rsmt.getColumnType(i), columnName, rsmt.getPrecision(i), rsmt.getScale(i)); schema.addColumn(column); } return schema; } catch (SQLException e) { throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0016, e); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { LOG.info("Ignoring exception while closing ResultSet", e); } } if (executor != null) { executor.close(); } } }
From source file:org.seasar.dbflute.logic.jdbc.metadata.synonym.DfSynonymExtractorOracle.java
protected List<DfColumnMeta> getSynonymColumns(Connection conn, UnifiedSchema synonymOwner, String synonymName) throws SQLException { final List<DfColumnMeta> columnList = new ArrayList<DfColumnMeta>(); Statement st = null;//from www .j a v a 2s . c o m ResultSet rs = null; try { st = conn.createStatement(); final String synonymSqlName = synonymOwner.buildSchemaQualifiedName(synonymName); final String sql = "select * from " + synonymSqlName + " where 0=1"; rs = st.executeQuery(sql); final ResultSetMetaData metaData = rs.getMetaData(); int count = metaData.getColumnCount(); for (int i = 0; i < count; i++) { int index = i + 1; String columnName = metaData.getColumnName(index); int columnType = metaData.getColumnType(index); String columnTypeName = metaData.getColumnTypeName(index); int precision = metaData.getPrecision(index); int scale = metaData.getScale(index); int nullableType = metaData.isNullable(index); DfColumnMeta column = new DfColumnMeta(); column.setColumnName(columnName); column.setJdbcDefValue(columnType); column.setDbTypeName(columnTypeName); column.setColumnSize(precision); column.setDecimalDigits(scale); column.setRequired(nullableType == ResultSetMetaData.columnNoNulls); columnList.add(column); } return columnList; } finally { if (st != null) { try { st.close(); } catch (SQLException ignored) { } } if (rs != null) { try { rs.close(); } catch (SQLException ignored) { } } } }
From source file:ro.nextreports.designer.dbviewer.DefaultDBViewer.java
public List<DBColumn> getColumns(String schema, String table) throws NextSqlException, MalformedTableNameException { Connection con;// w w w . j a va2 s . c om List<DBColumn> columns = new ArrayList<DBColumn>(); String schemaName; String escapedTableName; try { con = Globals.getConnection(); if (schema == null) { schemaName = Globals.getConnection().getMetaData().getUserName(); } else { schemaName = schema; } Dialect dialect = Globals.getDialect(); if (dialect.isKeyWord(table)) { escapedTableName = dialect.getEscapedKeyWord(table); } else { escapedTableName = table; } } catch (Exception e) { throw new NextSqlException("Could not retrieve connection.", e); } ResultSet rs = null; Statement stmt = null; List<String> keyColumns = new ArrayList<String>(); try { // primary keys DatabaseMetaData dbmd = con.getMetaData(); rs = dbmd.getPrimaryKeys(null, schemaName, table); while (rs.next()) { keyColumns.add(rs.getString("COLUMN_NAME")); } closeResultSet(rs); // foreign keys rs = dbmd.getImportedKeys(null, schemaName, table); List<String> foreignColumns = new ArrayList<String>(); HashMap<String, DBForeignColumnInfo> fkMap = new HashMap<String, DBForeignColumnInfo>(); while (rs.next()) { String fkSchema = rs.getString("FKTABLE_SCHEM"); String fkTable = rs.getString("FKTABLE_NAME"); String fkColumn = rs.getString("FKCOLUMN_NAME"); String pkSchema = rs.getString("PKTABLE_SCHEM"); String pkTable = rs.getString("PKTABLE_NAME"); String pkColumn = rs.getString("PKCOLUMN_NAME"); DBForeignColumnInfo fkInfo = new DBForeignColumnInfo(fkSchema, fkTable, fkColumn, pkSchema, pkTable, pkColumn); //System.out.println("fkInfo : " + fkInfo); foreignColumns.add(fkColumn); fkMap.put(fkColumn, fkInfo); } closeResultSet(rs); // column names with index rs = dbmd.getIndexInfo(null, schemaName, table, false, true); List<String> indexes = new ArrayList<String>(); while (rs.next()) { String indexName = rs.getString(9); if (indexName != null) { indexes.add(indexName); } } closeResultSet(rs); DataSource ds = DefaultDataSourceManager.getInstance().getConnectedDataSource(); String header = ""; stmt = con.createStatement(); try { // workaround if a table name contains spaces if (escapedTableName.indexOf(" ") != -1) { escapedTableName = "\"" + escapedTableName + "\""; } String prefix = ""; if (!NO_SCHEMA_NAME.equals(schemaName)) { prefix = schemaName; } if (prefix.indexOf(" ") != -1) { prefix = "\"" + prefix + "\""; } if (!"".equals(prefix)) { prefix = prefix + "."; } if (ds.getDriver().equals(CSVDialect.DRIVER_CLASS)) { header = (String) ds.getProperties().get("headerline"); if (header == null) { header = ""; } } if (header.isEmpty()) { String s = "SELECT * FROM " + prefix + escapedTableName + " WHERE 1 = 0"; LOG.info("getColumns[ " + s + "]"); rs = stmt.executeQuery(s); } } catch (SQLException e) { e.printStackTrace(); throw new MalformedTableNameException(e); } if (header.isEmpty()) { ResultSetMetaData rsmd = rs.getMetaData(); int columnCount = rsmd.getColumnCount(); for (int col = 1; col <= columnCount; col++) { String name = rsmd.getColumnLabel(col); int length = rsmd.getColumnDisplaySize(col); int precision = rsmd.getPrecision(col); int scale = rsmd.getScale(col); boolean isPrimaryKey = false; boolean isForeignKey = false; boolean isIndex = false; if (keyColumns.contains(name)) { isPrimaryKey = true; } DBForeignColumnInfo fkInfo = null; if (foreignColumns.contains(name)) { isForeignKey = true; fkInfo = fkMap.get(name); } if (indexes.contains(name)) { isIndex = true; } DBColumn column = new DBColumn(schemaName, table, name, rsmd.getColumnTypeName(col), isPrimaryKey, isForeignKey, isIndex, fkInfo, length, precision, scale); columns.add(column); } } else { String columnTypes = (String) ds.getProperties().get("columnTypes"); String[] names = header.split(","); String[] types = new String[names.length]; for (int i = 0; i < types.length; i++) { types[i] = "String"; } if ((columnTypes != null) && !columnTypes.isEmpty()) { types = columnTypes.split(","); } for (int i = 0; i < names.length; i++) { DBColumn column = new DBColumn(schemaName, table, names[i], types[i], false, false, false, null, 20, 0, 0); columns.add(column); } } } catch (SQLException e) { LOG.error(e.getMessage(), e); e.printStackTrace(); throw new NextSqlException("SQL Exception: " + e.getMessage(), e); } finally { closeResultSet(rs); closeStatement(stmt); } return columns; }
From source file:com.kylinolap.rest.service.QueryService.java
/** * @param sql/*from ww w .ja v a 2 s. c o m*/ * @param project * @return * @throws Exception */ private SQLResponse execute(String sql, SQLRequest sqlRequest) throws Exception { Connection conn = null; Statement stat = null; ResultSet resultSet = null; List<List<String>> results = new LinkedList<List<String>>(); List<SelectedColumnMeta> columnMetas = new LinkedList<SelectedColumnMeta>(); try { conn = getOLAPDataSource(sqlRequest.getProject()).getConnection(); if (sqlRequest instanceof PrepareSqlRequest) { PreparedStatement preparedState = conn.prepareStatement(sql); for (int i = 0; i < ((PrepareSqlRequest) sqlRequest).getParams().length; i++) { setParam(preparedState, i + 1, ((PrepareSqlRequest) sqlRequest).getParams()[i]); } resultSet = preparedState.executeQuery(); } else { stat = conn.createStatement(); resultSet = stat.executeQuery(sql); } ResultSetMetaData metaData = resultSet.getMetaData(); int columnCount = metaData.getColumnCount(); // Fill in selected column meta for (int i = 1; i <= columnCount; ++i) { columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i), metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i), metaData.isWritable(i), metaData.isDefinitelyWritable(i))); } List<String> oneRow = new LinkedList<String>(); // fill in results while (resultSet.next()) { for (int i = 0; i < columnCount; i++) { oneRow.add((resultSet.getString(i + 1))); } results.add(new LinkedList<String>(oneRow)); oneRow.clear(); } } catch (Exception e) { logger.error(e.getLocalizedMessage(), e); throw e; } finally { close(resultSet, stat, conn); } boolean isPartialResult = false; String cube = ""; long totalScanCount = 0; for (OLAPContext ctx : OLAPContext.getThreadLocalContexts()) { isPartialResult |= ctx.storageContext.isPartialResultReturned(); cube = ctx.cubeInstance.getName(); totalScanCount += ctx.storageContext.getTotalScanCount(); } SQLResponse response = new SQLResponse(columnMetas, results, cube, 0, false, null, isPartialResult); response.setTotalScanCount(totalScanCount); return response; }
From source file:mondrian.spi.impl.JdbcDialectImpl.java
void logTypeInfo(ResultSetMetaData metaData, int columnIndex, SqlStatement.Type internalType) throws SQLException { if (LOGGER.isDebugEnabled()) { final int columnType = metaData.getColumnType(columnIndex + 1); final int precision = metaData.getPrecision(columnIndex + 1); final int scale = metaData.getScale(columnIndex + 1); final String columnName = metaData.getColumnName(columnIndex + 1); LOGGER.debug("JdbcDialectImpl.getType " + "Dialect- " + this.getDatabaseProduct() + ", Column-" + columnName + " is of internal type " + internalType + ". JDBC type was " + columnType + ". Column precision=" + precision + ". Column scale=" + scale); }/*from w w w . ja v a 2 s . co m*/ }