List of usage examples for java.sql DatabaseMetaData getColumns
ResultSet getColumns(String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern)
throws SQLException;
From source file:org.wso2.carbon.dataservices.core.script.DSGenerator.java
private ResultSet getColumnNames(DatabaseMetaData metaData, String schema, String dbName, String tableName, String columnNamePattern) throws SQLException { ResultSet columnNames = metaData.getColumns(dbName, schema, tableName, columnNamePattern); if (columnNames.next()) { columnNames = metaData.getColumns(dbName, schema, tableName, columnNamePattern); } else {/* w w w. j av a 2 s . c o m*/ columnNames = metaData.getColumns(null, schema, tableName, columnNamePattern); } return columnNames; }
From source file:com.streamsets.pipeline.lib.jdbc.JdbcUtil.java
/** * Wrapper for {@link java.sql.DatabaseMetaData#getColumns(String, String, String, String)} that detects * the format of the supplied tableName. * * @param connection An open JDBC connection * @param tableName table name that is optionally fully qualified with a schema in the form schema.tableName * @return ResultSet containing the column metadata * * @throws SQLException//from w ww .ja va2s . c om */ public ResultSet getColumnMetadata(Connection connection, String schema, String tableName) throws SQLException { DatabaseMetaData metadata = connection.getMetaData(); return metadata.getColumns(connection.getCatalog(), schema, tableName, null); // Get all columns for this table }
From source file:org.msec.LogQuery.java
public void showRecords() throws SQLException { String tableName = tableNamePrefix + dayFormatter.format(new Date()); DatabaseMetaData md = conn.getMetaData(); ResultSet rs = md.getTables(null, null, tableName, null); if (rs.next()) { System.out.println("table " + tableName + " exist"); } else {/*from w w w . ja v a2s. c o m*/ System.out.println("table " + tableName + " not exist"); return; } String columnList = ""; rs = md.getColumns(null, null, tableName, null); while (rs.next()) { columnList += rs.getString("COLUMN_NAME"); if (!rs.isLast()) columnList += ","; } System.out.println(columnList); String sql = "select * from " + tableName; //SQL rs = stmt.executeQuery(sql);// System.out.println("ip" + "\t" + "level" + "\t" + "rpcname" + "\t\t" + "time" + "\t" + "content"); while (rs.next()) { System.out.print(rs.getString(1) + "\t"); System.out.print(rs.getString(2) + "\t"); System.out.print(rs.getString(3) + "\t"); System.out.print(rs.getString(4) + "\t"); System.out.print(rs.getString(5) + "\t"); System.out.println(); } rs.close(); }
From source file:kr.co.bitnine.octopus.frame.SessionServerTest.java
@Test public void testUpdateDataSource2() throws Exception { Connection conn = getConnection("octopus", "bitnine"); Statement stmt = conn.createStatement(); stmt.execute("CREATE USER \"yjchoi\" IDENTIFIED BY 'piggy'"); stmt.execute("GRANT SELECT ON \"" + dataMemDb.name + "\".\"__DEFAULT\" TO \"yjchoi\""); Connection conn2 = getConnection("yjchoi", "piggy"); Statement stmt2 = conn.createStatement(); int rows = checkNumRows(stmt2, "employee"); assertEquals(rows, 10);/* w ww. j a va 2s.c om*/ ResultSet rs; DatabaseMetaData metaData = conn.getMetaData(); System.out.println("* Columns"); rs = metaData.getColumns(dataMemDb.name, "%DEFAULT", "employee", "%"); while (rs.next()) { System.out.println(" " + rs.getString("TABLE_CAT") + ", " + rs.getString("TABLE_SCHEM") + ", " + rs.getString("TABLE_NAME") + ", " + rs.getString("COLUMN_NAME") + ", " + rs.getString("REMARKS")); } rs.close(); stmt.execute("ALTER SYSTEM UPDATE DATASOURCE \"" + dataMemDb.name + '"'); metaData = conn.getMetaData(); System.out.println("* Columns"); rs = metaData.getColumns(dataMemDb.name, "%DEFAULT", "employee", "%"); while (rs.next()) { System.out.println(" " + rs.getString("TABLE_CAT") + ", " + rs.getString("TABLE_SCHEM") + ", " + rs.getString("TABLE_NAME") + ", " + rs.getString("COLUMN_NAME") + ", " + rs.getString("REMARKS")); } rs.close(); /* privileges should be preserved after update dataSource */ rows = checkNumRows(stmt2, "employee"); assertEquals(rows, 10); stmt2.close(); conn2.close(); stmt.close(); conn.close(); }
From source file:org.springframework.jdbc.core.metadata.GenericTableMetaDataProvider.java
/** * Method supporting the metadata processing for a table's columns *//* ww w . j ava 2 s . co m*/ private void processTableColumns(DatabaseMetaData databaseMetaData, TableMetaData tmd) { ResultSet tableColumns = null; String metaDataCatalogName = metaDataCatalogNameToUse(tmd.getCatalogName()); String metaDataSchemaName = metaDataSchemaNameToUse(tmd.getSchemaName()); String metaDataTableName = tableNameToUse(tmd.getTableName()); if (logger.isDebugEnabled()) { logger.debug("Retrieving metadata for " + metaDataCatalogName + '/' + metaDataSchemaName + '/' + metaDataTableName); } try { tableColumns = databaseMetaData.getColumns(metaDataCatalogName, metaDataSchemaName, metaDataTableName, null); while (tableColumns.next()) { String columnName = tableColumns.getString("COLUMN_NAME"); int dataType = tableColumns.getInt("DATA_TYPE"); if (dataType == Types.DECIMAL) { String typeName = tableColumns.getString("TYPE_NAME"); int decimalDigits = tableColumns.getInt("DECIMAL_DIGITS"); // Override a DECIMAL data type for no-decimal numerics // (this is for better Oracle support where there have been issues // using DECIMAL for certain inserts (see SPR-6912)) if ("NUMBER".equals(typeName) && decimalDigits == 0) { dataType = Types.NUMERIC; if (logger.isDebugEnabled()) { logger.debug("Overriding metadata: " + columnName + " now NUMERIC instead of DECIMAL"); } } } boolean nullable = tableColumns.getBoolean("NULLABLE"); TableParameterMetaData meta = new TableParameterMetaData(columnName, dataType, nullable); this.tableParameterMetaData.add(meta); if (logger.isDebugEnabled()) { logger.debug("Retrieved metadata: " + meta.getParameterName() + " " + meta.getSqlType() + " " + meta.isNullable()); } } } catch (SQLException ex) { if (logger.isWarnEnabled()) { logger.warn("Error while retrieving metadata for table columns: " + ex.getMessage()); } } finally { JdbcUtils.closeResultSet(tableColumns); } }
From source file:madgik.exareme.master.queryProcessor.analyzer.stat.ExternalStat.java
@Override public Map<String, Table> extractStats() throws Exception { DatabaseMetaData dbmd = con.getMetaData(); // dtabase metadata object // listing tables and columns String catalog = null;/*from w ww . ja va 2s . c o m*/ String schemaPattern = sch; String tableNamePattern = tblName; String columnNamePattern = "%"; if (con.getClass().getName().contains("postgresql")) { // tableNamePattern="\""+tableNamePattern+"\""; schemaPattern = "public"; } // ResultSet resultTables = dbmd.getTables(catalog, "public", // tableNamePattern, types); ResultSet resultColumns = dbmd.getColumns(catalog, schemaPattern, tableNamePattern, columnNamePattern); if (con.getClass().getName().contains("postgresql")) { tableNamePattern = "\"" + tableNamePattern + "\""; } else if (con.getClass().getName().contains("oracle")) { tableNamePattern = schemaPattern + "." + tableNamePattern; } log.debug("Starting extracting stats"); // while (resultTables.next()) { Map<String, Column> columnMap = new HashMap<String, Column>(); // StringEscapeUtils.escapeJava(resultTables.getString(3)); log.debug("Analyzing table " + tblName); int toupleSize = 0; // in bytes // tableNamePattern = tableName; int columnCount = resultColumns.getMetaData().getColumnCount(); Statement st = con.createStatement(); ResultSet rs = st.executeQuery("select count(*) from " + tableNamePattern); int count = 0; if (rs.next()) { count = rs.getInt(1); } else { log.error("could not get count for table " + tableNamePattern); } rs.close(); st.close(); ResultSet pkrs = dbmd.getExportedKeys("", "", tblName); String pkey = "DEFAULT_KEY"; while (pkrs.next()) { pkey = pkrs.getString("PKCOLUMN_NAME"); break; } pkrs.close(); if (count == 0) { log.debug("Empty table"); Table t = new Table(tblName, columnCount, toupleSize, columnMap, count, pkey); schema.put(tblName, t); return schema; } while (resultColumns.next()) { String columnName = StringEscapeUtils.escapeJava(resultColumns.getString(4)); try { String colNamePattern = columnName; if (con.getClass().getName().contains("postgresql")) { colNamePattern = "\"" + columnName + "\""; } int columnType = resultColumns.getInt(5); // computing column's size in bytes int columnSize = computeColumnSize(colNamePattern, columnType, tableNamePattern); toupleSize += columnSize; // execute queries for numberOfDiffValues, minVal, maxVal // Map<String, Integer> diffValFreqMap = new HashMap<String, // Integer>(); // computing column's min and max values String minVal = "0"; String maxVal = "0"; if (columnType != Types.BLOB) { MinMax mm = computeMinMax(tableNamePattern, colNamePattern); minVal = mm.getMin(); maxVal = mm.getMax(); } Map<String, Integer> diffValFreqMap = new HashMap<String, Integer>(); //only for equidepth! // for (ValFreq k : freqs) { // diffValFreqMap.put(k.getVal(), k.getFreq()); // } // /add min max diff vals in the sampling values int minOcc = 1; int maxOcc = 1; int diffVals = 0; boolean equidepth = false; if (equidepth) { //diffValFreqMap is used only in equidepth, do not compute it //if we have primitive diffValFreqMap = computeDistinctValuesFrequency(tableNamePattern, colNamePattern); String minValChar = minVal; String maxValChar = maxVal; if (columnType == Types.VARCHAR || columnType == Types.CHAR || columnType == Types.LONGNVARCHAR || columnType == Types.DATE) { minValChar = "\'" + minVal + "\'"; maxValChar = "\'" + maxVal + "\'"; } try { minOcc = computeValOccurences(tableNamePattern, colNamePattern, minValChar); } catch (Exception e) { log.error("Could not compute value occurences for column:" + colNamePattern + " and value:" + minValChar); } if (equidepth && !diffValFreqMap.containsKey(minVal)) diffValFreqMap.put(minVal, minOcc); try { maxOcc = computeValOccurences(tableNamePattern, colNamePattern, maxValChar); } catch (Exception e) { log.error("Could not compute value occurences for column:" + colNamePattern + " and value:" + maxValChar); } if (diffValFreqMap.containsKey(maxVal)) diffValFreqMap.put(maxVal, maxOcc); diffVals = diffValFreqMap.size(); } else { diffVals = computeDiffVals(tableNamePattern, colNamePattern, columnType); } if (diffVals == 0) { //all values are null! continue; } Column c = new Column(columnName, columnType, columnSize, diffVals, minVal, maxVal, diffValFreqMap); columnMap.put(columnName, c); } catch (Exception ex) { log.error("could not analyze column " + columnName + ":" + ex.getMessage()); } } Table t = new Table(tblName, columnCount, toupleSize, columnMap, count, pkey); schema.put(tblName, t); // } // resultTables.close(); resultColumns.close(); return schema; }
From source file:madgik.exareme.master.queryProcessor.analyzer.fanalyzer.FederatedAnalyzer.java
private Map<String, Set<String>> specifyTables() throws Exception { Map<String, Set<String>> info = new HashMap<String, Set<String>>(); DatabaseMetaData dbmd = con.getMetaData(); // dtabase metadata object // listing tables and columns String catalog = null;//from w w w . j av a 2s. c o m String schemaPattern = null; String tableNamePattern = null; String[] types = null; String columnNamePattern = null; ResultSet resultTables = dbmd.getTables(catalog, schemaPattern, tableNamePattern, types); while (resultTables.next()) { String tableName = StringEscapeUtils.escapeJava(resultTables.getString(3)); if (tableName.contains("_sample")) break; if (tableName.equals("sqlite_stat1")) continue; tableNamePattern = tableName; ResultSet resultColumns = dbmd.getColumns(catalog, schemaPattern, tableNamePattern, columnNamePattern); Set<String> columns = new HashSet<String>(); while (resultColumns.next()) { String columnName = StringEscapeUtils.escapeJava(resultColumns.getString(4)); columns.add(columnName); } info.put(tableName, columns); } return info; }
From source file:org.apache.hadoop.hive.ql.stats.jdbc.JDBCStatsPublisher.java
/** * Initialize the intermediate stats DB for the first time it is running (e.g., * creating tables.)./* w w w . j a v a 2 s . co m*/ */ @Override public boolean init(Configuration hconf) { Statement stmt = null; ResultSet rs = null; try { this.hiveconf = hconf; connectionString = HiveConf.getVar(hconf, HiveConf.ConfVars.HIVESTATSDBCONNECTIONSTRING); String driver = HiveConf.getVar(hconf, HiveConf.ConfVars.HIVESTATSJDBCDRIVER); JavaUtils.loadClass(driver).newInstance(); synchronized (DriverManager.class) { DriverManager.setLoginTimeout(timeout); conn = DriverManager.getConnection(connectionString); stmt = conn.createStatement(); Utilities.setQueryTimeout(stmt, timeout); // TODO: why is this not done using Hive db scripts? // Check if the table exists DatabaseMetaData dbm = conn.getMetaData(); String tableName = JDBCStatsUtils.getStatTableName(); rs = dbm.getTables(null, null, tableName, null); boolean tblExists = rs.next(); if (!tblExists) { // Table does not exist, create it String createTable = JDBCStatsUtils.getCreate(""); stmt.executeUpdate(createTable); } else { // Upgrade column name to allow for longer paths. String idColName = JDBCStatsUtils.getIdColumnName(); int colSize = -1; try { rs.close(); rs = dbm.getColumns(null, null, tableName, idColName); if (rs.next()) { colSize = rs.getInt("COLUMN_SIZE"); if (colSize < JDBCStatsSetupConstants.ID_COLUMN_VARCHAR_SIZE) { String alterTable = JDBCStatsUtils.getAlterIdColumn(); stmt.executeUpdate(alterTable); } } else { LOG.warn("Failed to update " + idColName + " - column not found"); } } catch (Throwable t) { LOG.warn("Failed to update " + idColName + " (size " + (colSize == -1 ? "unknown" : colSize) + ")", t); } } } } catch (Exception e) { LOG.error("Error during JDBC initialization. ", e); return false; } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { // do nothing } } if (stmt != null) { try { stmt.close(); } catch (SQLException e) { // do nothing } } closeConnection(); } return true; }
From source file:io.bibleget.BibleGetDB.java
private void listColNamesTypes(DatabaseMetaData dbMeta, ResultSet rs) { //System.out.println("After Table Creation: Table "+rs.getString("TABLE_NAME")+" exists !!"); ResultSet cols;/*from w w w . ja v a2 s. c o m*/ try { cols = dbMeta.getColumns(null, null, rs.getString("TABLE_NAME"), null); while (cols.next()) { //System.out.println(cols.getString("COLUMN_NAME")); colNames.add(cols.getString("COLUMN_NAME")); int dType = cols.getInt("DATA_TYPE"); switch (dType) { case Types.VARCHAR: colDataTypes.add(String.class); break; case Types.INTEGER: colDataTypes.add(Integer.class); break; case Types.FLOAT: colDataTypes.add(Float.class); break; case Types.DOUBLE: case Types.REAL: colDataTypes.add(Double.class); break; case Types.DATE: case Types.TIME: case Types.TIMESTAMP: colDataTypes.add(java.sql.Date.class); break; case Types.BOOLEAN: colDataTypes.add(Boolean.class); break; default: colDataTypes.add(String.class); break; } } } catch (SQLException ex) { Logger.getLogger(BibleGetDB.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.nextep.designer.sqlgen.generic.impl.JDBCCapturer.java
/** * Returns an array of the columns of the specified database object found in * the data source pointed to by the connection object provided by the * specified <code>context</code> and notifies the specified * <code>monitor</code> while capturing. * /* w w w . ja v a2 s . c o m*/ * @param context * a {@link ICaptureContext} to get the connection object to the * data source and the list of previously captured objects * @param monitor * the {@link IProgressMonitor} to notify while capturing objects * @param dbObj * a {@link IDatabaseObject} representing the database object for * which columns must be captured * @return an array of {@link IBasicColumn} objects, an empty array if no * columns could be found */ private IBasicColumn[] getColumns(ICaptureContext context, IProgressMonitor monitor, IDatabaseObject<?> dbObj) { ColumnsSorter colSorter = new ColumnsSorter(); final String dbObjName = dbObj.getName(); try { final DatabaseMetaData md = ((Connection) context.getConnectionObject()).getMetaData(); ResultSet rset = null; if (md != null) { rset = md.getColumns(getObjectOrContextCatalog(context, dbObj), getObjectOrContextSchema(context, dbObj), dbObjName, null); CaptureHelper.updateMonitor(monitor, getCounter(), 1, 1); } if (rset != null) { try { int pos = 0; while (rset.next()) { final IBasicColumn column = getColumnFromResultSet(context, rset, dbObjName); colSorter.addColumn(column, pos++); CaptureHelper.updateMonitor(monitor, getCounter(), 5, 1); } } finally { CaptureHelper.safeClose(rset, null); } } } catch (SQLException sqle) { LOGGER.error("Unable to fetch columns for table or view [" + dbObjName + "] from " + getConnectionVendorName(context) + " server: " + sqle.getMessage(), sqle); } return colSorter.getColumnsSortedArray(); }