List of usage examples for java.sql DatabaseMetaData getSchemas
ResultSet getSchemas() throws SQLException;
From source file:jef.database.DbMetaData.java
/** * Schema/*from w w w .jav a 2 s. c o m*/ * * @return Schema * @throws SQLException */ public String[] getSchemas() throws SQLException { Connection conn = getConnection(false); DatabaseMetaData databaseMetaData = conn.getMetaData(); ResultSet rs = databaseMetaData.getSchemas(); try { List<String> list = ResultSets.toStringList(rs, "TABLE_SCHEM", 9999, this.getProfile()); return list.toArray(new String[list.size()]); } finally { DbUtils.close(rs); releaseConnection(conn); } }
From source file:org.alfresco.util.DatabaseMetaDataHelper.java
/** * Trys to determine the schema name from the DatabaseMetaData obtained from the Connection. * @param connection A database connection * @return String/*from w w w .j a v a 2 s .co m*/ */ private String getSchemaFromConnection(Connection connection) { if (connection == null) { logger.error("Unable to determine schema due to null connection."); return null; } ResultSet schemas = null; try { final DatabaseMetaData dbmd = connection.getMetaData(); // Assume that if there are schemas, we want the one named after the connection user or the one called "dbo" (MS // SQL hack) String schema = null; schemas = dbmd.getSchemas(); while (schemas.next()) { final String thisSchema = schemas.getString("TABLE_SCHEM"); if (thisSchema.equals(dbmd.getUserName()) || thisSchema.equalsIgnoreCase("dbo")) { schema = thisSchema; break; } } return schema; } catch (Exception e) { logger.error("Unable to determine current schema.", e); } finally { if (schemas != null) { try { schemas.close(); } catch (Exception e) { //noop } } } return null; }
From source file:org.apache.bigtop.itest.hive.TestJdbc.java
/** * Test simple DatabaseMetaData calls. getColumns is tested elsewhere, as we need to call * that on a valid table. Same with getFunctions. * * @throws SQLException/*from w w w . j av a2 s.co m*/ */ @Test public void databaseMetaDataCalls() throws SQLException { DatabaseMetaData md = conn.getMetaData(); boolean boolrc = md.allTablesAreSelectable(); LOG.debug("All tables are selectable? " + boolrc); String strrc = md.getCatalogSeparator(); LOG.debug("Catalog separator " + strrc); strrc = md.getCatalogTerm(); LOG.debug("Catalog term " + strrc); ResultSet rs = md.getCatalogs(); while (rs.next()) { strrc = rs.getString(1); LOG.debug("Found catalog " + strrc); } Connection c = md.getConnection(); int intrc = md.getDatabaseMajorVersion(); LOG.debug("DB major version is " + intrc); intrc = md.getDatabaseMinorVersion(); LOG.debug("DB minor version is " + intrc); strrc = md.getDatabaseProductName(); LOG.debug("DB product name is " + strrc); strrc = md.getDatabaseProductVersion(); LOG.debug("DB product version is " + strrc); intrc = md.getDefaultTransactionIsolation(); LOG.debug("Default transaction isolation is " + intrc); intrc = md.getDriverMajorVersion(); LOG.debug("Driver major version is " + intrc); intrc = md.getDriverMinorVersion(); LOG.debug("Driver minor version is " + intrc); strrc = md.getDriverName(); LOG.debug("Driver name is " + strrc); strrc = md.getDriverVersion(); LOG.debug("Driver version is " + strrc); strrc = md.getExtraNameCharacters(); LOG.debug("Extra name characters is " + strrc); strrc = md.getIdentifierQuoteString(); LOG.debug("Identifier quote string is " + strrc); // In Hive 1.2 this always returns an empty RS rs = md.getImportedKeys("a", "b", "d"); // In Hive 1.2 this always returns an empty RS rs = md.getIndexInfo("a", "b", "d", true, true); intrc = md.getJDBCMajorVersion(); LOG.debug("JDBC major version is " + intrc); intrc = md.getJDBCMinorVersion(); LOG.debug("JDBC minor version is " + intrc); intrc = md.getMaxColumnNameLength(); LOG.debug("Maximum column name length is " + intrc); strrc = md.getNumericFunctions(); LOG.debug("Numeric functions are " + strrc); // In Hive 1.2 this always returns an empty RS rs = md.getPrimaryKeys("a", "b", "d"); // In Hive 1.2 this always returns an empty RS rs = md.getProcedureColumns("a", "b", "d", "e"); strrc = md.getProcedureTerm(); LOG.debug("Procedures are called " + strrc); // In Hive 1.2 this always returns an empty RS rs = md.getProcedures("a", "b", "d"); strrc = md.getSchemaTerm(); LOG.debug("Schemas are called " + strrc); rs = md.getSchemas(); while (rs.next()) { strrc = rs.getString(1); LOG.debug("Found schema " + strrc); } strrc = md.getSearchStringEscape(); LOG.debug("Search string escape is " + strrc); strrc = md.getStringFunctions(); LOG.debug("String functions are " + strrc); strrc = md.getSystemFunctions(); LOG.debug("System functions are " + strrc); rs = md.getTableTypes(); while (rs.next()) { strrc = rs.getString(1); LOG.debug("Found table type " + strrc); } strrc = md.getTimeDateFunctions(); LOG.debug("Time/date functions are " + strrc); rs = md.getTypeInfo(); while (rs.next()) { strrc = rs.getString(1); LOG.debug("Found type " + strrc); } // In Hive 1.2 this always returns an empty RS rs = md.getUDTs("a", "b", "d", null); boolrc = md.supportsAlterTableWithAddColumn(); LOG.debug("Supports alter table with add column? " + boolrc); boolrc = md.supportsAlterTableWithDropColumn(); LOG.debug("Supports alter table with drop column? " + boolrc); boolrc = md.supportsBatchUpdates(); LOG.debug("Supports batch updates? " + boolrc); boolrc = md.supportsCatalogsInDataManipulation(); LOG.debug("Supports catalogs in data manipulation? " + boolrc); boolrc = md.supportsCatalogsInIndexDefinitions(); LOG.debug("Supports catalogs in index definition? " + boolrc); boolrc = md.supportsCatalogsInPrivilegeDefinitions(); LOG.debug("Supports catalogs in privilege definition? " + boolrc); boolrc = md.supportsCatalogsInProcedureCalls(); LOG.debug("Supports catalogs in procedure calls? " + boolrc); boolrc = md.supportsCatalogsInTableDefinitions(); LOG.debug("Supports catalogs in table definition? " + boolrc); boolrc = md.supportsColumnAliasing(); LOG.debug("Supports column aliasing? " + boolrc); boolrc = md.supportsFullOuterJoins(); LOG.debug("Supports full outer joins? " + boolrc); boolrc = md.supportsGroupBy(); LOG.debug("Supports group by? " + boolrc); boolrc = md.supportsLimitedOuterJoins(); LOG.debug("Supports limited outer joins? " + boolrc); boolrc = md.supportsMultipleResultSets(); LOG.debug("Supports limited outer joins? " + boolrc); boolrc = md.supportsNonNullableColumns(); LOG.debug("Supports non-nullable columns? " + boolrc); boolrc = md.supportsOuterJoins(); LOG.debug("Supports outer joins? " + boolrc); boolrc = md.supportsPositionedDelete(); LOG.debug("Supports positioned delete? " + boolrc); boolrc = md.supportsPositionedUpdate(); LOG.debug("Supports positioned update? " + boolrc); boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); LOG.debug("Supports result set holdability? " + boolrc); boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT); LOG.debug("Supports result set type? " + boolrc); boolrc = md.supportsSavepoints(); LOG.debug("Supports savepoints? " + boolrc); boolrc = md.supportsSchemasInDataManipulation(); LOG.debug("Supports schemas in data manipulation? " + boolrc); boolrc = md.supportsSchemasInIndexDefinitions(); LOG.debug("Supports schemas in index definitions? " + boolrc); boolrc = md.supportsSchemasInPrivilegeDefinitions(); LOG.debug("Supports schemas in privilege definitions? " + boolrc); boolrc = md.supportsSchemasInProcedureCalls(); LOG.debug("Supports schemas in procedure calls? " + boolrc); boolrc = md.supportsSchemasInTableDefinitions(); LOG.debug("Supports schemas in table definitions? " + boolrc); boolrc = md.supportsSelectForUpdate(); LOG.debug("Supports select for update? " + boolrc); boolrc = md.supportsStoredProcedures(); LOG.debug("Supports stored procedures? " + boolrc); boolrc = md.supportsTransactions(); LOG.debug("Supports transactions? " + boolrc); boolrc = md.supportsUnion(); LOG.debug("Supports union? " + boolrc); boolrc = md.supportsUnionAll(); LOG.debug("Supports union all? " + boolrc); }
From source file:org.apache.cayenne.access.dbsync.ThrowOnPartialSchemaStrategy.java
@Override protected void processSchemaUpdate(DataNode dataNode) throws SQLException { SchemaAnalyzer analyzer = new SchemaAnalyzer(); List<String> schemas = new ArrayList<>(); DatabaseMetaData md = null; try {/*from ww w. j a v a 2 s .c om*/ try (Connection connection = dataNode.getDataSource().getConnection();) { md = connection.getMetaData(); try (ResultSet rs = md.getSchemas();) { while (rs.next()) { String schemaName = rs.getString(1); schemas.add(schemaName); } } } analyzer.analyzeSchemas(schemas, md); } catch (Exception e) { LOGGER.debug("Exception analyzing schema, ignoring", e); } Collection<DbEntity> entities = dataNode.getEntityResolver().getDbEntities(); boolean isIncluded = analyzer.compareTables(md, entities); if (isIncluded && analyzer.getErrorMessage() == null) { try { analyzer.compareColumns(md); } catch (SQLException e) { LOGGER.debug("Exception analyzing schema, ignoring", e); } } processSchemaUpdate(dataNode, analyzer.getTableNoInDB(), analyzer.getErrorMessage(), entities.size()); }
From source file:org.apache.ddlutils.task.DumpMetadataTask.java
/** * Dumps the catalogs and schemas of the database. * // w ww . j av a2 s. c o m * @param xmlWriter The xml writer to write to * @param metaData The database meta data */ private void dumpCatalogsAndSchemas(PrettyPrintingXmlWriter xmlWriter, final DatabaseMetaData metaData) { performResultSetXmlOperation(xmlWriter, "catalogs", new ResultSetXmlOperation() { public ResultSet getResultSet() throws SQLException { return metaData.getCatalogs(); } public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException { String catalogName = result.getString("TABLE_CAT"); if ((catalogName != null) && (catalogName.length() > 0)) { xmlWriter.writeElementStart(null, "catalog"); xmlWriter.writeAttribute(null, "name", catalogName); xmlWriter.writeElementEnd(); } } public void handleError(SQLException ex) { log("Could not read the catalogs from the result set: " + ex.getStackTrace(), Project.MSG_ERR); } }); performResultSetXmlOperation(xmlWriter, "schemas", new ResultSetXmlOperation() { public ResultSet getResultSet() throws SQLException { return metaData.getSchemas(); } public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException { String schemaName = result.getString("TABLE_SCHEM"); if ((schemaName != null) && (schemaName.length() > 0)) { xmlWriter.writeElementStart(null, "schema"); xmlWriter.writeAttribute(null, "name", schemaName); xmlWriter.writeElementEnd(); } } public void handleError(SQLException ex) { log("Could not read the schemas from the result set: " + ex.getStackTrace(), Project.MSG_ERR); } }); }
From source file:org.apache.empire.db.codegen.CodeGenParser.java
private String getSchemata(DatabaseMetaData dbMeta) throws SQLException { String retVal = ""; ResultSet rs = dbMeta.getSchemas(); while (rs.next()) { retVal += rs.getString("TABLE_SCHEM") + ", "; }//from ww w . jav a 2 s.c o m if (retVal.length() > 2) retVal = retVal.substring(0, retVal.length() - 2); return retVal; }
From source file:org.apache.hive.beeline.QFileBeeLineClient.java
private Set<String> getDatabases() throws SQLException { Set<String> databases = new HashSet<String>(); DatabaseMetaData metaData = beeLine.getDatabaseMetaData(); // Get the databases try (ResultSet schemasResultSet = metaData.getSchemas()) { while (schemasResultSet.next()) { databases.add(schemasResultSet.getString("TABLE_SCHEM")); }//w w w . j a v a 2 s .c om } return databases; }
From source file:org.apache.hive.jdbc.TestJdbcDriver2.java
@BeforeClass public static void setUpBeforeClass() throws SQLException, ClassNotFoundException { Class.forName(driverName);/*from w w w .ja va 2 s . c o m*/ Connection con1 = getConnection("default"); System.setProperty(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "verbose"); Statement stmt1 = con1.createStatement(); assertNotNull("Statement is null", stmt1); stmt1.execute("set hive.support.concurrency = false"); DatabaseMetaData metadata = con1.getMetaData(); // Drop databases created by other test cases ResultSet databaseRes = metadata.getSchemas(); while (databaseRes.next()) { String db = databaseRes.getString(1); if (!db.equals("default")) { System.err.println("Dropping database " + db); stmt1.execute("DROP DATABASE " + db + " CASCADE"); } } stmt1.close(); con1.close(); }
From source file:org.apache.hive.jdbc.TestJdbcDriver2.java
@Test public void testParentReferences() throws Exception { /* Test parent references from Statement */ Statement s = this.con.createStatement(); ResultSet rs = s.executeQuery("SELECT * FROM " + dataTypeTableName); assertTrue(s.getConnection() == this.con); assertTrue(rs.getStatement() == s);/*from w w w . java2 s . c om*/ rs.close(); s.close(); /* Test parent references from PreparedStatement */ PreparedStatement ps = this.con.prepareStatement("SELECT * FROM " + dataTypeTableName); rs = ps.executeQuery(); assertTrue(ps.getConnection() == this.con); assertTrue(rs.getStatement() == ps); rs.close(); ps.close(); /* Test DatabaseMetaData queries which do not have a parent Statement */ DatabaseMetaData md = this.con.getMetaData(); assertTrue(md.getConnection() == this.con); rs = md.getCatalogs(); assertNull(rs.getStatement()); rs.close(); rs = md.getColumns(null, null, null, null); assertNull(rs.getStatement()); rs.close(); rs = md.getFunctions(null, null, null); assertNull(rs.getStatement()); rs.close(); rs = md.getImportedKeys(null, null, null); assertNull(rs.getStatement()); rs.close(); rs = md.getPrimaryKeys(null, null, null); assertNull(rs.getStatement()); rs.close(); rs = md.getProcedureColumns(null, null, null, null); assertNull(rs.getStatement()); rs.close(); rs = md.getProcedures(null, null, null); assertNull(rs.getStatement()); rs.close(); rs = md.getSchemas(); assertNull(rs.getStatement()); rs.close(); rs = md.getTableTypes(); assertNull(rs.getStatement()); rs.close(); rs = md.getTables(null, null, null, null); assertNull(rs.getStatement()); rs.close(); rs = md.getTypeInfo(); assertNull(rs.getStatement()); rs.close(); }
From source file:org.apache.kylin.jdbc.ITJDBCDriverTest.java
@Test public void testMetadata1() throws Exception { // check the JDBC API here: http://docs.oracle.com/javase/7/docs/api/java/sql/DatabaseMetaData.html Connection conn = getConnection(); // test getSchemas(); List<String> schemaList = Lists.newArrayList(); DatabaseMetaData dbMetadata = conn.getMetaData(); ResultSet resultSet = dbMetadata.getSchemas(); while (resultSet.next()) { String schema = resultSet.getString("TABLE_SCHEM"); String catalog = resultSet.getString("TABLE_CATALOG"); System.out.println("Get schema: schema=" + schema + ", catalog=" + catalog); schemaList.add(schema);/*from ww w . j av a 2 s.c o m*/ } resultSet.close(); Assert.assertTrue(schemaList.contains("DEFAULT")); Assert.assertTrue(schemaList.contains("EDW")); // test getCatalogs(); resultSet = dbMetadata.getCatalogs(); List<String> catalogList = Lists.newArrayList(); while (resultSet.next()) { String catalog = resultSet.getString("TABLE_CAT"); System.out.println("Get catalog: catalog=" + catalog); catalogList.add(catalog); } resultSet.close(); Assert.assertTrue(catalogList.size() > 0 && catalogList.contains("defaultCatalog")); /** //Disable the test on getTableTypes() as it is not ready resultSet = dbMetadata.getTableTypes(); List<String> tableTypes = Lists.newArrayList(); while (resultSet.next()) { String type = resultSet.getString("TABLE_TYPE"); System.out.println("Get table type: type=" + type); tableTypes.add(type); } Assert.assertTrue(tableTypes.size() > 0 && tableTypes.contains("TABLE")); resultSet.close(); **/ conn.close(); }