List of usage examples for java.sql Connection getSchema
String getSchema() throws SQLException;
Connection
object's current schema name. From source file:net.certifi.audittablegen.AuditTableGen.java
/** * Validates the provided dataSource and gets a DataSourceDMR * object to manage database interaction. Sets initialized flag * to true if initialization is successful. * @throws SQLException // w w w.j av a2s. co m */ void initialize() throws SQLException { Connection connection = dataSource.getConnection(); //Properties connectionProperties = connection.getClientInfo(); DatabaseMetaData dmd = connection.getMetaData(); logger.debug("DatabaseProduct: {}", dmd.getDatabaseProductName()); try { catalog = connection.getCatalog(); if (schema.isEmpty() || schema == null) { try { schema = connection.getSchema(); } catch (AbstractMethodError e) { logger.error("Abstract method getSchema() not implemented", e); schema = ""; } } } catch (SQLException e) { logger.error("Error getting catalog/schema", e); } if (dmd.getDriverName().toLowerCase().contains("postgresql")) { dmr = new PostgresqlDMR(dataSource, schema); //known dataSource with specific implementation requirements //ie PostgrresDMR, HsqldbDMR... } else if (dmd.getDriverName().toLowerCase().contains("hsqldb")) { dmr = new HsqldbDMR(dataSource, schema); //known dataSource with specific implementation requirements //ie PostgrresDMR, HsqldbDMR... } else { //generic implementation dmr = new GenericDMR(dataSource, schema); logger.info("attempting to run against unknown database product"); } if (dmr != null) { this.initialized = true; } if (schema != null && !schema.isEmpty()) { dmr.setSchema(schema); if (dmr.getSchema() == null) { throw new RuntimeException("Schema could not be found."); } } }
From source file:com.adaptris.jdbc.connection.FailoverDatasourceTest.java
@Test public void testInfo() throws Exception { Connection conn = new MyProxy(); try {/*w ww . j a v a 2 s . c o m*/ try { conn.getMetaData(); } catch (SQLException e) { } try { conn.setCatalog(conn.getCatalog()); } catch (SQLException e) { } try { conn.setReadOnly(conn.isReadOnly()); } catch (SQLException e) { } try { conn.setTransactionIsolation(conn.getTransactionIsolation()); } catch (SQLException e) { } try { conn.setTransactionIsolation(conn.getTransactionIsolation()); } catch (SQLException e) { } try { conn.getWarnings(); } catch (SQLException e) { } try { conn.clearWarnings(); } catch (SQLException e) { } try { conn.setHoldability(conn.getHoldability()); } catch (SQLException e) { } try { conn.setSchema(conn.getSchema()); } catch (SQLException e) { } } finally { JdbcUtil.closeQuietly(conn); } }
From source file:org.apache.hadoop.hive.metastore.tools.SchemaToolTaskValidate.java
@VisibleForTesting boolean validateSchemaTables(Connection conn) throws HiveMetaException { System.out.println("Validating metastore schema tables"); String version = null;//ww w.j a v a 2 s .c o m try { MetaStoreConnectionInfo connectionInfo = schemaTool.getConnectionInfo(false); version = schemaTool.getMetaStoreSchemaInfo().getMetaStoreSchemaVersion(connectionInfo); } catch (HiveMetaException he) { System.err.println("Failed to determine schema version from Hive Metastore DB. " + he.getMessage()); System.out.println("Failed in schema table validation."); LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage(), he); return false; } Connection hmsConn = schemaTool.getConnectionToMetastore(false); LOG.debug("Validating tables in the schema for version " + version); List<String> dbTables = new ArrayList<>(); ResultSet rs = null; try { String schema = null; try { schema = hmsConn.getSchema(); } catch (SQLFeatureNotSupportedException e) { LOG.debug("schema is not supported"); } DatabaseMetaData metadata = conn.getMetaData(); rs = metadata.getTables(null, schema, "%", new String[] { "TABLE" }); while (rs.next()) { String table = rs.getString("TABLE_NAME"); dbTables.add(table.toLowerCase()); LOG.debug("Found table " + table + " in HMS dbstore"); } } catch (SQLException e) { throw new HiveMetaException("Failed to retrieve schema tables from Hive Metastore DB," + e.getMessage(), e); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { throw new HiveMetaException("Failed to close resultset", e); } } } // parse the schema file to determine the tables that are expected to exist // we are using oracle schema because it is simpler to parse, no quotes or backticks etc List<String> schemaTables = new ArrayList<>(); List<String> subScripts = new ArrayList<>(); String baseDir = new File(schemaTool.getMetaStoreSchemaInfo().getMetaStoreScriptDir()).getParent(); String schemaFile = new File(schemaTool.getMetaStoreSchemaInfo().getMetaStoreScriptDir(), schemaTool.getMetaStoreSchemaInfo().generateInitFileName(version)).getPath(); try { LOG.debug("Parsing schema script " + schemaFile); subScripts.addAll(findCreateTable(schemaFile, schemaTables)); while (subScripts.size() > 0) { schemaFile = baseDir + "/" + schemaTool.getDbType() + "/" + subScripts.remove(0); LOG.debug("Parsing subscript " + schemaFile); subScripts.addAll(findCreateTable(schemaFile, schemaTables)); } } catch (Exception e) { System.err.println("Exception in parsing schema file. Cause:" + e.getMessage()); System.out.println("Failed in schema table validation."); return false; } LOG.debug("Schema tables:[ " + Arrays.toString(schemaTables.toArray()) + " ]"); LOG.debug("DB tables:[ " + Arrays.toString(dbTables.toArray()) + " ]"); // now diff the lists schemaTables.removeAll(dbTables); if (schemaTables.size() > 0) { Collections.sort(schemaTables); System.err.println("Table(s) [ " + Arrays.toString(schemaTables.toArray()) + " ] " + "are missing from the metastore database schema."); System.out.println("[FAIL]\n"); return false; } else { System.out.println("[SUCCESS]\n"); return true; } }
From source file:org.apache.hive.beeline.HiveSchemaTool.java
boolean validateSchemaTables(Connection conn) throws HiveMetaException { String version = null;/*ww w . j a v a2 s.c o m*/ ResultSet rs = null; DatabaseMetaData metadata = null; List<String> dbTables = new ArrayList<String>(); List<String> schemaTables = new ArrayList<String>(); List<String> subScripts = new ArrayList<String>(); Connection hmsConn = getConnectionToMetastore(false); System.out.println("Validating metastore schema tables"); try { version = metaStoreSchemaInfo.getMetaStoreSchemaVersion(getConnectionInfo(false)); } catch (HiveMetaException he) { System.err.println("Failed to determine schema version from Hive Metastore DB. " + he.getMessage()); System.out.println("Failed in schema table validation."); LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage()); return false; } // re-open the hms connection hmsConn = getConnectionToMetastore(false); LOG.debug("Validating tables in the schema for version " + version); try { metadata = conn.getMetaData(); String[] types = { "TABLE" }; rs = metadata.getTables(null, hmsConn.getSchema(), "%", types); String table = null; while (rs.next()) { table = rs.getString("TABLE_NAME"); dbTables.add(table.toLowerCase()); LOG.debug("Found table " + table + " in HMS dbstore"); } } catch (SQLException e) { throw new HiveMetaException( "Failed to retrieve schema tables from Hive Metastore DB," + e.getMessage()); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { throw new HiveMetaException("Failed to close resultset", e); } } } // parse the schema file to determine the tables that are expected to exist // we are using oracle schema because it is simpler to parse, no quotes or backticks etc String baseDir = new File(metaStoreSchemaInfo.getMetaStoreScriptDir()).getParent(); String schemaFile = new File(metaStoreSchemaInfo.getMetaStoreScriptDir(), metaStoreSchemaInfo.generateInitFileName(version)).getPath(); try { LOG.debug("Parsing schema script " + schemaFile); subScripts.addAll(findCreateTable(schemaFile, schemaTables)); while (subScripts.size() > 0) { schemaFile = baseDir + "/" + dbType + "/" + subScripts.remove(0); LOG.debug("Parsing subscript " + schemaFile); subScripts.addAll(findCreateTable(schemaFile, schemaTables)); } } catch (Exception e) { System.err.println("Exception in parsing schema file. Cause:" + e.getMessage()); System.out.println("Failed in schema table validation."); return false; } LOG.debug("Schema tables:[ " + Arrays.toString(schemaTables.toArray()) + " ]"); LOG.debug("DB tables:[ " + Arrays.toString(dbTables.toArray()) + " ]"); // now diff the lists schemaTables.removeAll(dbTables); if (schemaTables.size() > 0) { Collections.sort(schemaTables); System.err.println("Table(s) [ " + Arrays.toString(schemaTables.toArray()) + " ] are missing from the metastore database schema."); System.out.println("Failed in schema table validation."); return false; } else { System.out.println("Succeeded in schema table validation."); return true; } }
From source file:org.dspace.storage.rdbms.DatabaseUtils.java
/** * Get the Database Schema Name in use by this Connection, so that it can * be used to limit queries in other methods (e.g. tableExists()). * * @param connection/* ww w . j a v a 2s. c om*/ * Current Database Connection * @return Schema name as a string, or "null" if cannot be determined or unspecified * @throws SQLException * An exception that provides information on a database access error or other errors. */ public static String getSchemaName(Connection connection) throws SQLException { String schema = null; // Try to get the schema from the DB connection itself. // As long as the Database driver supports JDBC4.1, there should be a getSchema() method // If this method is unimplemented or doesn't exist, it will throw an exception (likely an AbstractMethodError) try { schema = connection.getSchema(); } catch (Exception | AbstractMethodError e) { } // If we don't know our schema, let's try the schema in the DSpace configuration if (StringUtils.isBlank(schema)) { schema = canonicalize(connection, DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("db.schema")); } // Still blank? Ok, we'll find a "sane" default based on the DB type if (StringUtils.isBlank(schema)) { String dbType = getDbType(connection); if (dbType.equals(DBMS_POSTGRES)) { // For PostgreSQL, the default schema is named "public" // See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html schema = "public"; } else if (dbType.equals(DBMS_ORACLE)) { // For Oracle, default schema is actually the user account // See: http://stackoverflow.com/a/13341390 DatabaseMetaData meta = connection.getMetaData(); schema = meta.getUserName(); } else // For H2 (in memory), there is no such thing as a schema schema = null; } return schema; }
From source file:org.kuali.coeus.common.impl.attachment.KcAttachmentDataDaoImpl.java
protected void populateReferences(Connection conn) throws SQLException { tableReferences = new HashSet<>(); String catalog = conn.getCatalog(); String schema = catalog;//from w ww . ja v a2 s . c o m // this indicates a non-mysql db, so try oracle. if (catalog == null) { schema = conn.getSchema(); } try { if (conn.getMetaData().getSchemas().next()) { schema = conn.getSchema(); } } catch (AbstractMethodError e) { LOG.info("Unable to retrieve schema, using catalog " + e.getMessage()); } // The Oracle database stores its table names as Upper-Case, // if you pass a table name in lowercase characters, it will not work. // MySQL does not care. ResultSet rs = conn.getMetaData().getExportedKeys(catalog, schema, "FILE_DATA"); while (rs.next()) { tableReferences.add(new TableReference(rs.getString("FKTABLE_NAME"), rs.getString("FKCOLUMN_NAME"))); } }
From source file:org.verdictdb.connection.JdbcConnection.java
public JdbcConnection(Connection conn, SqlSyntax syntax) { this.conn = conn; try {//ww w . j a va 2s.co m this.currentSchema = conn.getSchema(); } catch (SQLException e) { e.printStackTrace(); } // set a default value if an inappropriate value is set. if (currentSchema == null || currentSchema.length() == 0) { currentSchema = syntax.getFallbackDefaultSchema(); } this.syntax = syntax; this.log = VerdictDBLogger.getLogger(this.getClass()); }
From source file:org.xenei.bloomgraph.bloom.MysqlBigLoadTest.java
@Override protected BloomIO getBloomIO() throws SQLException { LoggingConfig.setLogger("org.xenei.bloomgraph.bloom", Level.INFO); final DataSource ds = MySQLBloomGraphTest.getMySQLDataSource(URL, USR, PWD); final Connection c = ds.getConnection(); final Statement stmt = c.createStatement(); ResultSet rs = null;//from w w w . j av a 2s . co m try { final SQLCommands sqlCmd = new MySQLCommands(); final DatabaseMetaData metaData = c.getMetaData(); rs = metaData.getTables(c.getCatalog(), c.getSchema(), sqlCmd.getPageIndexTableName(), new String[] { "TABLE" }); while (rs.next()) { stmt.execute("TRUNCATE " + rs.getString(3)); } DbUtils.closeQuietly(rs); rs = metaData.getTables(c.getCatalog(), c.getSchema(), sqlCmd.getPageStatsTableName(), new String[] { "TABLE" }); while (rs.next()) { stmt.execute("TRUNCATE " + rs.getString(3)); } DbUtils.closeQuietly(rs); rs = metaData.getTables(c.getCatalog(), c.getSchema(), "Page\\_%", new String[] { "TABLE" }); while (rs.next()) { stmt.execute("DROP TABLE " + rs.getString(3)); } return new DBIO(ds, sqlCmd); } finally { DbUtils.closeQuietly(rs); DbUtils.closeQuietly(stmt); DbUtils.closeQuietly(c); } }
From source file:org.xenei.bloomgraph.bloom.MySQLBloomGraphTest.java
@Override protected BloomIO getBloomIO() throws SQLException { final DataSource ds = getMySQLDataSource(URL, USR, PWD); final Connection c = ds.getConnection(); final Statement stmt = c.createStatement(); ResultSet rs = null;//from w ww. jav a 2 s . c o m try { final SQLCommands sqlCmd = new MySQLCommands(); final DatabaseMetaData metaData = c.getMetaData(); rs = metaData.getTables(c.getCatalog(), c.getSchema(), sqlCmd.getPageIndexTableName(), new String[] { "TABLE" }); while (rs.next()) { stmt.execute("TRUNCATE " + rs.getString(3)); } DbUtils.closeQuietly(rs); rs = metaData.getTables(c.getCatalog(), c.getSchema(), sqlCmd.getPageStatsTableName(), new String[] { "TABLE" }); while (rs.next()) { stmt.execute("TRUNCATE " + rs.getString(3)); } DbUtils.closeQuietly(rs); rs = metaData.getTables(c.getCatalog(), c.getSchema(), "Page\\_%", new String[] { "TABLE" }); while (rs.next()) { stmt.execute("DROP TABLE " + rs.getString(3)); } return new DBIO(ds, sqlCmd); } finally { DbUtils.closeQuietly(rs); DbUtils.closeQuietly(stmt); DbUtils.closeQuietly(c); } }
From source file:org.xenei.bloomgraph.bloom.sql.MySQLCommands.java
/** * Create any necessary procedures./*from w w w . jav a2s . c om*/ * * @param metadata * the metadata for the database. * @throws SQLException * on error */ private void createProcedures(final DatabaseMetaData metadata) throws SQLException { ResultSet rs = null; Statement stmt = null; final Connection connection = metadata.getConnection(); try { rs = metadata.getProcedures(connection.getCatalog(), connection.getSchema(), "add_triple"); if (!rs.next()) { stmt = connection.createStatement(); stmt.executeUpdate(CREATE_ADD_TRIPLE_PROCEDURE); } } finally { DbUtils.closeQuietly(rs); DbUtils.closeQuietly(stmt); } }