List of usage examples for java.sql Connection getCatalog
String getCatalog() throws SQLException;
Connection
object's current catalog name. From source file:org.kawanfw.test.api.client.DatabaseMetaDataTest.java
public void test(Connection connection) throws Exception { MessageDisplayer.initClassDisplay(this.getClass().getSimpleName()); DatabaseMetaData databaseMetaData = connection.getMetaData(); // Test that getMetaData() will return value from cache databaseMetaData = connection.getMetaData(); if (connection instanceof RemoteConnection) { MessageDisplayer.display("Java Version : " + System.getProperty("java.version")); MessageDisplayer.display("AceQL Version: " + ((RemoteConnection) connection).getVersion()); MessageDisplayer.display("AceQL Url : " + ((RemoteConnection) connection).getUrl()); MessageDisplayer.display(""); }//from w w w. j av a 2 s . co m if (connection instanceof RemoteConnection) { MessageDisplayer.display("((RemoteConnection)connection).clone();"); Connection connection2 = ((RemoteConnection) connection).clone(); @SuppressWarnings("unused") DatabaseMetaData databaseMetaData2 = connection2.getMetaData(); connection2.close(); } MessageDisplayer.display("General info (no Assert done):"); MessageDisplayer.display("connection.getCatalog() : " + connection.getCatalog()); MessageDisplayer.display( "databaseMetaData.getDatabaseProductName() : " + databaseMetaData.getDatabaseProductName()); MessageDisplayer.display( "databaseMetaData.getDatabaseProductVersion(): " + databaseMetaData.getDatabaseProductVersion()); MessageDisplayer.display( "databaseMetaData.getDatabaseMajorVersion() : " + databaseMetaData.getDatabaseMajorVersion()); MessageDisplayer.display( "databaseMetaData.getDatabaseMinorVersion() : " + databaseMetaData.getDatabaseMinorVersion()); MessageDisplayer.display( "databaseMetaData.allProceduresAreCallable() : " + databaseMetaData.allProceduresAreCallable()); // SystemOutHandle.display(DatabaseMetaData.bestRowSession); MessageDisplayer.display(""); // SystemOutHandle.display(databaseMetaData.autoCommitFailureClosesAllResultSets()); MessageDisplayer.display("databaseMetaData.getCatalogTerm(): " + databaseMetaData.getCatalogTerm()); try { MessageDisplayer.display( "databaseMetaData.supportsStoredProcedures(): " + databaseMetaData.supportsStoredProcedures()); MessageDisplayer.display("databaseMetaData.supportsStoredFunctionsUsingCallSyntax(): " + databaseMetaData.supportsStoredFunctionsUsingCallSyntax()); } catch (Throwable e) { MessageDisplayer.display(e.toString()); } MessageDisplayer.display("connection.getAutoCommit(): " + connection.getAutoCommit()); MessageDisplayer.display("databaseMetaData.getDefaultTransactionIsolation() : " + databaseMetaData.getDefaultTransactionIsolation()); MessageDisplayer .display("databaseMetaData.supportsTransactionIsolationLevel(TRANSACTION_READ_UNCOMMITTED): " + databaseMetaData .supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_UNCOMMITTED)); MessageDisplayer.display("databaseMetaData.supportsTransactionIsolationLevel(TRANSACTION_READ_COMMITTED): " + databaseMetaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_COMMITTED)); MessageDisplayer.display("databaseMetaData.supportsTransactionIsolationLevel(TRANSACTION_REPEATABLE_READ): " + databaseMetaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_REPEATABLE_READ)); MessageDisplayer.display("databaseMetaData.supportsTransactionIsolationLevel(TRANSACTION_SERIALIZABLE): " + databaseMetaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_SERIALIZABLE)); MessageDisplayer .display("databaseMetaData.supportsBatchUpdates() : " + databaseMetaData.supportsBatchUpdates()); MessageDisplayer .display("databaseMetaData.supportsSavepoints() : " + databaseMetaData.supportsSavepoints()); MessageDisplayer.display( "databaseMetaData.supportsGetGeneratedKeys(): " + databaseMetaData.supportsGetGeneratedKeys()); if (!new SqlUtil(connection).isTeradata() && !new SqlUtil(connection).isInformix()) { Assert.assertEquals(true, databaseMetaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_COMMITTED)); } Assert.assertEquals("databaseMetaData.supportsBatchUpdates()", true, databaseMetaData.supportsBatchUpdates()); if (!new SqlUtil(connection).isSQLAnywhere() && !new SqlUtil(connection).isAccess()) { Assert.assertEquals("databaseMetaData.supportsGetGeneratedKeys()", true, databaseMetaData.supportsGetGeneratedKeys()); } // Informix does not support savepoints SqlUtil sqlUtil = new SqlUtil(connection); if (!sqlUtil.isInformix() && !sqlUtil.isTeradata() && !new SqlUtil(connection).isAccess()) { Assert.assertEquals(true, databaseMetaData.supportsSavepoints()); } MessageDisplayer.display(""); String catalog = null; String schema = null; String table = "customer"; // Table name must be uppercase for Oracle & DB2, lowercase for MySQL // and PostgreSQL if (new SqlUtil(connection).isOracle() || new SqlUtil(connection).isHSQLDB() || new SqlUtil(connection).isDB2()) { table = table.toUpperCase(); } ResultSet rs = null; if (!new SqlUtil(connection).isAccess()) { rs = databaseMetaData.getPrimaryKeys(catalog, schema, table); printResultSet(rs); boolean rsNext = false; while (rs.next()) { rsNext = true; String keyColumnName = rs.getString("COLUMN_NAME"); MessageDisplayer.display("Primary Key is: " + keyColumnName + " for Table: " + table); Assert.assertEquals("customer_id", keyColumnName.toLowerCase()); } if (!new SqlUtil(connection).isH2()) { Assert.assertEquals(true, rsNext); } rs.close(); } // boolean returnNow = true; // if (returnNow) return; String[] types = { "TABLE", "VIEW" }; rs = databaseMetaData.getTables(null, null, null, types); Set<String> tablesSet = new HashSet<String>(); Set<String> ourTables = new HashSet<String>(); ourTables.add("banned_usernames"); ourTables.add("customer"); ourTables.add("customer_auto"); ourTables.add("orderlog"); ourTables.add("user_login"); MessageDisplayer.display(""); while (rs.next()) { table = rs.getString("TABLE_NAME"); if (ourTables.contains(table.toLowerCase())) { MessageDisplayer.display("Table: " + table); } tablesSet.add(table.toLowerCase()); } // printResultSet(rs); testTable("banned_usernames", tablesSet); testTable("customer", tablesSet); testTable("orderlog", tablesSet); testTable("user_login", tablesSet); rs.close(); }
From source file:org.kuali.coeus.common.impl.attachment.KcAttachmentDataDaoImpl.java
protected void populateReferences(Connection conn) throws SQLException { tableReferences = new HashSet<>(); String catalog = conn.getCatalog(); String schema = catalog;//w w w .j a v a 2 s. co m // this indicates a non-mysql db, so try oracle. if (catalog == null) { schema = conn.getSchema(); } try { if (conn.getMetaData().getSchemas().next()) { schema = conn.getSchema(); } } catch (AbstractMethodError e) { LOG.info("Unable to retrieve schema, using catalog " + e.getMessage()); } // The Oracle database stores its table names as Upper-Case, // if you pass a table name in lowercase characters, it will not work. // MySQL does not care. ResultSet rs = conn.getMetaData().getExportedKeys(catalog, schema, "FILE_DATA"); while (rs.next()) { tableReferences.add(new TableReference(rs.getString("FKTABLE_NAME"), rs.getString("FKCOLUMN_NAME"))); } }
From source file:org.mifosplatform.infrastructure.dataqueries.service.ReadReportingServiceImpl.java
private void addParametersToReport(final MasterReport report, final Map<String, String> queryParams) { final AppUser currentUser = this.context.authenticatedUser(); try {/* w ww.j av a 2 s.co m*/ final ReportParameterValues rptParamValues = report.getParameterValues(); final ReportParameterDefinition paramsDefinition = report.getParameterDefinition(); /* * only allow integer, long, date and string parameter types and * assume all mandatory - could go more detailed like Pawel did in * Mifos later and could match incoming and pentaho parameters * better... currently assuming they come in ok... and if not an * error */ for (final ParameterDefinitionEntry paramDefEntry : paramsDefinition.getParameterDefinitions()) { final String paramName = paramDefEntry.getName(); if (!((paramName.equals("tenantdb")) || (paramName.equals("userhierarchy")))) { logger.info("paramName:" + paramName); final String pValue = queryParams.get(paramName); if (StringUtils.isBlank(pValue)) { throw new PlatformDataIntegrityException("error.msg.reporting.error", "Pentaho Parameter: " + paramName + " - not Provided"); } final Class<?> clazz = paramDefEntry.getValueType(); logger.info("addParametersToReport(" + paramName + " : " + pValue + " : " + clazz.getCanonicalName() + ")"); if (clazz.getCanonicalName().equalsIgnoreCase("java.lang.Integer")) { rptParamValues.put(paramName, Integer.parseInt(pValue)); } else if (clazz.getCanonicalName().equalsIgnoreCase("java.lang.Long")) { rptParamValues.put(paramName, Long.parseLong(pValue)); } else if (clazz.getCanonicalName().equalsIgnoreCase("java.sql.Date")) { rptParamValues.put(paramName, Date.valueOf(pValue)); } else { rptParamValues.put(paramName, pValue); } } } // tenant database name and current user's office hierarchy // passed as parameters to allow multitenant penaho reporting // and // data scoping final Connection connection = this.dataSource.getConnection(); String tenantdb; try { tenantdb = connection.getCatalog(); } finally { connection.close(); } final String userhierarchy = currentUser.getOffice().getHierarchy(); logger.info("db name:" + tenantdb + " userhierarchy:" + userhierarchy); rptParamValues.put("tenantdb", tenantdb); rptParamValues.put("userhierarchy", userhierarchy); } catch (final Exception e) { logger.error("error.msg.reporting.error:" + e.getMessage()); throw new PlatformDataIntegrityException("error.msg.reporting.error", e.getMessage()); } }
From source file:org.nuxeo.ecm.core.storage.sql.db.H2Fulltext.java
protected static String getIndexName(Connection conn) throws SQLException { String catalog = conn.getCatalog(); if (catalog == null) { catalog = "default"; }/*from w ww . ja va 2s. c o m*/ return catalog; }
From source file:org.openadaptor.auxil.connector.jdbc.writer.AbstractSQLWriter.java
/** * Get the types of the args of a stored proc. * <br>/*from ww w. j ava 2 s . c o m*/ * From javadoc on DatabaseMetaData.getProcedureColumns() * <pre> * 1. PROCEDURE_CAT String => procedure catalog (may be null) * 2. PROCEDURE_SCHEM String => procedure schema (may be null) * 3. PROCEDURE_NAME String => procedure name * 4. COLUMN_NAME String => column/parameter name * 5. COLUMN_TYPE Short => kind of column/parameter: * * procedureColumnUnknown - nobody knows * * procedureColumnIn - IN parameter * * procedureColumnInOut - INOUT parameter * * procedureColumnOut - OUT parameter * * procedureColumnReturn - procedure return value * * procedureColumnResult - result column in ResultSet * 6. DATA_TYPE int => SQL type from java.sql.Types * 7. TYPE_NAME String => SQL type name, for a UDT type the type name is fully qualified * 8. PRECISION int => precision * 9. LENGTH int => length in bytes of data *10. SCALE short => scale *11. RADIX short => radix *12. NULLABLE short => can it contain NULL. * * procedureNoNulls - does not allow NULL values * * procedureNullable - allows NULL values * * procedureNullableUnknown - nullability unknown *13. REMARKS String => comment describing parameter/column * *</pre> * */ protected int[] getStoredProcArgumentTypes(String storedProcName, Connection connection) throws SQLException { //Fix for #SC36: MapCallableStatementWriter misses first argument for Oracle databases // Now it checks each columnType, and only includes IN or INOUT types. // ToDo: Further validation of this approach. Perhaps OUT should also be included? DatabaseMetaData dmd = connection.getMetaData(); List sqlTypeList = new ArrayList(); String catalog = connection.getCatalog(); String schema = "%"; String proc = storedProcName; String column = "%"; log.debug("Catalog for stored proc " + storedProcName + " is " + catalog); ResultSet rs; //Oracle doesn't bother with catalogs at all :-( //Thus if it's an oracle db, we may need to substitute package name instead //of catalog. if ((catalog == null) && (oraclePackage != null)) { //oraclePackage will only be non-null for oracle db log.debug("Setting catalog to oracle package of: " + oraclePackage); catalog = oraclePackage; schema = null;//Oracle 'ignore' setting. Probably the same as "%" anyway. } //Check if there's a schema reference in the proc name... String[] components = storedProcName.split("\\."); int len = components.length; if (len > 1) { schema = components[len - 2]; proc = components[len - 1]; } log.debug("Resolving proc - catalog=" + catalog + ";schema=" + schema + ";proc=" + proc + ";column=" + column); rs = dmd.getProcedureColumns(catalog, schema, proc, column); //If RS is empty, then we have failed in our mission. if (!rs.next()) { //First rs is return value. rs.close(); String msg = "Failed to lookup stored procedure " + storedProcName; log.warn(msg); throw new SQLException(msg); } do { //Verify that each argument is an IN or INOUT arg type. int type = rs.getInt(5); //Need to check if it is a result, or an input arg. int dataType = rs.getInt(6); // DATA_TYPE is column six! if (log.isDebugEnabled()) { log.debug("Catalog=" + rs.getString(1) + "; Schema=" + rs.getString(2) + "; Proc=" + rs.getString(3) + "; Column=" + rs.getString(4) + "; ParamType=" + spTypeToString(type) + "(" + type + ")" + "; DataType=" + dataType + "; TypeName=" + rs.getString(7)); } if (type == DatabaseMetaData.procedureColumnIn || type == DatabaseMetaData.procedureColumnInOut) { log.debug("Argument of type " + type + " is IN or INOUT"); sqlTypeList.add(Integer.valueOf(dataType)); // DATA_TYPE is column six! } else { log.debug("Ignoring column of type " + type + " as it is neither IN nor INOUT"); } } while (rs.next()); log.debug("Number of stored procedure parameters found: " + sqlTypeList.size()); int[] sqlTypes = new int[sqlTypeList.size()]; for (int i = 0; i < sqlTypes.length; i++) { sqlTypes[i] = ((Integer) sqlTypeList.get(i)).intValue(); } rs.close(); return sqlTypes; }
From source file:org.openflexo.technologyadapter.jdbc.util.SQLHelper.java
/** * Updates the list of tables for the given schema. * /*from w w w . j a v a 2s .c o m*/ * @param schema * the schema * @param tables * the table list to update * @param factory * the factory used to create the new tables if needed */ public static void updateTables(final JDBCSchema schema, List<JDBCTable> tables, final JDBCFactory factory) throws SQLException { JDBCConnection jdbcConn = schema.getResourceData(); // TODO : maybe resource leak, cannot use lexical scope for auto-closing Connection connection = jdbcConn.getConnection(); // prepare case ignoring map to match tables final Map<String, JDBCTable> sortedTables = new HashMap<>(); for (JDBCTable table : tables) { sortedTables.put(table.getName().toLowerCase(), table); } // query the tables to find new and removed ones final Set<JDBCTable> added = new LinkedHashSet<>(); final Set<JDBCTable> matched = new LinkedHashSet<>(); DatabaseMetaData metadata = connection.getMetaData(); try (ResultSet jdbcTables = metadata.getTables(connection.getCatalog(), jdbcConn.getDbType().getSchemaPattern(), "%", null)) { while (jdbcTables.next()) { String tableName = jdbcTables.getString("TABLE_NAME"); JDBCTable aTable = sortedTables.get(tableName.toLowerCase()); if (aTable == null) { // new table, add it to the list aTable = factory.newInstance(JDBCTable.class); aTable.init(schema, tableName); added.add(aTable); } else { matched.add(aTable); } } } // gets tables to remove Set<JDBCTable> removed = new HashSet<>(); for (JDBCTable table : tables) { if (!matched.contains(table)) removed.add(table); } // clears the tables of the removed ones // using schema adder and removed fires notifications for (JDBCTable table : removed) { schema.removeTable(table); } // adds new tables for (JDBCTable table : added) { schema.addTable(table); } }
From source file:org.openflexo.technologyadapter.jdbc.util.SQLHelper.java
/** * Updates the list of columns for the given table. * //from w w w. j a v a 2s . c o m * @param table * the table * @param columns * the table list to update * @param factory * the factory used to create the new columns if needed */ public static void updateColumns(final JDBCTable table, List<JDBCColumn> columns, final JDBCFactory factory) throws SQLException { JDBCConnection jdbcConn = table.getResourceData(); // TODO : maybe resource leak, cannot use lexical scope for auto-closing Connection connection = jdbcConn.getConnection(); // retrieves keys final Set<String> keys = getKeys(table); // prepare case ignoring map to match columns final Map<String, JDBCColumn> sortedColumns = new HashMap<>(); for (JDBCColumn column : columns) { sortedColumns.put(column.getName().toLowerCase(), column); } // query the columns to find new and removed ones final Set<JDBCColumn> added = new LinkedHashSet<>(); final Set<JDBCColumn> matched = new LinkedHashSet<>(); DatabaseMetaData metadata = connection.getMetaData(); try (ResultSet jdbcCols = metadata.getColumns(connection.getCatalog(), jdbcConn.getDbType().getSchemaPattern(), sqlName(table.getName()), "%")) { while (jdbcCols.next()) { /* System.out.println(" --------------------> " + jdbcCols.getString("COLUMN_NAME")); System.out.println("TABLE_CAT: " + jdbcCols.getObject("TABLE_CAT")); System.out.println("TABLE_SCHEM: " + jdbcCols.getObject("TABLE_SCHEM")); System.out.println("TABLE_NAME: " + jdbcCols.getObject("TABLE_NAME")); System.out.println("COLUMN_NAME: " + jdbcCols.getObject("COLUMN_NAME")); System.out.println("DATA_TYPE: " + jdbcCols.getObject("DATA_TYPE")); System.out.println("TYPE_NAME: " + jdbcCols.getObject("TYPE_NAME")); System.out.println("COLUMN_SIZE: " + jdbcCols.getObject("COLUMN_SIZE")); System.out.println("BUFFER_LENGTH: " + jdbcCols.getObject("BUFFER_LENGTH")); System.out.println("DECIMAL_DIGITS: " + jdbcCols.getObject("DECIMAL_DIGITS")); System.out.println("NUM_PREC_RADIX: " + jdbcCols.getObject("NUM_PREC_RADIX")); System.out.println("IS_NULLABLE: " + jdbcCols.getObject("IS_NULLABLE")); System.out.println("REMARKS: " + jdbcCols.getObject("REMARKS")); System.out.println("COLUMN_DEF: " + jdbcCols.getObject("COLUMN_DEF")); System.out.println("SQL_DATA_TYPE: " + jdbcCols.getObject("SQL_DATA_TYPE")); System.out.println("SQL_DATETIME_SUB: " + jdbcCols.getObject("SQL_DATETIME_SUB")); System.out.println("CHAR_OCTET_LENGTH: " + jdbcCols.getObject("CHAR_OCTET_LENGTH")); System.out.println("ORDINAL_POSITION: " + jdbcCols.getObject("ORDINAL_POSITION")); System.out.println("IS_NULLABLE: " + jdbcCols.getObject("IS_NULLABLE")); System.out.println("SCOPE_CATALOG: " + jdbcCols.getObject("SCOPE_CATALOG")); System.out.println("SCOPE_SCHEMA: " + jdbcCols.getObject("SCOPE_SCHEMA")); System.out.println("SCOPE_TABLE: " + jdbcCols.getObject("SCOPE_TABLE")); System.out.println("SOURCE_DATA_TYPE: " + jdbcCols.getObject("SOURCE_DATA_TYPE")); System.out.println("IS_AUTOINCREMENT: " + jdbcCols.getObject("IS_AUTOINCREMENT")); System.out.println("IS_GENERATEDCOLUMN: " + jdbcCols.getObject("IS_GENERATEDCOLUMN")); */ // [TABLE_CAT, TABLE_SCHEM, TABLE_NAME, COLUMN_NAME, DATA_TYPE, TYPE_NAME, COLUMN_SIZE, BUFFER_LENGTH, DECIMAL_DIGITS, // NUM_PREC_RADIX, IS_NULLABLE, REMARKS, COLUMN_DEF, SQL_DATA_TYPE, SQL_DATETIME_SUB, CHAR_OCTET_LENGTH, ORDINAL_POSITION, // IS_NULLABLE, SCOPE_CATALOG, SCOPE_SCHEMA, SCOPE_TABLE, SOURCE_DATA_TYPE, IS_AUTOINCREMENT, IS_GENERATEDCOLUMN] String columnName = jdbcCols.getString("COLUMN_NAME"); String typeName = jdbcCols.getString("TYPE_NAME"); int columnLength = jdbcCols.getInt("COLUMN_SIZE"); boolean isNullable = jdbcCols.getString("IS_NULLABLE").equalsIgnoreCase("YES"); JDBCColumn column = sortedColumns.get(columnName.toLowerCase()); if (column == null) { // new column, add it to the list column = factory.newInstance(JDBCColumn.class); column.init(table, keys.contains(columnName), columnName, typeName, columnLength, isNullable); added.add(column); } else { matched.add(column); } } } // gets columns to remove Set<JDBCColumn> removed = new HashSet<>(); for (JDBCColumn column : columns) { if (!matched.contains(column)) removed.add(column); } // clears the columns of the removed ones // using table adder and removed fires notifications for (JDBCColumn column : removed) { table.removeColumn(column); } // adds new columns for (JDBCColumn column : added) { table.addColumn(column); } }
From source file:org.openflexo.technologyadapter.jdbc.util.SQLHelper.java
private static Set<String> getKeys(final JDBCTable table) throws SQLException { // TODO : maybe resource leak, cannot use lexical scope for auto-closing Connection connection = table.getResourceData().getConnection(); DatabaseMetaData metadata = connection.getMetaData(); try (ResultSet foundKeys = metadata.getPrimaryKeys(connection.getCatalog(), "PUBLIC", sqlName(table.getName()))) { Set<String> keys = new HashSet<>(); while (foundKeys.next()) { keys.add(foundKeys.getString("COLUMN_NAME")); }//from w w w. jav a2 s . co m return keys; } }
From source file:org.ralasafe.util.DBUtil.java
public static String getDefaultSchema(Connection conn) throws SQLException { String productName = getDatabaseProductName(conn); if (productName.equals(MYSQL) || productName.equals(SQLSERVER)) { return conn.getCatalog(); } else {/*from ww w. ja va 2 s .co m*/ DatabaseMetaData metaData = conn.getMetaData(); return metaData.getUserName(); } }
From source file:org.seasar.dbflute.properties.DfAbstractHelperProperties.java
protected String getConnectedCatalog(String driver, String url, String user, String password) throws SQLException { setupConnectionDriver(driver);// ww w . ja v a 2 s .c om try { final Connection conn = DriverManager.getConnection(url, user, password); return conn.getCatalog(); } catch (SQLException e) { String msg = "Failed to connect: url=" + url + " user=" + user; throw new DfJDBCException(msg, e); } }