List of usage examples for java.sql DatabaseMetaData getTables
ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String types[])
throws SQLException;
From source file:com.amazon.carbonado.repo.jdbc.JDBCStorableIntrospector.java
/** * Uses the given database connection to query database metadata. This is * used to bind storables to tables, and properties to columns. Other * checks are performed to ensure that storable type matches well with the * definition in the database./*from w w w .j av a2s. c om*/ */ private static <S extends Storable> JDBCStorableInfo<S> examine(StorableInfo<S> mainInfo, Connection con, final String searchCatalog, final String searchSchema, SchemaResolver resolver, boolean primaryKeyCheckDisabled) throws SQLException, SupportException { final DatabaseMetaData meta = con.getMetaData(); final String databaseProductName = meta.getDatabaseProductName(); final String userName = meta.getUserName(); String[] tableAliases; if (mainInfo.getAliasCount() > 0) { tableAliases = mainInfo.getAliases(); } else { String name = mainInfo.getStorableType().getSimpleName(); tableAliases = generateAliases(name); } // Try to find matching table from aliases. String catalog = null, schema = null, tableName = null, tableType = null; findName: { // The call to getTables may return several matching tables. This // map defines the "best" table type we'd like to use. The higher // the number the better. Map<String, Integer> fitnessMap = new HashMap<String, Integer>(); fitnessMap.put("LOCAL TEMPORARY", 1); fitnessMap.put("GLOBAL TEMPORARY", 2); fitnessMap.put("VIEW", 3); fitnessMap.put("SYSTEM TABLE", 4); fitnessMap.put("TABLE", 5); fitnessMap.put("ALIAS", 6); fitnessMap.put("SYNONYM", 7); for (int i = 0; i < tableAliases.length; i++) { ResultSet rs = meta.getTables(searchCatalog, searchSchema, tableAliases[i], null); try { int bestFitness = 0; while (rs.next()) { String type = rs.getString("TABLE_TYPE"); Integer fitness = fitnessMap.get(type); if (fitness != null) { String rsSchema = rs.getString("TABLE_SCHEM"); if (searchSchema == null) { if (userName != null && userName.equalsIgnoreCase(rsSchema)) { // Favor entities whose schema name matches // the user name. fitness += 7; } } if (fitness > bestFitness) { bestFitness = fitness; catalog = rs.getString("TABLE_CAT"); schema = rsSchema; tableName = rs.getString("TABLE_NAME"); tableType = type; } } } } finally { rs.close(); } if (tableName != null) { // Found a match, so stop checking aliases. break; } } } if (tableName == null && !mainInfo.isIndependent()) { StringBuilder buf = new StringBuilder(); buf.append("Unable to find matching table name for type \""); buf.append(mainInfo.getStorableType().getName()); buf.append("\" by looking for "); appendToSentence(buf, tableAliases); buf.append(" with catalog " + searchCatalog + " and schema " + searchSchema); throw new MismatchException(buf.toString()); } String qualifiedTableName = tableName; String resolvedTableName = tableName; // Oracle specific stuff... // TODO: Migrate this to OracleSupportStrategy. if (tableName != null && databaseProductName.toUpperCase().contains("ORACLE")) { if ("TABLE".equals(tableType) && searchSchema != null) { // Qualified table name references the schema. Used by SQL statements. qualifiedTableName = searchSchema + '.' + tableName; } else if ("SYNONYM".equals(tableType)) { // Try to get the real schema. This call is Oracle specific, however. String select = "SELECT TABLE_OWNER,TABLE_NAME " + "FROM ALL_SYNONYMS " + "WHERE OWNER=? AND SYNONYM_NAME=?"; PreparedStatement ps = con.prepareStatement(select); ps.setString(1, schema); // in Oracle, schema is the owner ps.setString(2, tableName); try { ResultSet rs = ps.executeQuery(); try { if (rs.next()) { schema = rs.getString("TABLE_OWNER"); resolvedTableName = rs.getString("TABLE_NAME"); } } finally { rs.close(); } } finally { ps.close(); } } } // Gather information on all columns such that metadata only needs to // be retrieved once. Map<String, ColumnInfo> columnMap = new TreeMap<String, ColumnInfo>(String.CASE_INSENSITIVE_ORDER); if (resolvedTableName != null) { ResultSet rs = meta.getColumns(catalog, schema, resolvedTableName, null); rs.setFetchSize(1000); try { while (rs.next()) { ColumnInfo info = new ColumnInfo(rs); columnMap.put(info.columnName, info); } } finally { rs.close(); } } // Make sure that all properties have a corresponding column. Map<String, ? extends StorableProperty<S>> mainProperties = mainInfo.getAllProperties(); Map<String, String> columnToProperty = new HashMap<String, String>(); Map<String, JDBCStorableProperty<S>> jProperties = new LinkedHashMap<String, JDBCStorableProperty<S>>( mainProperties.size()); ArrayList<String> errorMessages = new ArrayList<String>(); for (StorableProperty<S> mainProperty : mainProperties.values()) { if (mainProperty.isDerived() || mainProperty.isJoin() || tableName == null) { jProperties.put(mainProperty.getName(), new JProperty<S>(mainProperty, primaryKeyCheckDisabled)); continue; } String[] columnAliases; if (mainProperty.getAliasCount() > 0) { columnAliases = mainProperty.getAliases(); } else { columnAliases = generateAliases(mainProperty.getName()); } JDBCStorableProperty<S> jProperty = null; boolean addedError = false; findName: for (int i = 0; i < columnAliases.length; i++) { ColumnInfo columnInfo = columnMap.get(columnAliases[i]); if (columnInfo != null) { AccessInfo accessInfo = getAccessInfo(mainProperty, columnInfo.dataType, columnInfo.dataTypeName, columnInfo.columnSize, columnInfo.decimalDigits); if (accessInfo == null) { TypeDesc propertyType = TypeDesc.forClass(mainProperty.getType()); String message = "Property \"" + mainProperty.getName() + "\" has type \"" + propertyType.getFullName() + "\" which is incompatible with database type \"" + columnInfo.dataTypeName + '"'; if (columnInfo.decimalDigits > 0) { message += " (decimal digits = " + columnInfo.decimalDigits + ')'; } errorMessages.add(message); addedError = true; break findName; } if (columnInfo.nullable) { if (!mainProperty.isNullable() && !mainProperty.isIndependent()) { errorMessages.add( "Property \"" + mainProperty.getName() + "\" must have a Nullable annotation"); } } else { if (mainProperty.isNullable() && !mainProperty.isIndependent()) { errorMessages.add("Property \"" + mainProperty.getName() + "\" must not have a Nullable annotation"); } } boolean autoIncrement = mainProperty.isAutomatic(); if (autoIncrement) { // Need to execute a little query to check if column is // auto-increment or not. This information is not available in // the regular database metadata prior to jdk1.6. PreparedStatement ps = con.prepareStatement( "SELECT " + columnInfo.columnName + " FROM " + tableName + " WHERE 1=0"); try { ResultSet rs = ps.executeQuery(); try { autoIncrement = rs.getMetaData().isAutoIncrement(1); } finally { rs.close(); } } finally { ps.close(); } } jProperty = new JProperty<S>(mainProperty, columnInfo, autoIncrement, primaryKeyCheckDisabled, accessInfo.mResultSetGet, accessInfo.mPreparedStatementSet, accessInfo.getAdapter()); break findName; } } if (jProperty != null) { jProperties.put(mainProperty.getName(), jProperty); columnToProperty.put(jProperty.getColumnName(), jProperty.getName()); } else { if (mainProperty.isIndependent()) { jProperties.put(mainProperty.getName(), new JProperty<S>(mainProperty, primaryKeyCheckDisabled)); } else if (!addedError) { StringBuilder buf = new StringBuilder(); buf.append("Unable to find matching database column for property \""); buf.append(mainProperty.getName()); buf.append("\" by looking for "); appendToSentence(buf, columnAliases); errorMessages.add(buf.toString()); } } } if (errorMessages.size() > 0) { throw new MismatchException(mainInfo.getStorableType(), errorMessages); } // Now verify that primary or alternate keys match. if (resolvedTableName != null) checkPrimaryKey: { ResultSet rs; try { rs = meta.getPrimaryKeys(catalog, schema, resolvedTableName); } catch (SQLException e) { getLog().info("Unable to get primary keys for table \"" + resolvedTableName + "\" with catalog " + catalog + " and schema " + schema + ": " + e); break checkPrimaryKey; } List<String> pkProps = new ArrayList<String>(); try { while (rs.next()) { String columnName = rs.getString("COLUMN_NAME"); String propertyName = columnToProperty.get(columnName); if (propertyName == null) { errorMessages .add("Column \"" + columnName + "\" must be part of primary or alternate key"); continue; } pkProps.add(propertyName); } } finally { rs.close(); } if (errorMessages.size() > 0) { // Skip any extra checks. break checkPrimaryKey; } if (pkProps.size() == 0) { // If no primary keys are reported, don't even bother checking. // There's no consistent way to get primary keys, and entities // like views and synonyms don't usually report primary keys. // A primary key might even be logically defined as a unique // constraint. break checkPrimaryKey; } if (matchesKey(pkProps, mainInfo.getPrimaryKey())) { // Good. Primary key in database is same as in Storable. break checkPrimaryKey; } // Check if Storable has an alternate key which matches the // database's primary key. boolean foundAnyAltKey = false; for (StorableKey<S> altKey : mainInfo.getAlternateKeys()) { if (matchesKey(pkProps, altKey)) { // Okay. Primary key in database matches a Storable // alternate key. foundAnyAltKey = true; // Also check that declared primary key is a strict subset // of the alternate key. If not, keep checking alt keys. if (matchesSubKey(pkProps, mainInfo.getPrimaryKey())) { break checkPrimaryKey; } } } if (foundAnyAltKey) { errorMessages.add("Actual primary key matches a declared alternate key, " + "but declared primary key must be a strict subset. " + mainInfo.getPrimaryKey().getProperties() + " is not a subset of " + pkProps); } else { errorMessages.add("Actual primary key does not match any " + "declared primary or alternate key: " + pkProps); } } if (errorMessages.size() > 0) { if (primaryKeyCheckDisabled) { for (String errorMessage : errorMessages) { getLog().warn("Suppressed error: " + errorMessage); } errorMessages.clear(); } else { throw new MismatchException(mainInfo.getStorableType(), errorMessages); } } // IndexInfo is empty, as querying for it tends to cause a table analyze to run. IndexInfo[] indexInfo = new IndexInfo[0]; if (needsQuotes(tableName)) { String quote = meta.getIdentifierQuoteString(); if (quote != null && !quote.equals(" ")) { tableName = quote + tableName + quote; qualifiedTableName = quote + qualifiedTableName + quote; } } return new JInfo<S>(mainInfo, catalog, schema, tableName, qualifiedTableName, indexInfo, jProperties); }
From source file:com.google.enterprise.connector.sharepoint.dao.UserDataStoreDAO.java
/** * Checks if all the required entities exist in the user data store DB. If * not, creates them. As a minimal check, this method only checks for the * existence of tables. Child of this class can extend this for various such * checks// w w w. j a v a 2 s. co m * * @throws SharepointException */ private void confirmEntitiesExistence() throws SharepointException { DatabaseMetaData dbm = null; boolean tableFound = false; String tableName; String tablePattern; ResultSet rsTables = null; Statement statement = null; try { dbm = getConnection().getMetaData(); tableName = getQueryProvider().getUdsTableName(); // Specific to oracle data base to check required entities in user // data store data base. if (getQueryProvider().getDatabase().equalsIgnoreCase(SPConstants.SELECTED_DATABASE)) { statement = getConnection().createStatement(); String query = getSqlQuery(Query.UDS_CHECK_TABLES); rsTables = statement.executeQuery(query); while (rsTables.next()) { if (tableName.equalsIgnoreCase(rsTables.getString(1))) { tableFound = true; LOGGER.config("User data store table found with name : " + tableName); break; } } } else { if (dbm.storesUpperCaseIdentifiers()) { tablePattern = tableName.toUpperCase(); } else if (dbm.storesLowerCaseIdentifiers()) { tablePattern = tableName.toLowerCase(); } else { tablePattern = tableName; } tablePattern = tablePattern.replace("%", dbm.getSearchStringEscape() + "%"); tablePattern = tablePattern.replace("_", dbm.getSearchStringEscape() + "_"); rsTables = dbm.getTables(null, null, tablePattern, null); while (rsTables.next()) { if (tableName.equalsIgnoreCase(rsTables.getString(SPConstants.TABLE_NAME))) { tableFound = true; LOGGER.config( "User data store table found with name : " + rsTables.getString("TABLE_NAME")); break; } } } try { rsTables.close(); if (null != statement) { statement.close(); } } catch (SQLException e) { LOGGER.log(Level.WARNING, "Exception occurred while closing data base resources.", e); } if (!tableFound) { getSimpleJdbcTemplate().update(getSqlQuery(Query.UDS_CREATE_TABLE)); LOGGER.config( "Created user data store table with name : " + Query.UDS_CREATE_TABLE + " sucessfully"); getSimpleJdbcTemplate().update(getSqlQuery(Query.UDS_CREATE_INDEX)); LOGGER.config("Created user data store table index with name : " + Query.UDS_CREATE_INDEX + " sucessfully"); } } catch (Exception e) { LOGGER.log(Level.WARNING, "Exception occurred while getting the table information from the database metadata. ", e); } }
From source file:org.apache.hive.beeline.HiveSchemaTool.java
boolean validateSchemaTables(Connection conn) throws HiveMetaException { String version = null;//from w w w . jav a 2 s . c om ResultSet rs = null; DatabaseMetaData metadata = null; List<String> dbTables = new ArrayList<String>(); List<String> schemaTables = new ArrayList<String>(); List<String> subScripts = new ArrayList<String>(); Connection hmsConn = getConnectionToMetastore(false); System.out.println("Validating metastore schema tables"); try { version = metaStoreSchemaInfo.getMetaStoreSchemaVersion(getConnectionInfo(false)); } catch (HiveMetaException he) { System.err.println("Failed to determine schema version from Hive Metastore DB. " + he.getMessage()); System.out.println("Failed in schema table validation."); LOG.debug("Failed to determine schema version from Hive Metastore DB," + he.getMessage()); return false; } // re-open the hms connection hmsConn = getConnectionToMetastore(false); LOG.debug("Validating tables in the schema for version " + version); try { metadata = conn.getMetaData(); String[] types = { "TABLE" }; rs = metadata.getTables(null, hmsConn.getSchema(), "%", types); String table = null; while (rs.next()) { table = rs.getString("TABLE_NAME"); dbTables.add(table.toLowerCase()); LOG.debug("Found table " + table + " in HMS dbstore"); } } catch (SQLException e) { throw new HiveMetaException( "Failed to retrieve schema tables from Hive Metastore DB," + e.getMessage()); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { throw new HiveMetaException("Failed to close resultset", e); } } } // parse the schema file to determine the tables that are expected to exist // we are using oracle schema because it is simpler to parse, no quotes or backticks etc String baseDir = new File(metaStoreSchemaInfo.getMetaStoreScriptDir()).getParent(); String schemaFile = new File(metaStoreSchemaInfo.getMetaStoreScriptDir(), metaStoreSchemaInfo.generateInitFileName(version)).getPath(); try { LOG.debug("Parsing schema script " + schemaFile); subScripts.addAll(findCreateTable(schemaFile, schemaTables)); while (subScripts.size() > 0) { schemaFile = baseDir + "/" + dbType + "/" + subScripts.remove(0); LOG.debug("Parsing subscript " + schemaFile); subScripts.addAll(findCreateTable(schemaFile, schemaTables)); } } catch (Exception e) { System.err.println("Exception in parsing schema file. Cause:" + e.getMessage()); System.out.println("Failed in schema table validation."); return false; } LOG.debug("Schema tables:[ " + Arrays.toString(schemaTables.toArray()) + " ]"); LOG.debug("DB tables:[ " + Arrays.toString(dbTables.toArray()) + " ]"); // now diff the lists schemaTables.removeAll(dbTables); if (schemaTables.size() > 0) { Collections.sort(schemaTables); System.err.println("Table(s) [ " + Arrays.toString(schemaTables.toArray()) + " ] are missing from the metastore database schema."); System.out.println("Failed in schema table validation."); return false; } else { System.out.println("Succeeded in schema table validation."); return true; } }
From source file:org.alfresco.repo.domain.schema.SchemaBootstrap.java
/** * Count applied patches. This fails if multiple applied patch tables are found, * which normally indicates that the schema view needs to be limited. * /*from w ww .j av a 2s .co m*/ * @param cfg The Hibernate config * @param connection a valid database connection * @return Returns the number of applied patches * @throws NoSchemaException if the table of applied patches can't be found */ private int countAppliedPatches(Configuration cfg, Connection connection) throws Exception { String defaultSchema = dbSchemaName != null ? dbSchemaName : databaseMetaDataHelper.getSchema(connection); if (defaultSchema != null && defaultSchema.length() == 0) { defaultSchema = null; } String defaultCatalog = cfg.getProperty("hibernate.default_catalog"); if (defaultCatalog != null && defaultCatalog.length() == 0) { defaultCatalog = null; } DatabaseMetaData dbMetadata = connection.getMetaData(); ResultSet tableRs = dbMetadata.getTables(defaultCatalog, defaultSchema, "%", null); boolean newPatchTable = false; boolean oldPatchTable = false; try { boolean multipleSchemas = false; while (tableRs.next()) { String tableName = tableRs.getString("TABLE_NAME"); if (tableName.equalsIgnoreCase("applied_patch")) { if (oldPatchTable || newPatchTable) { // Found earlier multipleSchemas = true; } oldPatchTable = true; } else if (tableName.equalsIgnoreCase("alf_applied_patch")) { if (oldPatchTable || newPatchTable) { // Found earlier multipleSchemas = true; } newPatchTable = true; } } // We go through all the tables so that multiple visible schemas are detected if (multipleSchemas) { throw new AlfrescoRuntimeException(ERR_MULTIPLE_SCHEMAS); } } finally { try { tableRs.close(); } catch (Throwable e) { e.printStackTrace(); } } if (newPatchTable) { Statement stmt = connection.createStatement(); try { ResultSet rs = stmt.executeQuery("select count(id) from alf_applied_patch"); rs.next(); int count = rs.getInt(1); return count; } catch (SQLException e) { // This should work at least and is probably an indication of the user viewing multiple schemas throw new AlfrescoRuntimeException(ERR_MULTIPLE_SCHEMAS); } finally { try { stmt.close(); } catch (Throwable e) { } } } else if (oldPatchTable) { // found the old style table name Statement stmt = connection.createStatement(); try { ResultSet rs = stmt.executeQuery("select count(id) from applied_patch"); rs.next(); int count = rs.getInt(1); return count; } finally { try { stmt.close(); } catch (Throwable e) { } } } else { // The applied patches table is not present throw new NoSchemaException(); } }
From source file:org.talend.core.model.metadata.builder.database.manager.ExtractManager.java
/** * DOC qzhang Comment method "getResultSetFromTableInfo". * // ww w . ja v a2 s. c om * @param dbMetaData * @return * @throws SQLException * @throws IllegalAccessException * @throws InstantiationException * @throws ClassNotFoundException */ protected ResultSet getResultSetFromTableInfo(TableInfoParameters tableInfo, String namePattern, IMetadataConnection iMetadataConnection, String schema) throws SQLException, ClassNotFoundException, InstantiationException, IllegalAccessException { ResultSet rsTables = null; ExtractMetaDataUtils extractMeta = ExtractMetaDataUtils.getInstance(); String tableNamePattern = "".equals(namePattern) ? null : namePattern; //$NON-NLS-1$ String[] types = new String[tableInfo.getTypes().size()]; for (int i = 0; i < types.length; i++) { final String selectedTypeName = tableInfo.getTypes().get(i).getName(); // bug 0017782 ,db2's SYNONYM need to convert to ALIAS; if ("SYNONYM".equals(selectedTypeName) && iMetadataConnection.getDbType().equals(EDatabaseTypeName.IBMDB2.getDisplayName())) { types[i] = "ALIAS"; } else { types[i] = selectedTypeName; } } DatabaseMetaData dbMetaData = null; // Added by Marvin Wang on Mar. 13, 2013 for loading hive jars dynamically, refer to TDI-25072. if (EDatabaseTypeName.HIVE.getXmlName().equalsIgnoreCase(iMetadataConnection.getDbType())) { dbMetaData = HiveConnectionManager.getInstance().extractDatabaseMetaData(iMetadataConnection); } else { dbMetaData = extractMeta.getDatabaseMetaData(extractMeta.getConn(), iMetadataConnection.getDbType(), iMetadataConnection.isSqlMode(), iMetadataConnection.getDatabase()); } // rsTables = dbMetaData.getTables(null, ExtractMetaDataUtils.schema, tableNamePattern, types); ResultSet rsTableTypes = null; rsTableTypes = dbMetaData.getTableTypes(); Set<String> availableTableTypes = new HashSet<String>(); String[] neededTableTypes = { ETableTypes.TABLETYPE_TABLE.getName(), ETableTypes.TABLETYPE_VIEW.getName(), ETableTypes.TABLETYPE_SYNONYM.getName() }; while (rsTableTypes.next()) { String currentTableType = StringUtils.trimToEmpty(rsTableTypes.getString(ExtractManager.TABLE_TYPE)); if (ArrayUtils.contains(neededTableTypes, currentTableType)) { availableTableTypes.add(currentTableType); } } rsTableTypes.close(); rsTables = dbMetaData.getTables(null, schema, tableNamePattern, types); return rsTables; }
From source file:org.apache.ddlutils.task.DumpMetadataTask.java
/** * Dumps all tables./* w w w . ja v a2s . com*/ * * @param xmlWriter The xml writer to write to * @param metaData The database metadata */ private void dumpTables(PrettyPrintingXmlWriter xmlWriter, final DatabaseMetaData metaData) { // First we need the list of supported table types final ArrayList tableTypeList = new ArrayList(); performResultSetXmlOperation(xmlWriter, "tableTypes", new ResultSetXmlOperation() { public ResultSet getResultSet() throws SQLException { return metaData.getTableTypes(); } public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException { String tableType = result.getString("TABLE_TYPE"); tableTypeList.add(tableType); xmlWriter.writeElementStart(null, "tableType"); xmlWriter.writeAttribute(null, "name", tableType); xmlWriter.writeElementEnd(); } public void handleError(SQLException ex) { log("Could not read the table types from the result set: " + ex.getStackTrace(), Project.MSG_ERR); } }); final String[] tableTypesToRead; if ((_tableTypes == null) || (_tableTypes.length == 0)) { tableTypesToRead = (String[]) tableTypeList.toArray(new String[tableTypeList.size()]); } else { tableTypesToRead = _tableTypes; } performResultSetXmlOperation(xmlWriter, "tables", new ResultSetXmlOperation() { public ResultSet getResultSet() throws SQLException { return metaData.getTables(_catalogPattern, _schemaPattern, _tablePattern, tableTypesToRead); } public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException { Set columns = getColumnsInResultSet(result); String tableName = result.getString("TABLE_NAME"); if ((tableName != null) && (tableName.length() > 0)) { String catalog = result.getString("TABLE_CAT"); String schema = result.getString("TABLE_SCHEM"); log("Reading table " + ((schema != null) && (schema.length() > 0) ? schema + "." : "") + tableName, Project.MSG_INFO); xmlWriter.writeElementStart(null, "table"); xmlWriter.writeAttribute(null, "name", tableName); if (catalog != null) { xmlWriter.writeAttribute(null, "catalog", catalog); } if (schema != null) { xmlWriter.writeAttribute(null, "schema", schema); } addStringAttribute(xmlWriter, "type", result, columns, "TABLE_TYPE"); addStringAttribute(xmlWriter, "remarks", result, columns, "REMARKS"); addStringAttribute(xmlWriter, "typeName", result, columns, "TYPE_NAME"); addStringAttribute(xmlWriter, "typeCatalog", result, columns, "TYPE_CAT"); addStringAttribute(xmlWriter, "typeSchema", result, columns, "TYPE_SCHEM"); addStringAttribute(xmlWriter, "identifierColumn", result, columns, "SELF_REFERENCING_COL_NAME"); addStringAttribute(xmlWriter, "identifierGeneration", result, columns, "REF_GENERATION"); dumpColumns(xmlWriter, metaData, catalog, schema, tableName); dumpPKs(xmlWriter, metaData, catalog, schema, tableName); dumpVersionColumns(xmlWriter, metaData, catalog, schema, tableName); dumpFKs(xmlWriter, metaData, catalog, schema, tableName); dumpIndexes(xmlWriter, metaData, catalog, schema, tableName); xmlWriter.writeElementEnd(); } } public void handleError(SQLException ex) { log("Could not read the tables from the result set: " + ex.getStackTrace(), Project.MSG_ERR); } }); }
From source file:org.talend.core.model.metadata.DBConnectionFillerImplTest.java
private void initializeForFillTables(orgomg.cwm.objectmodel.core.Package pack, DatabaseMetaData dbmd, String[] tableType, boolean isOracle) throws SQLException { when(pack.getName()).thenReturn("tdqPackage");//$NON-NLS-1$ PowerMockito.mockStatic(PackageHelper.class); when(PackageHelper.getParentPackage(pack)).thenReturn(pack); when(PackageHelper.getCatalogOrSchema(pack)).thenReturn(pack); Connection con = mock(Connection.class); PowerMockito.mockStatic(MetadataConnectionUtils.class); when(MetadataConnectionUtils.isAS400(pack)).thenReturn(false); when(MetadataConnectionUtils.isOracle(con)).thenReturn(isOracle); when(MetadataConnectionUtils.isSybase(dbmd)).thenReturn(false); when(MetadataConnectionUtils.isOracle8i(con)).thenReturn(false); when(MetadataConnectionUtils.isOracleJDBC(con)).thenReturn(isOracle); PowerMockito.mockStatic(ConnectionHelper.class); when(ConnectionHelper.getConnection(pack)).thenReturn(con); List<String> filterNames = new ArrayList<String>(); filterNames.add("Table1");//$NON-NLS-1$ if (isOracle) { java.sql.Connection sqlConn = mock(java.sql.Connection.class); when(dbmd.getConnection()).thenReturn(sqlConn); Statement stme = mock(Statement.class); ResultSet rsTables = mock(ResultSet.class); when(sqlConn.createStatement()).thenReturn(stme); when(stme.executeQuery(TableInfoParameters.ORACLE_10G_RECBIN_SQL)).thenReturn(rsTables); stub(method(ExtractMetaDataFromDataBase.class, "getTableNamesFromQuery")).toReturn(filterNames);//$NON-NLS-1$ stub(method(ExtensionImplementationProvider.class, "getInstanceV2", IExtensionPointLimiter.class)) //$NON-NLS-1$ .toReturn(new ArrayList<IConfigurationElement>()); }// ww w .j av a2 s . c om ResultSet rs = mock(ResultSet.class); when(rs.next()).thenReturn(true).thenReturn(true).thenReturn(false); when(rs.getString(GetTable.TABLE_NAME.name())).thenReturn("Table1").thenReturn("Table2");//$NON-NLS-1$ //$NON-NLS-2$ when(rs.getString(GetTable.TABLE_TYPE.name())).thenReturn("Table");//$NON-NLS-1$ when(rs.getString(GetTable.REMARKS.name())).thenReturn("");//$NON-NLS-1$ when(dbmd.getTables("tdqPackage", "tdqPackage", "", tableType)).thenReturn(rs);//$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ when(dbmd.getTables("tdqPackage", null, "", tableType)).thenReturn(rs);//$NON-NLS-1$//$NON-NLS-2$ stub(method(StringUtils.class, "isBlank")).toReturn(false);//$NON-NLS-1$ ProxyRepositoryFactory proxFactory = mock(ProxyRepositoryFactory.class); when(proxFactory.getNextId()).thenReturn("abcd1").thenReturn("abcd2");//$NON-NLS-1$//$NON-NLS-2$ stub(method(ProxyRepositoryFactory.class, "getInstance")).toReturn(proxFactory);//$NON-NLS-1$ }
From source file:org.executequery.databasemediators.spi.DefaultStatementExecutor.java
/** <p>Retrieves a description of the specified table using * the connection's <code>DatabaseMetaData</code> object * and the method <code>getColumns(...)</code>. * * @param the table name to describe//from ww w . j a v a 2 s . c om * @return the query result */ private SqlStatementResult getTableDescription(String tableName) throws SQLException { if (!prepared()) { return statementResult; } DatabaseHost host = null; try { /* ------------------------------------------------- * Database meta data values can be case-sensitive. * search for a match and use as returned from dmd. * ------------------------------------------------- */ String name = tableName; String catalog = null; String schema = null; host = new DatabaseObjectFactoryImpl().createDatabaseHost(databaseConnection); int nameDelim = tableName.indexOf('.'); if (nameDelim != -1) { name = tableName.substring(nameDelim + 1); String value = tableName.substring(0, nameDelim); DatabaseMetaData databaseMetaData = host.getDatabaseMetaData(); if (host.supportsCatalogsInTableDefinitions()) { ResultSet resultSet = databaseMetaData.getCatalogs(); while (resultSet.next()) { String _catalog = resultSet.getString(1); if (value.equalsIgnoreCase(_catalog)) { catalog = _catalog; break; } } resultSet.close(); } else if (host.supportsSchemasInTableDefinitions()) { ResultSet resultSet = databaseMetaData.getCatalogs(); while (resultSet.next()) { String _schema = resultSet.getString(1); if (value.equalsIgnoreCase(_schema)) { schema = _schema; break; } } resultSet.close(); } } DatabaseMetaData databaseMetaData = host.getDatabaseMetaData(); ResultSet resultSet = databaseMetaData.getTables(catalog, schema, null, null); String nameToSearchOn = null; while (resultSet.next()) { String _tableName = resultSet.getString(3); if (_tableName.equalsIgnoreCase(name)) { nameToSearchOn = _tableName; break; } } resultSet.close(); if (StringUtils.isNotBlank(nameToSearchOn)) { databaseMetaData = conn.getMetaData(); resultSet = databaseMetaData.getColumns(catalog, schema, nameToSearchOn, null); statementResult.setResultSet(resultSet); } else { statementResult.setMessage("Invalid table name"); } } catch (SQLException e) { statementResult.setSqlException(e); finished(); } catch (OutOfMemoryError e) { statementResult.setMessage(e.getMessage()); releaseResources(); } finally { if (host != null) { host.close(); } } return statementResult; }
From source file:DatabaseInfo.java
public void doGet(HttpServletRequest inRequest, HttpServletResponse outResponse) throws ServletException, IOException { PrintWriter out = null;/*from w ww .ja v a2 s.co m*/ Connection connection = null; Statement statement; ResultSet rs; outResponse.setContentType("text/html"); out = outResponse.getWriter(); try { Context ctx = new InitialContext(); DataSource ds = (DataSource) ctx.lookup("java:comp/env/jdbc/AccountsDB"); connection = ds.getConnection(); DatabaseMetaData md = connection.getMetaData(); statement = connection.createStatement(); out.println("<HTML><HEAD><TITLE>Database Server Information</TITLE></HEAD>"); out.println("<BODY>"); out.println("<H1>General Source Information</H1>"); out.println("getURL() - " + md.getURL() + "<BR>"); out.println("getUserName() - " + md.getUserName() + "<BR>"); out.println("getDatabaseProductVersion - " + md.getDatabaseProductVersion() + "<BR>"); out.println("getDriverMajorVersion - " + md.getDriverMajorVersion() + "<BR>"); out.println("getDriverMinorVersion - " + md.getDriverMinorVersion() + "<BR>"); out.println("nullAreSortedHigh - " + md.nullsAreSortedHigh() + "<BR>"); out.println("<H1>Feature Support</H1>"); out.println("supportsAlterTableWithDropColumn - " + md.supportsAlterTableWithDropColumn() + "<BR>"); out.println("supportsBatchUpdates - " + md.supportsBatchUpdates() + "<BR>"); out.println("supportsTableCorrelationNames - " + md.supportsTableCorrelationNames() + "<BR>"); out.println("supportsPositionedDelete - " + md.supportsPositionedDelete() + "<BR>"); out.println("supportsFullOuterJoins - " + md.supportsFullOuterJoins() + "<BR>"); out.println("supportsStoredProcedures - " + md.supportsStoredProcedures() + "<BR>"); out.println("supportsMixedCaseQuotedIdentifiers - " + md.supportsMixedCaseQuotedIdentifiers() + "<BR>"); out.println("supportsANSI92EntryLevelSQL - " + md.supportsANSI92EntryLevelSQL() + "<BR>"); out.println("supportsCoreSQLGrammar - " + md.supportsCoreSQLGrammar() + "<BR>"); out.println("<H1>Data Source Limits</H1>"); out.println("getMaxRowSize - " + md.getMaxRowSize() + "<BR>"); out.println("getMaxStatementLength - " + md.getMaxStatementLength() + "<BR>"); out.println("getMaxTablesInSelect - " + md.getMaxTablesInSelect() + "<BR>"); out.println("getMaxConnections - " + md.getMaxConnections() + "<BR>"); out.println("getMaxCharLiteralLength - " + md.getMaxCharLiteralLength() + "<BR>"); out.println("<H1>SQL Object Available</H1>"); out.println("getTableTypes()<BR><UL>"); rs = md.getTableTypes(); while (rs.next()) { out.println("<LI>" + rs.getString(1)); } out.println("</UL>"); out.println("getTables()<BR><UL>"); rs = md.getTables("accounts", "", "%", new String[0]); while (rs.next()) { out.println("<LI>" + rs.getString("TABLE_NAME")); } out.println("</UL>"); out.println("<H1>Transaction Support</H1>"); out.println("getDefaultTransactionIsolation() - " + md.getDefaultTransactionIsolation() + "<BR>"); out.println( "dataDefinitionIgnoredInTransactions() - " + md.dataDefinitionIgnoredInTransactions() + "<BR>"); out.println("<H1>General Source Information</H1>"); out.println("getMaxTablesInSelect - " + md.getMaxTablesInSelect() + "<BR>"); out.println("getMaxColumnsInTable - " + md.getMaxColumnsInTable() + "<BR>"); out.println("getTimeDateFunctions - " + md.getTimeDateFunctions() + "<BR>"); out.println("supportsCoreSQLGrammar - " + md.supportsCoreSQLGrammar() + "<BR>"); out.println("getTypeInfo()<BR><UL>"); rs = md.getTypeInfo(); while (rs.next()) { out.println("<LI>" + rs.getString(1)); } out.println("</UL>"); out.println("</BODY></HTML>"); } catch (Exception e) { e.printStackTrace(); } }
From source file:org.apache.ode.bpel.extvar.jdbc.JdbcExternalVariableModule.java
public void configure(QName pid, String extVarId, Element config) throws ExternalVariableModuleException { EVarId evarId = new EVarId(pid, extVarId); DataSource ds = null;//from ww w. j ava 2s . com Element jndiDs = DOMUtils.findChildByName(config, new QName(JDBC_NS, "datasource-jndi")); Element jndiRef = DOMUtils.findChildByName(config, new QName(JDBC_NS, "datasource-ref")); Element initMode = DOMUtils.findChildByName(config, new QName(JDBC_NS, "init-mode")); if (jndiRef != null) { String refname = jndiRef.getTextContent().trim(); ds = _dataSources.get(refname); if (ds == null) throw new ExternalVariableModuleException( "Data source reference \"" + refname + "\" not found for external variable " + evarId + "; make sure to register the data source with the engine!"); } else if (jndiDs != null) { String name = jndiDs.getTextContent().trim(); Object dsCandidate; InitialContext ctx; try { ctx = new InitialContext(); } catch (Exception ex) { throw new ExternalVariableModuleException( "Unable to access JNDI context for external variable " + evarId, ex); } try { dsCandidate = ctx.lookup(name); } catch (Exception ex) { throw new ExternalVariableModuleException("Lookup of data source for " + evarId + " failed.", ex); } finally { try { ctx.close(); } catch (NamingException e) { /* ignore */ } } if (dsCandidate == null) throw new ExternalVariableModuleException("Data source \"" + name + "\" not found in JNDI!"); if (!(dsCandidate instanceof DataSource)) throw new ExternalVariableModuleException( "JNDI object \"" + name + "\" does not implement javax.sql.DataSource"); ds = (DataSource) dsCandidate; } if (ds == null) { throw new ExternalVariableModuleException( "No valid data source configuration for JDBC external varible " + evarId); } Connection conn = null; DatabaseMetaData metaData; try { conn = ds.getConnection(); metaData = conn.getMetaData(); } catch (Exception ex) { try { if (conn != null) conn.close(); } catch (SQLException e) { // ignore } throw new ExternalVariableModuleException( "Unable to open database connection for external variable " + evarId, ex); } try { DbExternalVariable dbev = new DbExternalVariable(evarId, ds); if (initMode != null) try { dbev._initType = InitType.valueOf(initMode.getTextContent().trim()); } catch (Exception ex) { throw new ExternalVariableModuleException( "Invalid <init-mode> value: " + initMode.getTextContent().trim()); } Element tableName = DOMUtils.findChildByName(config, new QName(JDBC_NS, "table")); if (tableName == null || tableName.getTextContent().trim().equals("")) throw new ExternalVariableModuleException("Must specify <table> for external variable " + evarId); String table = tableName.getTextContent().trim(); String schema = null; if (table.indexOf('.') != -1) { schema = table.substring(0, table.indexOf('.')); table = table.substring(table.indexOf('.') + 1); } if (metaData.storesLowerCaseIdentifiers()) { table = table.toLowerCase(); if (schema != null) schema = table.toLowerCase(); } else if (metaData.storesUpperCaseIdentifiers()) { table = table.toUpperCase(); if (schema != null) schema = schema.toUpperCase(); } dbev.generatedKeys = metaData.supportsGetGeneratedKeys(); ResultSet tables = metaData.getTables(null, schema, table, null); if (tables.next()) { dbev.table = tables.getString("TABLE_NAME"); dbev.schema = tables.getString("TABLE_SCHEM"); } else throw new ExternalVariableModuleException("Table \"" + table + "\" not found in database."); tables.close(); List<Element> columns = DOMUtils.findChildrenByName(config, new QName(JDBC_NS, "column")); for (Element col : columns) { String name = col.getAttribute("name"); String colname = col.getAttribute("column-name"); String key = col.getAttribute("key"); String gentype = col.getAttribute("generator"); String expression = col.getAttribute("expression"); if (key == null || "".equals(key)) key = "no"; if (gentype == null || "".equals(gentype)) gentype = GenType.none.toString(); if (colname == null || "".equals(colname)) colname = name; if (name == null || "".equals(name)) throw new ExternalVariableModuleException( "External variable " + evarId + " <column> element must have \"name\" attribute. "); if (metaData.storesLowerCaseIdentifiers()) colname = colname.toLowerCase(); else if (metaData.storesUpperCaseIdentifiers()) colname = colname.toUpperCase(); GenType gtype; try { gtype = GenType.valueOf(gentype); } catch (Exception ex) { throw new ExternalVariableModuleException("External variable " + evarId + " column \"" + name + "\" generator type \"" + gentype + "\" is unknown."); } if (gtype == GenType.expression && (expression == null || "".equals(expression))) throw new ExternalVariableModuleException("External variable " + evarId + " column \"" + name + "\" used \"expression\" generator, but did not specify an expression"); Column c = dbev.new Column(name, colname, key.equalsIgnoreCase("yes"), gtype, expression); ResultSet cmd = metaData.getColumns(null, dbev.schema, dbev.table, colname); try { if (cmd.next()) { c.dataType = cmd.getInt("DATA_TYPE"); c.nullok = cmd.getInt("NULLABLE") != 0; } else throw new ExternalVariableModuleException("External variable " + evarId + " referenced " + "non-existant column \"" + colname + "\"!"); } finally { cmd.close(); } dbev.addColumn(c); } if (dbev.numColumns() == 0) throw new ExternalVariableModuleException( "External variable " + evarId + " did not have any <column> elements!"); _vars.put(evarId, dbev); } catch (SQLException se) { throw new ExternalVariableModuleException("SQL Error", se); } finally { try { conn.close(); } catch (SQLException e) { } } }