Example usage for java.sql DatabaseMetaData getTables

List of usage examples for java.sql DatabaseMetaData getTables

Introduction

In this page you can find the example usage for java.sql DatabaseMetaData getTables.

Prototype

ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String types[])
        throws SQLException;

Source Link

Document

Retrieves a description of the tables available in the given catalog.

Usage

From source file:com.ccl.jersey.codegen.SimpleMetaDataExporter.java

/**
 * Export the tables based on the given database metadata
 *
 * @param md/*from  w w w.j  a  v  a2s . c  o m*/
 * @param stmt
 * @throws SQLException
 */
public void export(DatabaseMetaData md, Statement stmt) throws SQLException {
    if (basePackageName == null) {
        basePackageName = module.getPackageName();
    }
    module.bind(SQLCodegenModule.PACKAGE_NAME, basePackageName + ".query");

    String beanPackageName = basePackageName + ".entity";
    module.bind(SQLCodegenModule.BEAN_PACKAGE_NAME, beanPackageName);

    if (spatial) {
        SpatialSupport.addSupport(module);
    }

    typeMappings = module.get(TypeMappings.class);
    queryTypeFactory = module.get(QueryTypeFactory.class);
    metaDataSerializer = module.get(Serializer.class);
    beanSerializer = module.get(Serializer.class, SQLCodegenModule.BEAN_SERIALIZER);
    namingStrategy = module.get(NamingStrategy.class);
    configuration = module.get(Configuration.class);

    SQLTemplates templates = sqlTemplatesRegistry.getTemplates(md);
    if (templates != null) {
        configuration.setTemplates(templates);
    } else {
        logger.info("Found no specific dialect for " + md.getDatabaseProductName());
    }

    if (beanSerializer == null) {
        keyDataFactory = new KeyDataFactory(namingStrategy, module.getPackageName(), module.getPrefix(),
                module.getSuffix(), schemaToPackage);
    } else {
        keyDataFactory = new KeyDataFactory(namingStrategy, beanPackageName, module.getBeanPrefix(),
                module.getBeanSuffix(), schemaToPackage);
    }

    String[] typesArray = null;

    if (tableTypesToExport != null && !tableTypesToExport.isEmpty()) {
        List<String> types = new ArrayList<String>();
        for (String tableType : tableTypesToExport.split(",")) {
            types.add(tableType.trim());
        }
        typesArray = types.toArray(new String[types.size()]);
    } else if (!exportAll) {
        List<String> types = new ArrayList<String>(2);
        if (exportTables) {
            types.add("TABLE");
        }
        if (exportViews) {
            types.add("VIEW");
        }
        typesArray = types.toArray(new String[types.size()]);
    }

    Map<String, String> modules = new HashMap<String, String>();

    if (tableNamePattern != null && tableNamePattern.contains(",")) {
        for (String table : tableNamePattern.split(",")) {
            ResultSet tables = md.getTables(null, schemaPattern, null, typesArray);
            try {
                while (tables.next()) {
                    String tableName = normalize(tables.getString("TABLE_NAME"));
                    if (tableName.matches(table)) {
                        addModule(stmt, modules, tableName);
                        handleTable(md, tables, stmt);
                    }
                }
            } finally {
                tables.close();
            }
        }
    } else {
        ResultSet tables = md.getTables(null, schemaPattern, null, typesArray);
        try {
            while (tables.next()) {
                String tableName = normalize(tables.getString("TABLE_NAME"));
                if (null == tableNamePattern || tableName.matches(tableNamePattern)) {
                    addModule(stmt, modules, tableName);
                    handleTable(md, tables, stmt);
                }
            }
        } finally {
            tables.close();
        }
    }

    stmt.close();
}

From source file:org.talend.metadata.managment.model.DBConnectionFillerImpl.java

@Override
public List<TdTable> fillTables(Package pack, DatabaseMetaData dbJDBCMetadata, List<String> tableFilter,
        String tablePattern, String[] tableType) {
    List<TdTable> tableList = new ArrayList<TdTable>();
    if (dbJDBCMetadata == null) {
        return null;
    }//from   www .  j a v a  2s.  c o  m
    Package catalogOrSchema = PackageHelper.getCatalogOrSchema(pack);
    String catalogName = null;
    String schemaPattern = null;

    if (catalogOrSchema != null) {
        // catalog
        if (catalogOrSchema instanceof Catalog) {
            catalogName = catalogOrSchema.getName();
            if (MetadataConnectionUtils.isAS400(catalogOrSchema)) {
                return tableList;
            }
        } else {// schema
            Package parentCatalog = PackageHelper.getParentPackage(catalogOrSchema);
            schemaPattern = catalogOrSchema.getName();
            catalogName = parentCatalog == null ? null : parentCatalog.getName();
        }
    }
    try {
        // common
        boolean flag = true;
        String tableComment = null;
        List<String> tablesToFilter = new ArrayList<String>();
        if (pack != null) {
            Connection c = ConnectionHelper.getConnection(pack);
            flag = MetadataConnectionUtils.isOracle8i(c);
            boolean isOracle = MetadataConnectionUtils.isOracle(c);
            boolean isOracleJdbc = MetadataConnectionUtils.isOracleJDBC(c);
            // MetadataConnectionUtils.isOracle8i(connection)
            if ((isOracle || isOracleJdbc) && !flag) {// oracle and not oracle8
                Statement stmt;
                try {
                    // MOD qiongli TDQ-4732 use the common method to create statement both DI and DQ,avoid Exception
                    // for top.
                    stmt = dbJDBCMetadata.getConnection().createStatement();
                    ResultSet rsTables = stmt.executeQuery(TableInfoParameters.ORACLE_10G_RECBIN_SQL);
                    tablesToFilter = ExtractMetaDataFromDataBase.getTableNamesFromQuery(rsTables,
                            dbJDBCMetadata.getConnection());
                    rsTables.close();
                    stmt.close();
                } catch (SQLException e) {
                    ExceptionHandler.process(e);
                }
            }
        }

        ResultSet tables = dbJDBCMetadata.getTables(catalogName, schemaPattern, tablePattern, tableType);

        while (tables.next()) {
            String tableName = getStringFromResultSet(tables, GetTable.TABLE_NAME.name());
            String temptableType = getStringFromResultSet(tables, GetTable.TABLE_TYPE.name());
            // for special db. teradata_sql_model/db2_zos/as400
            temptableType = convertSpecialTableType(temptableType);
            // if TableType is view type don't create it at here.
            if (TableType.VIEW.toString().equals(temptableType)) {
                continue;
            }

            // for
            if (!isCreateElement(tableFilter, tableName)) {
                continue;
            }

            if (tableName == null || tablesToFilter.contains(tableName) || tableName.startsWith("/")) { //$NON-NLS-1$
                continue;
            }
            if (!flag) {
                tableComment = getTableComment(dbJDBCMetadata, tables, tableName, catalogName, schemaPattern);
            }
            // create table
            TdTable table = RelationalFactory.eINSTANCE.createTdTable();
            table.setName(tableName);
            table.setTableType(temptableType);
            table.setLabel(table.getName());
            // MOD qiongli 2011-11-30 TDQ-3930.set id for this retrive table.
            table.setId(EcoreUtil.generateUUID());
            if (tableComment != null) {
                ColumnSetHelper.setComment(tableComment, table);
            }
            tableList.add(table);
        }
        if (isLinked()) {
            PackageHelper.addMetadataTable(ListUtils.castList(MetadataTable.class, tableList), pack);
        }

    } catch (SQLException e) {
        log.error(e, e);
    }
    return tableList;
}

From source file:org.talend.metadata.managment.model.DBConnectionFillerImpl.java

@Override
public List<MetadataTable> fillAll(Package pack, DatabaseMetaData dbJDBCMetadata,
        IMetadataConnection metaConnection, List<String> tableFilter, String tablePattern, String[] tableType) {

    List<MetadataTable> list = new ArrayList<MetadataTable>();
    if (dbJDBCMetadata == null) {
        return null;
    }/*www  . j  av a 2 s  . c  o m*/
    ExtractMetaDataUtils extractMeta = ExtractMetaDataUtils.getInstance();
    Package catalogOrSchema = PackageHelper.getCatalogOrSchema(pack);
    String catalogName = null;
    String schemaPattern = null;

    if (catalogOrSchema != null) {
        // catalog
        if (catalogOrSchema instanceof Catalog) {
            catalogName = catalogOrSchema.getName();
        } else {// schema
            Package parentCatalog = PackageHelper.getParentPackage(catalogOrSchema);
            // in the fillSchema, we set one default schema with " ", but this one doesn't exist, so we should
            // replace to get the tables from all schemas instead
            schemaPattern = " ".equals(catalogOrSchema.getName()) ? null : catalogOrSchema.getName(); //$NON-NLS-1$
            catalogName = parentCatalog == null ? null : parentCatalog.getName();
        }
    }
    try {
        // common
        boolean isOracle8i = true;
        boolean isOracle = false;
        boolean isOracleJdbc = false;
        String tableComment = null;
        List<String> tablesToFilter = new ArrayList<String>();
        if (pack != null) {
            Connection c = ConnectionHelper.getConnection(pack);
            isOracle8i = MetadataConnectionUtils.isOracle8i(c);
            isOracle = MetadataConnectionUtils.isOracle(c);
            isOracleJdbc = MetadataConnectionUtils.isOracleJDBC(c);
            if ((isOracleJdbc || isOracle) && !isOracle8i) {// oracle and not oracle8
                Statement stmt;
                try {
                    // MOD qiongli TDQ-4732 use the common method to create statement both DI and DQ,avoid Exception
                    // for top.
                    stmt = dbJDBCMetadata.getConnection().createStatement();
                    ResultSet rsTables = stmt.executeQuery(TableInfoParameters.ORACLE_10G_RECBIN_SQL);
                    tablesToFilter = ExtractMetaDataFromDataBase.getTableNamesFromQuery(rsTables,
                            dbJDBCMetadata.getConnection());
                    rsTables.close();
                    stmt.close();
                } catch (SQLException e) {
                    ExceptionHandler.process(e);
                }
            }
        }

        boolean isHive2 = HiveConnectionManager.getInstance().isHive2(metaConnection);
        if (isHive2) {
            // for CDH4 HIVE2 , the table type are MANAGED_TABLE and EXTERNAL_TABLE ......
            // tableType = null;
        }
        ResultSet tables = dbJDBCMetadata.getTables(catalogName, schemaPattern, tablePattern, tableType);

        while (tables.next()) {
            String coloumnName = GetTable.TABLE_SCHEM.name();
            if (schemaPattern != null) {
                try {
                    tables.getString(coloumnName);
                } catch (Exception e) {
                    coloumnName = GetTable.TABLE_SCHEMA.name();
                }
            }
            String tableName = getStringFromResultSet(tables, GetTable.TABLE_NAME.name());
            String temptableType = getStringFromResultSet(tables, GetTable.TABLE_TYPE.name());

            // for special db. teradata_sql_model/db2_zos/as400
            temptableType = convertSpecialTableType(temptableType);
            // for
            if (!isCreateElement(tableFilter, tableName)) {
                continue;
            }
            if (tableName == null || tablesToFilter.contains(tableName)) {
                continue;
            }
            //                if (!isOracle && !isOracle8i && !isOracleJdbc && tableName.startsWith("/")) { //$NON-NLS-1$
            // continue;
            // }
            if (!isOracle8i) {
                tableComment = getTableComment(dbJDBCMetadata, tables, tableName, catalogName, schemaPattern);
            }
            MetadataTable metadatatable = null;
            if (TableType.VIEW.toString().equals(temptableType)
                    || ETableTypes.VIRTUAL_VIEW.getName().equals(temptableType)) {
                metadatatable = RelationalFactory.eINSTANCE.createTdView();
            } else {
                metadatatable = RelationalFactory.eINSTANCE.createTdTable();
            }

            metadatatable.setName(tableName);
            // Added by Marvin Wang on Feb. 6, 2012 for bug TDI-24413, it is just for hive external table.
            if (ETableTypes.TABLETYPE_EXTERNAL_TABLE.getName().equals(temptableType)
                    || ETableTypes.EXTERNAL_TABLE.getName().equals(temptableType)
                    || ETableTypes.MANAGED_TABLE.getName().equals(temptableType)
                    || ETableTypes.INDEX_TABLE.getName().equals(temptableType)) {
                metadatatable.setTableType(ETableTypes.TABLETYPE_TABLE.getName());
            } else if (ETableTypes.VIRTUAL_VIEW.getName().equals(temptableType)) {
                metadatatable.setTableType(ETableTypes.TABLETYPE_VIEW.getName());
            } else {
                metadatatable.setTableType(temptableType);
            }
            metadatatable.setLabel(metadatatable.getName());
            if (tableComment != null) {
                metadatatable.setComment(tableComment);
                ColumnSetHelper.setComment(tableComment, metadatatable);
            }
            try {
                if (tables.getString("SYSTEM_TABLE_NAME") != null //$NON-NLS-1$
                        && tables.getString("SYSTEM_TABLE_SCHEMA") != null //$NON-NLS-1$
                        && tables.getString("TABLE_SCHEMA") != null) { //$NON-NLS-1$
                    TaggedValueHelper.setTaggedValue(metadatatable, TaggedValueHelper.SYSTEMTABLENAME,
                            tables.getString("SYSTEM_TABLE_NAME").trim()); //$NON-NLS-1$
                    TaggedValueHelper.setTaggedValue(metadatatable, TaggedValueHelper.SYSTEMTABLESCHEMA,
                            tables.getString("SYSTEM_TABLE_SCHEMA").trim()); //$NON-NLS-1$
                    TaggedValueHelper.setTaggedValue(metadatatable, TaggedValueHelper.TABLESCHEMA,
                            tables.getString("TABLE_SCHEMA").trim()); //$NON-NLS-1$
                }
            } catch (SQLException e) {
                // don't catch anything if the system table name or schema doesn't exist
                // this part is needed only for as400
            }
            list.add(metadatatable);
        }
        if (dbJDBCMetadata.getDatabaseProductName() != null
                && dbJDBCMetadata.getDatabaseProductName().equals("Microsoft SQL Server")) { //$NON-NLS-1$
            for (String element : tableType) {
                if (element.equals("SYNONYM")) { //$NON-NLS-1$
                    Statement stmt = extractMeta.getConn().createStatement();
                    extractMeta.setQueryStatementTimeout(stmt);
                    String schemaname = schemaPattern + ".sysobjects"; //$NON-NLS-1$
                    String sql = "select name from " + schemaname + " where xtype='SN'"; //$NON-NLS-1$//$NON-NLS-2$
                    if ("dbo".equalsIgnoreCase(schemaPattern)) { //$NON-NLS-1$
                        // SELECT name AS object_name ,SCHEMA_NAME(schema_id) AS schema_name FROM sys.objects where
                        // type='SN'
                        ResultSet rsTables = stmt.executeQuery(sql);
                        while (rsTables.next()) {
                            String nameKey = rsTables.getString("name").trim(); //$NON-NLS-1$

                            MetadataTable metadatatable = null;
                            metadatatable = RelationalFactory.eINSTANCE.createTdTable();

                            metadatatable.setName(nameKey);
                            metadatatable.setTableType(ETableTypes.TABLETYPE_SYNONYM.getName());
                            metadatatable.setLabel(metadatatable.getName());
                            list.add(metadatatable);
                        }
                    }
                }
            }
        } else if (dbJDBCMetadata.getDatabaseProductName() != null
                && dbJDBCMetadata.getDatabaseProductName().startsWith("DB2/")) { //$NON-NLS-1$
            for (String element : tableType) {
                if (element.equals("SYNONYM")) { //$NON-NLS-1$
                    Statement stmt = extractMeta.getConn().createStatement();
                    extractMeta.setQueryStatementTimeout(stmt);
                    String sql = "SELECT NAME FROM SYSIBM.SYSTABLES where TYPE='A' and BASE_SCHEMA = '" //$NON-NLS-1$
                            + schemaPattern + "'"; //$NON-NLS-1$
                    ResultSet rsTables = stmt.executeQuery(sql);
                    while (rsTables.next()) {
                        String nameKey = rsTables.getString("NAME").trim(); //$NON-NLS-1$

                        MetadataTable metadatatable = null;
                        metadatatable = RelationalFactory.eINSTANCE.createTdTable();

                        metadatatable.setName(nameKey);
                        metadatatable.setTableType(ETableTypes.TABLETYPE_SYNONYM.getName());
                        metadatatable.setLabel(metadatatable.getName());
                        list.add(metadatatable);
                    }
                }
            }
        }
        if (isLinked()) {
            PackageHelper.addMetadataTable(ListUtils.castList(MetadataTable.class, list), pack);
        }
    } catch (SQLException e) {
        log.error(e, e);
    }

    return list;

}

From source file:org.biomart.configurator.controller.MartController.java

public void updateDatasetFromSource(Dataset ds) throws SQLException, DataModelException {
    Mart mart = ds.getParentMart();/* ww  w  . j  a  va 2s.  com*/
    List<SourceSchema> sss = mart.getIncludedSchemas();
    // assuming that only one for now
    if (McUtils.isCollectionEmpty(sss))
        return;
    SourceSchema ss = sss.get(0);

    final DatabaseMetaData dmd = ss.getConnection().getMetaData();
    final String catalog = ss.getConnection().getCatalog();

    // List of objects storing orphan key column and its table name
    List<ForeignKey> orphanFKList = new ArrayList<ForeignKey>();
    StringBuffer orphanSearch = new StringBuffer();
    boolean orphanBool = false;

    /*
     * try { orphanBool = findOrphanFKFromDB(orphanFKList, orphanSearch, mart); if (orphanBool) { Frame frame = new
     * Frame(); Object[] options = { "Proceed", "Abort Synchronization" }; int n = JOptionPane .showOptionDialog(
     * frame,
     * "Some columns in relations no longer exist in source DB. This may be caused by renaming/dropping tables/columns in source DB.\n"
     * +
     * "When choose 'Proceed', you will be prompted to save this information for later use. Do you want to proceed?"
     * +"\n", "Schema Update Warning", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE, null, // do // not //
     * use a // custom // Icon options, // the titles of buttons options[1]); // default button title if (n ==
     * JOptionPane.NO_OPTION) { return; } else{ new SaveOrphanKeyDialog("Orphan Relation", orphanSearch.toString());
     * } } } catch (Exception e) { e.printStackTrace(); } // Now that user decides to sync GUI model to DB schema,
     * remove orphan foreign key clearOrphanForeignKey(orphanFKList);
     */
    // Create a list of existing tables. During this method, we remove
    // from this list all tables that still exist in the database. At
    // the end of the method, the list contains only those tables
    // which no longer exist, so they will be dropped.
    final Collection<Table> tablesToBeDropped = new HashSet<Table>(ss.getTables());

    // Load tables and views from database, then loop over them.
    ResultSet dbTables;
    String sName = ss.getJdbcLinkObject().getJdbcType().useSchema() ? ss.getJdbcLinkObject().getSchemaName()
            : catalog;

    dbTables = dmd.getTables(catalog, sName, "%", new String[] { "TABLE", "VIEW", "ALIAS", "SYNONYM" });

    // Do the loop.
    final Collection<Table> tablesToBeKept = new HashSet<Table>();
    while (dbTables.next()) {
        // Check schema and catalog.
        final String catalogName = dbTables.getString("TABLE_CAT");
        final String schemaName = dbTables.getString("TABLE_SCHEM");
        String schemaPrefix = schemaName;

        // What is the table called?
        final String dbTableName = dbTables.getString("TABLE_NAME");
        Log.debug("Processing table " + catalogName + "." + dbTableName);

        // Look to see if we already have a table by this name defined.
        // If we do, reuse it. If not, create a new table.
        Table dbTable = ss.getTableByName(dbTableName);
        if (dbTable == null)
            try {
                dbTable = new SourceTable(ss, dbTableName);
                dbTable.setVisibleModified(true);
                ss.addTable(dbTable);
            } catch (final Throwable t) {
                throw new BioMartError(t);
            }
        // Add schema prefix to list.
        if (schemaPrefix != null)
            dbTable.addInPartitions(schemaPrefix);

        // Table exists, so remove it from our list of tables to be
        // dropped at the end of the method.
        tablesToBeDropped.remove(dbTable);
        tablesToBeKept.add(dbTable);
    }
    dbTables.close();

    // Loop over all columns.
    for (final Iterator<Table> i = tablesToBeKept.iterator(); i.hasNext();) {
        final Table dbTable = (Table) i.next();
        final String dbTableName = dbTable.getName();
        // Make a list of all the columns in the table. Any columns
        // remaining in this list by the end of the loop will be
        // dropped.
        final Collection<Column> colsToBeDropped = new HashSet<Column>(dbTable.getColumnList());

        // Clear out the existing schema partition info on all cols.
        for (final Iterator<Column> j = dbTable.getColumnList().iterator(); j.hasNext();)
            ((Column) j.next()).cleanInPartitions();

        // Load the table columns from the database, then loop over
        // them.
        Log.debug("Loading table column list for " + dbTableName);
        ResultSet dbTblCols;

        dbTblCols = dmd.getColumns(catalog, sName, dbTableName, "%");

        // FIXME: When using Oracle, if the table is a synonym then the
        // above call returns no results.
        while (dbTblCols.next()) {
            // Check schema and catalog.
            final String catalogName = dbTblCols.getString("TABLE_CAT");
            final String schemaName = dbTblCols.getString("TABLE_SCHEM");
            String schemaPrefix = null;
            // No prefix if partitions are empty;
            /*
             * if (!this.getPartitions().isEmpty()) { if ("".equals(dmd.getSchemaTerm())) // Use catalog name to get
             * prefix. schemaPrefix = (String) this.getPartitions().get( catalogName); else // Use schema name to
             * get prefix. schemaPrefix = (String) this.getPartitions().get( schemaName); // Don't want to include
             * if prefix is still null. if (schemaPrefix == null) continue; }
             */

            // What is the column called, and is it nullable?
            final String dbTblColName = dbTblCols.getString("COLUMN_NAME");
            Log.debug("Processing column " + dbTblColName);

            // Look to see if the column already exists on this table.
            // If it does, reuse it. Else, create it.
            Column dbTblCol = (Column) dbTable.getColumnByName(dbTblColName);
            if (dbTblCol == null)
                try {
                    dbTblCol = new SourceColumn((SourceTable) dbTable, dbTblColName);
                    dbTblCol.setVisibleModified(true);
                    dbTable.addColumn(dbTblCol);
                } catch (final Throwable t) {
                    throw new BioMartError(t);
                }

            // Column exists, so remove it from our list of columns to
            // be dropped at the end of the loop.
            colsToBeDropped.remove(dbTblCol);
            if (schemaPrefix != null)
                dbTblCol.addInPartitions(schemaPrefix);
        }
        dbTblCols.close();

        // Drop all columns that are left in the list, as they no longer
        // exist in the database.
        for (final Iterator<Column> j = colsToBeDropped.iterator(); j.hasNext();) {
            final Column column = (Column) j.next();
            Log.debug("Dropping redundant column " + column.getName());
            dbTable.getColumnList().remove(column);
        }

    }

    // Remove from schema all tables not found in the database, using
    // the list we constructed above.
    for (final Iterator<Table> i = tablesToBeDropped.iterator(); i.hasNext();) {
        final Table existingTable = (Table) i.next();
        Log.debug("Dropping redundant table " + existingTable);
        final String tableName = existingTable.getName();
        // By clearing its keys we will also clear its relations.
        for (final Iterator<Key> j = existingTable.getKeys().iterator(); j.hasNext();) {
            j.next().removeAllRelations();
        }
        existingTable.setPrimaryKey(null);
        existingTable.getForeignKeys().clear();
        ss.removeTableByName(tableName);
    }

    // Get and create primary keys.
    // Work out a list of all foreign keys currently existing.
    // Any remaining in this list later will be dropped.
    final Collection<ForeignKey> fksToBeDropped = new HashSet<ForeignKey>();
    for (final Iterator<Table> i = ss.getTables().iterator(); i.hasNext();) {
        final Table t = (Table) i.next();
        fksToBeDropped.addAll(t.getForeignKeys());

        // Obtain the primary key from the database. Even in databases
        // without referential integrity, the primary key is still
        // defined and can be obtained from the metadata.
        Log.debug("Loading table primary keys");
        String searchCatalog = catalog;
        String searchSchema = sName;
        /*
         * if (!t.getSchemaPartitions().isEmpty()) { // Locate partition with first prefix. final String prefix =
         * (String) t.getSchemaPartitions() .iterator().next(); String schemaName = (String) new InverseMap(this
         * .getPartitions()).get(prefix); if (schemaName == null) // Should never happen. throw new BioMartError();
         * if ("".equals(dmd.getSchemaTerm())) searchCatalog = schemaName; searchSchema = schemaName; }
         */
        final ResultSet dbTblPKCols = dmd.getPrimaryKeys(searchCatalog, searchSchema, t.getName());

        // Load the primary key columns into a map keyed by column
        // position.
        // In other words, the first column in the key has a map key of
        // 1, and so on. We do this because we can't guarantee we'll
        // read the key columns from the database in the correct order.
        // We keep the map sorted, so that when we iterate over it later
        // we get back the columns in the correct order.
        final Map<Short, Column> pkCols = new TreeMap<Short, Column>();
        while (dbTblPKCols.next()) {
            final String pkColName = dbTblPKCols.getString("COLUMN_NAME");
            final Short pkColPosition = new Short(dbTblPKCols.getShort("KEY_SEQ"));
            pkCols.put(pkColPosition, t.getColumnByName(pkColName));
        }
        dbTblPKCols.close();

        // Did DMD find a PK? If not, which is really unusual but
        // potentially may happen, attempt to find one by looking for a
        // single column with the same name as the table or with '_id'
        // appended.
        // Only do this if we are using key-guessing.
        if (pkCols.isEmpty() && ss.getJdbcLinkObject().isKeyGuessing()) {
            Log.debug("Found no primary key, so attempting to guess one");
            // Plain version first.
            Column candidateCol = (Column) t.getColumnByName(t.getName());
            // Try with '_id' appended if plain version turned up
            // nothing.
            if (candidateCol == null)
                candidateCol = (Column) t.getColumnByName(t.getName() + Resources.get("primaryKeySuffix"));
            // Found something? Add it to the primary key columns map,
            // with a dummy key of 1. (Use Short for the key because
            // that
            // is what DMD would have used had it found anything
            // itself).
            if (candidateCol != null)
                pkCols.put(Short.valueOf("1"), candidateCol);
        }

        // Obtain the existing primary key on the table, if the table
        // previously existed and even had one in the first place.
        final PrimaryKey existingPK = t.getPrimaryKey();

        // Did we find a PK on the database copy of the table?
        if (!pkCols.isEmpty()) {

            // Yes, we found a PK on the database copy of the table. So,
            // create a new key based around the columns we identified.
            PrimaryKey candidatePK;
            try {
                candidatePK = new PrimaryKey(new ArrayList<Column>(pkCols.values()));
            } catch (final Throwable th) {
                throw new BioMartError(th);
            }

            // If the existing table has no PK, or has a PK which
            // matches and is not incorrect, or has a PK which does not
            // match
            // and is not handmade, replace that PK with the one we
            // found.
            // This way we preserve any existing handmade PKs, and don't
            // override any marked as incorrect.
            try {
                if (existingPK == null)
                    t.setPrimaryKey(candidatePK);
                else if (existingPK.equals(candidatePK)
                        && existingPK.getStatus().equals(ComponentStatus.HANDMADE))
                    existingPK.setStatus(ComponentStatus.INFERRED);
                else if (!existingPK.equals(candidatePK)
                        && !existingPK.getStatus().equals(ComponentStatus.HANDMADE))
                    t.setPrimaryKey(candidatePK);
            } catch (final Throwable th) {
                throw new BioMartError(th);
            }
        } else // No, we did not find a PK on the database copy of the
        // table, so that table should not have a PK at all. So if the
        // existing table has a PK which is not handmade, remove it.
        if (existingPK != null && !existingPK.getStatus().equals(ComponentStatus.HANDMADE))
            try {
                t.setPrimaryKey(null);
            } catch (final Throwable th) {
                throw new BioMartError(th);
            }
    }

    // Are we key-guessing? Key guess the foreign keys, passing in a
    // reference to the list of existing foreign keys. After this call
    // has completed, the list will contain all those foreign keys which
    // no longer exist, and can safely be dropped.
    if (ss.getJdbcLinkObject().isKeyGuessing())
        this.synchroniseKeysUsingKeyGuessing(ss, fksToBeDropped);
    // Otherwise, use DMD to do the same, also passing in the list of
    // existing foreign keys to be updated as the call progresses. Also
    // pass in the DMD details so it doesn't have to work them out for
    // itself.
    else
        this.synchroniseKeysUsingDMD(ss, fksToBeDropped, dmd, sName, catalog);

    // Drop any foreign keys that are left over (but not handmade ones).
    for (final Iterator<ForeignKey> i = fksToBeDropped.iterator(); i.hasNext();) {
        final Key k = (Key) i.next();
        if (k.getStatus().equals(ComponentStatus.HANDMADE))
            continue;
        Log.debug("Dropping redundant foreign key " + k);
        for (final Iterator<Relation> r = k.getRelations().iterator(); r.hasNext();) {
            final Relation rel = (Relation) r.next();
            rel.getFirstKey().getRelations().remove(rel);
            rel.getSecondKey().getRelations().remove(rel);
        }
        k.getTable().getForeignKeys().remove(k);
    }

    // rebuild mart
    this.rebuildMartFromSource(mart);
}

From source file:org.apache.openjpa.jdbc.sql.DBDictionary.java

/**
 * Reflect on the schema to find tables matching the given name pattern.
 *///from www  . j  a  v a  2  s  .c  o  m
public Table[] getTables(DatabaseMetaData meta, DBIdentifier sqlCatalog, DBIdentifier sqlSchemaName,
        DBIdentifier sqlTableName, Connection conn) throws SQLException {

    String schemaName = DBIdentifier.isNull(sqlSchemaName) ? null : sqlSchemaName.getName();
    if (!supportsSchemaForGetTables)
        schemaName = null;
    else {
        schemaName = getSchemaNameForMetadata(sqlSchemaName);
    }

    String[] types = Strings.split(tableTypes, ",", 0);
    for (int i = 0; i < types.length; i++)
        types[i] = types[i].trim();

    beforeMetadataOperation(conn);
    ResultSet tables = null;
    try {
        tables = meta.getTables(getCatalogNameForMetadata(sqlCatalog), schemaName,
                getTableNameForMetadata(sqlTableName), types);
        List tableList = new ArrayList();
        while (tables != null && tables.next())
            tableList.add(newTable(tables));
        return (Table[]) tableList.toArray(new Table[tableList.size()]);
    } finally {
        if (tables != null)
            try {
                tables.close();
            } catch (Exception e) {
            }
    }
}

From source file:org.kawanfw.test.api.client.DatabaseMetaDataTest.java

public void test(Connection connection) throws Exception {
    MessageDisplayer.initClassDisplay(this.getClass().getSimpleName());

    DatabaseMetaData databaseMetaData = connection.getMetaData();

    // Test that getMetaData() will return value from cache
    databaseMetaData = connection.getMetaData();

    if (connection instanceof RemoteConnection) {
        MessageDisplayer.display("Java Version : " + System.getProperty("java.version"));
        MessageDisplayer.display("AceQL Version: " + ((RemoteConnection) connection).getVersion());
        MessageDisplayer.display("AceQL Url    : " + ((RemoteConnection) connection).getUrl());
        MessageDisplayer.display("");
    }/* w  w w. j a  va2  s. c o  m*/

    if (connection instanceof RemoteConnection) {
        MessageDisplayer.display("((RemoteConnection)connection).clone();");
        Connection connection2 = ((RemoteConnection) connection).clone();
        @SuppressWarnings("unused")
        DatabaseMetaData databaseMetaData2 = connection2.getMetaData();
        connection2.close();
    }

    MessageDisplayer.display("General info (no Assert done):");

    MessageDisplayer.display("connection.getCatalog()                     : " + connection.getCatalog());

    MessageDisplayer.display(
            "databaseMetaData.getDatabaseProductName()   : " + databaseMetaData.getDatabaseProductName());
    MessageDisplayer.display(
            "databaseMetaData.getDatabaseProductVersion(): " + databaseMetaData.getDatabaseProductVersion());
    MessageDisplayer.display(
            "databaseMetaData.getDatabaseMajorVersion()  : " + databaseMetaData.getDatabaseMajorVersion());
    MessageDisplayer.display(
            "databaseMetaData.getDatabaseMinorVersion()  : " + databaseMetaData.getDatabaseMinorVersion());
    MessageDisplayer.display(
            "databaseMetaData.allProceduresAreCallable() : " + databaseMetaData.allProceduresAreCallable());
    // SystemOutHandle.display(DatabaseMetaData.bestRowSession);
    MessageDisplayer.display("");

    // SystemOutHandle.display(databaseMetaData.autoCommitFailureClosesAllResultSets());

    MessageDisplayer.display("databaseMetaData.getCatalogTerm(): " + databaseMetaData.getCatalogTerm());

    try {

        MessageDisplayer.display(
                "databaseMetaData.supportsStoredProcedures(): " + databaseMetaData.supportsStoredProcedures());

        MessageDisplayer.display("databaseMetaData.supportsStoredFunctionsUsingCallSyntax(): "
                + databaseMetaData.supportsStoredFunctionsUsingCallSyntax());

    } catch (Throwable e) {
        MessageDisplayer.display(e.toString());
    }

    MessageDisplayer.display("connection.getAutoCommit(): " + connection.getAutoCommit());

    MessageDisplayer.display("databaseMetaData.getDefaultTransactionIsolation()    : "
            + databaseMetaData.getDefaultTransactionIsolation());

    MessageDisplayer
            .display("databaseMetaData.supportsTransactionIsolationLevel(TRANSACTION_READ_UNCOMMITTED): "
                    + databaseMetaData
                            .supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_UNCOMMITTED));

    MessageDisplayer.display("databaseMetaData.supportsTransactionIsolationLevel(TRANSACTION_READ_COMMITTED): "
            + databaseMetaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_COMMITTED));

    MessageDisplayer.display("databaseMetaData.supportsTransactionIsolationLevel(TRANSACTION_REPEATABLE_READ): "
            + databaseMetaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_REPEATABLE_READ));

    MessageDisplayer.display("databaseMetaData.supportsTransactionIsolationLevel(TRANSACTION_SERIALIZABLE): "
            + databaseMetaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_SERIALIZABLE));

    MessageDisplayer
            .display("databaseMetaData.supportsBatchUpdates()    : " + databaseMetaData.supportsBatchUpdates());
    MessageDisplayer
            .display("databaseMetaData.supportsSavepoints()      : " + databaseMetaData.supportsSavepoints());
    MessageDisplayer.display(
            "databaseMetaData.supportsGetGeneratedKeys(): " + databaseMetaData.supportsGetGeneratedKeys());

    if (!new SqlUtil(connection).isTeradata() && !new SqlUtil(connection).isInformix()) {
        Assert.assertEquals(true,
                databaseMetaData.supportsTransactionIsolationLevel(Connection.TRANSACTION_READ_COMMITTED));
    }

    Assert.assertEquals("databaseMetaData.supportsBatchUpdates()", true,
            databaseMetaData.supportsBatchUpdates());

    if (!new SqlUtil(connection).isSQLAnywhere() && !new SqlUtil(connection).isAccess()) {
        Assert.assertEquals("databaseMetaData.supportsGetGeneratedKeys()", true,
                databaseMetaData.supportsGetGeneratedKeys());
    }
    // Informix does not support savepoints
    SqlUtil sqlUtil = new SqlUtil(connection);
    if (!sqlUtil.isInformix() && !sqlUtil.isTeradata() && !new SqlUtil(connection).isAccess()) {
        Assert.assertEquals(true, databaseMetaData.supportsSavepoints());
    }

    MessageDisplayer.display("");

    String catalog = null;
    String schema = null;
    String table = "customer";

    // Table name must be uppercase for Oracle & DB2, lowercase for MySQL
    // and PostgreSQL
    if (new SqlUtil(connection).isOracle() || new SqlUtil(connection).isHSQLDB()
            || new SqlUtil(connection).isDB2()) {
        table = table.toUpperCase();
    }

    ResultSet rs = null;

    if (!new SqlUtil(connection).isAccess()) {

        rs = databaseMetaData.getPrimaryKeys(catalog, schema, table);

        printResultSet(rs);

        boolean rsNext = false;

        while (rs.next()) {
            rsNext = true;
            String keyColumnName = rs.getString("COLUMN_NAME");
            MessageDisplayer.display("Primary Key is: " + keyColumnName + " for Table: " + table);
            Assert.assertEquals("customer_id", keyColumnName.toLowerCase());
        }

        if (!new SqlUtil(connection).isH2()) {
            Assert.assertEquals(true, rsNext);
        }

        rs.close();
    }

    // boolean returnNow = true;
    // if (returnNow) return;

    String[] types = { "TABLE", "VIEW" };
    rs = databaseMetaData.getTables(null, null, null, types);

    Set<String> tablesSet = new HashSet<String>();

    Set<String> ourTables = new HashSet<String>();
    ourTables.add("banned_usernames");
    ourTables.add("customer");
    ourTables.add("customer_auto");
    ourTables.add("orderlog");
    ourTables.add("user_login");

    MessageDisplayer.display("");
    while (rs.next()) {
        table = rs.getString("TABLE_NAME");

        if (ourTables.contains(table.toLowerCase())) {
            MessageDisplayer.display("Table: " + table);
        }

        tablesSet.add(table.toLowerCase());
    }

    // printResultSet(rs);

    testTable("banned_usernames", tablesSet);
    testTable("customer", tablesSet);
    testTable("orderlog", tablesSet);
    testTable("user_login", tablesSet);

    rs.close();
}