Example usage for java.sql DatabaseMetaData getImportedKeys

List of usage examples for java.sql DatabaseMetaData getImportedKeys

Introduction

In this page you can find the example usage for java.sql DatabaseMetaData getImportedKeys.

Prototype

ResultSet getImportedKeys(String catalog, String schema, String table) throws SQLException;

Source Link

Document

Retrieves a description of the primary key columns that are referenced by the given table's foreign key columns (the primary keys imported by a table).

Usage

From source file:org.alfresco.util.schemacomp.ExportDb.java

private void extractSchema(DatabaseMetaData dbmd, String schemaName, String prefixFilter)
        throws SQLException, IllegalArgumentException, IllegalAccessException {
    if (log.isDebugEnabled()) {
        log.debug("Retrieving tables: schemaName=[" + schemaName + "], prefixFilter=[" + prefixFilter + "]");
    }/*  w w  w  .  j  a  v  a  2s .co m*/

    final ResultSet tables = dbmd.getTables(null, schemaName, prefixFilter,
            new String[] { "TABLE", "VIEW", "SEQUENCE" });

    while (tables.next()) {
        final String tableName = tables.getString("TABLE_NAME");

        if (log.isDebugEnabled()) {
            log.debug("Examining table tableName=[" + tableName + "]");
        }

        // Oracle hack: ignore tables in the recycle bin
        // ALF-14129 fix, check whether schema already contains object with provided name
        if (tableName.startsWith("BIN$") || schema.containsByName(tableName)) {
            continue;
        }

        if (tables.getString("TABLE_TYPE").equals("SEQUENCE")) {
            Sequence sequence = new Sequence(tableName);
            schema.add(sequence);
            continue;
        }

        Table table = new Table(tableName);
        schema.add(table);

        // Table columns
        final ResultSet columns = dbmd.getColumns(null, tables.getString("TABLE_SCHEM"), tableName, "%");
        while (columns.next()) {
            String columnName = columns.getString("COLUMN_NAME");
            Column column = new Column(columnName);

            String dbType = columns.getString("TYPE_NAME");
            int colSize = columns.getInt("COLUMN_SIZE");
            int scale = columns.getInt("DECIMAL_DIGITS");
            int jdbcType = columns.getInt("DATA_TYPE");
            String type = generateType(dbType, colSize, scale, jdbcType);
            column.setType(type);

            String nullableString = columns.getString("IS_NULLABLE");
            column.setNullable(parseBoolean(nullableString));

            column.setOrder(columns.getInt("ORDINAL_POSITION"));

            try {
                String autoIncString = columns.getString("IS_AUTOINCREMENT");
                column.setAutoIncrement(parseBoolean(autoIncString));
            } catch (SQLException jtdsDoesNOtHAveIsUatoincrement) {
                column.setAutoIncrement((dbType.endsWith("identity")));
            }

            column.setParent(table);
            table.getColumns().add(column);
        }
        columns.close();

        // Primary key
        final ResultSet primarykeycols = dbmd.getPrimaryKeys(null, tables.getString("TABLE_SCHEM"), tableName);

        PrimaryKey pk = null;

        while (primarykeycols.next()) {
            if (pk == null) {
                String pkName = primarykeycols.getString("PK_NAME");
                pk = new PrimaryKey(pkName);
            }
            String columnName = primarykeycols.getString("COLUMN_NAME");
            pk.getColumnNames().add(columnName);

            int columnOrder = primarykeycols.getInt("KEY_SEQ");
            pk.getColumnOrders().add(columnOrder);
        }
        primarykeycols.close();

        // If this table has a primary key, add it. 
        if (pk != null) {
            pk.setParent(table);
            table.setPrimaryKey(pk);
        }

        // Indexes
        final ResultSet indexes = dbmd.getIndexInfo(null, tables.getString("TABLE_SCHEM"), tableName, false,
                true);
        String lastIndexName = "";

        Index index = null;

        while (indexes.next()) {
            final String indexName = indexes.getString("INDEX_NAME");
            if (indexName == null) {
                // Oracle seems to have some dummy index entries
                continue;
            }
            // Skip the index corresponding to the PK if it is mentioned
            else if (indexName.equals(table.getPrimaryKey().getName())) {
                continue;
            }

            if (!indexName.equals(lastIndexName)) {
                index = new Index(indexName);
                index.setUnique(!indexes.getBoolean("NON_UNIQUE"));
                index.setParent(table);
                table.getIndexes().add(index);
                lastIndexName = indexName;
            }
            if (index != null) {
                String columnName = indexes.getString("COLUMN_NAME");
                index.getColumnNames().add(columnName);
            }
        }
        indexes.close();

        final ResultSet foreignkeys = dbmd.getImportedKeys(null, tables.getString("TABLE_SCHEM"), tableName);
        String lastKeyName = "";

        ForeignKey fk = null;

        while (foreignkeys.next()) {
            final String keyName = foreignkeys.getString("FK_NAME");
            if (!keyName.equals(lastKeyName)) {
                fk = new ForeignKey(keyName);
                fk.setParent(table);
                table.getForeignKeys().add(fk);
                lastKeyName = keyName;
            }
            if (fk != null) {
                fk.setLocalColumn(foreignkeys.getString("FKCOLUMN_NAME"));
                fk.setTargetTable(foreignkeys.getString("PKTABLE_NAME"));
                fk.setTargetColumn(foreignkeys.getString("PKCOLUMN_NAME"));
            }
        }
        foreignkeys.close();
    }
    tables.close();
}

From source file:org.apache.ambari.server.orm.DBAccessorImpl.java

@Override
public boolean tableHasForeignKey(String tableName, String fkName) throws SQLException {
    DatabaseMetaData metaData = getDatabaseMetaData();

    ResultSet rs = metaData.getImportedKeys(null, null, convertObjectName(tableName));

    if (rs != null) {
        try {/*from   ww  w  .j  a va  2s . co  m*/
            while (rs.next()) {
                if (StringUtils.equalsIgnoreCase(fkName, rs.getString("FK_NAME"))) {
                    return true;
                }
            }
        } finally {
            rs.close();
        }
    }

    LOG.warn("FK {} not found for table {}", convertObjectName(fkName), convertObjectName(tableName));

    return false;
}

From source file:org.apache.bigtop.itest.hive.TestJdbc.java

/**
 * Test simple DatabaseMetaData calls.  getColumns is tested elsewhere, as we need to call
 * that on a valid table.  Same with getFunctions.
 *
 * @throws SQLException/*www . ja v a 2  s  . com*/
 */
@Test
public void databaseMetaDataCalls() throws SQLException {
    DatabaseMetaData md = conn.getMetaData();

    boolean boolrc = md.allTablesAreSelectable();
    LOG.debug("All tables are selectable? " + boolrc);

    String strrc = md.getCatalogSeparator();
    LOG.debug("Catalog separator " + strrc);

    strrc = md.getCatalogTerm();
    LOG.debug("Catalog term " + strrc);

    ResultSet rs = md.getCatalogs();
    while (rs.next()) {
        strrc = rs.getString(1);
        LOG.debug("Found catalog " + strrc);
    }

    Connection c = md.getConnection();

    int intrc = md.getDatabaseMajorVersion();
    LOG.debug("DB major version is " + intrc);

    intrc = md.getDatabaseMinorVersion();
    LOG.debug("DB minor version is " + intrc);

    strrc = md.getDatabaseProductName();
    LOG.debug("DB product name is " + strrc);

    strrc = md.getDatabaseProductVersion();
    LOG.debug("DB product version is " + strrc);

    intrc = md.getDefaultTransactionIsolation();
    LOG.debug("Default transaction isolation is " + intrc);

    intrc = md.getDriverMajorVersion();
    LOG.debug("Driver major version is " + intrc);

    intrc = md.getDriverMinorVersion();
    LOG.debug("Driver minor version is " + intrc);

    strrc = md.getDriverName();
    LOG.debug("Driver name is " + strrc);

    strrc = md.getDriverVersion();
    LOG.debug("Driver version is " + strrc);

    strrc = md.getExtraNameCharacters();
    LOG.debug("Extra name characters is " + strrc);

    strrc = md.getIdentifierQuoteString();
    LOG.debug("Identifier quote string is " + strrc);

    // In Hive 1.2 this always returns an empty RS
    rs = md.getImportedKeys("a", "b", "d");

    // In Hive 1.2 this always returns an empty RS
    rs = md.getIndexInfo("a", "b", "d", true, true);

    intrc = md.getJDBCMajorVersion();
    LOG.debug("JDBC major version is " + intrc);

    intrc = md.getJDBCMinorVersion();
    LOG.debug("JDBC minor version is " + intrc);

    intrc = md.getMaxColumnNameLength();
    LOG.debug("Maximum column name length is " + intrc);

    strrc = md.getNumericFunctions();
    LOG.debug("Numeric functions are " + strrc);

    // In Hive 1.2 this always returns an empty RS
    rs = md.getPrimaryKeys("a", "b", "d");

    // In Hive 1.2 this always returns an empty RS
    rs = md.getProcedureColumns("a", "b", "d", "e");

    strrc = md.getProcedureTerm();
    LOG.debug("Procedures are called " + strrc);

    // In Hive 1.2 this always returns an empty RS
    rs = md.getProcedures("a", "b", "d");

    strrc = md.getSchemaTerm();
    LOG.debug("Schemas are called " + strrc);

    rs = md.getSchemas();
    while (rs.next()) {
        strrc = rs.getString(1);
        LOG.debug("Found schema " + strrc);
    }

    strrc = md.getSearchStringEscape();
    LOG.debug("Search string escape is " + strrc);

    strrc = md.getStringFunctions();
    LOG.debug("String functions are " + strrc);

    strrc = md.getSystemFunctions();
    LOG.debug("System functions are " + strrc);

    rs = md.getTableTypes();
    while (rs.next()) {
        strrc = rs.getString(1);
        LOG.debug("Found table type " + strrc);
    }

    strrc = md.getTimeDateFunctions();
    LOG.debug("Time/date functions are " + strrc);

    rs = md.getTypeInfo();
    while (rs.next()) {
        strrc = rs.getString(1);
        LOG.debug("Found type " + strrc);
    }

    // In Hive 1.2 this always returns an empty RS
    rs = md.getUDTs("a", "b", "d", null);

    boolrc = md.supportsAlterTableWithAddColumn();
    LOG.debug("Supports alter table with add column? " + boolrc);

    boolrc = md.supportsAlterTableWithDropColumn();
    LOG.debug("Supports alter table with drop column? " + boolrc);

    boolrc = md.supportsBatchUpdates();
    LOG.debug("Supports batch updates? " + boolrc);

    boolrc = md.supportsCatalogsInDataManipulation();
    LOG.debug("Supports catalogs in data manipulation? " + boolrc);

    boolrc = md.supportsCatalogsInIndexDefinitions();
    LOG.debug("Supports catalogs in index definition? " + boolrc);

    boolrc = md.supportsCatalogsInPrivilegeDefinitions();
    LOG.debug("Supports catalogs in privilege definition? " + boolrc);

    boolrc = md.supportsCatalogsInProcedureCalls();
    LOG.debug("Supports catalogs in procedure calls? " + boolrc);

    boolrc = md.supportsCatalogsInTableDefinitions();
    LOG.debug("Supports catalogs in table definition? " + boolrc);

    boolrc = md.supportsColumnAliasing();
    LOG.debug("Supports column aliasing? " + boolrc);

    boolrc = md.supportsFullOuterJoins();
    LOG.debug("Supports full outer joins? " + boolrc);

    boolrc = md.supportsGroupBy();
    LOG.debug("Supports group by? " + boolrc);

    boolrc = md.supportsLimitedOuterJoins();
    LOG.debug("Supports limited outer joins? " + boolrc);

    boolrc = md.supportsMultipleResultSets();
    LOG.debug("Supports limited outer joins? " + boolrc);

    boolrc = md.supportsNonNullableColumns();
    LOG.debug("Supports non-nullable columns? " + boolrc);

    boolrc = md.supportsOuterJoins();
    LOG.debug("Supports outer joins? " + boolrc);

    boolrc = md.supportsPositionedDelete();
    LOG.debug("Supports positioned delete? " + boolrc);

    boolrc = md.supportsPositionedUpdate();
    LOG.debug("Supports positioned update? " + boolrc);

    boolrc = md.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT);
    LOG.debug("Supports result set holdability? " + boolrc);

    boolrc = md.supportsResultSetType(ResultSet.HOLD_CURSORS_OVER_COMMIT);
    LOG.debug("Supports result set type? " + boolrc);

    boolrc = md.supportsSavepoints();
    LOG.debug("Supports savepoints? " + boolrc);

    boolrc = md.supportsSchemasInDataManipulation();
    LOG.debug("Supports schemas in data manipulation? " + boolrc);

    boolrc = md.supportsSchemasInIndexDefinitions();
    LOG.debug("Supports schemas in index definitions? " + boolrc);

    boolrc = md.supportsSchemasInPrivilegeDefinitions();
    LOG.debug("Supports schemas in privilege definitions? " + boolrc);

    boolrc = md.supportsSchemasInProcedureCalls();
    LOG.debug("Supports schemas in procedure calls? " + boolrc);

    boolrc = md.supportsSchemasInTableDefinitions();
    LOG.debug("Supports schemas in table definitions? " + boolrc);

    boolrc = md.supportsSelectForUpdate();
    LOG.debug("Supports select for update? " + boolrc);

    boolrc = md.supportsStoredProcedures();
    LOG.debug("Supports stored procedures? " + boolrc);

    boolrc = md.supportsTransactions();
    LOG.debug("Supports transactions? " + boolrc);

    boolrc = md.supportsUnion();
    LOG.debug("Supports union? " + boolrc);

    boolrc = md.supportsUnionAll();
    LOG.debug("Supports union all? " + boolrc);

}

From source file:org.apache.ddlutils.task.DumpMetadataTask.java

/**
 * Dumps the foreign key columns of the indicated table to other tables.
 * /* ww  w  .j a v  a  2s .c  o  m*/
 * @param xmlWriter   The xml writer to write to
 * @param metaData    The database metadata
 * @param catalogName The catalog name
 * @param schemaName  The schema name
 * @param tableName   The table name
 */
private void dumpFKs(PrettyPrintingXmlWriter xmlWriter, final DatabaseMetaData metaData,
        final String catalogName, final String schemaName, final String tableName) throws SQLException {
    performResultSetXmlOperation(xmlWriter, null, new ResultSetXmlOperation() {
        public ResultSet getResultSet() throws SQLException {
            return metaData.getImportedKeys(catalogName, schemaName, tableName);
        }

        public void handleRow(PrettyPrintingXmlWriter xmlWriter, ResultSet result) throws SQLException {
            Set columns = getColumnsInResultSet(result);

            xmlWriter.writeElementStart(null, "foreignKey");

            addStringAttribute(xmlWriter, "name", result, columns, "FK_NAME");
            addStringAttribute(xmlWriter, "primaryKeyName", result, columns, "PK_NAME");
            addStringAttribute(xmlWriter, "column", result, columns, "PKCOLUMN_NAME");
            addStringAttribute(xmlWriter, "foreignCatalog", result, columns, "FKTABLE_CAT");
            addStringAttribute(xmlWriter, "foreignSchema", result, columns, "FKTABLE_SCHEM");
            addStringAttribute(xmlWriter, "foreignTable", result, columns, "FKTABLE_NAME");
            addStringAttribute(xmlWriter, "foreignColumn", result, columns, "FKCOLUMN_NAME");
            addShortAttribute(xmlWriter, "sequenceNumberInFK", result, columns, "KEY_SEQ");
            if (columns.contains("UPDATE_RULE")) {
                try {
                    switch (result.getShort("UPDATE_RULE")) {
                    case DatabaseMetaData.importedKeyNoAction:
                        xmlWriter.writeAttribute(null, "updateRule", "no action");
                        break;
                    case DatabaseMetaData.importedKeyCascade:
                        xmlWriter.writeAttribute(null, "updateRule", "cascade PK change");
                        break;
                    case DatabaseMetaData.importedKeySetNull:
                        xmlWriter.writeAttribute(null, "updateRule", "set FK to NULL");
                        break;
                    case DatabaseMetaData.importedKeySetDefault:
                        xmlWriter.writeAttribute(null, "updateRule", "set FK to default");
                        break;
                    default:
                        xmlWriter.writeAttribute(null, "updateRule", "unknown");
                        break;
                    }
                } catch (SQLException ex) {
                    log("Could not read the UPDATE_RULE value for a foreign key of table '" + tableName
                            + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
                }
            }
            if (columns.contains("DELETE_RULE")) {
                try {
                    switch (result.getShort("DELETE_RULE")) {
                    case DatabaseMetaData.importedKeyNoAction:
                    case DatabaseMetaData.importedKeyRestrict:
                        xmlWriter.writeAttribute(null, "deleteRule", "no action");
                        break;
                    case DatabaseMetaData.importedKeyCascade:
                        xmlWriter.writeAttribute(null, "deleteRule", "cascade PK change");
                        break;
                    case DatabaseMetaData.importedKeySetNull:
                        xmlWriter.writeAttribute(null, "deleteRule", "set FK to NULL");
                        break;
                    case DatabaseMetaData.importedKeySetDefault:
                        xmlWriter.writeAttribute(null, "deleteRule", "set FK to default");
                        break;
                    default:
                        xmlWriter.writeAttribute(null, "deleteRule", "unknown");
                        break;
                    }
                } catch (SQLException ex) {
                    log("Could not read the DELETE_RULE value for a foreign key of table '" + tableName
                            + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
                }
            }
            if (columns.contains("DEFERRABILITY")) {
                try {
                    switch (result.getShort("DEFERRABILITY")) {
                    case DatabaseMetaData.importedKeyInitiallyDeferred:
                        xmlWriter.writeAttribute(null, "deferrability", "initially deferred");
                        break;
                    case DatabaseMetaData.importedKeyInitiallyImmediate:
                        xmlWriter.writeAttribute(null, "deferrability", "immediately deferred");
                        break;
                    case DatabaseMetaData.importedKeyNotDeferrable:
                        xmlWriter.writeAttribute(null, "deferrability", "not deferred");
                        break;
                    default:
                        xmlWriter.writeAttribute(null, "deferrability", "unknown");
                        break;
                    }
                } catch (SQLException ex) {
                    log("Could not read the DEFERRABILITY value for a foreign key of table '" + tableName
                            + "' from the result set: " + ex.getStackTrace(), Project.MSG_ERR);
                }
            }
            xmlWriter.writeElementEnd();
        }

        public void handleError(SQLException ex) {
            log("Could not determine the foreign keys for table '" + tableName + "': " + ex.getStackTrace(),
                    Project.MSG_ERR);
        }
    });
}

From source file:org.apache.empire.db.codegen.CodeGenParser.java

private void gatherRelations(DBDatabase db, DatabaseMetaData dbMeta, ArrayList<String> tables)
        throws SQLException {
    ResultSet relations = null;/* w ww. ja v a  2s  . com*/
    String fkTableName, pkTableName, fkColName, pkColName, relName;
    DBTableColumn fkCol, pkCol;
    DBTable fkTable, pkTable;
    DBColumn col;

    // Add all Relations
    for (String tableName : tables) {

        // check for foreign-keys
        relations = dbMeta.getImportedKeys(config.getDbCatalog(), config.getDbSchema(), tableName);
        while (relations.next()) {
            pkCol = fkCol = null;

            fkTableName = relations.getString("FKTABLE_NAME");
            pkTableName = relations.getString("PKTABLE_NAME");
            fkColName = relations.getString("FKCOLUMN_NAME");
            pkColName = relations.getString("PKCOLUMN_NAME");

            // Detect relation name
            relName = relations.getString("FK_NAME");
            if (StringUtils.isEmpty(relName))
                relName = fkTableName + "." + fkColName + "-" + pkTableName + "." + pkColName;

            pkTable = db.getTable(pkTableName);
            fkTable = db.getTable(fkTableName);

            // check if both tables really exist in the model
            if (pkTable == null || fkTable == null) {
                log.error("Unable to add the relation \"" + relName
                        + "\"! One of the tables could not be found.");
                continue;
            }

            col = pkTable.getColumn(pkColName);
            if (col instanceof DBTableColumn)
                pkCol = (DBTableColumn) col;

            col = fkTable.getColumn(fkColName);
            if (col instanceof DBTableColumn)
                fkCol = (DBTableColumn) col;

            // check if both columns really exist in the model
            if (fkCol == null || pkCol == null) {
                log.error("Unable to add the relation \"" + relName
                        + "\"! One of the columns could not be found.");
                continue;
            }

            // add the relation
            DBRelation.DBReference reference = fkCol.referenceOn(pkCol);
            DBRelation.DBReference[] refs = null;
            DBRelation r = db.getRelation(relName);
            if (r != null) {
                DBRelation.DBReference[] refsOld = r.getReferences();
                refs = new DBRelation.DBReference[refsOld.length + 1];
                int i = 0;
                for (; i < refsOld.length; i++)
                    refs[i] = refsOld[i];
                refs[i] = reference;
                // remove old relation
                db.getRelations().remove(r);
            } else {
                refs = new DBRelation.DBReference[] { reference };
            }
            // Add a new relation
            db.addRelation(relName, refs);
            log.info("Added relation (FK-PK): " + relName);
        }
    }
}

From source file:org.apache.hive.jdbc.TestJdbcDriver2.java

@Test
public void testParentReferences() throws Exception {
    /* Test parent references from Statement */
    Statement s = this.con.createStatement();
    ResultSet rs = s.executeQuery("SELECT * FROM " + dataTypeTableName);

    assertTrue(s.getConnection() == this.con);
    assertTrue(rs.getStatement() == s);//  ww w . j  av a  2  s  .  c o m

    rs.close();
    s.close();

    /* Test parent references from PreparedStatement */
    PreparedStatement ps = this.con.prepareStatement("SELECT * FROM " + dataTypeTableName);
    rs = ps.executeQuery();

    assertTrue(ps.getConnection() == this.con);
    assertTrue(rs.getStatement() == ps);

    rs.close();
    ps.close();

    /* Test DatabaseMetaData queries which do not have a parent Statement */
    DatabaseMetaData md = this.con.getMetaData();

    assertTrue(md.getConnection() == this.con);

    rs = md.getCatalogs();
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getColumns(null, null, null, null);
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getFunctions(null, null, null);
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getImportedKeys(null, null, null);
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getPrimaryKeys(null, null, null);
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getProcedureColumns(null, null, null, null);
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getProcedures(null, null, null);
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getSchemas();
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getTableTypes();
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getTables(null, null, null, null);
    assertNull(rs.getStatement());
    rs.close();

    rs = md.getTypeInfo();
    assertNull(rs.getStatement());
    rs.close();
}

From source file:org.apache.hive.jdbc.TestJdbcDriver2.java

/**
 * test getImportedKeys()/*  w w w  . j  a va 2  s.  co  m*/
 * @throws SQLException
 */
@Test
public void testImportedKeys() throws SQLException {
    DatabaseMetaData dbmd = con.getMetaData();
    assertNotNull(dbmd);
    // currently getImportedKeys always returns an empty resultset for Hive
    ResultSet res = dbmd.getImportedKeys(null, null, null);
    ResultSetMetaData md = res.getMetaData();
    assertEquals(md.getColumnCount(), 14);
    assertFalse(res.next());
}

From source file:org.apache.openjpa.jdbc.sql.DBDictionary.java

/**
 * Reflect on the schema to return full foreign keys imported by the given
 * table pattern./*from w  w w.j a v  a2s.  c  o  m*/
 */
public ForeignKey[] getImportedKeys(DatabaseMetaData meta, DBIdentifier catalog, DBIdentifier schemaName,
        DBIdentifier tableName, Connection conn, boolean partialKeys) throws SQLException {
    if (!supportsForeignKeys)
        return null;
    if (tableName == null && !supportsNullTableForGetImportedKeys)
        return null;

    beforeMetadataOperation(conn);
    ResultSet keys = null;
    try {
        keys = meta.getImportedKeys(getCatalogNameForMetadata(catalog), getSchemaNameForMetadata(schemaName),
                getTableNameForMetadata(tableName));

        List<ForeignKey> importedKeyList = new ArrayList<ForeignKey>();
        Map<FKMapKey, ForeignKey> fkMap = new HashMap<FKMapKey, ForeignKey>();

        while (keys != null && keys.next()) {
            ForeignKey nfk = newForeignKey(keys);
            if (!partialKeys) {
                ForeignKey fk = combineForeignKey(fkMap, nfk);
                // If the key returned != new key, fk col was combined
                // with existing fk.
                if (fk != nfk) {
                    continue;
                }
            }
            importedKeyList.add(nfk);
        }
        return (ForeignKey[]) importedKeyList.toArray(new ForeignKey[importedKeyList.size()]);
    } finally {
        if (keys != null) {
            try {
                keys.close();
            } catch (Exception e) {
            }
        }
    }
}

From source file:org.apache.syncope.core.persistence.jpa.content.XMLContentExporter.java

private List<String> sortByForeignKeys(final String dbSchema, final Connection conn,
        final Set<String> tableNames) throws SQLException {

    Set<MultiParentNode<String>> roots = new HashSet<>();

    final DatabaseMetaData meta = conn.getMetaData();

    final Map<String, MultiParentNode<String>> exploited = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
    final Set<String> pkTableNames = new HashSet<>();

    for (String tableName : tableNames) {
        MultiParentNode<String> node = exploited.get(tableName);
        if (node == null) {
            node = new MultiParentNode<>(tableName);
            roots.add(node);/* w  ww . j  a  v  a  2  s.  c o  m*/
            exploited.put(tableName, node);
        }

        pkTableNames.clear();

        ResultSet rs = null;
        try {
            rs = meta.getImportedKeys(conn.getCatalog(), dbSchema, tableName);

            // this is to avoid repetition
            while (rs.next()) {
                pkTableNames.add(rs.getString("PKTABLE_NAME"));
            }
        } finally {
            if (rs != null) {
                try {
                    rs.close();
                } catch (SQLException e) {
                    LOG.error("While closing tables result set", e);
                }
            }
        }

        for (String pkTableName : pkTableNames) {
            if (!tableName.equalsIgnoreCase(pkTableName)) {
                MultiParentNode<String> pkNode = exploited.get(pkTableName);
                if (pkNode == null) {
                    pkNode = new MultiParentNode<>(pkTableName);
                    roots.add(pkNode);
                    exploited.put(pkTableName, pkNode);
                }

                pkNode.addChild(node);

                if (roots.contains(node)) {
                    roots.remove(node);
                }
            }
        }
    }

    final List<String> sortedTableNames = new ArrayList<>(tableNames.size());
    MultiParentNodeOp.traverseTree(roots, sortedTableNames);

    // remove from sortedTableNames any table possibly added during lookup 
    // but matching some item in this.tablePrefixesToBeExcluded
    sortedTableNames.retainAll(tableNames);

    LOG.debug("Tables after retainAll {}", sortedTableNames);

    Collections.reverse(sortedTableNames);

    return sortedTableNames;
}

From source file:org.apache.syncope.core.persistence.jpa.content.XMLContentExporter.java

private void doExportTable(final TransformerHandler handler, final String dbSchema, final Connection conn,
        final String tableName, final String whereClause) throws SQLException, SAXException {

    LOG.debug("Export table {}", tableName);

    AttributesImpl attrs = new AttributesImpl();

    PreparedStatement stmt = null;
    ResultSet rs = null;//  w  w  w .  j a v a2 s  .  c  o  m
    try {
        StringBuilder orderBy = new StringBuilder();

        DatabaseMetaData meta = conn.getMetaData();

        // ------------------------------------
        // retrieve foreign keys (linked to the same table) to perform an ordered select
        ResultSet pkeyRS = null;
        try {
            pkeyRS = meta.getImportedKeys(conn.getCatalog(), dbSchema, tableName);
            while (pkeyRS.next()) {
                if (tableName.equals(pkeyRS.getString("PKTABLE_NAME"))) {
                    String columnName = pkeyRS.getString("FKCOLUMN_NAME");
                    if (columnName != null) {
                        if (orderBy.length() > 0) {
                            orderBy.append(",");
                        }

                        orderBy.append(columnName);
                    }
                }
            }
        } finally {
            if (pkeyRS != null) {
                try {
                    pkeyRS.close();
                } catch (SQLException e) {
                    LOG.error("While closing result set", e);
                }
            }
        }

        // retrieve primary keys to perform an ordered select
        try {
            pkeyRS = meta.getPrimaryKeys(null, null, tableName);
            while (pkeyRS.next()) {
                String columnName = pkeyRS.getString("COLUMN_NAME");
                if (columnName != null) {
                    if (orderBy.length() > 0) {
                        orderBy.append(",");
                    }

                    orderBy.append(columnName);
                }
            }
        } finally {
            if (pkeyRS != null) {
                try {
                    pkeyRS.close();
                } catch (SQLException e) {
                    LOG.error("While closing result set", e);
                }
            }
        }

        // ------------------------------------
        StringBuilder query = new StringBuilder();
        query.append("SELECT * FROM ").append(tableName).append(" a");
        if (StringUtils.isNotBlank(whereClause)) {
            query.append(" WHERE ").append(whereClause);
        }
        if (orderBy.length() > 0) {
            query.append(" ORDER BY ").append(orderBy);
        }
        stmt = conn.prepareStatement(query.toString());

        rs = stmt.executeQuery();
        while (rs.next()) {
            attrs.clear();

            final ResultSetMetaData rsMeta = rs.getMetaData();
            for (int i = 0; i < rsMeta.getColumnCount(); i++) {
                final String columnName = rsMeta.getColumnName(i + 1);
                final Integer columnType = rsMeta.getColumnType(i + 1);

                // Retrieve value taking care of binary values.
                String value = getValues(rs, columnName, columnType);
                if (value != null && (!COLUMNS_TO_BE_NULLIFIED.containsKey(tableName)
                        || !COLUMNS_TO_BE_NULLIFIED.get(tableName).contains(columnName))) {

                    attrs.addAttribute("", "", columnName, "CDATA", value);
                }
            }

            handler.startElement("", "", tableName, attrs);
            handler.endElement("", "", tableName);

            LOG.debug("Add record {}", attrs);
        }
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                LOG.error("While closing result set", e);
            }
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
                LOG.error("While closing result set", e);
            }
        }
    }
}