Example usage for java.sql Connection getCatalog

List of usage examples for java.sql Connection getCatalog

Introduction

In this page you can find the example usage for java.sql Connection getCatalog.

Prototype

String getCatalog() throws SQLException;

Source Link

Document

Retrieves this Connection object's current catalog name.

Usage

From source file:com.streamsets.pipeline.lib.jdbc.JdbcUtil.java

/**
 * Wrapper for {@link java.sql.DatabaseMetaData#getColumns(String, String, String, String)} that detects
 * the format of the supplied tableName.
 *
 * @param connection An open JDBC connection
 * @param tableName table name that is optionally fully qualified with a schema in the form schema.tableName
 * @return ResultSet containing the column metadata
 *
 * @throws SQLException//from www.j  ava 2 s .  c  o  m
 */
public ResultSet getColumnMetadata(Connection connection, String schema, String tableName) throws SQLException {
    DatabaseMetaData metadata = connection.getMetaData();
    return metadata.getColumns(connection.getCatalog(), schema, tableName, null); // Get all columns for this table
}

From source file:com.streamsets.pipeline.lib.jdbc.JdbcUtil.java

/**
 * Wrapper for {@link java.sql.DatabaseMetaData#getImportedKeys(String, String, String)}
 *
 * @param connection An open JDBC connection
 * @param tableName table name that is optionally fully qualified with a schema in the form schema.tableName
 * @return List of Table Names whose primary key are referred as foreign key by the table tableName
 *
 * @throws SQLException/*from   w  w w. j  av  a 2s . c o  m*/
 */
public Set<String> getReferredTables(Connection connection, String schema, String tableName)
        throws SQLException {
    DatabaseMetaData metadata = connection.getMetaData();

    ResultSet result = metadata.getImportedKeys(connection.getCatalog(), schema, tableName);
    Set<String> referredTables = new HashSet<>();
    while (result.next()) {
        referredTables.add(result.getString(PK_TABLE_NAME));
    }
    return referredTables;
}

From source file:com.streamsets.pipeline.lib.jdbc.JdbcUtil.java

/**
 * Wrapper for {@link java.sql.DatabaseMetaData#getPrimaryKeys(String, String, String)}
 *
 * @param connection An open JDBC connection
 * @param tableName table name that is optionally fully qualified with a schema in the form schema.tableName
 * @return List of primary key column names for a table
 *
 * @throws SQLException/*from w  w  w.ja v a  2 s. co m*/
 */
public List<String> getPrimaryKeys(Connection connection, String schema, String tableName) throws SQLException {
    String table = tableName;
    DatabaseMetaData metadata = connection.getMetaData();
    List<String> keys = new ArrayList<>();
    try (ResultSet result = metadata.getPrimaryKeys(connection.getCatalog(), schema, table)) {
        while (result.next()) {
            keys.add(result.getString(COLUMN_NAME));
        }
    }
    return keys;
}

From source file:it.unibas.spicy.persistence.relational.DAORelational.java

private void getInstanceByTable(IConnectionFactory dataSourceDB, Connection connection, String schemaName,
        String tableName, INode setTable, IDataSourceProxy dataSource, boolean translated) throws DAOException {
    PreparedStatement statement = null;
    ResultSet resultSet = null;/*  w  w  w. ja  v  a  2s  .  co m*/
    try {
        String tablePath = tableName;
        //mysql jdbc driver
        if (connection.getMetaData().getDriverName().equalsIgnoreCase(SpicyEngineConstants.MYSQL_DRIVER_NAME)) {
            tablePath = connection.getCatalog() + "." + tableName;
        } else if (!schemaName.equals("")) {
            tablePath = schemaName + ".\"" + tableName + "\"";
        }
        statement = connection.prepareStatement("select * from " + tablePath + " order by 1");
        statement.setMaxRows(NUMBER_OF_SAMPLE);
        resultSet = statement.executeQuery();
        if (resultSet == null) {
            throw new DAOException("ResultSet is NULL!");
        }
        int sampleCounter = 0;
        while (resultSet.next() && sampleCounter < NUMBER_OF_SAMPLE) {
            sampleCounter++;
            TupleNode tupleNode = new TupleNode(getNode(tableName + TUPLE_SUFFIX).getLabel(), getOID());
            setTable.addChild(tupleNode);
            for (INode attributeNodeSchema : getNode(tableName + TUPLE_SUFFIX).getChildren()) {
                AttributeNode attributeNode = new AttributeNode(attributeNodeSchema.getLabel(), getOID());
                String columnName = attributeNodeSchema.getLabel();
                String oldName = dataSource.getChangedValue(tableName + "." + columnName);
                if (oldName != null && !translated) {
                    columnName = oldName;
                }
                Object columnValue = resultSet.getObject(columnName);
                LeafNode leafNode = createLeafNode(attributeNodeSchema, columnValue);
                attributeNode.addChild(leafNode);
                tupleNode.addChild(attributeNode);
            }
        }
    } catch (SQLException sqle) {
        throw new DAOException(sqle.getMessage());
    } finally {
        dataSourceDB.close(resultSet);
        dataSourceDB.close(statement);
    }
}

From source file:org.apache.syncope.core.util.ImportExport.java

private List<String> sortByForeignKeys(final Connection conn, final Set<String> tableNames, final String schema)
        throws SQLException {

    Set<MultiParentNode<String>> roots = new HashSet<MultiParentNode<String>>();

    final DatabaseMetaData meta = conn.getMetaData();

    final Map<String, MultiParentNode<String>> exploited = new TreeMap<String, MultiParentNode<String>>(
            String.CASE_INSENSITIVE_ORDER);

    final Set<String> pkTableNames = new HashSet<String>();

    for (String tableName : tableNames) {

        MultiParentNode<String> node = exploited.get(tableName);

        if (node == null) {
            node = new MultiParentNode<String>(tableName);
            roots.add(node);//from w  w w  .  j  av a2 s.  c  o m
            exploited.put(tableName, node);
        }

        ResultSet rs = null;

        pkTableNames.clear();

        try {
            rs = meta.getImportedKeys(conn.getCatalog(), readSchema(), tableName);

            // this is to avoid repetition
            while (rs.next()) {
                pkTableNames.add(rs.getString("PKTABLE_NAME"));
            }

        } finally {
            if (rs != null) {
                try {
                    rs.close();
                } catch (SQLException e) {
                    LOG.error("While closing tables result set", e);
                }
            }
        }

        for (String pkTableName : pkTableNames) {

            if (!tableName.equalsIgnoreCase(pkTableName)) {

                MultiParentNode<String> pkNode = exploited.get(pkTableName);

                if (pkNode == null) {
                    pkNode = new MultiParentNode<String>(pkTableName);
                    roots.add(pkNode);
                    exploited.put(pkTableName, pkNode);
                }

                pkNode.addChild(node);

                if (roots.contains(node)) {
                    roots.remove(node);
                }
            }
        }
    }

    final List<String> sortedTableNames = new ArrayList<String>(tableNames.size());
    MultiParentNodeOp.traverseTree(roots, sortedTableNames);

    Collections.reverse(sortedTableNames);
    return sortedTableNames;
}

From source file:it.eng.spagobi.meta.initializer.PhysicalModelInitializer.java

private void addCatalog(Connection conn, PhysicalModel model, String defaultCatalog) {
    String catalog;/*from  ww  w  .j a va  2 s  .  c  o m*/
    List<String> catalogs;
    DatabaseMetaData dbMeta;
    ResultSet rs;
    Iterator<String> it;

    catalog = null;

    try {

        catalog = conn.getCatalog();
        if (catalog == null) {
            dbMeta = conn.getMetaData();

            rs = dbMeta.getCatalogs();
            catalogs = new ArrayList();
            while (rs.next()) {
                String catalogName = rs.getString(1);
                if (catalogName != null) {
                    catalogs.add(catalogName);
                }
            }
            if (catalogs.size() == 0) {
                log("No schema [" + dbMeta.getSchemaTerm() + "] defined");
            } else if (catalogs.size() == 1) {
                catalog = catalogs.get(1);
            } else {
                String targetCatalog = null;
                it = catalogs.iterator();
                while (it.hasNext()) {
                    String s = it.next();
                    log("s [" + s + "]");
                    if (s.equalsIgnoreCase(defaultCatalog)) {
                        targetCatalog = defaultCatalog;
                        break;
                    }
                }
                if (targetCatalog == null) {
                    throw new RuntimeException("No catalog named [" + defaultCatalog + "] is available on db");
                }
                catalog = targetCatalog;
            }
            rs.close();
        }

        model.setCatalog(catalog);
    } catch (Throwable t) {
        throw new RuntimeException("Impossible to initialize catalog metadata", t);
    }
}

From source file:it.unibas.spicy.persistence.relational.DAORelational.java

public IDataSourceProxy loadSchema(int scenarioNo, AccessConfiguration accessConfiguration,
        DBFragmentDescription dataDescription, IConnectionFactory dataSourceDB, boolean source)
        throws DAOException {
    this.dataDescription = dataDescription;
    Connection connectionPostgres = null;
    INode root = null;/*from   w  ww .  ja v  a  2  s  .co m*/
    String catalog = null;
    String schemaName = accessConfiguration.getSchemaName();
    DatabaseMetaData databaseMetaData = null;
    Connection connection = dataSourceDB.getConnection(accessConfiguration);
    IDataSourceProxy dataSource = null;

    AccessConfiguration accessConfigurationPostgres = new AccessConfiguration();
    accessConfigurationPostgres.setDriver(SpicyEngineConstants.ACCESS_CONFIGURATION_DRIVER);
    accessConfigurationPostgres
            .setUri(SpicyEngineConstants.ACCESS_CONFIGURATION_URI + SpicyEngineConstants.MAPPING_TASK_DB_NAME);
    accessConfigurationPostgres.setLogin(SpicyEngineConstants.ACCESS_CONFIGURATION_LOGIN);
    accessConfigurationPostgres.setPassword(SpicyEngineConstants.ACCESS_CONFIGURATION_PASS);

    connectionPostgres = dataSourceDB.getConnection(accessConfigurationPostgres);
    try {
        Statement statement = connectionPostgres.createStatement();

        databaseMetaData = connection.getMetaData();
        catalog = connection.getCatalog();
        if (catalog == null) {
            catalog = accessConfiguration.getUri();
            if (logger.isDebugEnabled())
                logger.debug("Catalog is null. Catalog name will be: " + catalog);
        }
        root = this.createRootNode(catalog);

        //giannisk postgres create schemas
        if (source) {
            String createSchemasQuery = "create schema if not exists " + SpicyEngineConstants.SOURCE_SCHEMA_NAME
                    + scenarioNo + ";\n";
            //createSchemasQuery += "create schema if not exists " + GenerateSQL.WORK_SCHEMA_NAME + ";\n";                        
            createSchemasQuery += "create schema if not exists " + SpicyEngineConstants.TARGET_SCHEMA_NAME
                    + scenarioNo + ";";
            statement.executeUpdate(createSchemasQuery);
        }

        String[] tableTypes = new String[] { "TABLE" };
        ResultSet tableResultSet = databaseMetaData.getTables(catalog, schemaName, null, tableTypes);
        while (tableResultSet.next()) {
            String tableName = tableResultSet.getString("TABLE_NAME");
            if (!this.dataDescription.checkLoadTable(tableName)) {
                continue;
            }
            INode setTable = new SetNode(tableName);
            setTable.addChild(
                    getTuple(databaseMetaData, catalog, schemaName, tableName, source, statement, scenarioNo));
            setTable.setRequired(false);
            setTable.setNotNull(true);
            root.addChild(setTable);
            addNode(tableName, setTable);
        }
        dataSource = new ConstantDataSourceProxy(new DataSource(SpicyEngineConstants.TYPE_RELATIONAL, root));

        dataSource.addAnnotation(SpicyEngineConstants.ACCESS_CONFIGURATION, accessConfiguration);
        dataSource.addAnnotation(SpicyEngineConstants.LOADED_INSTANCES_FLAG, false);

        for (Map.Entry<String, String> entry : changedColumnNames.entrySet()) {
            dataSource.putChangedValue(entry.getKey(), entry.getValue());
        }
        loadPrimaryKeys(dataSource, databaseMetaData, catalog, schemaName, source, statement, scenarioNo,
                false);
        loadForeignKeys(dataSource, databaseMetaData, catalog, schemaName, source, scenarioNo);

    } catch (Throwable ex) {
        logger.error(ex);
        throw new DAOException(ex.getMessage());
    } finally {
        if (connection != null)
            dataSourceDB.close(connection);
        if (connectionPostgres != null)
            dataSourceDB.close(connectionPostgres);
    }
    return dataSource;
}

From source file:it.unibas.spicy.persistence.relational.DAORelational.java

public void loadInstance(int scenarioNo, AccessConfiguration accessConfiguration, IDataSourceProxy dataSource,
        DBFragmentDescription dataDescription, IConnectionFactory dataSourceDB, boolean source)
        throws DAOException, SQLException {
    Connection connection = dataSourceDB.getConnection(accessConfiguration);
    DatabaseMetaData databaseMetaData = null;
    String catalog = null;/*w w w. j a  v a  2  s .  c o m*/
    String schemaName = accessConfiguration.getSchemaName();
    Connection connectionPostgres = null;
    this.dataDescription = dataDescription;

    AccessConfiguration accessConfigurationPostgres = new AccessConfiguration();
    accessConfigurationPostgres.setDriver(SpicyEngineConstants.ACCESS_CONFIGURATION_DRIVER);
    accessConfigurationPostgres
            .setUri(SpicyEngineConstants.ACCESS_CONFIGURATION_URI + SpicyEngineConstants.MAPPING_TASK_DB_NAME);
    accessConfigurationPostgres.setLogin(SpicyEngineConstants.ACCESS_CONFIGURATION_LOGIN);
    accessConfigurationPostgres.setPassword(SpicyEngineConstants.ACCESS_CONFIGURATION_PASS);
    connectionPostgres = dataSourceDB.getConnection(accessConfigurationPostgres);
    try {
        databaseMetaData = connection.getMetaData();
        catalog = connection.getCatalog();
        String[] tableTypes = new String[] { "TABLE" };
        ResultSet tableResultSet = databaseMetaData.getTables(catalog, schemaName, null, tableTypes);
        Statement statement = connection.createStatement();
        Statement statementPostgres = connectionPostgres.createStatement();
        while (tableResultSet.next()) {
            String tableName = tableResultSet.getString("TABLE_NAME");
            if (!this.dataDescription.checkLoadTable(tableName)) {
                continue;
            }
            String tablePath = tableName;
            if (!schemaName.equals("")) {
                tablePath = schemaName + ".\"" + tableName + "\"";
            }
            String newTablePath = tableName;
            if (source) {
                newTablePath = SpicyEngineConstants.SOURCE_SCHEMA_NAME + scenarioNo + ".\"" + tableName + "\"";
            } else {
                newTablePath = SpicyEngineConstants.TARGET_SCHEMA_NAME + scenarioNo + ".\"" + tableName + "\"";
            }
            ResultSet countResult = statement
                    .executeQuery("SELECT COUNT(*) AS instancesCount FROM " + tablePath + ";");
            int instancesCount = 1;
            while (countResult.next()) {
                instancesCount = countResult.getInt("instancesCount");
            }

            ResultSet pKList = null;
            pKList = databaseMetaData.getPrimaryKeys(null, null, tableName);
            //                ResultSet pKList = statement.executeQuery("SELECT c.column_name as keyname\n" + "FROM information_schema.key_column_usage AS c\n" +
            //                    "LEFT JOIN information_schema.table_constraints AS t\n" +
            //                    "ON t.constraint_name = c.constraint_name\n" +
            //                    "WHERE t.table_name = '" + tablePath + "' AND t.constraint_type = 'PRIMARY KEY';");
            String pKListString = "";
            while (pKList.next()) {
                pKListString += pKList.getString("COLUMN_NAME") + ",";
            }
            if (pKListString != "")
                pKListString = pKListString.substring(0, pKListString.length() - 1);

            int inCount = 0;
            String viewName = tableName + "_MIPMapView";
            String orderByClause = "";
            if (pKListString != "")
                orderByClause = " ORDER BY " + pKListString;
            statement.executeUpdate("DROP VIEW IF EXISTS " + viewName + ";");
            statement.executeUpdate(
                    "CREATE VIEW " + viewName + " AS SELECT * FROM " + tablePath + orderByClause + ";");
            for (int i = 0; i <= ((instancesCount - 1) / BATCH_SIZE); i++) {
                ResultSet instancesSet = statement.executeQuery("SELECT * FROM " + viewName + " LIMIT "
                        + BATCH_SIZE + " OFFSET " + (BATCH_SIZE * i) + ";");
                ResultSetMetaData rsmd = instancesSet.getMetaData();
                int columnsNumber = rsmd.getColumnCount();
                String sql_insert_stmnt = "";
                while (instancesSet.next()) {
                    String tmp_sql_insert_stmnt = "(";
                    for (int j = 1; j <= columnsNumber; j++) {
                        String columnValue = instancesSet.getString(j);
                        if (columnValue == null) {
                            tmp_sql_insert_stmnt += " null,";
                        } else {
                            if (isTextColumn(rsmd.getColumnTypeName(j))) {
                                tmp_sql_insert_stmnt += "'" + columnValue.replaceAll("'", "''") + "',";
                            } else {
                                tmp_sql_insert_stmnt += "" + columnValue + ",";
                            }
                        }

                    }
                    //take out the last ',' character           
                    tmp_sql_insert_stmnt = tmp_sql_insert_stmnt.substring(0, tmp_sql_insert_stmnt.length() - 1);
                    tmp_sql_insert_stmnt += "),";
                    //                        if (!inserted.contains(tmp_sql_insert_stmnt)) {
                    sql_insert_stmnt += tmp_sql_insert_stmnt;
                    //                            inserted.add(tmp_sql_insert_stmnt);
                    //                        }
                }
                if (!sql_insert_stmnt.equals("")) {
                    //take out the last ',' character           
                    sql_insert_stmnt = sql_insert_stmnt.substring(0, sql_insert_stmnt.length() - 1);
                    inCount += statementPostgres
                            .executeUpdate("insert into " + newTablePath + " values " + sql_insert_stmnt + ";");
                }
            }
            statement.executeUpdate("DROP VIEW IF EXISTS " + viewName + ";");
        }
        dataSource.addAnnotation(SpicyEngineConstants.LOADED_INSTANCES_FLAG, true);
    } finally {
        if (connection != null)
            dataSourceDB.close(connection);
        if (connectionPostgres != null)
            dataSourceDB.close(connectionPostgres);
    }
}

From source file:edu.ku.brc.specify.conversion.BasicSQLUtils.java

/**
 * @param connection//from  w ww .  jav  a2s . com
 * @param tableName
 * @return
 */
public static boolean doesTableExist(final Connection connection, final String tableName) {
    try {
        DatabaseMetaData mdm = connection.getMetaData();
        ResultSet rs = mdm.getColumns(connection.getCatalog(), connection.getCatalog(), tableName, null);
        if (rs.next()) {
            rs.close();
            return true;
        }
        rs.close();

    } catch (SQLException ex) {
        ex.printStackTrace();
    }
    return false;
}

From source file:edu.ku.brc.specify.conversion.BasicSQLUtils.java

public static List<String> getFieldNamesFromSchema(final Connection connection, final String tableName,
        final List<String> fields) {
    try {//from   ww w  .  ja  va  2s.  c o  m
        DatabaseMetaData mdm = connection.getMetaData();
        ResultSet rs = mdm.getColumns(connection.getCatalog(), connection.getCatalog(), tableName, null);
        while (rs.next()) {
            fields.add(rs.getString("COLUMN_NAME"));
        }
        rs.close();
        return fields;

    } catch (SQLException ex) {
        ex.printStackTrace();
    }
    return null;
}