Example usage for java.sql DatabaseMetaData getTables

List of usage examples for java.sql DatabaseMetaData getTables

Introduction

In this page you can find the example usage for java.sql DatabaseMetaData getTables.

Prototype

ResultSet getTables(String catalog, String schemaPattern, String tableNamePattern, String types[])
        throws SQLException;

Source Link

Document

Retrieves a description of the tables available in the given catalog.

Usage

From source file:org.wso2.throttle.core.Throttler.java

/**
 * Copies physical ThrottleTable to this instance's in-memory ThrottleTable.
 *//* w ww.  j  a va  2s .c  o  m*/
private void populateThrottleTable() {
    BasicDataSource basicDataSource = new BasicDataSource();
    basicDataSource.setDriverClassName("com.mysql.jdbc.Driver");
    basicDataSource.setUrl("jdbc:mysql://localhost/org_wso2_throttle_DataSource");
    basicDataSource.setUsername("root");
    basicDataSource.setPassword("root");

    Connection connection = null;
    try {
        connection = basicDataSource.getConnection();
        DatabaseMetaData dbm = connection.getMetaData();
        // check if "ThrottleTable" table is there
        ResultSet tables = dbm.getTables(null, null, RDBMS_THROTTLE_TABLE_NAME, null);
        if (tables.next()) { // Table exists
            PreparedStatement stmt = connection.prepareStatement("SELECT * FROM " + RDBMS_THROTTLE_TABLE_NAME);
            ResultSet resultSet = stmt.executeQuery();
            while (resultSet.next()) {
                String key = resultSet.getString(RDBMS_THROTTLE_TABLE_COLUMN_KEY);
                Boolean isThrottled = resultSet.getBoolean(RDBMS_THROTTLE_TABLE_COLUMN_ISTHROTTLED);
                try {
                    getGlobalThrottleStreamInputHandler().send(new Object[] { key, isThrottled });
                } catch (InterruptedException e) {
                    log.error("Error occurred while sending an event.", e);
                }
            }
        } else { // Table does not exist
            log.warn("RDBMS ThrottleTable does not exist. Make sure global throttler server is started.");
        }
    } catch (SQLException e) {
        log.error("Error occurred while copying throttle data from global throttler server.", e);
    } finally {
        if (connection != null) {
            try {
                connection.close();
            } catch (SQLException e) {
                log.error("Error occurred while closing database connection.", e);
            }
        }
    }
}

From source file:it.unibas.spicy.persistence.csv.ExportCSVInstances.java

public void appendCSVInstances(MappingTask mappingTask, HashMap<String, String> directoryPaths, int scenarioNo)
        throws SQLException, DAOException, IOException {
    //connection to Postgres
    IConnectionFactory connectionFactory = new SimpleDbConnectionFactory();
    Connection connection = getConnectionToPostgres(connectionFactory);
    try {// w  w w.  java  2  s . c o  m
        Statement statement = connection.createStatement();

        //get table names from target database
        DatabaseMetaData databaseMetaData = connection.getMetaData();
        String[] tableTypes = new String[] { "TABLE" };
        ResultSet tableResultSet = databaseMetaData.getTables(SpicyEngineConstants.MAPPING_TASK_DB_NAME,
                SpicyEngineConstants.TARGET_SCHEMA_NAME + scenarioNo, null, tableTypes);
        //for each table
        while (tableResultSet.next()) {
            String tableName = tableResultSet.getString("TABLE_NAME");
            String filePath = directoryPaths.get(tableName);

            if ((filePath != null) && (!filePath.equals(""))) {
                ResultSet allRows = statement.executeQuery("SELECT * FROM "
                        + SpicyEngineConstants.TARGET_SCHEMA_NAME + scenarioNo + ".\"" + tableName + "\";");
                //no of columns
                int columnCount = allRows.getMetaData().getColumnCount();
                //column names
                String[] columnNames = new String[columnCount];
                for (int i = 1; i <= columnCount; i++) {
                    columnNames[i - 1] = allRows.getMetaData().getColumnName(i);
                }
                if (checkDocForCSVColumns(columnCount, columnNames, filePath))
                    appendToCSVDocument(allRows, columnCount, filePath);
                else {
                    throw new DAOException("Column names do not match those of the csv file");
                }
                allRows.close();
            } else {
                System.out.println("The CSV file " + tableName + " cannot be found!");
                System.exit(-1);
            }
        }
    } finally {
        //close connection
        if (connection != null)
            connectionFactory.close(connection);
    }
}

From source file:org.diffkit.db.DKDBTableDataAccess.java

/**
 * default (normal) implementation of getTableMaps that relies on
 *///w w w  . j a va  2s. c  o m
private List<Map<String, ?>> getTableMapsStandard(String catalog_, String schema_, String tableName_,
        DatabaseMetaData dbMeta_) throws SQLException {
    _log.debug("catalog_->{}", catalog_);
    _log.debug("schema_->{}", schema_);
    _log.debug("tableName_->{}", tableName_);
    ResultSet tablesRS = dbMeta_.getTables(catalog_, schema_, tableName_, null);
    _log.debug("tablesRS->{}", tablesRS);
    if (tablesRS == null) {
        _log.warn("no tablesRS for catalog_->{} schema_->{} tableName_->{}");
        return null;
    }
    List<Map<String, ?>> tableMaps = DKSqlUtil.readRows(tablesRS, true);
    _log.debug("tableMaps->{}", tableMaps);
    DKSqlUtil.close(tablesRS);
    return tableMaps;
}

From source file:com.couchbase.devex.JDBCConfig.java

@Override
public Observable<Document> startImport() throws Exception {
    // get Database Medatadata objects to retrieve Tables schema
    DatabaseMetaData databaseMetadata = jdbcTemplate.getDataSource().getConnection().getMetaData();
    List<String> tableNames = new ArrayList<String>();
    // Get tables names
    ResultSet result = databaseMetadata.getTables(catalog, schemaPattern, tableNamePattern, types);
    while (result.next()) {
        String tablename = result.getString(3);
        String tableType = result.getString(4);
        // make sure we only import table(as oppose to Views, counter etc...)
        if (!tablename.isEmpty() && "TABLE".equals(tableType)) {
            tableNames.add(tablename);// w w  w .j a v  a2s .c  o m
            log.debug("Will import table " + tablename);
        }
    }
    // Map the tables schema to Table objects
    Map<String, Table> tables = new HashMap<String, Table>();
    JsonObject tablesSchema = JsonObject.create();
    for (String tableName : tableNames) {
        result = databaseMetadata.getColumns(catalog, schemaPattern, tableName, columnNamePattern);
        Table table = new Table(tableName);
        while (result.next()) {
            String columnName = result.getString(4);
            // Maps to JDBCType enum
            int columnType = result.getInt(5);
            table.addColumn(columnName, columnType);
        }
        result = databaseMetadata.getPrimaryKeys(catalog, schemaPattern, tableName);
        while (result.next()) {
            String columnName = result.getString(4);
            table.setPrimaryKey(columnName);
        }
        tables.put(tableName, table);
        tablesSchema.put(tableName, table.toJsonObject());
    }
    JsonDocument schemaDoc = JsonDocument.create(tablesSchemaId, tablesSchema);
    log.debug(tablesSchema);
    // FlatMap each table to an Observable of JsonDocument, one
    // JsonDocument per table row.
    return Observable.from(tableNames).flatMap(s -> {
        String sql = String.format(SELECT_EVERYTHING_FROM_TABLE_QUERY, s);
        return Observable.from(jdbcTemplate.query(sql, new JSONRowMapper(tables.get(s))));
    })
            // start by a jsonDocument containing the tables to be imported.
            .startWith(schemaDoc);
}

From source file:com.iver.utiles.connections.ConnectionDB.java

/**
 * Returns the names of the tables/*from  w  ww. jav  a 2s.c om*/
 * 
 * @param conn
 *            Connection
 * 
 * @return Array of string with the names of the tables.
 * 
 * @throws ConnectionException
 */
public String[] getTableNames(Connection conn) throws ConnectionException {
    // Connection conn=getConnectionByName(name);
    ArrayList tableNames = new ArrayList();
    String nombreTablas = "%"; // Listamos todas las tablas
    String[] tipos = new String[1]; // Listamos slo tablas
    tipos[0] = "TABLE";

    try {
        DatabaseMetaData dbmd = conn.getMetaData();
        ResultSet tablas = dbmd.getTables(null, null, nombreTablas, tipos);

        boolean seguir = tablas.next();

        while (seguir) {
            // Mostramos slo el nombre de las tablas, guardado
            // en la columna "TABLE_NAME"
            System.out.println(tablas.getString(tablas.findColumn("TABLE_NAME")));
            tableNames.add(tablas.getString(tablas.findColumn("TABLE_NAME")));
            seguir = tablas.next();
        }
    } catch (SQLException e) {
        throw new ConnectionException(JDBCManager.getTranslation("fallo_obtener_tablas"), e);
    }

    return (String[]) tableNames.toArray(new String[0]);
}

From source file:net.gcolin.simplerepo.search.SearchController.java

public SearchController(ConfigurationManager configManager) throws IOException {
    this.configManager = configManager;
    File plugins = new File(configManager.getRoot(), "plugins");
    plugins.mkdirs();/* w ww. j  a  va2 s  .  com*/
    System.setProperty("derby.system.home", plugins.getAbsolutePath());
    BasicDataSource s = new BasicDataSource();
    s.setDriverClassName("org.apache.derby.jdbc.EmbeddedDriver");
    s.setUrl("jdbc:derby:search" + (new File(plugins, "search").exists() ? "" : ";create=true"));
    s.setUsername("su");
    s.setPassword("");
    s.setMaxTotal(10);
    s.setMinIdle(0);
    s.setDefaultAutoCommit(true);
    datasource = s;

    Set<String> allTables = new HashSet<>();
    Connection connection = null;

    try {
        try {
            connection = datasource.getConnection();
            connection.setAutoCommit(false);
            DatabaseMetaData dbmeta = connection.getMetaData();
            try (ResultSet rs = dbmeta.getTables(null, null, null, new String[] { "TABLE" })) {
                while (rs.next()) {
                    allTables.add(rs.getString("TABLE_NAME").toLowerCase());
                }
            }

            if (!allTables.contains("artifact")) {
                QueryRunner run = new QueryRunner();
                run.update(connection,
                        "CREATE TABLE artifactindex(artifact bigint NOT NULL, version bigint NOT NULL)");
                run.update(connection, "INSERT INTO artifactindex (artifact,version) VALUES (?,?)", 1L, 1L);
                run.update(connection,
                        "CREATE TABLE artifact(id bigint NOT NULL,groupId character varying(120), artifactId character varying(120),CONSTRAINT artifact_pkey PRIMARY KEY (id))");
                run.update(connection,
                        "CREATE TABLE artifactversion(artifact_id bigint NOT NULL,id bigint NOT NULL,"
                                + "version character varying(100)," + "reponame character varying(30),"
                                + "CONSTRAINT artifactversion_pkey PRIMARY KEY (id),"
                                + "CONSTRAINT fk_artifactversion_artifact_id FOREIGN KEY (artifact_id) REFERENCES artifact (id) )");
                run.update(connection,
                        "CREATE TABLE artifacttype(version_id bigint NOT NULL,packaging character varying(20) NOT NULL,classifier character varying(30),"
                                + "CONSTRAINT artifacttype_pkey PRIMARY KEY (version_id,packaging,classifier),"
                                + "CONSTRAINT fk_artifacttype_version FOREIGN KEY (version_id) REFERENCES artifactversion (id))");
                run.update(connection, "CREATE INDEX artifactindex ON artifact(groupId,artifactId)");
                run.update(connection, "CREATE INDEX artifactgroupindex ON artifact(groupId)");
                run.update(connection, "CREATE INDEX artifactversionindex ON artifactversion(version)");
            }
            connection.commit();
        } catch (SQLException ex) {
            connection.rollback();
            throw ex;
        } finally {
            DbUtils.close(connection);
        }
    } catch (SQLException ex) {
        throw new IOException(ex);
    }
}

From source file:com.netflix.metacat.connector.jdbc.services.JdbcConnectorTableService.java

/**
 * Get the tables. See {@link java.sql.DatabaseMetaData#getTables(String, String, String, String[]) getTables} for
 * expected format of the ResultSet columns.
 *
 * @param connection       The database connection to use
 * @param name             The qualified name of the database to get tables for
 * @param prefix           An optional database table name prefix to search for
 * @return The result set with columns as described in the getTables method from java.sql.DatabaseMetaData
 * @throws SQLException on query error/*from ww  w .  j ava  2  s. c  o  m*/
 */
protected ResultSet getTables(@Nonnull @NonNull final Connection connection,
        @Nonnull @NonNull final QualifiedName name, @Nullable final QualifiedName prefix) throws SQLException {
    final String database = name.getDatabaseName();
    final DatabaseMetaData metaData = connection.getMetaData();
    return prefix == null || StringUtils.isEmpty(prefix.getTableName())
            ? metaData.getTables(database, database, null, TABLE_TYPES)
            : metaData.getTables(database, database,
                    prefix.getTableName() + JdbcConnectorUtils.MULTI_CHARACTER_SEARCH, TABLE_TYPES);
}

From source file:org.jboss.dashboard.database.DatabaseAutoSynchronizer.java

/**
 * Check if the dashbuilder installed modules table exist.
 *
 * BZ-1030424: Added <code>default_schema</code> argument to allow finding tables only for a given database schema.
 *
 * @param default_schema If specified, look up the table only for the specified schema.
 * @return If exist dashbuilder installed modules table
 * @throws Exception/*from www  .j a  v  a  2s.  c o  m*/
 */
protected boolean existsModulesTable(final String default_schema) throws Exception {
    final boolean[] returnValue = { false };
    new HibernateTxFragment(true) {
        protected void txFragment(Session session) throws Exception {
            Work w = new Work() {
                public void execute(Connection connection) throws SQLException {
                    // IMPORTANT NOTE: SQL Server driver closes the previous result set. So it's very important to read the
                    // data from the first result set before opening a new one. If not an exception is thrown.

                    DatabaseMetaData metaData = connection.getMetaData();
                    String schema = default_schema != null && default_schema.trim().length() > 0
                            ? default_schema
                            : null;
                    returnValue[0] = metaData.getTables(null, schema, installedModulesTable.toLowerCase(), null)
                            .next();
                    if (!returnValue[0])
                        returnValue[0] = metaData
                                .getTables(null, schema, installedModulesTable.toUpperCase(), null).next();
                }
            };
            session.doWork(w);
        }
    }.execute();
    return returnValue[0];
}

From source file:org.apache.hadoop.hive.ql.stats.jdbc.JDBCStatsPublisher.java

/**
 * Initialize the intermediate stats DB for the first time it is running (e.g.,
 * creating tables.).//from  w w w .  ja  va  2 s.com
 */
@Override
public boolean init(Configuration hconf) {
    Statement stmt = null;
    ResultSet rs = null;
    try {
        this.hiveconf = hconf;
        connectionString = HiveConf.getVar(hconf, HiveConf.ConfVars.HIVESTATSDBCONNECTIONSTRING);
        String driver = HiveConf.getVar(hconf, HiveConf.ConfVars.HIVESTATSJDBCDRIVER);
        JavaUtils.loadClass(driver).newInstance();
        synchronized (DriverManager.class) {
            DriverManager.setLoginTimeout(timeout);
            conn = DriverManager.getConnection(connectionString);

            stmt = conn.createStatement();
            Utilities.setQueryTimeout(stmt, timeout);

            // TODO: why is this not done using Hive db scripts?
            // Check if the table exists
            DatabaseMetaData dbm = conn.getMetaData();
            String tableName = JDBCStatsUtils.getStatTableName();
            rs = dbm.getTables(null, null, tableName, null);
            boolean tblExists = rs.next();
            if (!tblExists) { // Table does not exist, create it
                String createTable = JDBCStatsUtils.getCreate("");
                stmt.executeUpdate(createTable);
            } else {
                // Upgrade column name to allow for longer paths.
                String idColName = JDBCStatsUtils.getIdColumnName();
                int colSize = -1;
                try {
                    rs.close();
                    rs = dbm.getColumns(null, null, tableName, idColName);
                    if (rs.next()) {
                        colSize = rs.getInt("COLUMN_SIZE");
                        if (colSize < JDBCStatsSetupConstants.ID_COLUMN_VARCHAR_SIZE) {
                            String alterTable = JDBCStatsUtils.getAlterIdColumn();
                            stmt.executeUpdate(alterTable);
                        }
                    } else {
                        LOG.warn("Failed to update " + idColName + " - column not found");
                    }
                } catch (Throwable t) {
                    LOG.warn("Failed to update " + idColName + " (size " + (colSize == -1 ? "unknown" : colSize)
                            + ")", t);
                }
            }
        }
    } catch (Exception e) {
        LOG.error("Error during JDBC initialization. ", e);
        return false;
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                // do nothing
            }
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
                // do nothing
            }
        }
        closeConnection();
    }
    return true;
}

From source file:org.diffkit.db.DKDBTableDataAccess.java

/**
 * HyperSQL seems to have some problems with DatabaseMetaData.getTables(),
 * when only a tableName is specified//  w ww .  j  a  v  a2s.  c  o  m
 */
private List<Map<String, ?>> getTableMapsHyperSQL(String catalog_, String schema_, String tableName_,
        DatabaseMetaData dbMeta_) throws SQLException {
    _log.debug("catalog_->{}", catalog_);
    _log.debug("schema_->{}", schema_);
    _log.debug("tableName_->{}", tableName_);
    ResultSet tablesRS = dbMeta_.getTables(null, null, null, null);
    _log.debug("tablesRS->{}", tablesRS);
    if (tablesRS == null) {
        _log.warn("no tablesRS for catalog_->{} schema_->{} tableName_->{}");
        return null;
    }
    List<Map<String, ?>> allTableMaps = DKSqlUtil.readRows(tablesRS, true);
    _log.debug("allTableMaps->{}", allTableMaps);
    DKSqlUtil.close(tablesRS);
    List<Map<String, ?>> matchingTableMaps = new ArrayList<Map<String, ?>>();
    for (Map<String, ?> map : allTableMaps) {
        if (catalog_ != null) {
            String catalogName = (String) DKMapUtil.getValueForKeyPrefix(map, TABLE_CATALOG_KEY);
            if (!StringUtils.equalsIgnoreCase(catalog_, catalogName))
                continue;
        }
        if (schema_ != null) {
            String schemaName = (String) DKMapUtil.getValueForKeyPrefix(map, TABLE_SCHEMA_KEY);
            if (!StringUtils.equalsIgnoreCase(schema_, schemaName))
                continue;
        }
        if (tableName_ != null) {
            String tableName = (String) map.get(TABLE_NAME_KEY);
            if (!StringUtils.equalsIgnoreCase(tableName_, tableName))
                continue;
        }
        matchingTableMaps.add(map);
    }
    return matchingTableMaps;
}