Example usage for java.sql Connection getMetaData

List of usage examples for java.sql Connection getMetaData

Introduction

In this page you can find the example usage for java.sql Connection getMetaData.

Prototype

DatabaseMetaData getMetaData() throws SQLException;

Source Link

Document

Retrieves a DatabaseMetaData object that contains metadata about the database to which this Connection object represents a connection.

Usage

From source file:com.liferay.util.dao.hibernate.DynamicDialect.java

public DynamicDialect() {

    // Instantiate the proper dialect

    String datasource = GetterUtil
            .getString(Environment.getProperties().getProperty(Environment.DATASOURCE, "jdbc/dotCMSPool"));

    try {//from ww  w .j ava 2 s .  c  om
        Connection con = DataAccess.getConnection(datasource);

        String url = con.getMetaData().getURL();

        Class dialectClass = null;

        if (url.startsWith(DriverInfo.DB2_URL)) {
            dialectClass = DB2Dialect.class;
        } else if (url.startsWith(DriverInfo.HYPERSONIC_URL)) {
            dialectClass = HSQLDialect.class;
        } else if (url.startsWith(DriverInfo.MYSQL_URL)) {
            dialectClass = MySQLDialect.class;
        } else if (url.startsWith(DriverInfo.ORACLE_URL)) {
            dialectClass = OracleDialect.class;
        } else if (url.startsWith(DriverInfo.POSTGRESQL_URL)) {
            dialectClass = PostgreSQLDialect.class;
        } else if (url.startsWith(DriverInfo.SQLSERVER_URL)) {
            dialectClass = SQLServerDialect.class;
        }

        if (dialectClass != null) {
            _log.debug("Class implementation " + dialectClass.getName());
        } else {
            _log.debug("Class implementation is null");
        }

        if (dialectClass != null) {
            _dialect = (Dialect) dialectClass.newInstance();
        }

        DataAccess.cleanUp(con);
    } catch (Exception e) {
        Logger.error(this, e.getMessage(), e);
    }

    if (_dialect == null) {
        _dialect = new GenericDialect();
    }

    // Synchorize default properties

    getDefaultProperties().clear();

    Enumeration enu = _dialect.getDefaultProperties().propertyNames();

    while (enu.hasMoreElements()) {
        String key = (String) enu.nextElement();
        String value = _dialect.getDefaultProperties().getProperty(key);

        getDefaultProperties().setProperty(key, value);
    }
}

From source file:com.bluexml.side.Integration.alfresco.sql.synchronization.dialects.CreateTableStatement.java

public TableStatus checkStatus(Connection connection) {

    TableStatus status = TableStatus.EXISTS_MATCHED;
    ResultSet rs = null;// w  w  w .ja va 2s  .  c  om

    try {
        DatabaseMetaData databaseMetaData = connection.getMetaData();
        rs = databaseMetaData.getColumns(null, null, tableName, "%");

        if (!rs.next()) {
            status = TableStatus.NOT_EXISTS;
        } else {
            if (logger.isDebugEnabled())
                logger.debug("Checking table '" + tableName + "'");

            Map<String, Integer> tableColumns = new LinkedHashMap<String, Integer>();
            do {
                String columnName = rs.getString("COLUMN_NAME");
                Integer dataType = rs.getInt("DATA_TYPE");
                if (logger.isDebugEnabled()) {
                    String dataTypeDepName = rs.getString("TYPE_NAME");
                    logger.debug("Column '" + columnName + "' with type '" + dataTypeDepName + "'(" + dataType
                            + ")");
                }
                tableColumns.put(columnName, dataType);
            } while (rs.next());
            rs.close();
            // TODO : Implement type checking to return EXIST_SIMILAR if types are compatible

            Set<String> propertySet = columns.keySet();
            propertySet.removeAll(tableColumns.keySet());

            if (!propertySet.isEmpty()) {
                status = TableStatus.EXISTS_UNMATCHED;
            }

            if (customActionManager != null) {
                status = customActionManager.doInSchemaChecking(tableColumns, status, tableType);
            } else {
                if (logger.isDebugEnabled())
                    logger.debug(
                            "Cannot execute any custom checking since no custom action manager has been defined on create statement for table '"
                                    + tableName + "'");
            }
        }

    } catch (SQLException e) {
        logger.error("Cannot get meta-data for checking table status");
        logger.debug(e);
        return TableStatus.NOT_CHECKABLE;
    }

    if (logger.isDebugEnabled())
        logger.debug("Checking table output status '" + tableName + "': " + status.name());

    return status;

}

From source file:net.sf.classifier4J.bayesian.JDBCWordsDataSource.java

/**
 * Create the word_probability table if it does not already
 * exist. Tested successfully with MySQL 4 & HSQLDB. See
 * comments in code for Axion 1.0M1 issues. 
 *   //  ww  w  .  j  ava2 s .  c  o m
 * 
 * @throws WordsDataSourceException
 */
private void createTable() throws WordsDataSourceException {
    Connection con = null;
    try {
        con = connectionManager.getConnection();

        // check if the word_probability table exists 
        DatabaseMetaData dbm = con.getMetaData();
        ResultSet rs = dbm.getTables(null, null, "word_probability", null);
        if (!rs.next()) {
            // the table does not exist
            Statement stmt = con.createStatement();
            //   Under Axion 1.0M1, use          
            //    stmt.executeUpdate( "CREATE TABLE word_probability ( "
            //         + " word         VARCHAR(255) NOT NULL,"
            //         + " category      VARCHAR(20) NOT NULL,"
            //         + " match_count      INTEGER NOT NULL,"
            //         + " nonmatch_count   INTEGER NOT NULL, "
            //         + " PRIMARY KEY(word, category) ) ");            
            stmt.executeUpdate("CREATE TABLE word_probability ( " + " word         VARCHAR(255) NOT NULL,"
                    + " category      VARCHAR(20) NOT NULL," + " match_count      INT DEFAULT 0 NOT NULL,"
                    + " nonmatch_count   INT DEFAULT 0 NOT NULL, " + " PRIMARY KEY(word, category) ) ");
        }
    } catch (SQLException e) {
        throw new WordsDataSourceException("Problem creating table", e); // we can't recover from this            
    } finally {
        if (con != null) {
            try {
                connectionManager.returnConnection(con);
            } catch (SQLException e1) {
                // ignore
            }
        }
    }
}

From source file:com.dominion.salud.pedicom.negocio.service.impl.PedidosExtServiceImpl.java

@Override
public void sendByMailByPDF(List<Pedidos> listPedidos) {
    Connection con = null;
    try {/*from w w w. j a v a 2  s.  com*/
        con = routingDataSource.getConnection();
        logger.debug("     Conexion obtenida de bd con username " + con.getMetaData().getUserName());
        logger.debug("     Agrupando pedidos: " + listPedidos.size());
        Map<Integer, List<Pedidos>> map = sortPedidos(listPedidos);
        logger.debug("     Pedidos correctamente agrupados: " + map.size());
        Iterator<Integer> itMap = map.keySet().iterator();
        while (itMap.hasNext()) {
            Integer key = itMap.next();
            List<Pedidos> listPedsProveedor = new ArrayList<>();
            try {
                listPedsProveedor = map.get(key);
                prepararYenviarEmail(listPedsProveedor, con);
            } catch (Exception e) {
                logger.error("     Error", e);
                logger.error(e.toString());
                logger.debug("     Actualizando pedidos a error");
                errorPedido(listPedsProveedor, e);
            }
        }
        logger.debug("ENVIOS FINALIZADOS");
    } catch (SQLException ex) {
        logger.error("     Error", ex);
    } finally {
        try {
            if (con != null && !con.isClosed()) {
                con.close();
            }
        } catch (SQLException ex) {
            logger.error("Error :" + ex.getMessage());
        }
    }
}

From source file:com.qualogy.qafe.business.integration.rdb.SQLQueryDAO.java

/**
 * Return database dialect./*w ww  .ja v  a 2 s  .c om*/
 *
 * @param conn
 * @return
 */
private Dialect getDatabaseDialect(Connection conn) {
    Dialect dialect = null;
    try {
        String database = conn.getMetaData().getDatabaseProductName();
        if ((database != null) && database.equalsIgnoreCase(Dialect.ORACLE_DIALECT_KEY)) {
            dialect = Dialect.ORACLE_DIALECT;
        }
    } catch (SQLException e) {
        e.printStackTrace();
    }
    return dialect;
}

From source file:gov.nih.nci.ncicb.cadsr.service.impl.CaDSRQueryServiceImpl.java

/**
 * Queries the XML for the DataElements associated with the attributes.
 * @param attributes//  www  .j ava 2  s. c o m
 * @return CDE XML for data elements associated with the attributes
 */
public String getXMLForAttributes(Collection<UMLAttributeMetadata> attributes) throws Exception {
    XMLGeneratorBean cdeXmlGenerator = new XMLGeneratorBean();
    Connection conn = null;
    Connection oracleConn = null;
    try {
        conn = dataSource.getConnection();
        //Get the OracleConnection
        oracleConn = conn.getMetaData().getConnection();
        if (!(oracleConn instanceof oracle.jdbc.driver.OracleConnection)) {
            log.error("DataElement XML download can work with OracleConnection only.");
            throw new Exception("DataElement XML download can work with OracleConnection only.");
        }
        StringBuffer where = new StringBuffer(attributes.size() * 33);
        where.append("DE_IDSEQ IN ('-1'");
        for (UMLAttributeMetadata attribute : attributes) {
            DataElement de = attribute.getDataElement();
            where.append(",'" + de.getId() + "'");
        }
        where.append(")");

        cdeXmlGenerator.setConnection(oracleConn);
        cdeXmlGenerator.setQuery(cdeXMLQuery);
        cdeXmlGenerator.setWhereClause(where.toString());
        cdeXmlGenerator.setRowsetTag(rowSetTag);
        cdeXmlGenerator.setRowTag(rowTag);
        cdeXmlGenerator.displayNulls(true);
        String xmlString = cdeXmlGenerator.getXMLString();
        return xmlString;
    } catch (Exception e) {
        log.error("Error getting CDE Xml.", e);
        throw e;
    } finally {
        cdeXmlGenerator.closeResources();
        if (conn != null) {
            conn.close();
        }
        if (oracleConn != null) {
            oracleConn.close();
        }
    }
}

From source file:com.ailk.oci.ocnosql.tools.load.csvbulkload.CsvBulkLoadTool.java

/**
 * Perform any required validation on the table being bulk loaded into:
 * - ensure no column family names start with '_', as they'd be ignored leading to problems.
 * @throws java.sql.SQLException/* www  .j  ava2s . c  o  m*/
 */
private void validateTable(Connection conn, String schemaName, String tableName) throws SQLException {

    ResultSet rs = conn.getMetaData().getColumns(null, StringUtil.escapeLike(schemaName),
            StringUtil.escapeLike(tableName), null);
    while (rs.next()) {
        String familyName = rs.getString(PhoenixDatabaseMetaData.COLUMN_FAMILY);
        if (familyName != null && familyName.startsWith("_")) {
            if (QueryConstants.DEFAULT_COLUMN_FAMILY.equals(familyName)) {
                throw new IllegalStateException(
                        "CSV Bulk Loader error: All column names that are not part of the "
                                + "primary key constraint must be prefixed with a column family "
                                + "name (i.e. f.my_column VARCHAR)");
            } else {
                throw new IllegalStateException("CSV Bulk Loader error: Column family name "
                        + "must not start with '_': " + familyName);
            }
        }
    }
    rs.close();
}

From source file:com.linuxbox.enkive.audit.sqldb.SqlDbAuditService.java

/**
 * Since the description column is of finite size, we may need to truncate
 * long descriptions to fit in the column. We need to determine the size of
 * the column, which is likely to be described in a Liquibase changelog XML
 * file. Rather than try to keep the Java and XML copies in sync, this just
 * retrieves the current column size from the databases metadata.
 * /*from  w ww .j  a  v  a 2s. c o m*/
 * @return
 * @throws AuditServiceException
 */
public int getDescriptionColumnSize() throws AuditServiceException {
    AuditOperation<Integer> op = new AuditOperation<Integer>() {
        @Override
        public Integer execute(Connection connection) throws SQLException, AuditServiceException {
            int columnSize = -1;

            DatabaseMetaData metaData = connection.getMetaData();
            ResultSet resultSet = metaData.getColumns(null, null, "events", "description");
            setResultSet(resultSet);

            if (resultSet.next()) {
                columnSize = resultSet.getInt("COLUMN_SIZE");
                if (resultSet.next()) {
                    throw new AuditServiceException(
                            "retrieved multiple column metadata entries for the description column in the events table");
                }
            } else {
                throw new AuditServiceException(
                        "could not retrieve the size of the description column in the events table");
            }

            return columnSize;
        }
    };

    return op.executeAuditOperation();
}

From source file:com.baidu.rigel.biplatform.tesseract.dataquery.service.impl.SqlDataQueryServiceImpl.java

/**
 * ?SQL??resultRecord list/*from   w w  w.  j a  va2 s . c  o m*/
 * @param sqlQuery
 * @param dataSource
 * @param limitStart
 * @param limitEnd
 * @return
 */
private SearchIndexResultSet querySqlList(SqlQuery sqlQuery, DataSource dataSource, long limitStart,
        long limitEnd) {
    long current = System.currentTimeMillis();
    if (sqlQuery == null || dataSource == null || limitEnd < 0) {
        throw new IllegalArgumentException();
    }

    sqlQuery.setLimitMap(limitStart, limitEnd);

    this.initJdbcTemplate(dataSource);

    Meta meta = new Meta(sqlQuery.getSelectList().toArray(new String[0]));
    SearchIndexResultSet resultSet = new SearchIndexResultSet(meta, 1000000);

    jdbcTemplate.query(new PreparedStatementCreator() {

        @Override
        public PreparedStatement createPreparedStatement(Connection con) throws SQLException {
            PreparedStatement pstmt = con.prepareStatement(sqlQuery.toSql(), ResultSet.TYPE_FORWARD_ONLY,
                    ResultSet.CONCUR_READ_ONLY);
            if (con.getMetaData().getDriverName().toLowerCase().contains("mysql")) {
                pstmt.setFetchSize(Integer.MIN_VALUE);
            }
            return pstmt;
        }
    }, new RowCallbackHandler() {

        @Override
        public void processRow(ResultSet rs) throws SQLException {
            List<Object> fieldValues = new ArrayList<Object>();
            String groupBy = "";
            for (String select : sqlQuery.getSelectList()) {
                fieldValues.add(rs.getObject(select));
                if (sqlQuery.getGroupBy() != null && sqlQuery.getGroupBy().contains(select)) {
                    groupBy += rs.getString(select) + ",";
                }
            }

            SearchIndexResultRecord record = new SearchIndexResultRecord(
                    fieldValues.toArray(new Serializable[0]), groupBy);
            resultSet.addRecord(record);
        }
    });
    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_END, "querySqlList",
            "[sqlQuery:" + sqlQuery.toSql() + "][dataSource:" + dataSource + "][limitStart:" + limitStart
                    + "][limitEnd:" + limitEnd + "] cost" + (System.currentTimeMillis() - current + "ms!")));
    return resultSet;
}