Example usage for java.sql SQLWarning getMessage

List of usage examples for java.sql SQLWarning getMessage

Introduction

In this page you can find the example usage for java.sql SQLWarning getMessage.

Prototype

public String getMessage() 

Source Link

Document

Returns the detail message string of this throwable.

Usage

From source file:org.apache.bigtop.itest.hive.TestJdbc.java

@Test
public void statement() throws SQLException {
    try (Statement stmt = conn.createStatement()) {
        stmt.cancel();//from w w w.  ja  v a  2s .co  m
    }

    try (Statement stmt = conn.createStatement()) {
        stmt.clearWarnings();

        final String tableName = "bigtop_jdbc_statement_test_table";

        stmt.execute("drop table if exists " + tableName);
        stmt.execute("create table " + tableName + " (a int, b varchar(32))");

        stmt.executeUpdate("insert into " + tableName + " values (1, 'abc'), (2, 'def')");

        int intrc = stmt.getUpdateCount();
        LOG.debug("Update count is " + intrc);

        ResultSet rs = stmt.executeQuery("select * from " + tableName);
        while (rs.next()) {
            LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
        }

        Connection localConn = stmt.getConnection();

        intrc = stmt.getFetchDirection();
        LOG.debug("Fetch direction is " + intrc);

        intrc = stmt.getFetchSize();
        LOG.debug("Fetch size is " + intrc);

        intrc = stmt.getMaxRows();
        LOG.debug("max rows is " + intrc);

        boolean boolrc = stmt.getMoreResults();
        LOG.debug("more results is " + boolrc);

        intrc = stmt.getQueryTimeout();
        LOG.debug("query timeout is " + intrc);

        stmt.execute("select * from " + tableName);
        rs = stmt.getResultSet();
        while (rs.next()) {
            LOG.debug("Fetched " + rs.getInt(1) + "," + rs.getString(2));
        }

        intrc = stmt.getResultSetType();
        LOG.debug("result set type is " + intrc);

        SQLWarning warning = stmt.getWarnings();
        while (warning != null) {
            LOG.debug("Found a warning: " + warning.getMessage());
            warning = warning.getNextWarning();
        }

        boolrc = stmt.isClosed();
        LOG.debug("is closed " + boolrc);

        boolrc = stmt.isCloseOnCompletion();
        LOG.debug("is close on completion " + boolrc);

        boolrc = stmt.isPoolable();
        LOG.debug("is poolable " + boolrc);

        stmt.setFetchDirection(ResultSet.FETCH_FORWARD);
        stmt.setFetchSize(500);
        stmt.setMaxRows(500);
    }
}

From source file:org.apache.bigtop.itest.hive.TestJdbc.java

@Test
public void preparedStmtAndResultSet() throws SQLException {
    final String tableName = "bigtop_jdbc_psars_test_table";
    try (Statement stmt = conn.createStatement()) {
        stmt.execute("drop table if exists " + tableName);
        stmt.execute("create table " + tableName + " (bo boolean, ti tinyint, db double, fl float, "
                + "i int, lo bigint, sh smallint, st varchar(32))");
    }//  w ww  .  j  a  va  2  s.co m

    // NOTE Hive 1.2 theoretically support binary, Date & Timestamp in JDBC, but I get errors when I
    // try to put them in the query.
    try (PreparedStatement ps = conn
            .prepareStatement("insert into " + tableName + " values (?, ?, ?, ?, ?, ?, ?, ?)")) {
        ps.setBoolean(1, true);
        ps.setByte(2, (byte) 1);
        ps.setDouble(3, 3.141592654);
        ps.setFloat(4, 3.14f);
        ps.setInt(5, 3);
        ps.setLong(6, 10L);
        ps.setShort(7, (short) 20);
        ps.setString(8, "abc");
        ps.executeUpdate();
    }

    try (PreparedStatement ps = conn.prepareStatement("insert into " + tableName + " (i, st) " + "values(?, ?)",
            ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) {
        ps.setNull(1, Types.INTEGER);
        ps.setObject(2, "mary had a little lamb");
        ps.executeUpdate();
        ps.setNull(1, Types.INTEGER, null);
        ps.setString(2, "its fleece was white as snow");
        ps.clearParameters();
        ps.setNull(1, Types.INTEGER, null);
        ps.setString(2, "its fleece was white as snow");
        ps.execute();

    }

    try (Statement stmt = conn.createStatement()) {

        ResultSet rs = stmt.executeQuery("select * from " + tableName);

        ResultSetMetaData md = rs.getMetaData();

        int colCnt = md.getColumnCount();
        LOG.debug("Column count is " + colCnt);

        for (int i = 1; i <= colCnt; i++) {
            LOG.debug("Looking at column " + i);
            String strrc = md.getColumnClassName(i);
            LOG.debug("Column class name is " + strrc);

            int intrc = md.getColumnDisplaySize(i);
            LOG.debug("Column display size is " + intrc);

            strrc = md.getColumnLabel(i);
            LOG.debug("Column label is " + strrc);

            strrc = md.getColumnName(i);
            LOG.debug("Column name is " + strrc);

            intrc = md.getColumnType(i);
            LOG.debug("Column type is " + intrc);

            strrc = md.getColumnTypeName(i);
            LOG.debug("Column type name is " + strrc);

            intrc = md.getPrecision(i);
            LOG.debug("Precision is " + intrc);

            intrc = md.getScale(i);
            LOG.debug("Scale is " + intrc);

            boolean boolrc = md.isAutoIncrement(i);
            LOG.debug("Is auto increment? " + boolrc);

            boolrc = md.isCaseSensitive(i);
            LOG.debug("Is case sensitive? " + boolrc);

            boolrc = md.isCurrency(i);
            LOG.debug("Is currency? " + boolrc);

            intrc = md.getScale(i);
            LOG.debug("Scale is " + intrc);

            intrc = md.isNullable(i);
            LOG.debug("Is nullable? " + intrc);

            boolrc = md.isReadOnly(i);
            LOG.debug("Is read only? " + boolrc);

        }

        while (rs.next()) {
            LOG.debug("bo = " + rs.getBoolean(1));
            LOG.debug("bo = " + rs.getBoolean("bo"));
            LOG.debug("ti = " + rs.getByte(2));
            LOG.debug("ti = " + rs.getByte("ti"));
            LOG.debug("db = " + rs.getDouble(3));
            LOG.debug("db = " + rs.getDouble("db"));
            LOG.debug("fl = " + rs.getFloat(4));
            LOG.debug("fl = " + rs.getFloat("fl"));
            LOG.debug("i = " + rs.getInt(5));
            LOG.debug("i = " + rs.getInt("i"));
            LOG.debug("lo = " + rs.getLong(6));
            LOG.debug("lo = " + rs.getLong("lo"));
            LOG.debug("sh = " + rs.getShort(7));
            LOG.debug("sh = " + rs.getShort("sh"));
            LOG.debug("st = " + rs.getString(8));
            LOG.debug("st = " + rs.getString("st"));
            LOG.debug("tm = " + rs.getObject(8));
            LOG.debug("tm = " + rs.getObject("st"));
            LOG.debug("tm was null " + rs.wasNull());
        }
        LOG.debug("bo is column " + rs.findColumn("bo"));

        int intrc = rs.getConcurrency();
        LOG.debug("concurrency " + intrc);

        intrc = rs.getFetchDirection();
        LOG.debug("fetch direction " + intrc);

        intrc = rs.getType();
        LOG.debug("type " + intrc);

        Statement copy = rs.getStatement();

        SQLWarning warning = rs.getWarnings();
        while (warning != null) {
            LOG.debug("Found a warning: " + warning.getMessage());
            warning = warning.getNextWarning();
        }
        rs.clearWarnings();
    }
}

From source file:org.gbif.ipt.service.manage.impl.SourceManagerImpl.java

private Connection getDbConnection(SqlSource source) throws SQLException {
    Connection conn = null;/*w w  w.j  av  a  2s.c  o  m*/
    // try to connect to db via simple JDBC
    if (source.getHost() != null && source.getJdbcUrl() != null && source.getJdbcDriver() != null) {
        try {
            DriverManager.setLoginTimeout(CONNECTION_TIMEOUT_SECS);
            Class.forName(source.getJdbcDriver());
            conn = DriverManager.getConnection(source.getJdbcUrl(), source.getUsername(), source.getPassword());

            // If a SQLWarning object is available, log its
            // warning(s). There may be multiple warnings chained.

            SQLWarning warn = conn.getWarnings();
            while (warn != null) {
                log.warn("SQLWarning: state=" + warn.getSQLState() + ", message=" + warn.getMessage()
                        + ", vendor=" + warn.getErrorCode());
                warn = warn.getNextWarning();
            }
        } catch (java.lang.ClassNotFoundException e) {
            String msg = String.format(
                    "Couldnt load JDBC driver to create new external datasource connection with JDBC Class=%s and URL=%s. Error: %s",
                    source.getJdbcDriver(), source.getJdbcUrl(), e.getMessage());
            log.warn(msg, e);
            throw new SQLException(msg, e);
        } catch (Exception e) {
            String msg = String.format(
                    "Couldnt create new external datasource connection with JDBC Class=%s, URL=%s, user=%s. Error: %s",
                    source.getJdbcDriver(), source.getJdbcUrl(), source.getUsername(), e.getMessage());
            log.warn(msg, e);
            throw new SQLException(msg);
        }
    }
    return conn;
}

From source file:org.hyperic.hq.plugin.sybase.SybaseSysmonCollector.java

public void collect() {
    Properties props = getProperties();
    log.debug("[collect] props=" + props);

    try {/*from ww w.j  a va2 s . c om*/
        setAvailability(Metric.AVAIL_DOWN);
        if (conn == null) {
            conn = createConnection(props);
        }
        stmt = conn.prepareCall("{call sp_sysmon '" + props.getProperty(INTERVAL) + "'}");
        stmt.executeUpdate();

        StringBuffer response = new StringBuffer();
        SQLWarning war = stmt.getWarnings();
        do {
            response.append(war.getMessage()).append("\n");
            war = war.getNextWarning();
        } while (war != null);
        trace.debug(response);

        String res = response.toString();

        Pattern pat = Pattern.compile("\n +Cache:(.*)\n");
        Matcher m = pat.matcher(res);
        while (m.find()) {
            final String cacheName = m.group(1).trim().replaceAll(" ", "_");
            if (trace.isDebugEnabled()) {
                trace.debug("->'" + cacheName + "'");
                trace.debug("->" + m.start());
            }
            String sec = res.substring(m.start());
            setValue(cacheName + ".Availability", Metric.AVAIL_UP);
            setValue(cacheName + ".CacheHitsRatio", get(sec, "Cache Hits", 5) / 100);
            setValue(cacheName + ".CacheMissesRatio", get(sec, "Cache Misses", 5) / 100);
        }

        // output per engine:
        // Engine 0                        0.0 %      0.0 %    100.0 %
        //
        // regex should only find lines starting with "Engine X                        X.X %"
        // engineid and percentage are in regex groups 1 and 2
        pat = Pattern.compile("\n +Engine (\\d)+\\s+(\\d+\\.\\d+) %.*");
        m = pat.matcher(res);
        while (m.find()) {
            try {
                final String engineId = m.group(1);
                final String cpuBusyVal = m.group(2);
                if (engineId != null && cpuBusyVal != null) {
                    setValue("EngineUtilization" + engineId.trim(),
                            Double.parseDouble(cpuBusyVal.trim()) / 100);
                }
                if (trace.isDebugEnabled()) {
                    trace.debug("Found Engine Utilization for engineid=" + engineId.trim() + " with value "
                            + Double.parseDouble(cpuBusyVal.trim()) / 100);
                }
            } catch (NumberFormatException e) {
                if (trace.isDebugEnabled()) {
                    trace.debug("Unable to parse number from: " + e.toString());
                }
            } catch (IndexOutOfBoundsException e) {
                if (trace.isDebugEnabled()) {
                    trace.debug("Unable to find group from matcher: " + e.toString());
                }
            }
        }

        setValue("Deadlocks", get(res, "Deadlock Percentage", 5));
        setValue("TotalLockReqs", get(res, "Total Lock Requests", 5));
        setValue("AvgLockContention", get(res, "Avg Lock Contention", 5));
        setValue("TotalCacheHitsRatio", get(res, "Total Cache Hits", 6) / 100);
        setValue("TotalCacheMissesRatio", get(res, "Total Cache Misses", 6) / 100);
        setValue("TDSPacketsReceived", get(res, "Total TDS Packets Rec'd", 6) / 100);
        setValue("TDSPacketsSent", get(res, "Total Bytes Rec'd", 5) / 100);
        setAvailability(Metric.AVAIL_UP);

    } catch (SQLException e) {
        setValue("Availability", Metric.AVAIL_DOWN);
        log.debug("[collect] Error " + e.getMessage());
        log.debug("[collect] Error " + getResult().toString());
        if (conn != null) {
            DBUtil.closeJDBCObjects(log, conn, null, null);
            conn = null;
        }
    } finally {
        if (conn != null) {
            DBUtil.closeJDBCObjects(log, null, stmt, null);
        }
    }

}

From source file:org.rimudb.Table.java

/**
 * Update the database with the newRecord data. Key values from the
 * oldRecord are used. The update is run using the SQL connection con.
 *//*from   ww  w.  ja v  a  2s.  co m*/
protected void update(Connection con, Record oldRecord, Record newRecord) throws RimuDBException {
    PreparedStatement stmt = null;
    int statID = 0;
    try {
        List<String> changedPropertyList = newRecord.getModifiedProperties();
        List<String> nulledProperties = oldRecord.getNulledProperties();

        // Throws an exception if one of the changed properties is a PK and its being changed to a null
        tableMetaData.checkForNulledPrimaryKey(newRecord.getModifiedToNullProperties());

        List<String> nullColumnList = tableMetaData.getColumnNamesFromPropertyNames(nulledProperties);

        String sql = sqlStatementCache.getUpdatePKSQL(getOptimisticLocking(), changedPropertyList,
                nullColumnList);
        if (sql == null) {
            sql = sqlAdapter.getPrimaryKeyUpdateStatement(tableMetaData, getTableName(), getOptimisticLocking(),
                    changedPropertyList, nullColumnList);
            sqlStatementCache.addUpdatePKSQL(getOptimisticLocking(), changedPropertyList, nullColumnList, sql);
        }

        // Get the statistic ID
        int loggingType = getDatabase().getDatabaseConfiguration().getLoggingType();
        if (loggingType == DatabaseConfiguration.LOG_STATISTICS) {
            statID = StatisticCollector.getInstance().createStatistic(sql);
        } else if (loggingType == DatabaseConfiguration.LOG_SQL_ONLY) {
            log.info("SQL=" + sql);
        }

        stmt = createPreparedStatement(con, sql, CrudType.UPDATE);

        recordBinder.bindStatementForUpdate(stmt, oldRecord, newRecord, getOptimisticLocking(),
                changedPropertyList);

        if (statID > 0)
            StatisticCollector.getInstance().logEvent(statID, "preparetime");

        int rowsUpdated = stmt.executeUpdate();

        SQLWarning warning = stmt.getWarnings();
        while (warning != null) {
            log.warn(warning.getMessage());
            warning = warning.getNextWarning();
        }

        // If we didn't update a row then the where clause didn't find a record, so we have a stale record
        if (rowsUpdated == 0) {
            // Read the current record from the DB so we can discover the values that are different
            Record currentValue = get(oldRecord.getPrimaryWhereList());
            String changedValues = currentValue.getChangedPropertiesText(oldRecord);
            //            log.error("Record in database: "+currentValue.toKeyPairString());
            //            log.error("Unmodified record in memory: "+oldRecord.toKeyPairString());
            //            log.error("Modified record in memory: "+newRecord.toKeyPairString());
            throw new RecordChangeException(
                    "This record has already been changed by another process. Change was not processed. Changed values: "
                            + changedValues);
        }

        if (statID > 0)
            StatisticCollector.getInstance().logEvent(statID, "executetime");

        if (statID > 0) {
            StatisticCollector.getInstance().logEvent(statID, "processtime");
            if (StatisticCollector.getInstance().exceedsThreshold(statID,
                    getDatabase().getDatabaseConfiguration().getLoggingThreshold())) {
                String text = StatisticCollector.getInstance().formatStatistics(statID,
                        getDatabase().getStatisticFormatter());
                log.info(text);
            }
            StatisticCollector.getInstance().removeID(statID);
        }

    } catch (SQLException e) {

        throw new RimuDBException(e);

    } finally {
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException e) {
            }
            stmt = null;
        }
    }
}

From source file:org.sakaiproject.util.conversion.SchemaConversionController.java

public boolean migrate(DataSource datasource, SchemaConversionHandler convert, SchemaConversionDriver driver)
        throws SchemaConversionException {
    // issues://from   www  . j  a  v a2 s.co m
    // Data size bigger than max size for this type?
    // Failure may cause rest of set to fail?

    boolean alldone = false;
    Connection connection = null;
    PreparedStatement selectNextBatch = null;
    PreparedStatement markNextBatch = null;
    PreparedStatement completeNextBatch = null;
    PreparedStatement selectRecord = null;
    PreparedStatement selectValidateRecord = null;
    PreparedStatement updateRecord = null;
    PreparedStatement reportError = null;
    ResultSet rs = null;
    try {
        connection = datasource.getConnection();
        connection.setAutoCommit(false);
        selectNextBatch = connection.prepareStatement(driver.getSelectNextBatch());
        markNextBatch = connection.prepareStatement(driver.getMarkNextBatch());
        completeNextBatch = connection.prepareStatement(driver.getCompleteNextBatch());
        String selectRecordStr = driver.getSelectRecord();
        selectRecord = connection.prepareStatement(selectRecordStr);
        selectValidateRecord = connection.prepareStatement(driver.getSelectValidateRecord());
        updateRecord = connection.prepareStatement(driver.getUpdateRecord());
        if (reportErrorsInTable) {
            reportError = connection.prepareStatement(driver.getErrorReportSql());
        }
        // log.info("  +++ updateRecord == " + driver.getUpdateRecord());

        // 2. select x at a time
        rs = selectNextBatch.executeQuery();
        List<String> l = new ArrayList<String>();
        while (rs.next()) {
            l.add(rs.getString(1));
        }
        rs.close();
        log.info("Migrating " + l.size() + " records of " + nrecords);

        for (String id : l) {

            markNextBatch.clearParameters();
            markNextBatch.clearWarnings();
            markNextBatch.setString(1, id);
            if (markNextBatch.executeUpdate() != 1) {
                log.warn("  --> Failed to mark id [" + id + "][" + id.length() + "] for processing ");
                insertErrorReport(reportError, id, driver.getHandler(),
                        "Unable to mark this record for processing");
            }
        }

        int count = 1;
        for (String id : l) {
            selectRecord.clearParameters();
            selectRecord.setString(1, id);
            rs = selectRecord.executeQuery();
            Object source = null;
            if (rs.next()) {
                source = convert.getSource(id, rs);
            } else {
                log.warn("  --> Result-set is empty for id: " + id + " [" + count + " of " + l.size() + "]");
                insertErrorReport(reportError, id, driver.getHandler(), "Result set empty getting source");
            }
            rs.close();
            if (source == null) {
                log.warn("  --> Source is null for id: " + id + " [" + count + " of " + l.size() + "]");
                insertErrorReport(reportError, id, driver.getHandler(), "Source null");
            } else {
                try {
                    updateRecord.clearParameters();
                    if (convert.convertSource(id, source, updateRecord)) {
                        if (updateRecord.executeUpdate() == 1) {
                            selectValidateRecord.clearParameters();
                            selectValidateRecord.setString(1, id);
                            rs = selectValidateRecord.executeQuery();
                            Object result = null;
                            if (rs.next()) {
                                result = convert.getValidateSource(id, rs);
                            }

                            convert.validate(id, source, result);
                        } else {
                            log.warn("  --> Failed to update record " + id + " [" + count + " of " + l.size()
                                    + "]");
                            insertErrorReport(reportError, id, driver.getHandler(), "Failed to update record");
                        }
                    } else {
                        log.warn("  --> Did not update record " + id + " [" + count + " of " + l.size() + "]");
                        insertErrorReport(reportError, id, driver.getHandler(), "Failed to write update to db");
                    }
                    rs.close();
                } catch (SQLException e) {
                    String msg = "  --> Failure converting or validating item " + id + " [" + count + " of "
                            + l.size() + "] \n";
                    insertErrorReport(reportError, id, driver.getHandler(),
                            "Exception while updating, converting or verifying item");
                    SQLWarning warnings = updateRecord.getWarnings();
                    while (warnings != null) {
                        msg += "\t\t\t" + warnings.getErrorCode() + "\t" + warnings.getMessage() + "\n";
                        warnings = warnings.getNextWarning();
                    }
                    log.warn(msg, e);
                    updateRecord.clearWarnings();
                    updateRecord.clearParameters();
                }

            }
            completeNextBatch.clearParameters();
            completeNextBatch.setString(1, id);
            if (completeNextBatch.executeUpdate() != 1) {
                log.warn("  --> Failed to mark id " + id + " for processing [" + count + " of " + l.size()
                        + "]");
                insertErrorReport(reportError, id, driver.getHandler(), "Unable to complete next batch");
            }
            count++;
        }

        if (l.size() == 0) {
            dropRegisterTable(connection, convert, driver);
            alldone = true;
        }
        connection.commit();
        nrecords -= l.size();

    } catch (Exception e) {
        log.error("Failed to perform migration ", e);
        try {
            connection.rollback();
            log.error("  ==> Rollback Sucessful ", e);
        } catch (Exception ex) {
            log.error("  ==> Rollback Failed ", e);
        }
        throw new SchemaConversionException(
                "Schema Conversion has been aborted due to earlier errors, please investigate ");

    } finally {
        try {
            rs.close();
        } catch (Exception ex) {
            log.debug("exception closing rs " + ex);
        }
        try {
            selectNextBatch.close();
        } catch (Exception ex) {
            log.debug("exception closing selectNextBatch " + ex);
        }
        try {
            markNextBatch.close();
        } catch (Exception ex) {
            log.debug("exception closing markNextBatch " + ex);
        }
        try {
            completeNextBatch.close();
        } catch (Exception ex) {
            log.debug("exception closing completeNextBatch " + ex);
        }
        try {
            selectRecord.close();
        } catch (Exception ex) {
            log.debug("exception closing selectRecord " + ex);
        }
        try {
            selectValidateRecord.close();
        } catch (Exception ex) {
            log.debug("exception closing selectValidateRecord " + ex);
        }
        try {
            updateRecord.close();
        } catch (Exception ex) {
            log.debug("exception closing updateRecord " + ex);
        }
        if (reportError != null) {
            try {
                reportError.close();
            } catch (Exception ex) {
                log.debug("exception closing reportError " + ex);
            }
        }

        try {
            connection.close();

        } catch (Exception ex) {
            log.debug("Exception closing connection " + ex);
        }

    }
    return !alldone;
}

From source file:org.springframework.batch.item.database.AbstractCursorItemReader.java

/**
 * Throw a SQLWarningException if we're not ignoring warnings, else log the
 * warnings (at debug level).//from  w w  w  .  j  ava 2s .c  o  m
 *
 * @param statement the current statement to obtain the warnings from, if there are any.
 * @throws SQLException if interaction with provided statement fails.
 *
 * @see org.springframework.jdbc.SQLWarningException
 */
protected void handleWarnings(Statement statement) throws SQLWarningException, SQLException {
    if (ignoreWarnings) {
        if (log.isDebugEnabled()) {
            SQLWarning warningToLog = statement.getWarnings();
            while (warningToLog != null) {
                log.debug("SQLWarning ignored: SQL state '" + warningToLog.getSQLState() + "', error code '"
                        + warningToLog.getErrorCode() + "', message [" + warningToLog.getMessage() + "]");
                warningToLog = warningToLog.getNextWarning();
            }
        }
    } else {
        SQLWarning warnings = statement.getWarnings();
        if (warnings != null) {
            throw new SQLWarningException("Warning not ignored", warnings);
        }
    }
}

From source file:org.springframework.jdbc.datasource.init.ScriptUtils.java

/**
 * Execute the given SQL script.//w ww  .  j a v  a 2s. c om
 * <p>Statement separators and comments will be removed before executing
 * individual statements within the supplied script.
 * <p><strong>Warning</strong>: this method does <em>not</em> release the
 * provided {@link Connection}.
 * @param connection the JDBC connection to use to execute the script; already
 * configured and ready to use
 * @param resource the resource (potentially associated with a specific encoding)
 * to load the SQL script from
 * @param continueOnError whether or not to continue without throwing an exception
 * in the event of an error
 * @param ignoreFailedDrops whether or not to continue in the event of specifically
 * an error on a {@code DROP} statement
 * @param commentPrefix the prefix that identifies single-line comments in the
 * SQL script &mdash; typically "--"
 * @param separator the script statement separator; defaults to
 * {@value #DEFAULT_STATEMENT_SEPARATOR} if not specified and falls back to
 * {@value #FALLBACK_STATEMENT_SEPARATOR} as a last resort; may be set to
 * {@value #EOF_STATEMENT_SEPARATOR} to signal that the script contains a
 * single statement without a separator
 * @param blockCommentStartDelimiter the <em>start</em> block comment delimiter; never
 * {@code null} or empty
 * @param blockCommentEndDelimiter the <em>end</em> block comment delimiter; never
 * {@code null} or empty
 * @throws ScriptException if an error occurred while executing the SQL script
 * @see #DEFAULT_STATEMENT_SEPARATOR
 * @see #FALLBACK_STATEMENT_SEPARATOR
 * @see #EOF_STATEMENT_SEPARATOR
 * @see org.springframework.jdbc.datasource.DataSourceUtils#getConnection
 * @see org.springframework.jdbc.datasource.DataSourceUtils#releaseConnection
 */
public static void executeSqlScript(Connection connection, EncodedResource resource, boolean continueOnError,
        boolean ignoreFailedDrops, String commentPrefix, @Nullable String separator,
        String blockCommentStartDelimiter, String blockCommentEndDelimiter) throws ScriptException {

    try {
        if (logger.isInfoEnabled()) {
            logger.info("Executing SQL script from " + resource);
        }
        long startTime = System.currentTimeMillis();

        String script;
        try {
            script = readScript(resource, commentPrefix, separator);
        } catch (IOException ex) {
            throw new CannotReadScriptException(resource, ex);
        }

        if (separator == null) {
            separator = DEFAULT_STATEMENT_SEPARATOR;
        }
        if (!EOF_STATEMENT_SEPARATOR.equals(separator) && !containsSqlScriptDelimiters(script, separator)) {
            separator = FALLBACK_STATEMENT_SEPARATOR;
        }

        List<String> statements = new LinkedList<>();
        splitSqlScript(resource, script, separator, commentPrefix, blockCommentStartDelimiter,
                blockCommentEndDelimiter, statements);

        int stmtNumber = 0;
        Statement stmt = connection.createStatement();
        try {
            for (String statement : statements) {
                stmtNumber++;
                try {
                    stmt.execute(statement);
                    int rowsAffected = stmt.getUpdateCount();
                    if (logger.isDebugEnabled()) {
                        logger.debug(rowsAffected + " returned as update count for SQL: " + statement);
                        SQLWarning warningToLog = stmt.getWarnings();
                        while (warningToLog != null) {
                            logger.debug("SQLWarning ignored: SQL state '" + warningToLog.getSQLState()
                                    + "', error code '" + warningToLog.getErrorCode() + "', message ["
                                    + warningToLog.getMessage() + "]");
                            warningToLog = warningToLog.getNextWarning();
                        }
                    }
                } catch (SQLException ex) {
                    boolean dropStatement = StringUtils.startsWithIgnoreCase(statement.trim(), "drop");
                    if (continueOnError || (dropStatement && ignoreFailedDrops)) {
                        if (logger.isDebugEnabled()) {
                            logger.debug(ScriptStatementFailedException.buildErrorMessage(statement, stmtNumber,
                                    resource), ex);
                        }
                    } else {
                        throw new ScriptStatementFailedException(statement, stmtNumber, resource, ex);
                    }
                }
            }
        } finally {
            try {
                stmt.close();
            } catch (Throwable ex) {
                logger.debug("Could not close JDBC Statement", ex);
            }
        }

        long elapsedTime = System.currentTimeMillis() - startTime;
        if (logger.isInfoEnabled()) {
            logger.info("Executed SQL script from " + resource + " in " + elapsedTime + " ms.");
        }
    } catch (Exception ex) {
        if (ex instanceof ScriptException) {
            throw (ScriptException) ex;
        }
        throw new UncategorizedScriptException(
                "Failed to execute database script from resource [" + resource + "]", ex);
    }
}

From source file:org.voltdb.HsqlBackend.java

public void runDDL(String ddl) {
    try {//from w  w w.  ja v  a2  s .co m
        //LOG.info("Executing " + ddl);
        Statement stmt = dbconn.createStatement();
        /*boolean success =*/ stmt.execute(ddl);
        SQLWarning warn = stmt.getWarnings();
        if (warn != null)
            sqlLog.warn(warn.getMessage());
        //LOG.info("SQL DDL execute result: " + (success ? "true" : "false"));
    } catch (SQLException e) {
        hostLog.l7dlog(Level.ERROR, LogKeys.host_Backend_RunDDLFailed.name(), new Object[] { ddl }, e);
    }

}