Example usage for java.sql Connection rollback

List of usage examples for java.sql Connection rollback

Introduction

In this page you can find the example usage for java.sql Connection rollback.

Prototype

void rollback() throws SQLException;

Source Link

Document

Undoes all changes made in the current transaction and releases any database locks currently held by this Connection object.

Usage

From source file:com.thinkmore.framework.orm.hibernate.SimpleHibernateDao.java

/**
 * ?//from w  w  w . j  a  v  a  2 s . c  om
 * @param list sql?
 */
public void executeBatch(final List<String> list) {
    Connection conn = null;
    Statement st = null;
    try {
        conn = SessionFactoryUtils.getDataSource(getSessionFactory()).getConnection();
        conn.setAutoCommit(false); // ??
        st = conn.createStatement();
        for (int i = 1, j = list.size(); i < j; i++) {
            String sql = list.get(i);
            st.addBatch(sql);
            if (i % 240 == 0) {//?240?sql???
                st.executeBatch();
                conn.commit();
                st.clearBatch();
            } else if (i % j == 0) {//??
                st.executeBatch();
                conn.commit();
                st.clearBatch();
            }
        }
    } catch (Exception e) {
        try {
            conn.rollback();
        } catch (SQLException e1) {
            e1.printStackTrace();
        }
        e.printStackTrace();
    } finally {
        closeAll(st, conn);
    }
}

From source file:hoot.services.controllers.osm.ChangesetResource.java

/**
 * Service method endpoint for uploading OSM changeset diff data
 * //from   ww  w.  j  a  v  a  2  s. c om
 * @param changeset OSM changeset diff data
 * @param changesetId ID of the changeset being uploaded; changeset with the ID must already exist
 * @return response acknowledging the result of the update operation with updated entity ID 
 * information
 * @throws Exception
 * @see http://wiki.openstreetmap.org/wiki/API_0.6 and 
 * http://wiki.openstreetmap.org/wiki/OsmChange
 * @todo why can't I pass in changesetDiff as an XML doc instead of a string?
 */
@POST
@Path("/{changesetId}/upload")
@Consumes(MediaType.TEXT_XML)
@Produces(MediaType.TEXT_XML)
public Response upload(final String changeset, @PathParam("changesetId") final long changesetId,
        @QueryParam("mapId") final String mapId) throws Exception {
    Connection conn = DbUtils.createConnection();
    Document changesetUploadResponse = null;
    try {
        log.debug("Intializing database connection...");

        log.debug("Intializing changeset upload transaction...");
        TransactionStatus transactionStatus = transactionManager
                .getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_REQUIRED));
        conn.setAutoCommit(false);

        try {
            if (mapId == null) {
                throw new Exception("Invalid map id.");
            }
            long mapid = Long.parseLong(mapId);
            changesetUploadResponse = (new ChangesetDbWriter(conn)).write(mapid, changesetId, changeset);
        } catch (Exception e) {
            log.error("Rolling back transaction for changeset upload...");
            transactionManager.rollback(transactionStatus);
            conn.rollback();
            handleError(e, changesetId, StringUtils.abbreviate(changeset, 100));
        }

        log.debug("Committing changeset upload transaction...");
        transactionManager.commit(transactionStatus);
        conn.commit();
    } finally {
        conn.setAutoCommit(true);
        DbUtils.closeConnection(conn);
    }

    log.debug("Returning changeset upload response: "
            + StringUtils.abbreviate(XmlDocumentBuilder.toString(changesetUploadResponse), 100) + " ...");
    return Response.ok(new DOMSource(changesetUploadResponse), MediaType.TEXT_XML)
            .header("Content-type", MediaType.TEXT_XML).build();
}

From source file:com.cloudera.sqoop.manager.SqlManager.java

@Override
public void migrateData(String fromTable, String toTable) throws SQLException {
    release(); // Release any previous ResultSet
    String updateQuery = "INSERT INTO " + toTable + " ( SELECT * FROM " + fromTable + " )";

    String deleteQuery = "DELETE FROM " + fromTable;
    Statement stmt = null;/*from  ww w.  j  av  a2 s  .c  om*/
    try {
        Connection conn = getConnection();
        stmt = conn.createStatement();

        // Insert data from the fromTable to the toTable
        int updateCount = stmt.executeUpdate(updateQuery);
        LOG.info("Migrated " + updateCount + " records from " + fromTable + " to " + toTable);

        // Delete the records from the fromTable
        int deleteCount = stmt.executeUpdate(deleteQuery);

        // If the counts do not match, fail the transaction
        if (updateCount != deleteCount) {
            conn.rollback();
            throw new RuntimeException("Inconsistent record counts");
        }
        conn.commit();
    } catch (SQLException ex) {
        LOG.error("Unable to migrate data from " + fromTable + " to " + toTable, ex);
        throw ex;
    } finally {
        if (stmt != null) {
            try {
                stmt.close();
            } catch (SQLException ex) {
                LOG.error("Unable to close statement", ex);
            }
        }
    }
}

From source file:com.cloudera.sqoop.manager.OracleManager.java

@Override
public String[] getColumnNames(String tableName) {
    Connection conn = null;
    PreparedStatement pStmt = null;
    ResultSet rset = null;/* ww  w . j a v a2s . c o  m*/
    List<String> columns = new ArrayList<String>();

    try {
        conn = getConnection();

        pStmt = conn.prepareStatement(QUERY_COLUMNS_FOR_TABLE, ResultSet.TYPE_FORWARD_ONLY,
                ResultSet.CONCUR_READ_ONLY);
        pStmt.setString(1, tableName);
        rset = pStmt.executeQuery();

        while (rset.next()) {
            columns.add(rset.getString(1));
        }
        conn.commit();
    } catch (SQLException e) {
        try {
            conn.rollback();
        } catch (Exception ex) {
            LOG.error("Failed to rollback transaction", ex);
        }
        LOG.error("Failed to list columns", e);
    } finally {
        if (rset != null) {
            try {
                rset.close();
            } catch (SQLException ex) {
                LOG.error("Failed to close resultset", ex);
            }
        }
        if (pStmt != null) {
            try {
                pStmt.close();
            } catch (Exception ex) {
                LOG.error("Failed to close statement", ex);
            }
        }

        try {
            close();
        } catch (SQLException ex) {
            LOG.error("Unable to discard connection", ex);
        }
    }

    return columns.toArray(new String[columns.size()]);
}

From source file:dbutils.DbUtilsTemplate.java

/**
 * ??/*from  www .ja  v  a  2s .  c om*/
 *
 * @param sql    sql?
 * @param params ?
 * @return (?, -1)
 * @throws SQLException
 */
public long insert(String sql, Object[] params) throws SQLException {
    long result = -1L;
    Connection conn = null;
    PreparedStatement stmt = null;
    ResultSet rs = null;
    try {
        conn = dataSource.getConnection();
        stmt = conn.prepareStatement(sql);
        fillStatement(stmt, params);
        int affectCount = stmt.executeUpdate();
        if (affectCount <= 0)
            return -1L;
        rs = stmt.getGeneratedKeys();
        result = rs.next() ? rs.getLong(1) : -1;
        conn.commit();
    } catch (SQLException e) {
        LOG.error("Error occured while attempting to insert data", e);
        if (conn != null) {
            conn.rollback();
        }
        throw e;
    } finally {
        DbUtils.closeQuietly(conn, stmt, rs);
    }
    return result;
}

From source file:com.cloudera.sqoop.manager.OracleManager.java

@Override
public String getPrimaryKey(String tableName) {
    Connection conn = null;
    PreparedStatement pStmt = null;
    ResultSet rset = null;/*from  w  w w .j ava  2  s .c o  m*/
    List<String> columns = new ArrayList<String>();

    try {
        conn = getConnection();

        pStmt = conn.prepareStatement(QUERY_PRIMARY_KEY_FOR_TABLE, ResultSet.TYPE_FORWARD_ONLY,
                ResultSet.CONCUR_READ_ONLY);
        pStmt.setString(1, tableName);
        rset = pStmt.executeQuery();

        while (rset.next()) {
            columns.add(rset.getString(1));
        }
        conn.commit();
    } catch (SQLException e) {
        try {
            conn.rollback();
        } catch (Exception ex) {
            LOG.error("Failed to rollback transaction", ex);
        }
        LOG.error("Failed to list columns", e);
    } finally {
        if (rset != null) {
            try {
                rset.close();
            } catch (SQLException ex) {
                LOG.error("Failed to close resultset", ex);
            }
        }
        if (pStmt != null) {
            try {
                pStmt.close();
            } catch (Exception ex) {
                LOG.error("Failed to close statement", ex);
            }
        }

        try {
            close();
        } catch (SQLException ex) {
            LOG.error("Unable to discard connection", ex);
        }
    }

    if (columns.size() == 0) {
        // Table has no primary key
        return null;
    }

    if (columns.size() > 1) {
        // The primary key is multi-column primary key. Warn the user.
        // TODO select the appropriate column instead of the first column based
        // on the datatype - giving preference to numerics over other types.
        LOG.warn("The table " + tableName + " " + "contains a multi-column primary key. Sqoop will default to "
                + "the column " + columns.get(0) + " only for this job.");
    }

    return columns.get(0);
}

From source file:org.opendatakit.persistence.engine.pgres.TaskLockImpl.java

private TaskLockTable doTransaction(TaskLockTable entity, long l)
        throws ODKEntityNotFoundException, ODKTaskLockException {
    boolean first;

    final List<String> stmts = new ArrayList<String>();

    String uri = entity.getUri();

    StringBuilder stringBuilder = new StringBuilder();
    String tableName = K_BQ + datastore.getDefaultSchemaName() + K_BQ + "." + K_BQ + TaskLockTable.TABLE_NAME
            + K_BQ;/*  w ww.  ja v a 2  s.  co  m*/

    stringBuilder.append("'").append(user.getUriUser().replaceAll("'", "''")).append("'");
    String uriUserInline = stringBuilder.toString();
    stringBuilder.setLength(0);
    stringBuilder.append("'").append(uri.replaceAll("'", "''")).append("'");
    String uriLockInline = stringBuilder.toString();
    stringBuilder.setLength(0);
    stringBuilder.append("'").append(entity.getFormId().replaceAll("'", "''")).append("'");
    String formIdInline = stringBuilder.toString();
    stringBuilder.setLength(0);
    stringBuilder.append("'").append(entity.getTaskType().replaceAll("'", "''")).append("'");
    String taskTypeInline = stringBuilder.toString();
    stringBuilder.setLength(0);
    stringBuilder.append("interval '").append(l).append(" milliseconds'");
    String lifetimeIntervalMilliseconds = stringBuilder.toString();
    stringBuilder.setLength(0);

    stringBuilder.append("LOCK TABLE ").append(tableName).append(" IN ACCESS EXCLUSIVE MODE");
    stmts.add(stringBuilder.toString());
    stringBuilder.setLength(0);

    dam.recordPutUsage(TaskLockTable.TABLE_NAME);
    if (!entity.isFromDatabase()) {
        // insert a new record (prospective lock)
        stringBuilder.append("INSERT INTO ");
        stringBuilder.append(tableName);
        stringBuilder.append(" (");
        first = true;
        for (DataField dataField : entity.getFieldList()) {
            if (!first) {
                stringBuilder.append(",");
            }
            first = false;
            stringBuilder.append(K_BQ);
            stringBuilder.append(dataField.getName());
            stringBuilder.append(K_BQ);
        }
        first = true;
        stringBuilder.append(") VALUES ( ");
        for (DataField dataField : entity.getFieldList()) {
            if (!first) {
                stringBuilder.append(",");
            }
            first = false;
            if (dataField.equals(entity.creationDate) || dataField.equals(entity.lastUpdateDate)) {
                stringBuilder.append("NOW()");
            } else if (dataField.equals(entity.creatorUriUser) || dataField.equals(entity.lastUpdateUriUser)) {
                stringBuilder.append(uriUserInline);
            } else if (dataField.equals(entity.formId)) {
                stringBuilder.append(formIdInline);
            } else if (dataField.equals(entity.taskType)) {
                stringBuilder.append(taskTypeInline);
            } else if (dataField.equals(entity.primaryKey)) {
                stringBuilder.append(uriLockInline);
            } else if (dataField.equals(entity.expirationDateTime)) {
                stringBuilder.append(" NOW() + ");
                stringBuilder.append(lifetimeIntervalMilliseconds);
            } else {
                throw new IllegalStateException("unexpected case " + dataField.getName());
            }
        }
        stringBuilder.append(")");
        stmts.add(stringBuilder.toString());
        stringBuilder.setLength(0);
    } else {
        // update existing record (prospective lock)
        stringBuilder.append("UPDATE ");
        stringBuilder.append(tableName);
        stringBuilder.append(" SET ");
        first = true;
        for (DataField f : entity.getFieldList()) {
            if (f == entity.primaryKey)
                continue;
            if (!first) {
                stringBuilder.append(",");
            }
            first = false;
            stringBuilder.append(K_BQ);
            stringBuilder.append(f.getName());
            stringBuilder.append(K_BQ);
            stringBuilder.append(" = ");
            if (f.equals(entity.creationDate) || f.equals(entity.lastUpdateDate)) {
                stringBuilder.append("NOW()");
            } else if (f.equals(entity.creatorUriUser) || f.equals(entity.lastUpdateUriUser)) {
                stringBuilder.append(uriUserInline);
            } else if (f.equals(entity.formId)) {
                stringBuilder.append(formIdInline);
            } else if (f.equals(entity.taskType)) {
                stringBuilder.append(taskTypeInline);
            } else if (f.equals(entity.primaryKey)) {
                stringBuilder.append(uriLockInline);
            } else if (f.equals(entity.expirationDateTime)) {
                stringBuilder.append(" NOW() + ");
                stringBuilder.append(lifetimeIntervalMilliseconds);
            } else {
                throw new IllegalStateException("unexpected case " + f.getName());
            }
        }
        stringBuilder.append(" WHERE ");
        stringBuilder.append(K_BQ);
        stringBuilder.append(entity.primaryKey.getName());
        stringBuilder.append(K_BQ);
        stringBuilder.append(" = ");
        stringBuilder.append(uriLockInline);
        stmts.add(stringBuilder.toString());
        stringBuilder.setLength(0);
    }
    // delete stale locks (don't care who's)
    dam.recordDeleteUsage(TaskLockTable.TABLE_NAME);
    stringBuilder.append("DELETE FROM ").append(tableName).append(" WHERE ");
    stringBuilder.append(K_BQ).append(entity.expirationDateTime.getName()).append(K_BQ).append(" <= NOW()");
    stmts.add(stringBuilder.toString());
    stringBuilder.setLength(0);
    // delete prospective locks which are not the oldest for that resource and
    // task type
    dam.recordDeleteUsage(TaskLockTable.TABLE_NAME);
    stringBuilder.append("DELETE FROM ").append(tableName).append(" WHERE ");
    stringBuilder.append(K_BQ).append(entity.formId.getName()).append(K_BQ).append(" = ").append(formIdInline)
            .append(" AND ");
    stringBuilder.append(K_BQ).append(entity.taskType.getName()).append(K_BQ).append(" = ")
            .append(taskTypeInline).append(" AND ");
    stringBuilder.append(K_BQ).append(entity.expirationDateTime.getName()).append(K_BQ);
    stringBuilder.append(" > (SELECT MIN(t3.").append(K_BQ).append(entity.expirationDateTime.getName())
            .append(K_BQ);
    stringBuilder.append(") FROM ").append(tableName).append(" AS t3 WHERE t3.");
    stringBuilder.append(K_BQ).append(entity.formId.getName()).append(K_BQ).append(" = ").append(formIdInline)
            .append(" AND t3.");
    stringBuilder.append(K_BQ).append(entity.taskType.getName()).append(K_BQ).append(" = ")
            .append(taskTypeInline).append(")");
    stmts.add(stringBuilder.toString());
    stringBuilder.setLength(0);
    // delete our entry if it collides with another entry with exactly 
    // this time.
    stringBuilder.append("DELETE FROM ").append(tableName).append(" WHERE ");
    stringBuilder.append(K_BQ).append(entity.formId.getName()).append(K_BQ).append(" = ").append(formIdInline)
            .append(" AND ");
    stringBuilder.append(K_BQ).append(entity.taskType.getName()).append(K_BQ).append(" = ")
            .append(taskTypeInline).append(" AND ");
    stringBuilder.append(K_BQ).append(entity.primaryKey.getName()).append(K_BQ).append(" = ")
            .append(uriLockInline).append(" AND ");
    stringBuilder.append("1 < (SELECT COUNT(t3.").append(K_BQ).append(entity.expirationDateTime.getName())
            .append(K_BQ);
    stringBuilder.append(") FROM ").append(tableName).append(" AS t3 WHERE t3.");
    stringBuilder.append(K_BQ).append(entity.formId.getName()).append(K_BQ).append(" = ").append(formIdInline)
            .append(" AND t3.");
    stringBuilder.append(K_BQ).append(entity.taskType.getName()).append(K_BQ).append(" = ")
            .append(taskTypeInline).append(")");
    stmts.add(stringBuilder.toString());
    stringBuilder.setLength(0);
    // assert: only the lock that holds the resource for that task type appears
    // in the task lock table
    TaskLockTable relation;
    try {

        JdbcTemplate jdbc = datastore.getJdbcConnection();
        jdbc.execute(new ConnectionCallback<Object>() {

            @Override
            public Object doInConnection(Connection conn) throws SQLException, DataAccessException {
                boolean oldAutoCommitValue = conn.getAutoCommit();
                int oldTransactionValue = conn.getTransactionIsolation();
                try {
                    conn.setAutoCommit(false);
                    conn.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE);
                    Statement stmt = conn.createStatement();
                    for (String s : stmts) {
                        // for debugging: LogFactory.getLog(TaskLockImpl.class).info(s);
                        stmt.execute(s);
                    }
                    conn.commit();
                } catch (PSQLException e) {
                    e.printStackTrace();
                    conn.rollback();
                } catch (Exception e) {
                    e.printStackTrace();
                    conn.rollback();
                }
                conn.setTransactionIsolation(oldTransactionValue);
                conn.setAutoCommit(oldAutoCommitValue);
                return null;
            }

        });

        relation = TaskLockTable.assertRelation(datastore, user);
    } catch (Exception e) {
        throw new ODKTaskLockException(PERSISTENCE_LAYER_PROBLEM, e);
    }
    return (TaskLockTable) datastore.getEntity(relation, entity.getUri(), user);
}

From source file:dao.PblogMessageDaoDb.java

public void addPblogMessage(String tid, String mid, String message, String topic, String userId,
        String userLogin, String pblogId, String fontSize, String fontFace, String fontColor, String moodId,
        String bgColor) throws BaseDaoException {

    if (RegexStrUtil.isNull(message) && RegexStrUtil.isNull(topic)) {
        throw new BaseDaoException("message & topic are null");
    }/*from   w  w w .  ja v a  2  s  .  c  o m*/

    if (RegexStrUtil.isNull(tid) || RegexStrUtil.isNull(mid) || RegexStrUtil.isNull(userId)
            || RegexStrUtil.isNull(pblogId)) {
        throw new BaseDaoException("params are null");
    }

    /**
     *  Get scalability datasource for pblogmessages partitioned on pblogId
     */
    String sourceName = scalabilityManager.getWriteScalability(pblogId);
    ds = scalabilityManager.getSource(sourceName);
    if (ds == null) {
        throw new BaseDaoException("ds null, addPblogMessage() " + sourceName + " pblogId = " + pblogId);
    }
    Connection conn = null;
    try {
        conn = ds.getConnection();
        conn.setAutoCommit(false);
        addQuery.run(conn, tid, mid, message, topic, userId, pblogId);
        addAttrQuery.run(conn, tid, "LAST_INSERT_ID()", fontSize, fontFace, fontColor, moodId, bgColor, userId);
    } catch (Exception e) {
        try {
            conn.rollback();
        } catch (Exception e1) {
            try {
                if (conn != null) {
                    conn.setAutoCommit(true);
                    conn.close();
                }
            } catch (Exception e2) {
                throw new BaseDaoException("conn.close() exception", e2);
            }
            throw new BaseDaoException("error rollingback entries from pblogmessageDao", e1);
        }
        throw new BaseDaoException("error adding entries, pblogmessagesDao", e);
    }

    try {
        conn.commit();
    } catch (Exception e3) {
        throw new BaseDaoException("conn.commit exception", e3);
    }

    try {
        if (conn != null) {
            conn.setAutoCommit(true);
            conn.close();
        }
    } catch (Exception e4) {
        throw new BaseDaoException("conn.close exception, setAutoCommit(true) pblogMessageDao ", e4);
    }

    Fqn fqn = cacheUtil.fqn(DbConstants.PBLOG_TOPIC_LIST);
    if (treeCache.exists(fqn, pblogId)) {
        treeCache.remove(fqn, pblogId);
    }

    fqn = cacheUtil.fqn(DbConstants.PBLOG_DAILY_LIST);
    if (treeCache.exists(fqn, pblogId)) {
        treeCache.remove(fqn, pblogId);
    }

    fqn = cacheUtil.fqn(DbConstants.RECENT_PBLOG);
    if (treeCache.exists(fqn, pblogId)) {
        treeCache.remove(fqn, pblogId);
    }

    StringBuffer sb = new StringBuffer(pblogId);
    sb.append("-");
    sb.append(tid);
    fqn = cacheUtil.fqn(DbConstants.PBLOG_TOPIC);
    if (treeCache.exists(fqn, sb.toString())) {
        treeCache.remove(fqn, sb.toString());
    }

    fqn = cacheUtil.fqn(DbConstants.PBLOG_MESSAGES);
    if (treeCache.exists(fqn, sb.toString())) {
        treeCache.remove(fqn, sb.toString());
    }

    fqn = cacheUtil.fqn(DbConstants.PERSONAL_BLOG);
    if (treeCache.exists(fqn, pblogId)) {
        treeCache.remove(fqn, pblogId);
    }

    fqn = cacheUtil.fqn(DbConstants.PERSONAL_BLOG);
    if (treeCache.exists(fqn, pblogId)) {
        treeCache.remove(fqn, pblogId);
    }

    fqn = cacheUtil.fqn(DbConstants.POPULAR_BLOGS);
    if (treeCache.exists(fqn, DbConstants.POPULAR_BLOG_KEY)) {
        treeCache.remove(fqn, DbConstants.POPULAR_BLOG_KEY);
    }

    fqn = cacheUtil.fqn(DbConstants.RECENT_BLOGS);
    if (treeCache.exists(fqn, DbConstants.RECENT_BLOG_KEY)) {
        treeCache.remove(fqn, DbConstants.RECENT_BLOG_KEY);
    }

}

From source file:broadwick.data.readers.DataFileReader.java

/**
 * Execute a command to create a table.//from w ww.ja va  2 s.c  om
 * @param tableName          the name of the table to be created.
 * @param createTableCommand the command to create the table.
 * @param connection         the database connection to use to create the table.
 * @throws SQLException if a SQL error has been encountered.
 */
protected final void createTable(final String tableName, final String createTableCommand,
        final Connection connection) throws SQLException {

    final DatabaseMetaData dbm = connection.getMetaData();

    // First check if the table already exists, some databases do not support
    // CREATE TABLE ??? IF NOT EXISTS
    // so we have to look at the database schema
    try (ResultSet resultSet = dbm.getTables(null, null, "%", null)) {
        boolean tableExists = false;
        while (resultSet.next()) {
            if (tableName.equalsIgnoreCase(resultSet.getString("TABLE_NAME"))) {
                log.debug("Table {} already exists, ignoring", tableName);
                tableExists = true;
            }
        }

        if (!tableExists) {
            try (Statement stmt = connection.createStatement()) {
                final String[] commands = createTableCommand.split(";");
                for (int i = 0; i < commands.length; i++) {
                    log.trace("Creating table {}", commands[i]);
                    stmt.execute(commands[i]);
                }
            } catch (SQLException sqle) {
                connection.rollback();
                log.error("Error while creating the table '{}'. {}", createTableCommand,
                        Throwables.getStackTraceAsString(sqle));
                throw sqle;
            }
        }

        //        } catch (Exception e) {
        //            log.error("Could not create database {}", Throwables.getStackTraceAsString(e));
    }

    connection.commit();
}

From source file:edu.uga.cs.fluxbuster.db.PostgresDBInterface.java

/**
 * Executes a prepared statement with no result.
 * //from   ww w . j  av  a 2s  .  c  o  m
 * @param con the connection to the database
 * @param stmt the prepared statement to execute
 */
private void executePreparedStatementNoResult(Connection con, PreparedStatement stmt) {
    try {
        con.setAutoCommit(false);
        stmt.execute();
        con.commit();
    } catch (SQLException e) {
        if (log.isErrorEnabled()) {
            log.error("", e);
        }
        if (con != null) {
            try {
                con.rollback();
            } catch (SQLException e1) {
                if (log.isErrorEnabled()) {
                    log.error("Error during rollback.", e1);
                }
            }
        }
    } finally {
        try {
            if (con != null && !con.isClosed()) {
                con.setAutoCommit(true);
            }
        } catch (SQLException e) {
            if (log.isErrorEnabled()) {
                log.error("Error setting auto commit.", e);
            }
        }
    }
}