Example usage for java.sql Connection rollback

List of usage examples for java.sql Connection rollback

Introduction

In this page you can find the example usage for java.sql Connection rollback.

Prototype

void rollback() throws SQLException;

Source Link

Document

Undoes all changes made in the current transaction and releases any database locks currently held by this Connection object.

Usage

From source file:net.gcolin.simplerepo.search.SearchController.java

public void add(final Repository repository, File pomFile, Model model) throws IOException {
    SearchResult result = buildResult(repository.getName(), pomFile, model);
    try {/*from  w w w . j  av a2s . com*/
        Connection connection = null;
        try {
            connection = datasource.getConnection();
            connection.setAutoCommit(false);
            QueryRunner run = new QueryRunner();
            Long artifactIdx = run.query(connection, "select id from artifact where groupId=? and artifactId=?",
                    getLong, result.getGroupId(), result.getArtifactId());
            if (artifactIdx == null) {
                artifactIdx = run.query(connection, "select artifact from artifactindex", getLong);
                run.update(connection, "update artifactindex set artifact=?", artifactIdx + 1);
                run.update(connection, "insert into artifact (id,groupId,artifactId) VALUES (?,?,?)",
                        artifactIdx, result.getGroupId(), result.getArtifactId());
            }
            Long versionId = run.query(connection, "select version from artifactindex", getLong);
            run.update(connection, "update artifactindex set version=?", versionId + 1);
            run.update(connection,
                    "insert into artifactversion(artifact_id,id,version,reponame) VALUES (?,?,?,?)",
                    artifactIdx, versionId, result.getVersion(), result.getRepoName());
            for (ResultType res : result.getTypes()) {
                run.update(connection,
                        "insert into artifacttype(version_id,packaging,classifier) VALUES (?,?,?)", versionId,
                        res.getName(), res.getClassifier());
            }
            connection.commit();
        } catch (SQLException ex) {
            connection.rollback();
            throw ex;
        } finally {
            DbUtils.close(connection);
        }
    } catch (SQLException ex) {
        logger.log(Level.SEVERE, null, ex);
        throw new IOException(ex);
    }
}

From source file:com.che.software.testato.domain.dao.jdbc.impl.UserDAO.java

/**
 * Creates an user from his properties bean.
 * /*from ww w .ja  v a 2s.  c o  m*/
 * @author Clement HELIOU (clement.heliou@che-software.com).
 * @param userToCreateBean the properties to use to create the user.
 * @throws UserCreationDAOException if an error occurs during the creation.
 */
@Override
public void createUserFromBean(UserCreation userToCreateBean) throws UserCreationDAOException {
    LOGGER.debug("createUserFromBean(" + userToCreateBean.getLogin() + ").");
    Connection connection = null;
    try {
        connection = getDataSource().getConnection();
        connection.setAutoCommit(false);
        getQueryRunner().update(connection,
                "INSERT INTO \"user\"(user_id, \"login\", \"password\", \"name\", last_name, creation_date, job) VALUES(nextval('user_seq'), ?, md5(?), ?, ?, NOW(), ?) ",
                new Object[] { userToCreateBean.getLogin(), userToCreateBean.getPassword(),
                        userToCreateBean.getName().toUpperCase(), userToCreateBean.getLastName(),
                        userToCreateBean.getJob() });
        Integer createdUserID = (Integer) getQueryRunner().query(connection,
                "SELECT user_id::int FROM \"user\" WHERE \"login\" = ? ", new ScalarHandler("user_id"),
                new Object[] { userToCreateBean.getLogin() });
        getQueryRunner().update(connection, "INSERT INTO user_role(user_id, role) VALUES(?, 'ROLE_USER') ",
                new Object[] { createdUserID });
        if (userToCreateBean.isAdministrator()) {
            getQueryRunner().update(connection, "INSERT INTO user_role(user_id, role) VALUES(?, 'ROLE_ADMIN') ",
                    new Object[] { createdUserID });
        }
        if (userToCreateBean.isResponsable()) {
            getQueryRunner().update(connection, "INSERT INTO user_role(user_id, role) VALUES(?, 'ROLE_RESP') ",
                    new Object[] { createdUserID });
        }
        getQueryRunner().update(connection,
                "INSERT INTO user_service(user_id, service_id, hired_date) VALUES(?, ?, NOW()) ",
                new Object[] { createdUserID, userToCreateBean.getServiceId() });
        connection.commit();
    } catch (SQLException e) {
        try {
            connection.rollback();
        } catch (SQLException e1) {
            throw new UserCreationDAOException(e1);
        }
        throw new UserCreationDAOException(e);
    } finally {
        if (null != connection) {
            DbUtils.closeQuietly(connection);
        }
    }
}

From source file:com.netflix.metacat.usermetadata.mysql.MysqlUserMetadataService.java

@Override
public void deleteMetadatas(final String userId, final List<HasMetadata> holders) {
    try {/* w w w. j ava 2 s.  c om*/
        final Connection conn = poolingDataSource.getConnection();
        try {
            final List<List<HasMetadata>> subLists = Lists.partition(holders,
                    config.getUserMetadataMaxInClauseItems());
            for (List<HasMetadata> hasMetadatas : subLists) {
                final List<QualifiedName> names = hasMetadatas.stream()
                        .filter(m -> m instanceof HasDefinitionMetadata)
                        .map(m -> ((HasDefinitionMetadata) m).getDefinitionName()).collect(Collectors.toList());
                if (!names.isEmpty()) {
                    _deleteDefinitionMetadatas(conn, names);
                }
                if (config.canSoftDeleteDataMetadata()) {
                    final List<String> uris = hasMetadatas.stream()
                            .filter(m -> m instanceof HasDataMetadata && ((HasDataMetadata) m).isDataExternal())
                            .map(m -> ((HasDataMetadata) m).getDataUri()).collect(Collectors.toList());
                    if (!uris.isEmpty()) {
                        _softDeleteDataMetadatas(conn, userId, uris);
                    }
                }
            }
            conn.commit();
        } catch (SQLException e) {
            conn.rollback();
            throw e;
        } finally {
            conn.close();
        }
    } catch (SQLException e) {
        log.error("Sql exception", e);
        throw new UserMetadataServiceException("Failed deleting data metadata", e);
    }
}

From source file:com.oic.event.RegisterProfile.java

@Override
public void ActionEvent(JSONObject json, WebSocketListener webSocket) {
    JSONObject responseJSON = new JSONObject();
    responseJSON.put("method", "setprofile");
    if (!validation(json, webSocket)) {
        return;//from w w  w .j a  v a2  s.  c  o  m
    }
    Connection con = DatabaseConnection.getConnection();
    PreparedStatement ps;
    try {
        con = DatabaseConnection.getConnection();
        con.setAutoCommit(false);
        String sql = "INSERT INTO user SET studentnumber = ?, name = ?, avatarid = ?, grade = ?, sex = ?, birth = ?, comment = ?, secretkey = ?";
        ps = con.prepareStatement(sql);
        ps.setString(1, json.get("studentid").toString());
        ps.setString(2, json.get("username").toString());
        ps.setInt(3, Integer.parseInt(json.get("avatarid").toString()));
        ps.setInt(4, Integer.parseInt(json.get("grade").toString()));
        ps.setInt(5, Integer.parseInt(json.get("gender").toString()));
        ps.setDate(6, toDate(json.get("birthday").toString()));
        ps.setString(7, json.get("comment").toString());
        ps.setString(8, json.get("secretkey").toString());
        ps.executeUpdate();
        ps.close();

        sql = "SELECT last_insert_id() AS last";
        ps = con.prepareStatement(sql);
        ResultSet rs = ps.executeQuery();
        if (!rs.next()) {
            throw new SQLException();
        }
        long userid = rs.getLong("last");
        rs.close();
        ps.close();

        sql = "INSERT INTO setting SET userid = ?, privategrade = ?, privatesex = ?, privatebirth = ?";
        ps = con.prepareStatement(sql);
        ps.setLong(1, userid);
        ps.setInt(2, Integer.parseInt(json.get("vgrade").toString()));
        ps.setInt(3, Integer.parseInt(json.get("vgender").toString()));
        ps.setInt(4, Integer.parseInt(json.get("vbirthday").toString()));
        ps.executeUpdate();
        ps.close();
        con.commit();

        responseJSON.put("status", 0);
        webSocket.userNoLogin();
    } catch (Exception e) {
        try {
            con.rollback();
        } catch (SQLException sq) {
            LOG.warning("[setProfile]Error Rolling back.");
        }
        e.printStackTrace();
        responseJSON.put("status", 1);
    } finally {
        try {
            con.setAutoCommit(true);
        } catch (SQLException ex) {
            Logger.getLogger(SetProfile.class.getName()).log(Level.WARNING,
                    "Error going back to AutoCommit mode", ex);
        }
    }
    webSocket.sendJson(responseJSON);
}

From source file:com.concursive.connect.web.modules.productcatalog.beans.OrderBean.java

/**
 * Description of the Method/* w w w  .ja v  a 2s . c  om*/
 *
 * @param db Description of the Parameter
 * @return Description of the Return Value
 * @throws SQLException Description of the Exception
 */
public boolean insert(Connection db) throws SQLException {
    try {
        db.setAutoCommit(false);
        // Insert the base order
        PreparedStatement pst = db.prepareStatement("INSERT INTO customer_order "
                + "(ipaddress, browser, total_price, order_by) VALUES (?,?,?,?) ");
        int i = 0;
        pst.setString(++i, ipAddress);
        pst.setString(++i, browser);
        pst.setDouble(++i, productList.getTotalPrice());
        DatabaseUtils.setInt(pst, ++i, userId);
        pst.execute();
        pst.close();
        id = DatabaseUtils.getCurrVal(db, "customer_order_order_id_seq", -1);
        // Insert the products
        productList.setOrderId(id);
        productList.insert(db);
        // Insert the contact info
        if (contactInformation.isValid()) {
            contactInformation.setOrderId(id);
            contactInformation.insert(db);
        }
        // Insert the addresses
        if (billing.isValid()) {
            billing.setOrderId(id);
            billing.insert(db);
        }
        if (shipping.isValid()) {
            shipping.setOrderId(id);
            shipping.insert(db);
        }
        // Insert the payment info
        if (payment.isValid()) {
            payment.setOrderId(id);
            payment.setChargeAmount(getChargeAmount());
            payment.insert(db);
        }
        db.commit();
        // Finalize
        saved = true;
        return true;
    } catch (Exception e) {
        db.rollback();
        LOG.error("insert", e);
        throw new SQLException("Could not save");
    } finally {
        db.setAutoCommit(true);
    }
}

From source file:com.gtwm.pb.servlets.ServletSchemaMethods.java

private static void rollbackConnections(Connection conn) {
    try {/*from  w w w .j  a  va2 s.c  o m*/
        if (conn != null) {
            logger.error("rolling back sql...");
            conn.rollback();
            logger.error("sql successfully rolled back");
        }
    } catch (SQLException sqlex) {
        logger.error("oh no! another sql exception was thrown: " + sqlex);
        sqlex.printStackTrace();
        // don't rethrow, may just be because no SQL has been sent since
        // transaction start
    }
    logger.error("rolling back hibernate...");
    HibernateUtil.currentSession().getTransaction().rollback();
    logger.error("hibernate successfully rolled back");
}

From source file:datawarehouse.CSVLoader.java

/**
 * Parse CSV file using OpenCSV library and load in given database table.
 *
 * @param csvFile Input CSV file//from  www .ja va 2s.  co m
 * @param tableName Database table name to import data
 * @param truncateBeforeLoad Truncate the table before inserting new
 * records.
 * @throws Exception
 */
public void loadCSV(String csvFile, String tableName, boolean truncateBeforeLoad) throws Exception {

    CSVReader csvReader = null;
    if (null == this.connection) {
        throw new Exception("Not a valid connection.");
    }
    try {

        csvReader = new CSVReader(new FileReader(csvFile), this.seprator);

    } catch (Exception e) {
        e.printStackTrace();
        throw new Exception("Error occured while executing file. " + e.getMessage());
    }

    String[] headerRow = csvReader.readNext();

    if (null == headerRow) {
        throw new FileNotFoundException(
                "No columns defined in given CSV file." + "Please check the CSV file format.");
    }

    String questionmarks = StringUtils.repeat("?,", headerRow.length);
    questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1);

    String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName);
    query = query.replaceFirst(KEYS_REGEX, StringUtils.join(headerRow, ","));
    query = query.replaceFirst(VALUES_REGEX, questionmarks);

    System.out.println("Query: " + query);

    String[] nextLine;
    Connection con = null;
    PreparedStatement ps = null;
    try {
        con = this.connection;
        con.setAutoCommit(false);
        ps = con.prepareStatement(query);

        if (truncateBeforeLoad) {
            //delete data from table before loading csv
            con.createStatement().execute("DELETE FROM " + tableName);
        }

        final int batchSize = 1000;
        int count = 0;
        while ((nextLine = csvReader.readNext()) != null) {
            if (null != nextLine) {
                int index = 1;
                for (String string : nextLine) {
                    //System.out.print(string + ": ");
                    try {
                        DateFormat format = new SimpleDateFormat("dd.mm.yyyy");
                        Date date = format.parse(string);
                        ps.setDate(index++, new java.sql.Date(date.getTime()));
                        //System.out.println("date");
                    } catch (ParseException | SQLException e) {
                        try {
                            Double income = parseDouble(string.replace(",", "."));
                            ps.setDouble(index++, income);
                            //System.out.println("double");
                        } catch (NumberFormatException | SQLException err) {
                            ps.setString(index++, string);
                            //System.out.println("string");
                        }
                    }
                }
                ps.addBatch();
            }
            if (++count % batchSize == 0) {
                ps.executeBatch();
            }
        }
        ps.executeBatch(); // insert remaining records
        con.commit();
    } catch (Exception e) {
        con.rollback();
        e.printStackTrace();
        throw new Exception("Error occured while loading data from file to database." + e.getMessage());
    } finally {
        if (null != ps) {
            ps.close();
        }
        if (null != con) {
            con.close();
        }

        csvReader.close();
    }
}

From source file:org.apache.hadoop.hive.metastore.txn.CompactionTxnHandler.java

/**
 * Find entries in the queue that are ready to
 * be cleaned./*from  w  w w.j  a  v a2  s .c o  m*/
 * @return information on the entry in the queue.
 */
public List<CompactionInfo> findReadyToClean() throws MetaException {
    Connection dbConn = null;
    List<CompactionInfo> rc = new ArrayList<CompactionInfo>();

    Statement stmt = null;
    try {
        try {
            dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
            stmt = dbConn.createStatement();
            String s = "select cq_id, cq_database, cq_table, cq_partition, "
                    + "cq_type, cq_run_as from COMPACTION_QUEUE where cq_state = '" + READY_FOR_CLEANING + "'";
            LOG.debug("Going to execute query <" + s + ">");
            ResultSet rs = stmt.executeQuery(s);
            while (rs.next()) {
                CompactionInfo info = new CompactionInfo();
                info.id = rs.getLong(1);
                info.dbname = rs.getString(2);
                info.tableName = rs.getString(3);
                info.partName = rs.getString(4);
                switch (rs.getString(5).charAt(0)) {
                case MAJOR_TYPE:
                    info.type = CompactionType.MAJOR;
                    break;
                case MINOR_TYPE:
                    info.type = CompactionType.MINOR;
                    break;
                default:
                    throw new MetaException("Unexpected compaction type " + rs.getString(5));
                }
                info.runAs = rs.getString(6);
                rc.add(info);
            }
            LOG.debug("Going to rollback");
            dbConn.rollback();
            return rc;
        } catch (SQLException e) {
            LOG.error("Unable to select next element for cleaning, " + e.getMessage());
            LOG.debug("Going to rollback");
            rollbackDBConn(dbConn);
            checkRetryable(dbConn, e, "findReadyToClean");
            throw new MetaException(
                    "Unable to connect to transaction database " + StringUtils.stringifyException(e));
        } finally {
            closeDbConn(dbConn);
            closeStmt(stmt);
        }
    } catch (RetryException e) {
        return findReadyToClean();
    }
}

From source file:hoot.services.db.DbUtils.java

/**
 * Clears all data in all resource related tables in the database
 *
 * @param conn JDBC Connection/*  w w w .j  ava 2 s  . c o  m*/
 * @throws Exception
 *           if any records still exist in the table after the attempted
 *           deletion
 */

public static void clearServicesDb(Connection conn) throws Exception {
    try {
        deleteMapRelatedTables();
        conn.setAutoCommit(false);
        Configuration configuration = getConfiguration();

        SQLDeleteClause delete = new SQLDeleteClause(conn, configuration, QCurrentWayNodes.currentWayNodes);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QCurrentRelationMembers.currentRelationMembers);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QCurrentNodes.currentNodes);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QCurrentWays.currentWays);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QCurrentRelations.currentRelations);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QChangesets.changesets);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QMaps.maps);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QUsers.users);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QReviewItems.reviewItems);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QElementIdMappings.elementIdMappings);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QReviewMap.reviewMap);
        delete.execute();
        delete = new SQLDeleteClause(conn, configuration, QJobStatus.jobStatus);
        delete.execute();
        conn.commit();
    } catch (Exception e) {
        conn.rollback();
        String msg = "Error clearing services database.  ";
        msg += "  " + e.getCause().getMessage();
        throw new Exception(msg);
    } finally {
        conn.setAutoCommit(true);
    }
}

From source file:dao.PblogTopicDaoDb.java

/**
 * update a topic in personal blog - allows the user to personalize the information
 * @param tid - topic id/* w w  w  .j  av  a2  s .c om*/
 * @param topic - topic 
 * @param message - message (can include HTML/digital content)
 * @param userId - user id of the blogger
 * @param userLogin - user login of the blogger
 * @param fontSize - font size for the topic
 * @param fontFace - font face for the topic
 * @param fontColor - font color for the topic
 * @param moodId - mood id for this topic
 * @param bgColor - background color for the topic
 * @throws BaseDaoException If we have a problem interpreting the data or the data is missing or incorrect
*/
public void updatePblogTopic(String tid, String topic, String message, String userId, String userLogin,
        String fontSize, String fontFace, String fontColor, String moodId, String bgColor)
        throws BaseDaoException {
    /**
     * either one of them should exist
     */
    if (RegexStrUtil.isNull(topic) && RegexStrUtil.isNull(message)) {
        throw new BaseDaoException("topic and message are null");
    }

    /**
     * userid same as pblogid
     */
    if (RegexStrUtil.isNull(tid) || RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(userLogin)) {
        throw new BaseDaoException("params are null");
    }

    /**
     *  Get scalability datasource for pblogtopics - partition on userId (pBlogId)
     */
    String sourceName = scalabilityManager.getWriteScalability(userId);
    ds = scalabilityManager.getSource(sourceName);
    if (ds == null) {
        throw new BaseDaoException("ds null, updatePblogTopic() " + sourceName + " userId = " + userId);
    }

    Connection conn = null;
    try {
        conn = ds.getConnection();
        conn.setAutoCommit(false);
        updateQuery.run(conn, tid, message, topic);
        updateAttrQuery.run(conn, tid, fontSize, fontFace, fontColor, moodId, bgColor);
    } catch (Exception e) {
        try {
            conn.rollback();
        } catch (Exception e1) {
            try {
                if (conn != null) {
                    conn.setAutoCommit(true);
                    conn.close();
                }
            } catch (Exception e2) {
                throw new BaseDaoException(
                        "conn.setAutoCommit(true),conn.close(), updating pblogtopics exception", e2);
            }
            throw new BaseDaoException("conn.rollback() error, updating pblogtopics, attributes", e1);
        }
    }
    try {
        conn.commit();
    } catch (Exception e3) {
        throw new BaseDaoException("conn.commit() error, updating pblogtopics, attributes", e3);
    }
    try {
        if (conn != null) {
            conn.setAutoCommit(true);
            conn.close();
        }
    } catch (Exception e4) {
        throw new BaseDaoException("conn.close() error, updating pblogtopics, setAutoCommit(true) ", e4);
    }

    /* update np table */
    sourceName = scalabilityManager.getWriteZeroScalability();
    ds = scalabilityManager.getSource(sourceName);
    if (ds == null) {
        throw new BaseDaoException("ds null, updateNpQuery() " + sourceName + " userId = " + userId);
    }

    try {
        conn = ds.getConnection();
        updateNpQuery.run(conn, userId, tid);
    } catch (Exception e) {
        try {
            if (conn != null) {
                conn.close();
            }
        } catch (Exception e1) {
            throw new BaseDaoException("conn.close(), updating pblog np exception", e1);
        }
        throw new BaseDaoException("error updating pblog np", e);
    }

    try {
        if (conn != null) {
            conn.close();
        }
    } catch (Exception e) {
        throw new BaseDaoException("error updating pblog np", e);
    }

    Fqn fqn = cacheUtil.fqn(DbConstants.PBLOG_TOPIC);
    StringBuffer sb = new StringBuffer(userId);
    sb.append("-");
    sb.append(tid);
    if (treeCache.exists(fqn, sb.toString())) {
        treeCache.remove(fqn, sb.toString());
    }

    fqn = cacheUtil.fqn(DbConstants.PBLOG_TOPIC_LIST);
    if (treeCache.exists(fqn, userId)) {
        treeCache.remove(fqn, userId);
    }

    fqn = cacheUtil.fqn(DbConstants.PBLOG_DAILY_LIST);
    if (treeCache.exists(fqn, userId)) {
        treeCache.remove(fqn, userId);
    }

    fqn = cacheUtil.fqn(DbConstants.PERSONAL_BLOG);
    if (treeCache.exists(fqn, userId)) {
        treeCache.remove(fqn, userId);
    }

    fqn = cacheUtil.fqn(DbConstants.POPULAR_BLOGS);
    if (treeCache.exists(fqn, DbConstants.POPULAR_BLOG_KEY)) {
        treeCache.remove(fqn, DbConstants.POPULAR_BLOG_KEY);
    }

    fqn = cacheUtil.fqn(DbConstants.RECENT_BLOGS);
    if (treeCache.exists(fqn, DbConstants.RECENT_BLOG_KEY)) {
        treeCache.remove(fqn, DbConstants.RECENT_BLOG_KEY);
    }
    fqn = cacheUtil.fqn(DbConstants.USER_PAGE);
    if (treeCache.exists(fqn, userLogin)) {
        treeCache.remove(fqn, userLogin);
    }
    fqn = cacheUtil.fqn(DbConstants.PBLOG_C_TOPICS);
    if (treeCache.exists(fqn, DbConstants.PBLOG_C_TOPICS)) {
        treeCache.remove(fqn, DbConstants.PBLOG_C_TOPICS);
    }
    fqn = cacheUtil.fqn(DbConstants.PBLOG_C_BIZ_TOPICS);
    if (treeCache.exists(fqn, DbConstants.PBLOG_C_BIZ_TOPICS)) {
        treeCache.remove(fqn, DbConstants.PBLOG_C_BIZ_TOPICS);
    }
}