Example usage for java.sql Connection commit

List of usage examples for java.sql Connection commit

Introduction

In this page you can find the example usage for java.sql Connection commit.

Prototype

void commit() throws SQLException;

Source Link

Document

Makes all changes made since the previous commit/rollback permanent and releases any database locks currently held by this Connection object.

Usage

From source file:com.excilys.ebi.bank.jdbc.SimpleBatchResourceDatabasePopulator.java

/**
 * Execute the given SQL script.//  ww  w .j  av a 2 s .co m
 * <p>
 * The script will normally be loaded by classpath. There should be one
 * statement per line. Any {@link #setSeparator(String) statement
 * separators} will be removed.
 * <p>
 * <b>Do not use this method to execute DDL if you expect rollback.</b>
 *
 * @param connection
 *            the JDBC Connection with which to perform JDBC operations
 * @param resource
 *            the resource (potentially associated with a specific encoding)
 *            to load the SQL script from
 * @param continueOnError
 *            whether or not to continue without throwing an exception in
 *            the event of an error
 * @param ignoreFailedDrops
 *            whether of not to continue in the event of specifically an
 *            error on a <code>DROP</code>
 */
private void executeSqlScript(Connection connection, EncodedResource resource, boolean continueOnError,
        boolean ignoreFailedDrops) throws SQLException, IOException {

    if (LOGGER.isInfoEnabled()) {
        LOGGER.info("Executing SQL script from " + resource);
    }

    long startTime = System.currentTimeMillis();
    Iterator<String> statements = IOUtils.lineIterator(resource.getReader());
    int lineNumber = 0;

    boolean initialAutoCommitState = connection.getAutoCommit();

    connection.setAutoCommit(false);
    Statement stmt = connection.createStatement();
    try {
        while (statements.hasNext()) {
            String statement = statements.next();
            lineNumber++;
            try {
                stmt.addBatch(statement);

                if (lineNumber % batchSize == 0) {
                    stmt.executeBatch();
                    connection.commit();
                }
            } catch (SQLException ex) {
                boolean dropStatement = StringUtils.startsWithIgnoreCase(statement.trim(), "drop");
                if (continueOnError || (dropStatement && ignoreFailedDrops)) {
                    if (LOGGER.isDebugEnabled()) {
                        LOGGER.debug("Failed to execute SQL script statement at line " + lineNumber
                                + " of resource " + resource + ": " + statement, ex);
                    }
                } else {
                    Exception nextException = ex.getNextException();
                    throw new ScriptStatementFailedException(statement, lineNumber, resource,
                            nextException != null ? nextException : ex);
                }
            }
        }
    } finally {
        stmt.executeBatch();
        connection.commit();

        connection.setAutoCommit(initialAutoCommitState);

        try {
            stmt.close();
        } catch (Throwable ex) {
            LOGGER.debug("Could not close JDBC Statement", ex);
        }
    }
    long elapsedTime = System.currentTimeMillis() - startTime;
    if (LOGGER.isInfoEnabled()) {
        LOGGER.info("Done executing SQL script from " + resource + " in " + elapsedTime + " ms.");
    }
}

From source file:com.novartis.opensource.yada.QueryManager.java

/**
 * Executes a commit on all connections created during processing of the
 * current request.//from   w w w .  j  av a  2s. com
 * 
 * @throws YADAConnectionException
 *           when the commit fails
 */
public void commit() throws YADAConnectionException {
    if (this.connectionMap != null && this.connectionMap.keySet().size() > 0) {
        //TODO         int    totalCount = 0;
        String source = "";
        for (Iterator<String> iterator = this.requiredCommits.iterator(); iterator.hasNext();) {
            try {
                source = iterator.next();
                Connection connection = (Connection) this.connectionMap.get(source);
                if (connection.getHoldability() == ResultSet.HOLD_CURSORS_OVER_COMMIT) {
                    connection.commit();
                    String msg = "\n------------------------------------------------------------\n";
                    msg += "   Commit successful on [" + source + "].\n";
                    msg += "------------------------------------------------------------\n";
                    l.info(msg);
                } else {
                    deferCommit(source);
                }
            } catch (SQLException e) {
                String msg = "Unable to commit transaction on [" + source + "].";
                throw new YADAConnectionException(msg, e);
            } catch (ClassCastException e) {
                l.info("Connection to [" + source
                        + "] is not a JDBC connection (it's probably SOAP.)  No commit was attempted.");
            }
        }
    }
}

From source file:com.cloudera.sqoop.manager.DirectMySQLTest.java

@Test
public void testJdbcEscapedTableName() throws Exception {
    // Test a JDBC-based import of a table whose name is
    // a reserved sql keyword (and is thus `quoted`)
    final String RESERVED_TABLE_NAME = "TABLE";
    SqoopOptions options = new SqoopOptions(MySQLTestUtils.CONNECT_STRING, RESERVED_TABLE_NAME);
    options.setUsername(MySQLTestUtils.getCurrentUser());
    ConnManager mgr = new MySQLManager(options);

    Connection connection = null;
    Statement st = null;/*from ww w. j a  v a  2  s.  c om*/

    try {
        connection = mgr.getConnection();
        connection.setAutoCommit(false);
        st = connection.createStatement();

        // create the database table and populate it with data.
        st.executeUpdate("DROP TABLE IF EXISTS `" + RESERVED_TABLE_NAME + "`");
        st.executeUpdate("CREATE TABLE `" + RESERVED_TABLE_NAME + "` ("
                + "id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, " + "name VARCHAR(24) NOT NULL, "
                + "start_date DATE, " + "salary FLOAT, " + "dept VARCHAR(32))");

        st.executeUpdate("INSERT INTO `" + RESERVED_TABLE_NAME + "` VALUES("
                + "2,'Aaron','2009-05-14',1000000.00,'engineering')");
        connection.commit();
    } finally {
        if (null != st) {
            st.close();
        }

        if (null != connection) {
            connection.close();
        }
    }

    String[] expectedResults = { "2,Aaron,2009-05-14,1000000.0,engineering", };

    doImport(false, false, RESERVED_TABLE_NAME, expectedResults, null);
}

From source file:eionet.cr.dao.virtuoso.VirtuosoFolderDAO.java

@Override
public void createUserHomeFolder(String userName) throws DAOException {

    if (StringUtils.isBlank(userName)) {
        throw new IllegalArgumentException("User name must not be blank!");
    }//  www .java 2  s  .  c om
    CRUser user = new CRUser(userName);

    Connection sqlConn = null;
    RepositoryConnection repoConn = null;
    try {
        sqlConn = SesameUtil.getSQLConnection();
        sqlConn.setAutoCommit(false);

        repoConn = SesameUtil.getRepositoryConnection();
        repoConn.setAutoCommit(false);
        ValueFactory vf = repoConn.getValueFactory();

        List<Statement> statements = getHomeFolderCreationStatements(user, vf);
        repoConn.add(statements);

        createNeverHarvestedSources(sqlConn, statements);

        repoConn.commit();
        sqlConn.commit();

    } catch (OpenRDFException e) {
        SesameUtil.rollback(repoConn);
        throw new DAOException(e.getMessage(), e);
    } catch (SQLException e) {
        SQLUtil.rollback(sqlConn);
        throw new DAOException(e.getMessage(), e);
    } finally {
        SQLUtil.close(sqlConn);
        SesameUtil.close(repoConn);
    }
}

From source file:com.krawler.esp.servlets.SuperAdminServlet.java

protected void processRequest(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    response.setContentType("text/html;charset=UTF-8");

    String result = "";
    Connection conn = null;

    try {//w ww . j a  va2  s. co  m
        conn = DbPool.getConnection();
        int mode = Integer.parseInt(request.getParameter("mode"));
        switch (mode) {
        case 0:
            result = getCompanyList(conn, request.getParameter("start"), request.getParameter("limit"));
            break;

        case 1:
            createCompany(conn, request, response);
            break;

        case 2:
            editCompany(conn, request, response);
            break;

        case 3:
            deleteCompany(conn, request.getParameter("companyid"), "false");
            break;
        case 4:
            signUp(conn, request, response);
            break;

        default:
            break;
        }
        conn.commit();
    } catch (ServiceException ex) {
        DbPool.quietRollback(conn);
    } finally {
        DbPool.quietClose(conn);
        response.getWriter().println(result);
        response.getWriter().close();
    }
}

From source file:edu.ncsa.sstde.indexing.postgis.PostgisIndexer.java

/**
 * {@inheritDoc}/*w ww.  ja v a 2s  .  c  o  m*/
 */
@Override
public void commit() {
    try {
        flush();
        Connection conn = getConnectionInternal();
        if (conn != null)
            conn.commit();
    } catch (SQLException e) {
        throw new IndexException("Commit error", e);
    }
}

From source file:com.krawler.esp.portalmsg.Mail.java

public static String MoveMails(Connection conn, String post_idArray, String last_folder_id,
        String dest_folder_id, String loginid) throws ServiceException, JSONException {
    String str = "{data:[";
    String str1 = " ";
    String query = null;//from w w w. j  av a  2  s. com
    com.krawler.utils.json.base.JSONObject jobj = new com.krawler.utils.json.base.JSONObject(post_idArray);
    if ((last_folder_id.equals(dest_folder_id)) && (Integer.parseInt(dest_folder_id) == 2)) {
        for (int i = 0; i < jobj.getJSONArray("data").length(); i++) {
            String post_id = jobj.getJSONArray("data").getJSONObject(i).getString("post_id");
            String query1 = "delete from mailmessages where post_id = ?";
            int numRows = DbUtil.executeUpdate(conn, query1, post_id);
            if (numRows == 0) {
                post_id = "-1";
            }
            conn.commit();
            str1 += "{'post_id':'" + post_id + "'},";
        }
    } else {
        for (int i = 0; i < jobj.getJSONArray("data").length(); i++) {
            String post_id = jobj.getJSONArray("data").getJSONObject(i).getString("post_id");
            if (dest_folder_id.equals("2")) {
                query = "Select to_id,poster_id from mailmessages where post_id =? ";
                // Object[] params1 = { post_id };
                DbResults rs1 = DbUtil.executeQuery(conn, query, post_id);
                while (rs1.next()) {
                    if (loginid.equals(rs1.getString("to_id"))) {
                        dest_folder_id = getDelId(loginid, "0");
                    } else if (loginid.equals(rs1.getString("poster_id"))) {
                        dest_folder_id = getDelId(loginid, "1");
                    }
                }
            }
            query = "Select folder from mailmessages where post_id =? ";
            // Object[] params1 = { post_id };
            DbResults rs = DbUtil.executeQuery(conn, query, post_id);
            while (rs.next()) {
                last_folder_id = rs.getString(1);
            }

            query = "Update mailmessages set folder= ? , last_folder_id = ? where post_id = ?";
            // Object[] params2 = { dest_folder_id, last_folder_id, post_idArray
            // };
            int numRows = DbUtil.executeUpdate(conn, query,
                    new Object[] { dest_folder_id, last_folder_id, post_id });
            str1 += "{'post_id':'" + post_id + "'},";
        }
    }
    str1 = str1.substring(0, str1.length() - 1);
    str += str1;
    str += "]}";

    return str;
}

From source file:com.cloudera.sqoop.TestIncrementalImport.java

public void testModifyWithTimestamp() throws Exception {
    // Create a table with data in it; import it.
    // Then modify some existing rows, and verify that we only grab
    // those rows.

    final String TABLE_NAME = "modifyTimestamp";
    Timestamp thePast = new Timestamp(System.currentTimeMillis() - 100);
    createTimestampTable(TABLE_NAME, 10, thePast);

    List<String> args = getArgListForTable(TABLE_NAME, false, false);
    createJob(TABLE_NAME, args);//from w  w w. ja va  2s  .c om
    runJob(TABLE_NAME);
    assertDirOfNumbers(TABLE_NAME, 10);

    // Modify a row.
    long importWasBefore = System.currentTimeMillis();
    Thread.sleep(50);
    long rowsAddedTime = System.currentTimeMillis() - 5;
    assertTrue(rowsAddedTime > importWasBefore);
    assertTrue(rowsAddedTime < System.currentTimeMillis());
    SqoopOptions options = new SqoopOptions();
    options.setConnectString(SOURCE_DB_URL);
    HsqldbManager manager = new HsqldbManager(options);
    Connection c = manager.getConnection();
    PreparedStatement s = null;
    try {
        s = c.prepareStatement("UPDATE " + TABLE_NAME + " SET id=?, last_modified=? WHERE id=?");
        s.setInt(1, 4000); // the first row should have '4000' in it now.
        s.setTimestamp(2, new Timestamp(rowsAddedTime));
        s.setInt(3, 0);
        s.executeUpdate();
        c.commit();
    } finally {
        s.close();
    }

    // Import only the new row.
    clearDir(TABLE_NAME);
    runJob(TABLE_NAME);
    assertSpecificNumber(TABLE_NAME, 4000);
}

From source file:dk.netarkivet.harvester.datamodel.RunningJobsInfoDBDAO.java

/**
 * Deletes all frontier report data pertaining to the given job id from
 * the persistent storage./*from  www. ja v a 2s  . c  o  m*/
 * @param jobId the job id
 * @return the update count
 */
public int deleteFrontierReports(long jobId) {
    ArgumentNotValid.checkNotNull(jobId, "jobId");

    Connection c = HarvestDBConnection.get();
    PreparedStatement stm = null;
    try {
        c.setAutoCommit(false);

        stm = c.prepareStatement("DELETE FROM frontierReportMonitor WHERE jobId=?");
        stm.setLong(1, jobId);

        int delCount = stm.executeUpdate();

        c.commit();

        return delCount;
    } catch (SQLException e) {
        String message = "SQL error deleting report lines for job ID " + jobId + "\n"
                + ExceptionUtils.getSQLExceptionCause(e);
        log.warn(message, e);
        return 0;
    } finally {
        DBUtils.closeStatementIfOpen(stm);
        DBUtils.rollbackIfNeeded(c, "deleteFrontierReports", jobId);
        HarvestDBConnection.release(c);
    }
}

From source file:com.cloudera.sqoop.manager.OracleManager.java

@Override
public String[] listTables() {
    Connection conn = null;
    Statement stmt = null;//w  ww  . ja  va2  s  .com
    ResultSet rset = null;
    List<String> tables = new ArrayList<String>();

    try {
        conn = getConnection();
        stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        rset = stmt.executeQuery(QUERY_LIST_TABLES);

        while (rset.next()) {
            tables.add(rset.getString(1));
        }
        conn.commit();
    } catch (SQLException e) {
        try {
            conn.rollback();
        } catch (Exception ex) {
            LOG.error("Failed to rollback transaction", ex);
        }
        LOG.error("Failed to list tables", e);
    } finally {
        if (rset != null) {
            try {
                rset.close();
            } catch (SQLException ex) {
                LOG.error("Failed to close resultset", ex);
            }
        }
        if (stmt != null) {
            try {
                stmt.close();
            } catch (Exception ex) {
                LOG.error("Failed to close statement", ex);
            }
        }

        try {
            close();
        } catch (SQLException ex) {
            LOG.error("Unable to discard connection", ex);
        }
    }

    return tables.toArray(new String[tables.size()]);
}