List of usage examples for java.sql Connection rollback
void rollback() throws SQLException;
Connection
object. From source file:net.gcolin.simplerepo.search.SearchController.java
public SearchController(ConfigurationManager configManager) throws IOException { this.configManager = configManager; File plugins = new File(configManager.getRoot(), "plugins"); plugins.mkdirs();//from ww w .j a va2 s.c o m System.setProperty("derby.system.home", plugins.getAbsolutePath()); BasicDataSource s = new BasicDataSource(); s.setDriverClassName("org.apache.derby.jdbc.EmbeddedDriver"); s.setUrl("jdbc:derby:search" + (new File(plugins, "search").exists() ? "" : ";create=true")); s.setUsername("su"); s.setPassword(""); s.setMaxTotal(10); s.setMinIdle(0); s.setDefaultAutoCommit(true); datasource = s; Set<String> allTables = new HashSet<>(); Connection connection = null; try { try { connection = datasource.getConnection(); connection.setAutoCommit(false); DatabaseMetaData dbmeta = connection.getMetaData(); try (ResultSet rs = dbmeta.getTables(null, null, null, new String[] { "TABLE" })) { while (rs.next()) { allTables.add(rs.getString("TABLE_NAME").toLowerCase()); } } if (!allTables.contains("artifact")) { QueryRunner run = new QueryRunner(); run.update(connection, "CREATE TABLE artifactindex(artifact bigint NOT NULL, version bigint NOT NULL)"); run.update(connection, "INSERT INTO artifactindex (artifact,version) VALUES (?,?)", 1L, 1L); run.update(connection, "CREATE TABLE artifact(id bigint NOT NULL,groupId character varying(120), artifactId character varying(120),CONSTRAINT artifact_pkey PRIMARY KEY (id))"); run.update(connection, "CREATE TABLE artifactversion(artifact_id bigint NOT NULL,id bigint NOT NULL," + "version character varying(100)," + "reponame character varying(30)," + "CONSTRAINT artifactversion_pkey PRIMARY KEY (id)," + "CONSTRAINT fk_artifactversion_artifact_id FOREIGN KEY (artifact_id) REFERENCES artifact (id) )"); run.update(connection, "CREATE TABLE artifacttype(version_id bigint NOT NULL,packaging character varying(20) NOT NULL,classifier character varying(30)," + "CONSTRAINT artifacttype_pkey PRIMARY KEY (version_id,packaging,classifier)," + "CONSTRAINT fk_artifacttype_version FOREIGN KEY (version_id) REFERENCES artifactversion (id))"); run.update(connection, "CREATE INDEX artifactindex ON artifact(groupId,artifactId)"); run.update(connection, "CREATE INDEX artifactgroupindex ON artifact(groupId)"); run.update(connection, "CREATE INDEX artifactversionindex ON artifactversion(version)"); } connection.commit(); } catch (SQLException ex) { connection.rollback(); throw ex; } finally { DbUtils.close(connection); } } catch (SQLException ex) { throw new IOException(ex); } }
From source file:com.skilrock.lms.web.scratchService.orderMgmt.common.ProcessRequestAction.java
@Override public String execute() throws Exception { HttpSession session = getRequest().getSession(); session.setAttribute("RequestList", null); session.setAttribute("orderId", null); session.setAttribute("TOTAL", null); session.setAttribute("BALANCE", null); session.setAttribute("CREDIT_LIMIT", null); session.setAttribute("CREDIT_AMT", null); list = new ArrayList<OrderRequestBean>(); Connection conn = null; Statement statement = null;//from w w w. ja va2 s.c o m ResultSet resultSet = null; try { conn = DBConnect.getConnection(); StringBuilder queryBuilder = new StringBuilder(); String query = "SELECT a.order_id, a.order_date, b.name FROM st_se_bo_order a INNER JOIN st_lms_organization_master b ON a.agent_org_id=b.organization_id INNER JOIN st_se_bo_ordered_games c ON a.order_id=c.order_id INNER JOIN st_se_game_master d ON c.game_id=d.game_id WHERE a.order_status='REQUESTED'"; query = CommonMethods.appendRoleAgentMappingQuery(query, "a.agent_org_id", ((UserInfoBean) session.getAttribute("USER_INFO")).getRoleId());// +" group by queryBuilder.append(query); if (gameName != null && gameName.length() > 0) queryBuilder.append(" AND game_name LIKE '%").append(gameName).append("%'"); if (gameNumber != null && gameNumber.length() > 0) queryBuilder.append(" AND game_nbr LIKE '%").append(gameNumber).append("%'"); if (agtOrgName != null && agtOrgName.length() > 0) queryBuilder.append(" AND name LIKE '%").append(agtOrgName).append("%'"); if (orderNumber != null && orderNumber.length() > 0) queryBuilder.append(" AND order_status LIKE '%").append(orderNumber).append("%'"); logger.info("Search - " + queryBuilder.toString()); statement = conn.createStatement(); resultSet = statement.executeQuery(queryBuilder.toString()); while (resultSet.next()) { orderBean = new OrderRequestBean(); orderId = resultSet.getInt("order_id"); date = resultSet.getDate("order_date"); name = resultSet.getString("name"); orderBean.setOrderId(orderId); orderBean.setDate(date); orderBean.setName(name); list.add(orderBean); } session.setAttribute("RequestList", list); session.setAttribute("RequestList1", list); session.setAttribute("orderId", orderId); if (list != null && list.size() > 0) { session.setAttribute("startValueRequestSearch", new Integer(0)); searchAjaxRequest(); } setVarFromProcessRequest("Yes"); return SUCCESS; } catch (SQLException se) { setVarFromProcessRequest("No"); conn.rollback(); System.out.println("We got an exception while preparing a statement:" + "Probably bad SQL."); throw new LMSException(se); } finally { try { if (resultSet != null) { resultSet.close(); } if (statement != null) { statement.close(); } if (conn != null) { conn.close(); } } catch (SQLException se) { throw new LMSException(se); } } }
From source file:azkaban.project.JdbcProjectLoader.java
private synchronized Project createNewProject(Connection connection, String name, String description, User creator) throws ProjectManagerException { QueryRunner runner = new QueryRunner(); ProjectResultHandler handler = new ProjectResultHandler(); // See if it exists first. try {/*from www . j a va 2s . com*/ List<Project> project = runner.query(connection, ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler, name); if (!project.isEmpty()) { throw new ProjectManagerException("Active project with name " + name + " already exists in db."); } } catch (SQLException e) { logger.error(e); throw new ProjectManagerException("Checking for existing project failed. " + name, e); } final String INSERT_PROJECT = "INSERT INTO projects ( name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob) values (?,?,?,?,?,?,?,?,?)"; // Insert project try { long time = System.currentTimeMillis(); int i = runner.update(connection, INSERT_PROJECT, name, true, time, time, null, creator.getUserId(), description, defaultEncodingType.getNumVal(), null); if (i == 0) { throw new ProjectManagerException("No projects have been inserted."); } connection.commit(); } catch (SQLException e) { logger.error(INSERT_PROJECT + " failed."); try { connection.rollback(); } catch (SQLException e1) { e1.printStackTrace(); } throw new ProjectManagerException("Insert project for existing project failed. " + name, e); } // Do another query to grab and return the project. Project project = null; try { List<Project> projects = runner.query(connection, ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler, name); if (projects.isEmpty()) { throw new ProjectManagerException("No active project with name " + name + " exists in db."); } else if (projects.size() > 1) { throw new ProjectManagerException("More than one active project " + name); } project = projects.get(0); } catch (SQLException e) { logger.error(e); throw new ProjectManagerException("Checking for existing project failed. " + name, e); } return project; }
From source file:org.apache.cocoon.acting.DatabaseAuthenticatorAction.java
/** * Main invocation routine.//from w w w . ja va 2 s . c om */ public Map act(Redirector redirector, SourceResolver resolver, Map objectModel, String src, Parameters parameters) throws Exception { DataSourceComponent datasource = null; Connection conn = null; PreparedStatement st = null; ResultSet rs = null; // read global parameter settings boolean reloadable = Constants.DESCRIPTOR_RELOADABLE_DEFAULT; if (this.settings.containsKey("reloadable")) { reloadable = Boolean.valueOf((String) this.settings.get("reloadable")).booleanValue(); } // read local settings try { Configuration conf = this.getConfiguration( parameters.getParameter("descriptor", (String) this.settings.get("descriptor")), resolver, parameters.getParameterAsBoolean("reloadable", reloadable)); boolean cs = true; String create_session = parameters.getParameter("create-session", (String) this.settings.get("create-session")); if (create_session != null) { cs = BooleanUtils.toBoolean(create_session.trim()); } datasource = this.getDataSource(conf); conn = datasource.getConnection(); Request req = ObjectModelHelper.getRequest(objectModel); /* check request validity */ if (req == null) { getLogger().debug("DBAUTH: no request object"); return null; } st = this.getAuthQuery(conn, conf, req); if (st == null) { getLogger().debug("DBAUTH: have not got query"); req.setAttribute("message", "The authenticator is misconfigured"); return null; } rs = st.executeQuery(); if (rs.next()) { getLogger().debug("DBAUTH: authorized successfully"); Session session = null; if (cs) { session = req.getSession(false); if (session != null) session.invalidate(); session = req.getSession(true); if (session == null) return null; getLogger().debug("DBAUTH: session created"); } else { getLogger().debug("DBAUTH: leaving session untouched"); } HashMap actionMap = this.propagateParameters(conf, rs, session); if (!conn.getAutoCommit()) { conn.commit(); } return Collections.unmodifiableMap(actionMap); } if (!conn.getAutoCommit()) { conn.rollback(); } req.setAttribute("message", "The username or password were incorrect, please check your CAPS LOCK key and try again."); getLogger().debug("DBAUTH: no results for query"); } catch (Exception e) { if (conn != null) { try { if (!conn.getAutoCommit()) { conn.rollback(); } } catch (Exception se) { /* ignore */} } getLogger().debug("exception: ", e); return null; } finally { if (rs != null) rs.close(); if (st != null) st.close(); if (conn != null) { try { conn.close(); } catch (Exception e) { /* ignore */} } } return null; }
From source file:dao.DirectoryAuthorDaoDb.java
/** * Adds the author for this directory.// www . j av a 2 s . c o m * @param directoryId the directory id * @param member the member who is added as author to this directory * @param userId the user Login is used to check if this user has the permission to add authors * @param userLogin the user Login is used to check if this user has the permission to add authors */ public void addAuthor(String directoryId, String member, String userId, String userLogin) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(member) || RegexStrUtil.isNull(userId)) { throw new BaseDaoException("params are null"); } /** * check if this user has permission to add authors */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(directoryId, userId)) { throw new BaseDaoException("User does not have permission to add authors to directory, " + directoryId + " userId = " + userId); } /** * Get scalability datasource for diradmin, not partitioned */ String sourceName = scalabilityManager.getWriteZeroScalability(); ds = scalabilityManager.getSource(sourceName); if (ds == null) { throw new BaseDaoException("ds null, addAuthor() " + sourceName + " userId = " + userId); } /** * Add author to this directory */ String memberId = null; Connection conn = null; try { Hdlogin hdlogin = getLoginid(member); if (hdlogin == null) { throw new BaseDaoException("Hdlogin for member is null, addAuthor(), member= " + member); } else { memberId = hdlogin.getValue(DbConstants.LOGIN_ID); conn = ds.getConnection(); conn.setAutoCommit(false); addAdminQuery.run(conn, directoryId, memberId); deleteBlockQuery.run(conn, directoryId, memberId); } } catch (Exception e) { try { conn.rollback(); } catch (Exception e1) { try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e2) { throw new BaseDaoException("conn.close() error, addAuthor(), memberId" + memberId, e2); } throw new BaseDaoException(" rollback() exception, for addAuthor(), userId = " + userId, e1); } throw new BaseDaoException(" addAuthor exception, userId = " + userId, e); } // connection commit try { conn.commit(); } catch (Exception e3) { throw new BaseDaoException(" commit() exception, for addAuthor() userId = " + userId, e3); } try { if (conn != null) { conn.setAutoCommit(true); conn.close(); } } catch (Exception e4) { throw new BaseDaoException(" conn.close() exception, for commit(), addAuthor() userId = " + userId, e4); } /** * member's list needs to be updated with the new query, remove the old entries for this member */ Fqn fqn = cacheUtil.fqn(DbConstants.AUTHORS_LIST); if (treeCache.exists(fqn, member)) { treeCache.remove(fqn, member); } fqn = cacheUtil.fqn(DbConstants.AUTHORS_LIST); if (treeCache.exists(fqn, userLogin)) { treeCache.remove(fqn, userLogin); } fqn = cacheUtil.fqn(DbConstants.USER_PAGE); if (treeCache.exists(fqn, member)) { treeCache.remove(fqn, member); } fqn = cacheUtil.fqn(DbConstants.DIRECTORY); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } fqn = cacheUtil.fqn(DbConstants.DIR_AUTHORS); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } fqn = cacheUtil.fqn(DbConstants.DIR_ADMINS); if (treeCache.exists(fqn, directoryId)) { treeCache.remove(fqn, directoryId); } fqn = cacheUtil.fqn(DbConstants.ADMIN_DIRS); if (treeCache.exists(fqn, DbConstants.ADMIN_DIRS)) { treeCache.remove(fqn, DbConstants.ADMIN_DIRS); } fqn = cacheUtil.fqn(DbConstants.AUTHORS_DIRECTORIES); if (treeCache.exists(fqn, member)) { treeCache.remove(fqn, member); } StringBuffer sb = new StringBuffer(directoryId); sb.append("-"); sb.append(memberId); fqn = cacheUtil.fqn(DbConstants.DIR_AUTHOR); if (treeCache.exists(fqn, sb.toString())) { treeCache.remove(fqn, sb.toString()); } fqn = cacheUtil.fqn(DbConstants.AUTHOR_BLOCKED_DIRS); if (treeCache.exists(fqn, memberId)) { treeCache.remove(fqn, memberId); } }
From source file:com.flexive.core.storage.GenericDBStorage.java
/** * {@inheritDoc}//w w w .j a v a 2 s .c o m */ @Override public void importDivision(Connection _con, ZipFile zip) throws Exception { long startTime = System.currentTimeMillis(); GenericDivisionImporter importer = getDivisionImporter(); FxDivisionExportInfo exportInfo = importer.getDivisionExportInfo(zip); if (FxSharedUtils.getDBVersion() != exportInfo.getSchemaVersion()) { LOG.warn("DB Version mismatch! Current:" + FxSharedUtils.getDBVersion() + ", exported schema:" + exportInfo.getSchemaVersion()); } boolean isNonTX = importer.importRequiresNonTXConnection(); Connection con = isNonTX ? Database.getNonTXDataSource().getConnection() : _con; boolean autoCommit = false; if (isNonTX) { autoCommit = con.getAutoCommit(); con.setAutoCommit(false); con.commit(); //ensure a "clean" connection } Exception inner = null; try { importer.wipeDivisionData(con); if (isNonTX) con.commit(); Statement stmt = con.createStatement(); if (isNonTX) con.commit(); try { importer.importLanguages(con, zip); if (isNonTX) con.commit(); importer.importMandators(con, zip); if (isNonTX) con.commit(); importer.importSecurity(con, zip); if (isNonTX) con.commit(); importer.importWorkflows(con, zip); if (isNonTX) con.commit(); importer.importConfigurations(con, zip); if (isNonTX) con.commit(); importer.importBinaries(con, zip); if (isNonTX) con.commit(); stmt.execute(getReferentialIntegrityChecksStatement(false)); importer.importStructures(con, zip); if (isNonTX) con.commit(); importer.importHierarchicalContents(con, zip); if (isNonTX) con.commit(); importer.importScripts(con, zip); if (isNonTX) con.commit(); importer.importTree(con, zip); if (isNonTX) con.commit(); importer.importHistory(con, zip); if (isNonTX) con.commit(); importer.importResources(con, zip); if (isNonTX) con.commit(); importer.importBriefcases(con, zip); if (isNonTX) con.commit(); importer.importFlatStorages(con, zip, exportInfo); if (isNonTX) con.commit(); importer.importSequencers(con, zip); if (isNonTX) con.commit(); } catch (Exception e) { if (isNonTX) con.rollback(); inner = e; throw e; } finally { if (isNonTX) con.commit(); stmt.execute(getReferentialIntegrityChecksStatement(true)); } if (isNonTX) con.commit(); //rebuild fulltext index FulltextIndexer ft = StorageManager.getStorageImpl().getContentStorage(TypeStorageMode.Hierarchical) .getFulltextIndexer(null, con); ft.rebuildIndex(); if (isNonTX) con.commit(); } catch (Exception e) { if (isNonTX) con.rollback(); if (inner != null) { LOG.error(e); throw inner; } throw e; } finally { if (isNonTX) { con.commit(); con.setAutoCommit(autoCommit); Database.closeObjects(GenericDBStorage.class, con, null); } LOG.info(" Importing took " + FxFormatUtils.formatTimeSpan((System.currentTimeMillis() - startTime))); } }
From source file:com.wso2telco.dep.mediator.dao.SMSMessagingDAO.java
/** * Subscription delete./*w w w . ja v a 2 s . c o m*/ * * @param moSubscriptionId * the moSubscriptionId * @return true, if successful * @throws Exception * the exception */ public void subscriptionDelete(Integer moSubscriptionId) throws SQLException, Exception { Connection con = DbUtils.getDbConnection(DataSourceNames.WSO2TELCO_DEP_DB); PreparedStatement deleteSubscriptionsStatement = null; PreparedStatement deleteOperatorSubscriptionsStatement = null; try { if (con == null) { throw new Exception("Connection not found"); } /** * Set autocommit off to handle the transaction */ con.setAutoCommit(false); StringBuilder deleteSubscriptionsQueryString = new StringBuilder("DELETE FROM "); deleteSubscriptionsQueryString.append(DatabaseTables.SUBSCRIPTIONS.getTableName()); deleteSubscriptionsQueryString.append(" WHERE mo_subscription_did = ?"); deleteSubscriptionsStatement = con.prepareStatement(deleteSubscriptionsQueryString.toString()); deleteSubscriptionsStatement.setInt(1, moSubscriptionId); log.debug("sql query in subscriptionDelete : " + deleteSubscriptionsStatement); deleteSubscriptionsStatement.executeUpdate(); StringBuilder deleteOperatorSubscriptionsQueryString = new StringBuilder("DELETE FROM "); deleteOperatorSubscriptionsQueryString.append(DatabaseTables.OPERATORSUBS.getTableName()); deleteOperatorSubscriptionsQueryString.append(" WHERE mo_subscription_did = ?"); deleteOperatorSubscriptionsStatement = con .prepareStatement(deleteOperatorSubscriptionsQueryString.toString()); deleteOperatorSubscriptionsStatement.setInt(1, moSubscriptionId); log.debug("sql query in subscriptionDelete : " + deleteOperatorSubscriptionsStatement); deleteOperatorSubscriptionsStatement.executeUpdate(); /** * commit the transaction if all success */ con.commit(); } catch (SQLException e) { /** * rollback if Exception occurs */ con.rollback(); log.error("database operation error in subscriptionDelete : ", e); throw e; } catch (Exception e) { /** * rollback if Exception occurs */ con.rollback(); log.error("error in subscriptionDelete : ", e); throw e; } finally { DbUtils.closeAllConnections(deleteSubscriptionsStatement, con, null); DbUtils.closeAllConnections(deleteOperatorSubscriptionsStatement, null, null); } }
From source file:mom.trd.opentheso.bdd.helper.TermHelper.java
/** * Cette fonction permet de supprimer des Termes Non descripteurs ou synonymes * * @param ds//from w ww . j ava 2s. c om * @param idTerm * @param idLang * @param lexicalValue * @param idThesaurus * @param stat * @param idUser * @return */ public boolean deleteNonPreferedTerm(HikariDataSource ds, String idTerm, String idLang, String lexicalValue, String idThesaurus, String stat, int idUser) { Connection conn; Statement stmt; boolean status = false; try { // Get connection from pool conn = ds.getConnection(); try { stmt = conn.createStatement(); try { Term term = new Term(); term.setLexical_value(lexicalValue); term.setId_thesaurus(idThesaurus); term.setLang(idLang); term.setId_term(idTerm); term.setStatus(stat); if (!addUSEHistorique(conn, term, idUser, "DEL")) { conn.rollback(); conn.close(); return false; } String query = "delete from non_preferred_term where" + " id_thesaurus = '" + idThesaurus + "'" + " and id_term = '" + idTerm + "'" + " and lexical_value = '" + lexicalValue + "'" + " and lang = '" + idLang + "'"; stmt.executeUpdate(query); status = true; } finally { stmt.close(); } } finally { conn.close(); } } catch (SQLException sqle) { // Log exception log.error("Error while deleting NonPreferedTerm of Term : " + idTerm, sqle); } return status; }
From source file:org.apache.hadoop.hive.metastore.txn.TestTxnHandler.java
@Test @Ignore//from w w w .java 2s .c om public void deadlockDetected() throws Exception { LOG.debug("Starting deadlock test"); Connection conn = txnHandler.getDbConn(Connection.TRANSACTION_SERIALIZABLE); Statement stmt = conn.createStatement(); long now = txnHandler.getDbTime(conn); stmt.executeUpdate("insert into TXNS (txn_id, txn_state, txn_started, txn_last_heartbeat, " + "txn_user, txn_host) values (1, 'o', " + now + ", " + now + ", 'shagy', " + "'scooby.com')"); stmt.executeUpdate("insert into HIVE_LOCKS (hl_lock_ext_id, hl_lock_int_id, hl_txnid, " + "hl_db, hl_table, hl_partition, hl_lock_state, hl_lock_type, hl_last_heartbeat, " + "hl_user, hl_host) values (1, 1, 1, 'mydb', 'mytable', 'mypartition', '" + txnHandler.LOCK_WAITING + "', '" + txnHandler.LOCK_EXCLUSIVE + "', " + now + ", 'fred', " + "'scooby.com')"); conn.commit(); txnHandler.closeDbConn(conn); final AtomicBoolean sawDeadlock = new AtomicBoolean(); final Connection conn1 = txnHandler.getDbConn(Connection.TRANSACTION_SERIALIZABLE); final Connection conn2 = txnHandler.getDbConn(Connection.TRANSACTION_SERIALIZABLE); try { for (int i = 0; i < 5; i++) { Thread t1 = new Thread() { @Override public void run() { try { try { updateTxns(conn1); updateLocks(conn1); Thread.sleep(1000); conn1.commit(); LOG.debug("no exception, no deadlock"); } catch (SQLException e) { try { txnHandler.checkRetryable(conn1, e, "thread t1"); LOG.debug("Got an exception, but not a deadlock, SQLState is " + e.getSQLState() + " class of exception is " + e.getClass().getName() + " msg is <" + e.getMessage() + ">"); } catch (TxnHandler.RetryException de) { LOG.debug("Forced a deadlock, SQLState is " + e.getSQLState() + " class of " + "exception is " + e.getClass().getName() + " msg is <" + e.getMessage() + ">"); sawDeadlock.set(true); } } conn1.rollback(); } catch (Exception e) { throw new RuntimeException(e); } } }; Thread t2 = new Thread() { @Override public void run() { try { try { updateLocks(conn2); updateTxns(conn2); Thread.sleep(1000); conn2.commit(); LOG.debug("no exception, no deadlock"); } catch (SQLException e) { try { txnHandler.checkRetryable(conn2, e, "thread t2"); LOG.debug("Got an exception, but not a deadlock, SQLState is " + e.getSQLState() + " class of exception is " + e.getClass().getName() + " msg is <" + e.getMessage() + ">"); } catch (TxnHandler.RetryException de) { LOG.debug("Forced a deadlock, SQLState is " + e.getSQLState() + " class of " + "exception is " + e.getClass().getName() + " msg is <" + e.getMessage() + ">"); sawDeadlock.set(true); } } conn2.rollback(); } catch (Exception e) { throw new RuntimeException(e); } } }; t1.start(); t2.start(); t1.join(); t2.join(); if (sawDeadlock.get()) break; } assertTrue(sawDeadlock.get()); } finally { conn1.rollback(); txnHandler.closeDbConn(conn1); conn2.rollback(); txnHandler.closeDbConn(conn2); } }
From source file:com.stgmastek.core.purge.PurgeBatchDetails.java
/** * The method to delete batch details/*from ww w. j av a 2 s.c o m*/ * <p> * Deletes the batch details from tables in the following order * </p> * <li>SYSTEM_INFO</li><li>LOG</li><li>INSTRUCTION_PARAMETERS</li><li> * INSTRUCTION_LOG</li> <li>PROGRESS_LEVEL</li><li>BATCH</li> * * @param batchNoList * Batch numbers list */ private void deleteBatchDetails(Connection con, ArrayList<String> batchNoList) throws SQLException { PreparedStatement psUpdate = null; int count = 1; out.println("Deleting batch details started.."); try { con.setAutoCommit(false); for (String strBacthNo : batchNoList) { log("SYSTEM_INFO", count, batchNoList.size(), strBacthNo, 2); psUpdate = con.prepareStatement("delete from SYSTEM_INFO WHERE BATCH_NO = ?"); psUpdate.setString(1, strBacthNo); psUpdate.executeUpdate(); con.setAutoCommit(true); psUpdate.close(); log("LOG", count, batchNoList.size(), strBacthNo, 2); psUpdate = con.prepareStatement("delete from LOG WHERE BATCH_NO = ?"); psUpdate.setString(1, strBacthNo); psUpdate.executeUpdate(); psUpdate.close(); log("INSTRUCTION_PARAMETERS", count, batchNoList.size(), strBacthNo, 2); psUpdate = con.prepareStatement( "delete from INSTRUCTION_PARAMETERS b where exists (select a.seq_no from instruction_log a where a.seq_no = b.instruction_log_no and a.batch_no = ? )"); psUpdate.setString(1, strBacthNo); psUpdate.executeUpdate(); psUpdate.close(); log("INSTRUCTION_LOG", count, batchNoList.size(), strBacthNo, 2); psUpdate = con.prepareStatement("delete from INSTRUCTION_LOG WHERE BATCH_NO = ?"); psUpdate.setString(1, strBacthNo); psUpdate.executeUpdate(); psUpdate.close(); log("PROGRESS_LEVEL", count, batchNoList.size(), strBacthNo, 2); psUpdate = con.prepareStatement("delete from PROGRESS_LEVEL WHERE BATCH_NO = ?"); psUpdate.setString(1, strBacthNo); psUpdate.executeUpdate(); psUpdate.close(); log("BATCH", count, batchNoList.size(), strBacthNo, 2); psUpdate = con.prepareStatement("delete from BATCH WHERE BATCH_NO = ?"); psUpdate.setString(1, strBacthNo); psUpdate.executeUpdate(); psUpdate.close(); count++; } out.println("Deleting batch details completed."); con.commit(); } catch (SQLException e) { logger.error("SQLException in getting batch details based on installation codes ", e); try { con.rollback(); } catch (SQLException e2) { } throw e; } finally { try { if (psUpdate != null) { psUpdate.close(); } } catch (SQLException e) { } try { con.setAutoCommit(true); } catch (SQLException e2) { } } }