List of usage examples for java.sql PreparedStatement addBatch
void addBatch() throws SQLException;
PreparedStatement
object's batch of commands. From source file:org.openbel.framework.core.kam.JdbcKAMLoaderImpl.java
/** * {@inheritDoc}/*from ww w . jav a 2s . co m*/ */ @Override public void loadEdges(StatementTable st, TermTable tt, ProtoNodeTable pnt, ProtoEdgeTable pet) throws SQLException { // load kam edges and associate to kam nodes (global terms) PreparedStatement keps = getPreparedStatement(KAM_EDGE_SQL); final Map<Integer, Integer> eqn = pnt.getEquivalences(); final Map<Integer, Integer> eqs = pet.getEquivalences(); final List<TableProtoEdge> edges = pet.getProtoEdges(); Set<Integer> added = new HashSet<Integer>(); for (int i = 0, n = edges.size(); i < n; i++) { final Integer eqId = eqs.get(i); // continue if we have already seen this equivalent proto edge if (added.contains(eqId)) { continue; } added.add(eqId); final TableProtoEdge edge = edges.get(i); // XXX offset keps.setInt(1, eqId + 1); // XXX offset keps.setInt(2, eqn.get(edge.getSource()) + 1); // XXX offset keps.setInt(3, eqn.get(edge.getTarget()) + 1); RelationshipType r = RelationshipType.getRelationshipType(edge.getRel()); keps.setInt(4, r.getValue()); keps.addBatch(); } keps.executeBatch(); // load statements final List<StatementTable.TableStatement> ts = st.getStatements(); final Map<Integer, Integer> sdm = st.getStatementDocument(); PreparedStatement sps = getPreparedStatement(STATEMENT_SQL); for (int i = 0, n = ts.size(); i < n; i++) { final TableStatement stmt = ts.get(i); // XXX offset sps.setInt(1, i + 1); // XXX offset sps.setInt(2, sdm.get(i) + 1); // XXX offset sps.setInt(3, stmt.getSubjectTermId() + 1); if (stmt.getRelationshipName() == null) { // load definitional statement sps.setNull(4, Types.INTEGER); sps.setNull(5, Types.INTEGER); sps.setNull(6, Types.INTEGER); sps.setNull(7, Types.INTEGER); sps.setNull(8, Types.INTEGER); } else if (stmt.getObjectTermId() != null) { // load simple statement RelationshipType r = RelationshipType.getRelationshipType(stmt.getRelationshipName()); sps.setInt(4, r.getValue()); // XXX offset sps.setInt(5, stmt.getObjectTermId() + 1); sps.setNull(6, Types.INTEGER); sps.setNull(7, Types.INTEGER); sps.setNull(8, Types.INTEGER); } else { // load nested statement RelationshipType r = RelationshipType.getRelationshipType(stmt.getRelationshipName()); sps.setInt(4, r.getValue()); // set null for object term since this is a nested statement sps.setNull(5, Types.INTEGER); // XXX offset sps.setInt(6, stmt.getNestedSubject() + 1); RelationshipType nr = RelationshipType.getRelationshipType(stmt.getNestedRelationship()); sps.setInt(7, nr.getValue()); // XXX offset sps.setInt(8, stmt.getNestedObject() + 1); } sps.addBatch(); } sps.executeBatch(); // load many-to-many association of edges to statements PreparedStatement skes = getPreparedStatement(KAM_EDGE_STATEMENT_SQL); final Map<Integer, Set<Integer>> edgeStmts = pet.getEdgeStatements(); added.clear(); for (int i = 0, n = edges.size(); i < n; i++) { final Integer eqId = eqs.get(i); // continue if we have already seen this equivalent proto edge if (added.contains(eqId)) { continue; } added.add(eqId); // retrieve statements for this edge final Set<Integer> stmtIds = edgeStmts.get(i); // if we have the edge, then assert that we have its statements assert stmtIds != null && !stmtIds.isEmpty(); for (final Integer stmtId : stmtIds) { // XXX offset skes.setInt(1, eqId + 1); // XXX offset skes.setInt(2, stmtId + 1); skes.addBatch(); } } skes.executeBatch(); }
From source file:com.esofthead.mycollab.module.project.service.ibatis.GanttAssignmentServiceImpl.java
private void massUpdateTaskGanttItems(final List<TaskGanttItem> taskGanttItems, Integer sAccountId) { if (CollectionUtils.isNotEmpty(taskGanttItems)) { Lock lock = DistributionLockUtil.getLock("gantt-task-service" + sAccountId); try {//from w w w . j av a 2 s . com final long now = new GregorianCalendar().getTimeInMillis(); if (lock.tryLock(30, TimeUnit.SECONDS)) { try (Connection connection = dataSource.getConnection()) { connection.setAutoCommit(false); PreparedStatement batchTasksStatement = connection.prepareStatement( "UPDATE `m_prj_task` SET " + "taskname = ?, `startdate` = ?, `enddate` = ?, " + "`lastUpdatedTime`=?, `percentagecomplete`=?, `assignUser`=?, `ganttindex`=?, " + "`milestoneId`=?, `parentTaskId`=? WHERE `id` = ?"); for (int i = 0; i < taskGanttItems.size(); i++) { TaskGanttItem ganttItem = taskGanttItems.get(i); if (ProjectTypeConstants.TASK.equals(ganttItem.getType())) { batchTasksStatement.setString(1, ganttItem.getName()); batchTasksStatement.setDate(2, getDateWithNullValue(ganttItem.getStartDate())); batchTasksStatement.setDate(3, getDateWithNullValue(ganttItem.getEndDate())); batchTasksStatement.setDate(4, new Date(now)); batchTasksStatement.setDouble(5, ganttItem.getProgress()); batchTasksStatement.setString(6, ganttItem.getAssignUser()); batchTasksStatement.setInt(7, ganttItem.getGanttIndex()); batchTasksStatement.setObject(8, ganttItem.getMilestoneId()); batchTasksStatement.setObject(9, ganttItem.getParentTaskId()); batchTasksStatement.setInt(10, ganttItem.getId()); batchTasksStatement.addBatch(); } } batchTasksStatement.executeBatch(); connection.commit(); } } } catch (Exception e) { throw new MyCollabException(e); } finally { DistributionLockUtil.removeLock("gantt-task-service" + sAccountId); lock.unlock(); } } }
From source file:org.wso2.carbon.is.migration.client.MigrateFrom5to510.java
/** * migrate data in the identity database and finalize the database table restructuring *///from w w w. j a va2s . c o m public void migrateIdentityData() { Connection identityConnection = null; PreparedStatement selectFromAccessTokenPS = null; PreparedStatement insertScopeAssociationPS = null; PreparedStatement insertTokenScopeHashPS = null; PreparedStatement insertTokenIdPS = null; PreparedStatement updateUserNamePS = null; PreparedStatement selectFromAuthorizationCodePS = null; PreparedStatement updateUserNameAuthorizationCodePS = null; PreparedStatement selectIdnAssociatedIdPS = null; PreparedStatement updateIdnAssociatedIdPS = null; PreparedStatement selectConsumerAppsPS = null; PreparedStatement updateConsumerAppsPS = null; ResultSet accessTokenRS = null; ResultSet authzCodeRS = null; ResultSet selectIdnAssociatedIdRS = null; ResultSet selectConsumerAppsRS = null; try { identityConnection = dataSource.getConnection(); identityConnection.setAutoCommit(false); selectConsumerAppsPS = identityConnection.prepareStatement(SQLQueries.SELECT_FROM_CONSUMER_APPS); updateConsumerAppsPS = identityConnection.prepareStatement(SQLQueries.UPDATE_CONSUMER_APPS); selectConsumerAppsRS = selectConsumerAppsPS.executeQuery(); while (selectConsumerAppsRS.next()) { int id = selectConsumerAppsRS.getInt("ID"); String username = selectConsumerAppsRS.getString("USERNAME"); String userDomainFromDB = selectConsumerAppsRS.getString("USER_DOMAIN"); if (userDomainFromDB == null) { String userDomain = UserCoreUtil.extractDomainFromName(username); username = UserCoreUtil.removeDomainFromName(username); updateConsumerAppsPS.setString(1, username); updateConsumerAppsPS.setString(2, userDomain); updateConsumerAppsPS.setInt(3, id); updateConsumerAppsPS.addBatch(); } } updateConsumerAppsPS.executeBatch(); String selectFromAccessToken = SQLQueries.SELECT_FROM_ACCESS_TOKEN; selectFromAccessTokenPS = identityConnection.prepareStatement(selectFromAccessToken); String insertScopeAssociation = SQLQueries.INSERT_SCOPE_ASSOCIATION; insertScopeAssociationPS = identityConnection.prepareStatement(insertScopeAssociation); String insertTokenScopeHash = SQLQueries.INSERT_TOKEN_SCOPE_HASH; insertTokenScopeHashPS = identityConnection.prepareStatement(insertTokenScopeHash); String insertTokenId = SQLQueries.INSERT_TOKEN_ID; insertTokenIdPS = identityConnection.prepareStatement(insertTokenId); String updateUserName = SQLQueries.UPDATE_USER_NAME; updateUserNamePS = identityConnection.prepareStatement(updateUserName); accessTokenRS = selectFromAccessTokenPS.executeQuery(); while (accessTokenRS.next()) { String accessToken = null; try { accessToken = accessTokenRS.getString("ACCESS_TOKEN"); String scopeString = accessTokenRS.getString("TOKEN_SCOPE"); String authzUser = accessTokenRS.getString("AUTHZ_USER"); String tokenIdFromDB = accessTokenRS.getString("TOKEN_ID"); if (tokenIdFromDB == null) { String tokenId = UUID.randomUUID().toString(); String username = UserCoreUtil .removeDomainFromName(MultitenantUtils.getTenantAwareUsername(authzUser)); String userDomain = UserCoreUtil.extractDomainFromName(authzUser); int tenantId = ISMigrationServiceDataHolder.getRealmService().getTenantManager() .getTenantId(MultitenantUtils.getTenantDomain(authzUser)); insertTokenIdPS.setString(1, tokenId); insertTokenIdPS.setString(2, accessToken); insertTokenIdPS.addBatch(); updateUserNamePS.setString(1, username); updateUserNamePS.setInt(2, tenantId); updateUserNamePS.setString(3, userDomain); updateUserNamePS.setString(4, authzUser); updateUserNamePS.setString(5, accessToken); updateUserNamePS.addBatch(); insertTokenScopeHashPS.setString(1, DigestUtils.md5Hex(scopeString)); insertTokenScopeHashPS.setString(2, accessToken); insertTokenScopeHashPS.addBatch(); if (scopeString != null) { String scopes[] = scopeString.split(" "); for (String scope : scopes) { insertScopeAssociationPS.setString(1, tokenId); insertScopeAssociationPS.setString(2, scope); insertScopeAssociationPS.addBatch(); } } } } catch (UserStoreException e) { log.warn("Error while migrating access token : " + accessToken); } } String selectFromAuthorizationCode = SQLQueries.SELECT_FROM_AUTHORIZATION_CODE; selectFromAuthorizationCodePS = identityConnection.prepareStatement(selectFromAuthorizationCode); String updateUserNameAuthorizationCode = SQLQueries.UPDATE_USER_NAME_AUTHORIZATION_CODE; updateUserNameAuthorizationCodePS = identityConnection .prepareStatement(updateUserNameAuthorizationCode); authzCodeRS = selectFromAuthorizationCodePS.executeQuery(); while (authzCodeRS.next()) { String authorizationCode = null; try { authorizationCode = authzCodeRS.getString("AUTHORIZATION_CODE"); String authzUser = authzCodeRS.getString("AUTHZ_USER"); String userDomainFromDB = authzCodeRS.getString("USER_DOMAIN"); if (userDomainFromDB == null) { String username = UserCoreUtil .removeDomainFromName(MultitenantUtils.getTenantAwareUsername(authzUser)); String userDomain = UserCoreUtil.extractDomainFromName(authzUser); int tenantId = ISMigrationServiceDataHolder.getRealmService().getTenantManager() .getTenantId(MultitenantUtils.getTenantDomain(authzUser)); updateUserNameAuthorizationCodePS.setString(1, username); updateUserNameAuthorizationCodePS.setInt(2, tenantId); updateUserNameAuthorizationCodePS.setString(3, userDomain); updateUserNameAuthorizationCodePS.setString(4, UUID.randomUUID().toString()); updateUserNameAuthorizationCodePS.setString(5, authzUser); updateUserNameAuthorizationCodePS.setString(6, authorizationCode); updateUserNameAuthorizationCodePS.addBatch(); } } catch (UserStoreException e) { log.warn("Error while migrating authorization code : " + authorizationCode); } } insertTokenIdPS.executeBatch(); insertScopeAssociationPS.executeBatch(); updateUserNamePS.executeBatch(); insertTokenScopeHashPS.executeBatch(); updateUserNameAuthorizationCodePS.executeBatch(); String selectIdnAssociatedId = SQLQueries.SELECT_IDN_ASSOCIATED_ID; selectIdnAssociatedIdPS = identityConnection.prepareStatement(selectIdnAssociatedId); selectIdnAssociatedIdRS = selectIdnAssociatedIdPS.executeQuery(); updateIdnAssociatedIdPS = identityConnection.prepareStatement(SQLQueries.UPDATE_IDN_ASSOCIATED_ID); while (selectIdnAssociatedIdRS.next()) { int id = selectIdnAssociatedIdRS.getInt("ID"); String username = selectIdnAssociatedIdRS.getString("USER_NAME"); String userDomainFromDB = selectIdnAssociatedIdRS.getString("DOMAIN_NAME"); if (userDomainFromDB == null) { updateIdnAssociatedIdPS.setString(1, UserCoreUtil.extractDomainFromName(username)); updateIdnAssociatedIdPS.setString(2, UserCoreUtil.removeDomainFromName(username)); updateIdnAssociatedIdPS.setInt(3, id); updateIdnAssociatedIdPS.addBatch(); } } updateIdnAssociatedIdPS.executeBatch(); identityConnection.commit(); } catch (SQLException e) { IdentityDatabaseUtil.rollBack(identityConnection); log.error(e); } catch (Exception e) { log.error(e); } finally { IdentityDatabaseUtil.closeResultSet(accessTokenRS); IdentityDatabaseUtil.closeResultSet(authzCodeRS); IdentityDatabaseUtil.closeResultSet(selectIdnAssociatedIdRS); IdentityDatabaseUtil.closeResultSet(selectConsumerAppsRS); IdentityDatabaseUtil.closeStatement(selectFromAccessTokenPS); IdentityDatabaseUtil.closeStatement(insertScopeAssociationPS); IdentityDatabaseUtil.closeStatement(insertTokenIdPS); IdentityDatabaseUtil.closeStatement(updateUserNamePS); IdentityDatabaseUtil.closeStatement(insertTokenScopeHashPS); IdentityDatabaseUtil.closeStatement(updateUserNameAuthorizationCodePS); IdentityDatabaseUtil.closeStatement(selectFromAuthorizationCodePS); IdentityDatabaseUtil.closeStatement(selectIdnAssociatedIdPS); IdentityDatabaseUtil.closeStatement(updateIdnAssociatedIdPS); IdentityDatabaseUtil.closeStatement(selectConsumerAppsPS); IdentityDatabaseUtil.closeStatement(updateConsumerAppsPS); IdentityDatabaseUtil.closeConnection(identityConnection); } }
From source file:org.entrystore.rowstore.store.impl.PgDataset.java
/** * @see Dataset#populate(File)/*from w ww . j a v a2s. c o m*/ */ @Override public boolean populate(File csvFile) throws IOException { if (csvFile == null) { throw new IllegalArgumentException("Argument must not be null"); } String dataTable = getDataTable(); if (dataTable == null) { log.error("Dataset has no data table assigned"); return false; } setStatus(EtlStatus.PROCESSING); Connection conn = null; PreparedStatement stmt = null; CSVReader cr = null; try { conn = rowstore.getConnection(); cr = new CSVReader(new FileReader(csvFile), ',', '"'); int lineCount = 0; String[] labels = null; String[] line; conn.setAutoCommit(false); stmt = conn.prepareStatement("INSERT INTO " + dataTable + " (rownr, data) VALUES (?, ?)"); while ((line = cr.readNext()) != null) { if (lineCount == 0) { labels = line; } else { JSONObject jsonLine = null; try { jsonLine = csvLineToJsonObject(line, labels); } catch (Exception e) { log.error(e.getMessage()); log.info("Rolling back transaction"); conn.rollback(); setStatus(EtlStatus.ERROR); return false; } stmt.setInt(1, lineCount); PGobject jsonb = new PGobject(); jsonb.setType("jsonb"); jsonb.setValue(jsonLine.toString()); stmt.setObject(2, jsonb); log.debug("Adding to batch: " + stmt); stmt.addBatch(); // we execute the batch every 100th line if ((lineCount % 100) == 0) { log.debug("Executing: " + stmt); stmt.executeBatch(); } } lineCount++; } // in case there are some inserts left to be sent (i.e. // batch size above was smaller than 100 when loop ended) log.debug("Executing: " + stmt); stmt.executeBatch(); // we create an index over the data createIndex(conn, dataTable, labels); // we commit the transaction and free the resources of the statement conn.commit(); setStatus(EtlStatus.AVAILABLE); return true; } catch (SQLException e) { SqlExceptionLogUtil.error(log, e); try { log.info("Rolling back transaction"); conn.rollback(); } catch (SQLException e1) { SqlExceptionLogUtil.error(log, e1); } setStatus(EtlStatus.ERROR); return false; } finally { if (cr != null) { try { cr.close(); } catch (IOException e) { log.error(e.getMessage()); } } if (stmt != null) { try { stmt.close(); } catch (SQLException e) { SqlExceptionLogUtil.error(log, e); } } if (conn != null) { try { conn.close(); } catch (SQLException e) { SqlExceptionLogUtil.error(log, e); } } } }
From source file:org.bml.util.errorconsumer.ParseErrorWorkerThread.java
/** * TOOD: Add a temp ordered list to store ParseError objects as they are * taken from the queue and log if any rows are rejected by the DB server * TODO: abstract out the handleDBEntry base logic and use <T> for entry and * a static method for marshaling into a Prepared Statement (Consider adding * the marshal method to a TABLE definition object). *///from w w w . j a v a2 s . c om public void handleDBEntry() { Connection myConnection = null; PreparedStatement myPreparedStatement = null; Connection myPageViewConnection = null; int batchExecutionResults[] = null; List<ParseError> theBatchTrackingList = new LinkedList<ParseError>(); //DeviceType aDeviceType = null; //DeviceClass aDeviceClass = null; //Change to reusable map Map<String, String> tmpMap = null; //Change to StringBuilder //String tmpString = null; //theBatchTrackingList = new ArrayList<PageViewData>(dataQueue.size()); boolean dbErrror = false; try { ParseError aParseError = null; try { aParseError = errorQueue.remove(); theBatchTrackingList.add(aParseError); } catch (NoSuchElementException e) { LOG.info("There are no ParseError Objects to push into the DB"); return; } StopWatch connectionAge = new StopWatch(); connectionAge.start(); setWorkerState(WORKER_STATE.ACQUIRING_CONNECTION); myConnection = DBUtil.getDefaultDataSource().getConnection(); setWorkerState(WORKER_STATE.CONFIGURING_CONNECTION); myConnection.clearWarnings(); myConnection.setAutoCommit(false); setWorkerState(WORKER_STATE.PREPARING_SQL); myPreparedStatement = myConnection.prepareStatement(ParseErrorTable.PREPARED_INSERT_SQL); setWorkerState(WORKER_STATE.BATCHING); while ((connectionAge.getTime() / 1000) <= 20) { ParseErrorTable.populatePreparedStatement(myPreparedStatement, aParseError.toParamMap(), Boolean.FALSE); myPreparedStatement.addBatch(); try { aParseError = errorQueue.remove(); theBatchTrackingList.add(aParseError); } catch (NoSuchElementException e) { break; } } this.setWorkerState(WORKER_STATE.EXECUTING_BATCH); batchExecutionResults = myPreparedStatement.executeBatch(); myConnection.commit(); this.setWorkerState(WORKER_STATE.VERIFYING_BATCH); if (batchExecutionResults.length != theBatchTrackingList.size()) { } } catch (SQLException sqle) { if (LOG.isFatalEnabled()) { LOG.fatal( "SQLException caught. The ErrorConsumer is unable to push data to a database. ParseErrors will be dumped to /tmp/error_consumer/", sqle); } } catch (Exception e) { if (LOG.isFatalEnabled()) { LOG.fatal( "Exception caught. The ErrorConsumer is unable to push data to a database. Errors will be dumped to /tmp/error_consumer/", e); } } finally { DbUtils.closeQuietly(myPreparedStatement); DbUtils.closeQuietly(myConnection); } }
From source file:com.wso2telco.dep.operatorservice.dao.OperatorDAO.java
public void insertBlacklistAggregatoRows(final Integer appID, final String subscriber, final int operatorid, final String[] merchants) throws SQLException, Exception { Connection con = null;//from w ww . ja va 2 s .co m final StringBuilder sql = new StringBuilder(); PreparedStatement pst = null; try { con = DbUtils.getDbConnection(DataSourceNames.WSO2TELCO_DEP_DB); sql.append(" INSERT INTO "); sql.append(OparatorTable.MERCHANT_OPCO_BLACKLIST.getTObject()); sql.append(" (application_id, operator_id, subscriber, merchant)"); sql.append("VALUES (?, ?, ?, ?) "); pst = con.prepareStatement(sql.toString()); /** * Set autocommit off to handle the transaction */ con.setAutoCommit(false); /** * each merchant log as black listed */ for (String merchant : merchants) { if (appID == null) { pst.setNull(1, Types.INTEGER); } else { pst.setInt(1, appID); } pst.setInt(2, operatorid); pst.setString(3, subscriber); pst.setString(4, merchant); pst.addBatch(); } log.debug("sql query in insertBlacklistAggregatoRows : " + pst); pst.executeBatch(); /** * commit the transaction if all success */ con.commit(); } catch (SQLException e) { log.error("database operation error in insertBlacklistAggregatoRows : ", e); /** * rollback if Exception occurs */ con.rollback(); /** * throw it into upper layer */ throw e; } catch (Exception e) { log.error("error in insertBlacklistAggregatoRows : ", e); /** * rollback if Exception occurs */ con.rollback(); /** * throw it into upper layer */ throw e; } finally { DbUtils.closeAllConnections(pst, con, null); } }
From source file:org.nuxeo.ecm.core.storage.sql.jdbc.JDBCRowMapper.java
/** * Inserts multiple rows, all for the same table. *///from w w w. j av a2 s. co m protected void insertSimpleRows(String tableName, List<Row> rows) throws StorageException { if (rows.isEmpty()) { return; } String sql = sqlInfo.getInsertSql(tableName); if (sql == null) { throw new StorageException("Unknown table: " + tableName); } String loggedSql = supportsBatchUpdates && rows.size() > 1 ? sql + " -- BATCHED" : sql; List<Column> columns = sqlInfo.getInsertColumns(tableName); try { PreparedStatement ps = connection.prepareStatement(sql); try { int batch = 0; for (Row row : rows) { batch++; if (logger.isLogEnabled()) { logger.logSQL(loggedSql, columns, row); } int i = 1; for (Column column : columns) { column.setToPreparedStatement(ps, i++, row.get(column.getKey())); } if (supportsBatchUpdates) { ps.addBatch(); if (batch % UPDATE_BATCH_SIZE == 0) { ps.executeBatch(); countExecute(); } } else { ps.execute(); countExecute(); } } if (supportsBatchUpdates) { ps.executeBatch(); countExecute(); } } finally { closeStatement(ps); } } catch (Exception e) { checkConnectionReset(e); if (e instanceof BatchUpdateException) { BatchUpdateException bue = (BatchUpdateException) e; if (e.getCause() == null && bue.getNextException() != null) { // provide a readable cause in the stack trace e.initCause(bue.getNextException()); } } checkConcurrentUpdate(e); throw new StorageException("Could not insert: " + sql, e); } }
From source file:com.liferay.portal.upgrade.util.Table.java
public void populateTable(String tempFileName, Connection con) throws Exception { PreparedStatement ps = null; String insertSQL = getInsertSQL(); UnsyncBufferedReader unsyncBufferedReader = new UnsyncBufferedReader(new FileReader(tempFileName)); String line = null;// w w w. j a va2 s .co m try { DatabaseMetaData databaseMetaData = con.getMetaData(); if (!databaseMetaData.supportsBatchUpdates()) { if (_log.isDebugEnabled()) { _log.debug("Database does not support batch updates"); } } int count = 0; while ((line = unsyncBufferedReader.readLine()) != null) { String[] values = StringUtil.split(line); Object[][] columns = getColumns(); if ((values.length) != (columns.length)) { throw new UpgradeException("Column lengths differ between temp file and schema. " + "Attempted to insert row " + line + "."); } if (count == 0) { ps = con.prepareStatement(insertSQL); } int[] order = getOrder(); for (int i = 0; i < order.length; i++) { int pos = order[i]; setColumn(ps, i, (Integer) columns[pos][1], values[pos]); } if (databaseMetaData.supportsBatchUpdates()) { ps.addBatch(); if (count == BATCH_SIZE) { populateTableRows(ps, true); count = 0; } else { count++; } } else { populateTableRows(ps, false); } } if (databaseMetaData.supportsBatchUpdates()) { if (count != 0) { populateTableRows(ps, true); } } } finally { DataAccess.cleanUp(null, ps); unsyncBufferedReader.close(); } if (_log.isDebugEnabled()) { _log.debug(getTableName() + " table populated with data"); } }
From source file:com.wso2telco.dep.mediator.dao.SMSMessagingDAO.java
/** * Operatorsubs entry./* w ww.j a v a 2 s . c o m*/ * * @param domainsubs * the domainsubs * @param moSubscriptionId * the moSubscriptionId * @return true, if successful * @throws Exception * the exception */ public void operatorSubsEntry(List<OperatorSubscriptionDTO> domainsubs, Integer moSubscriptionId) throws SQLException, Exception { Connection con = null; PreparedStatement insertStatement = null; PreparedStatement updateStatement = null; try { con = DbUtils.getDbConnection(DataSourceNames.WSO2TELCO_DEP_DB); if (con == null) { throw new Exception("Connection not found"); } /** * Set autocommit off to handle the transaction */ con.setAutoCommit(false); StringBuilder queryString = new StringBuilder("INSERT INTO "); queryString.append(DatabaseTables.OPERATORSUBS.getTableName()); queryString.append(" (mo_subscription_did, domainurl, operator) "); queryString.append("VALUES (?, ?, ?)"); insertStatement = con.prepareStatement(queryString.toString()); for (OperatorSubscriptionDTO d : domainsubs) { insertStatement.setInt(1, moSubscriptionId); insertStatement.setString(2, d.getDomain()); insertStatement.setString(3, d.getOperator()); insertStatement.addBatch(); } log.debug("sql query in operatorSubsEntry : " + insertStatement); insertStatement.executeBatch(); StringBuilder updateQueryString = new StringBuilder("UPDATE "); updateQueryString.append(DatabaseTables.SUBSCRIPTIONS.getTableName()); updateQueryString.append(" SET is_active = ?"); updateQueryString.append(" WHERE mo_subscription_did = ?"); updateStatement = con.prepareStatement(updateQueryString.toString()); updateStatement.setInt(1, 1); updateStatement.setInt(2, moSubscriptionId); log.debug("sql query in operatorSubsEntry : " + updateStatement); updateStatement.executeUpdate(); /** * commit the transaction if all success */ con.commit(); } catch (SQLException e) { /** * rollback if Exception occurs */ con.rollback(); log.error("database operation error in operatorSubsEntry : ", e); throw e; } catch (Exception e) { /** * rollback if Exception occurs */ con.rollback(); log.error("error in operatorSubsEntry : ", e); throw e; } finally { DbUtils.closeAllConnections(insertStatement, con, null); DbUtils.closeAllConnections(updateStatement, null, null); } }
From source file:com.feedzai.commons.sql.abstraction.engine.AbstractDatabaseEngine.java
/** * Add an entry to the batch.//from www. ja va2 s.co m * * @param name The entity name. * @param entry The entry to persist. * @throws DatabaseEngineException If something goes wrong while persisting data. */ @Override public synchronized void addBatch(final String name, final EntityEntry entry) throws DatabaseEngineException { try { final MappedEntity me = entities.get(name); if (me == null) { throw new DatabaseEngineException(String.format("Unknown entity '%s'", name)); } PreparedStatement ps = me.getInsert(); entityToPreparedStatement(me.getEntity(), ps, entry, true); ps.addBatch(); } catch (Exception ex) { throw new DatabaseEngineException("Error adding to batch", ex); } }