List of usage examples for java.sql PreparedStatement executeBatch
int[] executeBatch() throws SQLException;
From source file:com.octo.captcha.engine.bufferedengine.buffer.DatabaseCaptchaBuffer.java
/** * Remove a precise number of captcha with a locale * * @param number The number of captchas to remove * @param locale The locale of the removed captchas * * @return a collection of captchas//from w w w.java2 s .c o m */ public Collection removeCaptcha(int number, Locale locale) { Connection con = null; PreparedStatement ps = null; PreparedStatement psdel = null; ResultSet rs = null; Collection collection = new UnboundedFifoBuffer(); Collection temp = new UnboundedFifoBuffer(); if (number < 1) { return collection; } try { if (log.isDebugEnabled()) { log.debug("try to remove " + number + " captchas"); } ; con = datasource.getConnection(); ps = con.prepareStatement( "select * from " + table + " where " + localeColumn + " = ? order by " + timeMillisColumn); psdel = con.prepareStatement( "delete from " + table + " where " + timeMillisColumn + "= ? and " + hashCodeColumn + "= ? ");//and " + localeColumn //+ "= ?"); ps.setString(1, locale.toString()); ps.setMaxRows(number); //read rs = ps.executeQuery(); int i = 0; while (rs.next() && i < number) { try { i++; InputStream in = rs.getBinaryStream(captchaColumn); ObjectInputStream objstr = new ObjectInputStream(in); Object captcha = objstr.readObject(); temp.add(captcha); //and delete long time = rs.getLong(timeMillisColumn); long hash = rs.getLong(hashCodeColumn); psdel.setLong(1, time); psdel.setLong(2, hash); //psdel.setString(3, rs.getString(localeColumn)); psdel.addBatch(); if (log.isDebugEnabled()) { log.debug("remove captcha added to batch : " + time + ";" + hash); } } catch (IOException e) { log.error("error during captcha deserialization, " + "check your class versions. removing row from database", e); psdel.execute(); } catch (ClassNotFoundException e) { log.error("Serialized captcha class in database is not in your classpath!", e); } } //execute batch delete psdel.executeBatch(); log.debug("batch executed"); rs.close(); //commit the whole stuff con.commit(); log.debug("batch commited"); //only add after commit collection.addAll(temp); } catch (SQLException e) { log.error(DB_ERROR, e); if (rs != null) { try { rs.close(); } catch (SQLException ex) { } } } finally { if (ps != null) { try { ps.close(); } // rollback on error catch (SQLException e) { } } if (con != null) { try { con.close(); } // rollback on error catch (SQLException e) { } } } return collection; }
From source file:com.nabla.dc.server.handler.fixed_asset.Asset.java
static public void dispose(final Connection conn, final Integer assetId, final IDisposal disposal) throws SQLException, DispatchException { final PreparedStatement redo = conn .prepareStatement("INSERT INTO fa_transaction_redo (fa_asset_id, command) VALUES(?,?);"); try {/*from ww w . j av a 2 s . com*/ redo.setInt(1, assetId); // backup transaction after disposal if any if (log.isDebugEnabled()) log.debug("backing up transactions after disposal date"); // charge monthly depreciation in disposal month if disposal is after 15 final Calendar dt = Util.dateToCalendar(disposal.getDate()); if (dt.get(GregorianCalendar.DAY_OF_MONTH) >= dt.getActualMaximum(GregorianCalendar.DAY_OF_MONTH) / 2) dt.add(GregorianCalendar.MONTH, 1); dt.set(GregorianCalendar.DAY_OF_MONTH, 1); final Date from = Util.calendarToSqlDate(dt); // get list of transactions to backup before we delete them final IntegerSet transIds = new IntegerSet(); final PreparedStatement stmt = StatementFormat.prepare(conn, "SELECT t.*" + " FROM fa_transaction AS t INNER JOIN period_end AS p ON t.period_end_id=p.id" + " WHERE t.fa_asset_id=? AND p.end_date>?;", assetId, from); try { final ResultSet rs = stmt.executeQuery(); try { while (rs.next()) { transIds.add(rs.getInt("id")); final String command = MessageFormat.format("INSERT INTO fa_transaction" + " (id,fa_asset_id,period_end_id,amount,class,type,depreciation_period)" + " VALUES({0,number,0},{1,number,0},{2,number,0},{3,number,0},''{4}'',''{5}'',{6,number,0});", rs.getInt("id"), rs.getInt("fa_asset_id"), rs.getInt("period_end_id"), rs.getInt("amount"), rs.getString("class"), rs.getString("type"), Database.getInteger(rs, "depreciation_period")); if (log.isTraceEnabled()) log.trace("redo = " + command); redo.setString(2, command); redo.addBatch(); } } finally { rs.close(); } } finally { stmt.close(); } // remove any transaction after disposal date if (log.isDebugEnabled()) log.debug("removing transactions after disposal date"); Database.executeUpdate(conn, "DELETE FROM fa_transaction WHERE id IN (?);", transIds); // add disposal transactions if (log.isDebugEnabled()) log.debug("adding transactions for disposal"); final TransactionList transactions = new TransactionList(assetId); // closing cost transactions.add(new Transaction(TransactionClasses.COST, TransactionTypes.CLOSING, disposal.getDate(), -1 * getAssetCostBeforeDisposal(conn, assetId))); // closing accumulated depreciation transactions.add(new Transaction(TransactionClasses.DEP, TransactionTypes.CLOSING, disposal.getDate(), -1 * getAssetDepreciationBeforeDisposal(conn, assetId))); for (Integer newTransId : transactions.save(conn, true)) { redo.setString(2, MessageFormat.format("DELETE FROM fa_transaction WHERE id={0,number,0};", newTransId)); redo.addBatch(); } if (!Database.isBatchCompleted(redo.executeBatch())) throw new InternalErrorException("failed to save disposal transactions"); } finally { redo.close(); } }
From source file:com.oltpbenchmark.benchmarks.auctionmark.AuctionMarkLoader.java
/** * Load the tuples for the given table name * @param tableName//from ww w. java2 s . c o m */ protected void generateTableData(String tableName) throws SQLException { LOG.info("*** START " + tableName); final AbstractTableGenerator generator = this.generators.get(tableName); assert (generator != null); // Generate Data final Table catalog_tbl = benchmark.getCatalog().getTable(tableName); assert (catalog_tbl != null) : tableName; final List<Object[]> volt_table = generator.getVoltTable(); final String sql = SQLUtil.getInsertSQL(catalog_tbl); final PreparedStatement stmt = conn.prepareStatement(sql); final int types[] = catalog_tbl.getColumnTypes(); while (generator.hasMore()) { generator.generateBatch(); // StringBuilder sb = new StringBuilder(); // if (tableName.equalsIgnoreCase("USER_FEEDBACK")) { // || tableName.equalsIgnoreCase("USER_ATTRIBUTES")) { // sb.append(tableName + "\n"); // for (int i = 0; i < volt_table.size(); i++) { // sb.append(String.format("[%03d] %s\n", i, StringUtil.abbrv(Arrays.toString(volt_table.get(i)), 100))); // } // LOG.info(sb.toString() + "\n"); // } for (Object row[] : volt_table) { for (int i = 0; i < row.length; i++) { if (row[i] != null) { stmt.setObject(i + 1, row[i]); } else { stmt.setNull(i + 1, types[i]); } } // FOR stmt.addBatch(); } // FOR try { stmt.executeBatch(); conn.commit(); stmt.clearBatch(); } catch (SQLException ex) { if (ex.getNextException() != null) ex = ex.getNextException(); LOG.warn(tableName + " - " + ex.getMessage()); throw ex; // SKIP } this.tableSizes.put(tableName, volt_table.size()); // Release anything to the sub-generators if we have it // We have to do this to ensure that all of the parent tuples get // insert first for foreign-key relationships generator.releaseHoldsToSubTableGenerators(); } // WHILE stmt.close(); // Mark as finished if (this.fail == false) { generator.markAsFinished(); synchronized (this) { this.finished.add(tableName); LOG.info(String.format("*** FINISH %s - %d tuples - [%d / %d]", tableName, this.tableSizes.get(tableName), this.finished.size(), this.generators.size())); if (LOG.isDebugEnabled()) { LOG.debug("Remaining Tables: " + CollectionUtils.subtract(this.generators.keySet(), this.finished)); } } // SYNCH } }
From source file:org.wso2.carbon.idp.mgt.dao.IdPManagementDAO.java
/** * @param conn//w ww . ja v a 2s. c o m * @param idPId * @param addedRoles * @param deletedRoles * @param renamedOldRoles * @param renamedNewRoles * @throws SQLException */ private void updateIdPRoles(Connection conn, int idPId, List<String> addedRoles, List<String> deletedRoles, List<String> renamedOldRoles, List<String> renamedNewRoles) throws SQLException { PreparedStatement prepStmt = null; String sqlStmt = null; try { for (String deletedRole : deletedRoles) { sqlStmt = IdPManagementConstants.SQLQueries.DELETE_IDP_ROLES_SQL; prepStmt = conn.prepareStatement(sqlStmt); prepStmt.setInt(1, idPId); prepStmt.setString(2, deletedRole); prepStmt.addBatch(); } prepStmt.executeBatch(); prepStmt.clearParameters(); prepStmt.clearBatch(); IdentityApplicationManagementUtil.closeStatement(prepStmt); for (String addedRole : addedRoles) { sqlStmt = IdPManagementConstants.SQLQueries.ADD_IDP_ROLES_SQL; prepStmt = conn.prepareStatement(sqlStmt); prepStmt.setInt(1, idPId); prepStmt.setString(2, CharacterEncoder.getSafeText(addedRole)); prepStmt.addBatch(); } prepStmt.executeBatch(); prepStmt.clearParameters(); prepStmt.clearBatch(); IdentityApplicationManagementUtil.closeStatement(prepStmt); for (int i = 0; i < renamedOldRoles.size(); i++) { sqlStmt = IdPManagementConstants.SQLQueries.UPDATE_IDP_ROLES_SQL; prepStmt = conn.prepareStatement(sqlStmt); prepStmt.setString(1, CharacterEncoder.getSafeText(renamedNewRoles.get(i))); prepStmt.setInt(2, idPId); prepStmt.setString(3, CharacterEncoder.getSafeText(renamedOldRoles.get(i))); prepStmt.addBatch(); } prepStmt.executeBatch(); } finally { prepStmt.clearParameters(); prepStmt.clearBatch(); IdentityApplicationManagementUtil.closeStatement(prepStmt); } }
From source file:org.wso2.carbon.identity.oauth2.dao.AccessTokenDAOImpl.java
@Override public void revokeAccessTokensInBatch(String[] tokens) throws IdentityOAuth2Exception { if (log.isDebugEnabled()) { if (IdentityUtil.isTokenLoggable(IdentityConstants.IdentityTokens.ACCESS_TOKEN)) { StringBuilder stringBuilder = new StringBuilder(); for (String token : tokens) { stringBuilder.append(DigestUtils.sha256Hex(token)).append(" "); }//from w w w. j a v a 2 s.c o m log.debug("Revoking access tokens(hashed): " + stringBuilder.toString()); } else { log.debug("Revoking access tokens in batch mode"); } } String accessTokenStoreTable = OAuthConstants.ACCESS_TOKEN_STORE_TABLE; Connection connection = IdentityDatabaseUtil.getDBConnection(); PreparedStatement ps = null; if (tokens.length > 1) { try { List<String> oldTokens = new ArrayList<>(); connection.setAutoCommit(false); String sqlQuery = SQLQueries.REVOKE_ACCESS_TOKEN.replace(IDN_OAUTH2_ACCESS_TOKEN, accessTokenStoreTable); ps = connection.prepareStatement(sqlQuery); for (String token : tokens) { ps.setString(1, OAuthConstants.TokenStates.TOKEN_STATE_REVOKED); ps.setString(2, UUID.randomUUID().toString()); ps.setString(3, getHashingPersistenceProcessor().getProcessedAccessTokenIdentifier(token)); ps.addBatch(); oldTokens.add(getHashingPersistenceProcessor().getProcessedAccessTokenIdentifier(token)); } ps.executeBatch(); connection.commit(); // To revoke request objects which have persisted against the access token. OAuth2TokenUtil.postUpdateAccessTokens(Arrays.asList(tokens), OAuthConstants.TokenStates.TOKEN_STATE_REVOKED); if (isTokenCleanupFeatureEnabled) { oldTokenCleanupObject.cleanupTokensInBatch(oldTokens, connection); } connection.commit(); } catch (SQLException e) { IdentityDatabaseUtil.rollBack(connection); throw new IdentityOAuth2Exception( "Error occurred while revoking Access Tokens : " + Arrays.toString(tokens), e); } finally { IdentityDatabaseUtil.closeAllConnections(connection, null, ps); } } if (tokens.length == 1) { try { connection.setAutoCommit(true); String sqlQuery = SQLQueries.REVOKE_ACCESS_TOKEN.replace(IDN_OAUTH2_ACCESS_TOKEN, accessTokenStoreTable); ps = connection.prepareStatement(sqlQuery); ps.setString(1, OAuthConstants.TokenStates.TOKEN_STATE_REVOKED); ps.setString(2, UUID.randomUUID().toString()); ps.setString(3, getHashingPersistenceProcessor().getProcessedAccessTokenIdentifier(tokens[0])); ps.executeUpdate(); // To revoke request objects which have persisted against the access token. OAuth2TokenUtil.postUpdateAccessTokens(Arrays.asList(tokens), OAuthConstants.TokenStates.TOKEN_STATE_REVOKED); if (isTokenCleanupFeatureEnabled) { oldTokenCleanupObject.cleanupTokenByTokenValue( getHashingPersistenceProcessor().getProcessedAccessTokenIdentifier(tokens[0]), connection); } } catch (SQLException e) { // IdentityDatabaseUtil.rollBack(connection); throw new IdentityOAuth2Exception( "Error occurred while revoking Access Token : " + Arrays.toString(tokens), e); } finally { IdentityDatabaseUtil.closeAllConnections(connection, null, ps); } } }
From source file:mil.army.usace.data.nativequery.rdbms.NativeRdbmsQuery.java
private void runDml(DML dmlType, List records, boolean useDeclaredOnly) { PreparedStatement st = null; boolean inTrans = inTransaction(); int batchCount = 0; String command = null;/*w w w .j av a2s .c om*/ if (!inTrans) startTransaction(); try { Object obj = records.get(0); Class objClass = obj.getClass(); String schema = getEntitySchema(objClass); Boolean isCamelCased = useCamelCase(objClass); HashMap<Method, String> fieldMapping = getFieldMapping(objClass, GET, isCamelCased, useDeclaredOnly); HashMap<Integer, Method> indexMapping = new HashMap(); String tableName = getTableName(objClass); if (tableName == null) tableName = getDbName(isCamelCased, objClass.getSimpleName(), null); if (dmlType == DML.UPDATE) command = getUpdateCommand(tableName, schema, fieldMapping, indexMapping); else if (dmlType == DML.INSERT) command = getInsertCommand(tableName, schema, fieldMapping, indexMapping); else command = getDeleteCommand(tableName, schema, fieldMapping, indexMapping); st = conn.prepareStatement(command); for (Object record : records) { for (int index : indexMapping.keySet()) { Object value = indexMapping.get(index).invoke(record, null); if (value instanceof java.util.Date) { value = new java.sql.Date(((java.util.Date) value).getTime()); } st.setObject((Integer) index, value); } if (useBatch == true) st.addBatch(); else st.executeUpdate(); if (useBatch == true && ++batchCount % batchSize == 0) { st.executeBatch(); } } if (useBatch == true) st.executeBatch(); //flush out remaining records if (!inTrans) commitTransaction(); } catch (Exception ex) { ex.printStackTrace(); if (!inTrans) rollbackTransaction(); throw new NativeQueryException(command, "runDml", ex); } finally { if (st != null) { try { st.close(); } catch (Exception ex) { } } } }
From source file:com.mirth.connect.donkey.server.data.jdbc.JdbcDao.java
@Override public void executeBatchInsertMessageContent(String channelId) { logger.debug(channelId + ": executing batch message content insert"); try {// w w w . j ava 2s . c o m /* * MIRTH-3597 Batch statements need to have their own prepared statement object due to a * bug? in the Oracle 12 JDBC driver. Therefore we need to have both * insertMessageContent and insertMessageContentBatch even though they are the same * query. We should also call clearBatch() after each batch execution to verify that all * internal buffers are cleared. */ PreparedStatement statement = prepareStatement("batchInsertMessageContent", channelId); statement.executeBatch(); statement.clearBatch(); } catch (SQLException e) { throw new DonkeyDaoException(e); } }
From source file:lib.JdbcTemplate.java
@Override public <T> int[][] batchUpdate(String sql, final Collection<T> batchArgs, final int batchSize, final ParameterizedPreparedStatementSetter<T> pss) throws DataAccessException { if (logger.isDebugEnabled()) { logger.debug("Executing SQL batch update [" + sql + "] with a batch size of " + batchSize); }/*from ww w . j av a 2 s . c o m*/ return execute(sql, new PreparedStatementCallback<int[][]>() { @Override public int[][] doInPreparedStatement(PreparedStatement ps) throws SQLException { List<int[]> rowsAffected = new ArrayList<int[]>(); try { boolean batchSupported = true; if (!JdbcUtils.supportsBatchUpdates(ps.getConnection())) { batchSupported = false; logger.warn( "JDBC Driver does not support Batch updates; resorting to single statement execution"); } int n = 0; for (T obj : batchArgs) { pss.setValues(ps, obj); n++; if (batchSupported) { ps.addBatch(); if (n % batchSize == 0 || n == batchArgs.size()) { if (logger.isDebugEnabled()) { int batchIdx = (n % batchSize == 0) ? n / batchSize : (n / batchSize) + 1; int items = n - ((n % batchSize == 0) ? n / batchSize - 1 : (n / batchSize)) * batchSize; logger.debug( "Sending SQL batch update #" + batchIdx + " with " + items + " items"); } rowsAffected.add(ps.executeBatch()); } } else { int i = ps.executeUpdate(); rowsAffected.add(new int[] { i }); } } int[][] result = new int[rowsAffected.size()][]; for (int i = 0; i < result.length; i++) { result[i] = rowsAffected.get(i); } return result; } finally { if (pss instanceof ParameterDisposer) { ((ParameterDisposer) pss).cleanupParameters(); } } } }); }
From source file:org.apache.ddlutils.platform.PlatformImplBase.java
/** * Performs the batch for the given statement, and checks that the specified amount of rows have been changed. * // w ww.j av a 2s . c o m * @param statement The prepared statement * @param numRows The number of rows that should change * @param table The changed table */ private void executeBatch(PreparedStatement statement, int numRows, Table table) throws DatabaseOperationException { if (statement != null) { try { Connection connection = statement.getConnection(); beforeInsert(connection, table); int[] results = statement.executeBatch(); closeStatement(statement); afterInsert(connection, table); boolean hasSum = true; int sum = 0; for (int idx = 0; (results != null) && (idx < results.length); idx++) { if (results[idx] < 0) { hasSum = false; if (results[idx] == Statement.EXECUTE_FAILED) { _log.warn("The batch insertion of row " + idx + " into table " + table.getName() + " failed but the driver is able to continue processing"); } else if (results[idx] != Statement.SUCCESS_NO_INFO) { _log.warn("The batch insertion of row " + idx + " into table " + table.getName() + " returned an undefined status value " + results[idx]); } } else { sum += results[idx]; } } if (hasSum && (sum != numRows)) { _log.warn("Attempted to insert " + numRows + " rows into table " + table.getName() + " but changed " + sum + " rows"); } } catch (SQLException ex) { if (ex instanceof BatchUpdateException) { SQLException sqlEx = ((BatchUpdateException) ex).getNextException(); throw new DatabaseOperationException("Error while inserting into the database", sqlEx); } else { throw new DatabaseOperationException("Error while inserting into the database", ex); } } } }
From source file:org.openmrs.module.formentry.databasechange.MigrateXsltsAndTemplatesChangeset.java
private void migrateResources(JdbcConnection connection, boolean isXslt) throws CustomChangeException { Statement selectStmt = null;/*from w ww . j av a2 s . c om*/ PreparedStatement insertResourcesStmt = null; PreparedStatement insertClobsStmt = null; Boolean originalAutoCommit = null; ResultSet rs = null; String resourceName = (isXslt) ? FormEntryConstants.FORMENTRY_XSLT_FORM_RESOURCE_NAME : FormEntryConstants.FORMENTRY_TEMPLATE_FORM_RESOURCE_NAME; String columnName = (isXslt) ? "xslt" : "template"; try { originalAutoCommit = connection.getAutoCommit(); selectStmt = connection.createStatement(); boolean hasResults = selectStmt.execute("SELECT form_id, " + columnName + " FROM form WHERE " + columnName + " IS NOT NULL AND " + columnName + " != ''"); if (hasResults) { rs = selectStmt.getResultSet(); insertClobsStmt = connection .prepareStatement("INSERT INTO clob_datatype_storage (value, uuid) VALUES(?,?)"); insertResourcesStmt = connection.prepareStatement( "INSERT INTO form_resource (form_id, name, value_reference, datatype, preferred_handler, uuid) VALUES (?,'" + resourceName + "',?,'" + LongFreeTextDatatype.class.getName() + "','" + LongFreeTextFileUploadHandler.class.getName() + "',?)"); String defaultXslt = IOUtils .toString(getClass().getClassLoader().getResourceAsStream("default.xslt")); //intentionally didn't check for NULL so the exception halts the changeset defaultXslt = defaultXslt.trim(); while (rs.next()) { String resourceValue = rs.getString(columnName); //if the form has an xslt and it differs from the default one if (StringUtils.isNotBlank(resourceValue) && (!isXslt || !resourceValue.trim().equals(defaultXslt))) { //set the clob storage values String clobUuid = UUID.randomUUID().toString(); insertClobsStmt.setString(1, resourceValue.trim()); insertClobsStmt.setString(2, clobUuid); insertClobsStmt.addBatch(); //set the resource column values insertResourcesStmt.setInt(1, rs.getInt("form_id")); insertResourcesStmt.setString(2, clobUuid); insertResourcesStmt.setString(3, UUID.randomUUID().toString()); insertResourcesStmt.addBatch(); } } boolean successfullyAddedClobs = false; int[] clobInsertCounts = insertClobsStmt.executeBatch(); if (clobInsertCounts != null) { for (int i = 0; i < clobInsertCounts.length; i++) { if (clobInsertCounts[i] > -1) { successfullyAddedClobs = true; log.debug("Successfully inserted resource clobs: insert count=" + clobInsertCounts[i]); } else if (clobInsertCounts[i] == Statement.SUCCESS_NO_INFO) { successfullyAddedClobs = true; log.debug("Successfully inserted resource clobs; No Success info"); } else if (clobInsertCounts[i] == Statement.EXECUTE_FAILED) { log.warn("Failed to insert resource clobs"); } } } if (successfullyAddedClobs) { int[] resourceInsertCounts = insertResourcesStmt.executeBatch(); if (resourceInsertCounts != null) { boolean commit = false; for (int i = 0; i < resourceInsertCounts.length; i++) { if (resourceInsertCounts[i] > -1) { commit = true; log.debug("Successfully inserted " + columnName + " resources: insert count=" + resourceInsertCounts[i]); } else if (resourceInsertCounts[i] == Statement.SUCCESS_NO_INFO) { commit = true; log.debug("Successfully inserted " + columnName + " resources; No Success info"); } else if (resourceInsertCounts[i] == Statement.EXECUTE_FAILED) { log.warn("Failed to insert " + columnName + " resources"); } } if (commit) { log.debug("Committing " + columnName + " resource inserts..."); connection.commit(); } } } } } catch (Exception e) { log.warn("Error generated while processsing generation of " + columnName + " form resources", e); try { if (connection != null) { connection.rollback(); } } catch (Exception ex) { log.error("Failed to rollback", ex); } throw new CustomChangeException(e); } finally { if (rs != null) { try { rs.close(); } catch (SQLException e) { log.warn("Failed to close the resultset object"); } } if (connection != null && originalAutoCommit != null) { try { connection.setAutoCommit(originalAutoCommit); } catch (DatabaseException e) { log.error("Failed to reset auto commit", e); } } closeStatementQuietly(selectStmt); closeStatementQuietly(insertClobsStmt); closeStatementQuietly(insertResourcesStmt); } }