List of usage examples for java.sql PreparedStatement executeBatch
int[] executeBatch() throws SQLException;
From source file:org.wso2.carbon.identity.oauth2.dao.AuthorizationCodeDAOImpl.java
@Override public void deactivateAuthorizationCodes(List<AuthzCodeDO> authzCodeDOs) throws IdentityOAuth2Exception { Connection connection = IdentityDatabaseUtil.getDBConnection(); PreparedStatement prepStmt = null; if (log.isDebugEnabled()) { if (IdentityUtil.isTokenLoggable(IdentityConstants.IdentityTokens.AUTHORIZATION_CODE)) { StringBuilder stringBuilder = new StringBuilder(); for (AuthzCodeDO authzCodeDO : authzCodeDOs) { stringBuilder.append("Deactivating authorization code(hashed): ") .append(DigestUtils.sha256Hex(authzCodeDO.getAuthorizationCode())).append(" client: ") .append(authzCodeDO.getConsumerKey()).append(" user: ") .append(authzCodeDO.getAuthorizedUser().toString()).append("\n"); }//w ww .j a v a2 s . c o m log.debug(stringBuilder.toString()); } else { StringBuilder stringBuilder = new StringBuilder(); for (AuthzCodeDO authzCodeDO : authzCodeDOs) { stringBuilder.append("Deactivating authorization code client: ") .append(authzCodeDO.getConsumerKey()).append(" user: ") .append(authzCodeDO.getAuthorizedUser().toString()).append("\n"); } log.debug(stringBuilder.toString()); } } try { prepStmt = connection.prepareStatement(SQLQueries.DEACTIVATE_AUTHZ_CODE_AND_INSERT_CURRENT_TOKEN); for (AuthzCodeDO authzCodeDO : authzCodeDOs) { prepStmt.setString(1, authzCodeDO.getOauthTokenId()); prepStmt.setString(2, getHashingPersistenceProcessor().getProcessedAuthzCode(authzCodeDO.getAuthorizationCode())); prepStmt.addBatch(); } prepStmt.executeBatch(); connection.commit(); // To revoke request objects which are persisted against the code. OAuth2TokenUtil.postRevokeCodes(authzCodeDOs, OAuthConstants.AuthorizationCodeState.INACTIVE); } catch (SQLException e) { throw new IdentityOAuth2Exception("Error when deactivating authorization code", e); } finally { IdentityDatabaseUtil.closeAllConnections(connection, null, prepStmt); } }
From source file:org.nuxeo.ecm.core.storage.sql.jdbc.JDBCRowMapper.java
/** * Updates multiple simple rows, all for the same table. *//*from w w w. ja va 2 s . c o m*/ protected void updateSimpleRows(String tableName, List<RowUpdate> rows) throws StorageException { if (rows.isEmpty()) { return; } // reorganize by unique sets of keys + which ones are for delta updates Map<String, List<RowUpdate>> updatesByCanonKeys = new HashMap<>(); Map<String, Collection<String>> keysByCanonKeys = new HashMap<>(); Map<String, Set<String>> deltasByCanonKeys = new HashMap<>(); for (RowUpdate rowu : rows) { List<String> keys = new ArrayList<String>(rowu.keys); if (keys.isEmpty()) { continue; } Set<String> deltas = new HashSet<>(); for (ListIterator<String> it = keys.listIterator(); it.hasNext();) { String key = it.next(); Serializable value = rowu.row.get(key); if (value instanceof Delta) { deltas.add(key); it.set(key + '+'); } } Collections.sort(keys); String ck = StringUtils.join(keys, ','); // canonical keys List<RowUpdate> keysUpdates = updatesByCanonKeys.get(ck); if (keysUpdates == null) { updatesByCanonKeys.put(ck, keysUpdates = new LinkedList<RowUpdate>()); keysByCanonKeys.put(ck, rowu.keys); deltasByCanonKeys.put(ck, deltas); } keysUpdates.add(rowu); } for (String ck : updatesByCanonKeys.keySet()) { List<RowUpdate> keysUpdates = updatesByCanonKeys.get(ck); Collection<String> keys = keysByCanonKeys.get(ck); Set<String> deltas = deltasByCanonKeys.get(ck); SQLInfoSelect update = sqlInfo.getUpdateById(tableName, keys, deltas); String loggedSql = supportsBatchUpdates && rows.size() > 1 ? update.sql + " -- BATCHED" : update.sql; try { PreparedStatement ps = connection.prepareStatement(update.sql); int batch = 0; try { for (RowUpdate rowu : keysUpdates) { batch++; if (logger.isLogEnabled()) { logger.logSQL(loggedSql, update.whatColumns, rowu.row, deltas); } int i = 1; for (Column column : update.whatColumns) { Serializable value = rowu.row.get(column.getKey()); if (value instanceof Delta) { value = ((Delta) value).getDeltaValue(); } column.setToPreparedStatement(ps, i++, value); } if (supportsBatchUpdates) { ps.addBatch(); if (batch % UPDATE_BATCH_SIZE == 0) { int[] counts = ps.executeBatch(); countExecute(); logger.logCounts(counts); } } else { int count = ps.executeUpdate(); countExecute(); logger.logCount(count); } } if (supportsBatchUpdates) { int[] counts = ps.executeBatch(); countExecute(); logger.logCounts(counts); } } finally { closeStatement(ps); } } catch (Exception e) { checkConnectionReset(e); throw new StorageException("Could not update: " + update.sql, e); } } }
From source file:com.appspot.relaxe.tools.CSVInsertTask.java
public void run(Connection connection, Reader input, Table table) throws IOException, SQLException { if (connection == null) { throw new NullPointerException("'connection' must not be null"); }/* ww w . j a v a 2s. co m*/ if (input == null) { throw new NullPointerException("'input' must not be null"); } if (table == null) { throw new NullPointerException("'table' must not be null"); } boolean committed = false; try { connection.setAutoCommit(false); CSVStrategy cs = new CSVStrategy('\t', '"', CSVStrategy.COMMENTS_DISABLED, false, false, false); CSVParser p = new CSVParser(input, cs); // get header line String[] line = p.getLine(); // configure by using the column headers: ColumnMap cm = table.getColumnMap(); List<Identifier> names = new ArrayList<Identifier>(); List<Column> columnList = new ArrayList<Column>(); for (String n : line) { Column column = cm.get(n); if (column == null) { throw new IllegalArgumentException("column not found " + n); } columnList.add(column); names.add(column.getColumnName()); } if (names.isEmpty()) { throw new IllegalStateException("no column names available"); } ElementList<Identifier> nel = ElementList.newElementList(names); final int expectedColumnCount = line.length; // int recno = 0; PreparedStatement ps = null; InsertStatement ins = null; VarcharParameter[] params = new VarcharParameter[expectedColumnCount]; ValueAssignerFactory vaf = getImplementation().getValueAssignerFactory(); AssignmentVisitor pa = null; while ((line = p.getLine()) != null) { // recno++; final int cols = line.length; int lineno = p.getLineNumber(); if (cols != expectedColumnCount) { throw new IllegalStateException("unexpected column count: " + cols + " at line " + lineno); } if (ps == null) { List<RowValueConstructorElement> vl = new ArrayList<RowValueConstructorElement>(params.length); for (int i = 0; i < params.length; i++) { Column column = columnList.get(i); VarcharHolder h = parse(column, line[i]); VarcharParameter param = new VarcharParameter(column, h); params[i] = param; vl.add(param); } RowValueConstructor rvc = AbstractRowValueConstructor.of(vl); ins = new InsertStatement(table, nel, rvc); String q = ins.generate(); ps = connection.prepareStatement(q); pa = new AssignmentVisitor(vaf, ps); // System.err.println("lineno: " + lineno); // System.err.println("record: " + recno); // System.err.println("query: " + q); } else { pa.reset(); for (int i = 0; i < line.length; i++) { Column column = columnList.get(i); VarcharHolder h = parse(column, line[i]); VarcharParameter param = params[i]; param.setValue(h); } } ins.traverse(null, pa); ps.addBatch(); } int[] updateCounts = ps.executeBatch(); updated(updateCounts); connection.commit(); committed = true; } finally { if (!(committed)) { QueryHelper.doRollback(connection); } } }
From source file:org.qi4j.entitystore.sql.SQLEntityStoreMixin.java
public StateCommitter applyChanges(final EntityStoreUnitOfWork unitofwork, final Iterable<EntityState> states) { return new StateCommitter() { public void commit() { Connection connection = null; PreparedStatement insertPS = null; PreparedStatement updatePS = null; PreparedStatement removePS = null; try { connection = database.getConnection(); insertPS = database.prepareInsertEntityStatement(connection); updatePS = database.prepareUpdateEntityStatement(connection); removePS = database.prepareRemoveEntityStatement(connection); for (EntityState state : states) { EntityStatus status = state.status(); DefaultEntityState defState = ((SQLEntityState) state).getDefaultEntityState(); Long entityPK = ((SQLEntityState) state).getEntityPK(); if (EntityStatus.REMOVED.equals(status)) { database.populateRemoveEntityStatement(removePS, entityPK, state.identity()); removePS.addBatch(); } else { StringWriter writer = new StringWriter(); writeEntityState(defState, writer, unitofwork.identity()); writer.flush();//from w w w . j a v a 2s .c o m if (EntityStatus.UPDATED.equals(status)) { Long entityOptimisticLock = ((SQLEntityState) state).getEntityOptimisticLock(); database.populateUpdateEntityStatement(updatePS, entityPK, entityOptimisticLock, defState.identity(), writer.toString(), unitofwork.currentTime()); updatePS.addBatch(); } else if (EntityStatus.NEW.equals(status)) { database.populateInsertEntityStatement(insertPS, entityPK, defState.identity(), writer.toString(), unitofwork.currentTime()); insertPS.addBatch(); } } } removePS.executeBatch(); insertPS.executeBatch(); updatePS.executeBatch(); connection.commit(); } catch (SQLException sqle) { SQLUtil.rollbackQuietly(connection); if (LOGGER.isDebugEnabled()) { StringWriter sb = new StringWriter(); sb.append( "SQLException during commit, logging nested exceptions before throwing EntityStoreException:\n"); SQLException e = sqle; while (e != null) { e.printStackTrace(new PrintWriter(sb, true)); e = e.getNextException(); } LOGGER.debug(sb.toString()); } throw new EntityStoreException(sqle); } catch (RuntimeException re) { SQLUtil.rollbackQuietly(connection); throw new EntityStoreException(re); } finally { SQLUtil.closeQuietly(insertPS); SQLUtil.closeQuietly(updatePS); SQLUtil.closeQuietly(removePS); SQLUtil.closeQuietly(connection); } } public void cancel() { } }; }
From source file:org.wso2.carbon.policy.mgt.core.dao.impl.PolicyDAOImpl.java
@Override public boolean addPolicyCriteriaProperties(List<PolicyCriterion> policyCriteria) throws PolicyManagerDAOException { Connection conn;/*from w w w . j a v a 2s.co m*/ PreparedStatement stmt = null; try { conn = this.getConnection(); String query = "INSERT INTO DM_POLICY_CRITERIA_PROPERTIES (POLICY_CRITERION_ID, PROP_KEY, PROP_VALUE, " + "CONTENT) VALUES (?, ?, ?, ?)"; stmt = conn.prepareStatement(query); for (PolicyCriterion criterion : policyCriteria) { Properties prop = criterion.getProperties(); for (String name : prop.stringPropertyNames()) { stmt.setInt(1, criterion.getId()); stmt.setString(2, name); stmt.setString(3, prop.getProperty(name)); stmt.setBytes(4, PolicyManagerUtil.getBytes(criterion.getObjectMap())); stmt.addBatch(); } stmt.executeBatch(); } // stmt.executeUpdate(); } catch (SQLException | IOException e) { throw new PolicyManagerDAOException( "Error occurred while inserting the criterion properties " + "to database", e); } finally { PolicyManagementDAOUtil.cleanupResources(stmt, null); } return false; }
From source file:org.openmrs.util.databasechange.AddConceptMapTypesChangeset.java
/** * Executes all the changes to the concept names as a batch update. * * @param connection The database connection *///from w w w .j a va 2 s . c o m private void runBatchInsert(JdbcConnection connection) throws CustomChangeException { PreparedStatement pStmt = null; ResultSet rs = null; try { connection.setAutoCommit(false); Integer userId = DatabaseUpdater.getAuthenticatedUserId(); //if we have no authenticated user(for API users), set as Daemon if (userId == null || userId < 1) { userId = getInt(connection, "SELECT min(user_id) FROM users"); //leave it as null rather than setting it to 0 if (userId < 1) { userId = null; } } //userId is not a param, because it's easier this way if it's null pStmt = connection.prepareStatement("INSERT INTO concept_map_type " + "(concept_map_type_id, name, is_hidden, retired, creator, date_created, uuid) VALUES(?,?,?,?," + userId + ",?,?)"); int mapTypeId = 1; for (String map : visibleConceptMapTypeArray) { String[] mapTypeAndUuid = map.trim().split("\\|"); String mapType = mapTypeAndUuid[0]; String mapUuid = mapTypeAndUuid[1]; pStmt.setInt(1, mapTypeId); pStmt.setString(2, mapType); pStmt.setBoolean(3, false); pStmt.setBoolean(4, false); pStmt.setDate(5, new Date(Calendar.getInstance().getTimeInMillis())); pStmt.setString(6, mapUuid); pStmt.addBatch(); mapTypeId++; } for (String map : hiddenConceptMapTypeArray) { String[] mapTypeAndUuid = map.trim().split("\\|"); String mapType = mapTypeAndUuid[0]; String mapUuid = mapTypeAndUuid[1]; pStmt.setInt(1, mapTypeId); pStmt.setString(2, mapType); pStmt.setBoolean(3, true); pStmt.setBoolean(4, false); pStmt.setDate(5, new Date(Calendar.getInstance().getTimeInMillis())); pStmt.setString(6, mapUuid); pStmt.addBatch(); mapTypeId++; } try { int[] updateCounts = pStmt.executeBatch(); for (int i = 0; i < updateCounts.length; i++) { if (updateCounts[i] > -1) { log.debug("Successfully executed: updateCount=" + updateCounts[i]); } else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) { log.debug("Successfully executed; No Success info"); } else if (updateCounts[i] == Statement.EXECUTE_FAILED) { log.warn("Failed to execute insert"); } } log.debug("Committing inserts..."); connection.commit(); } catch (BatchUpdateException be) { log.warn("Error generated while processsing batch insert", be); int[] updateCounts = be.getUpdateCounts(); for (int i = 0; i < updateCounts.length; i++) { if (updateCounts[i] > -1) { log.warn("Executed with exception: insertCount=" + updateCounts[i]); } else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) { log.warn("Executed with exception; No Success info"); } else if (updateCounts[i] == Statement.EXECUTE_FAILED) { log.warn("Failed to execute insert with exception"); } } try { log.debug("Rolling back batch", be); connection.rollback(); } catch (Exception rbe) { log.warn("Error generated while rolling back batch insert", be); } //marks the changeset as a failed one throw new CustomChangeException("Failed to insert one or more concept map types", be); } } catch (DatabaseException e) { throw new CustomChangeException("Failed to insert one or more concept map types:", e); } catch (SQLException e) { throw new CustomChangeException("Failed to insert one or more concept map types:", e); } finally { //reset to auto commit mode try { connection.setAutoCommit(true); } catch (DatabaseException e) { log.warn("Failed to reset auto commit back to true", e); } if (rs != null) { try { rs.close(); } catch (SQLException e) { log.warn("Failed to close the resultset object"); } } if (pStmt != null) { try { pStmt.close(); } catch (SQLException e) { log.warn("Failed to close the prepared statement object"); } } } }
From source file:com.wabacus.system.dataimport.DataImportItem.java
private void updateRowDataToDB(AbsDatabaseType dbtype, Connection conn, PreparedStatement pstmtDelete, PreparedStatement pstmtInsert, DataImportSqlBean disqlbeanDelete, DataImportSqlBean disqlbeanInsert) throws SQLException { List lstColsInFileDelete = null; List<IDataType> lstColTypeDelete = null; if (disqlbeanDelete != null) { lstColsInFileDelete = disqlbeanDelete.getLstParamColsInFile(); lstColTypeDelete = disqlbeanDelete.getLstParamTypes(); }//from ww w . j ava 2 s. co m List lstColsInFileInsert = disqlbeanInsert.getLstParamColsInFile(); List<IDataType> lstColTypeInsert = disqlbeanInsert.getLstParamTypes(); boolean matchFileIndex = configBean.getColMapBean().getFileMapType().equals("index"); List<String> lstColNames = this.getLstColNames(matchFileIndex);//??? boolean hasUnCommitData = false; List lstDataColValuesPerRow; Map<String, Object> mDataColValues = null; int i = fileProcessor.getStartrecordindex(); for (int len = fileProcessor.getStartrecordindex() + fileProcessor.getRecordcount(); i < len; i++) { lstDataColValuesPerRow = fileProcessor.getRowData(i); if (lstDataColValuesPerRow == null || lstDataColValuesPerRow.size() == 0) continue; if (configBean.getInterceptor() != null) { boolean flag = configBean.getInterceptor().beforeImportRow(conn, this, lstColNames, lstDataColValuesPerRow); if (!flag) continue; } if (!matchFileIndex) { mDataColValues = getAllColTitleAndValueMap(lstColNames, lstDataColValuesPerRow); } if (pstmtDelete != null) { this.errorSqlTrace = disqlbeanDelete.getSql(); updateDBRowData(pstmtDelete, dbtype, lstColsInFileDelete, lstColTypeDelete, matchFileIndex, lstDataColValuesPerRow, mDataColValues); if (this.shouldBatchCommit(i)) { pstmtDelete.executeBatch(); } } this.errorSqlTrace = disqlbeanInsert.getSql(); updateDBRowData(pstmtInsert, dbtype, lstColsInFileInsert, lstColTypeInsert, matchFileIndex, lstDataColValuesPerRow, mDataColValues); hasUnCommitData = true; if (this.shouldBatchCommit(i)) { hasUnCommitData = false; pstmtInsert.executeBatch(); } if (configBean.getInterceptor() != null) { configBean.getInterceptor().afterImportRow(conn, this, lstColNames, lstDataColValuesPerRow); } } if (hasUnCommitData) {//???pstmt if (pstmtDelete != null) { this.errorSqlTrace = disqlbeanDelete.getSql(); pstmtDelete.executeBatch(); } this.errorSqlTrace = disqlbeanInsert.getSql(); pstmtInsert.executeBatch(); } }
From source file:org.seasar.dbflute.logic.replaceschema.loaddata.impl.DfXlsDataHandlerImpl.java
protected void doWriteDataTable(DfXlsDataResource resource, File file, DfDataTable dataTable) { final String tableDbName = dataTable.getTableDbName(); if (dataTable.getRowSize() == 0) { _log.info("*Not found row at the table: " + tableDbName); return;/*from w w w . j a v a 2 s. c o m*/ } // set up columnMetaInfo final Map<String, DfColumnMeta> columnMetaMap = getColumnMetaMap(tableDbName); if (columnMetaMap.isEmpty()) { throwTableNotFoundException(file, tableDbName); } // process before handling table beforeHandlingTable(tableDbName, columnMetaMap); // set up columnNameList final List<String> columnNameList = new ArrayList<String>(); for (int j = 0; j < dataTable.getColumnSize(); j++) { final DfDataColumn dataColumn = dataTable.getColumn(j); final String columnName = dataColumn.getColumnDbName(); columnNameList.add(columnName); } final String dataDirectory = resource.getDataDirectory(); final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory); final boolean suppressBatchUpdate = isMergedSuppressBatchUpdate(resource.getDataDirectory()); Connection conn = null; PreparedStatement ps = null; String preparedSql = null; SQLException retryEx = null; DfDataRow retryDataRow = null; try { conn = _dataSource.getConnection(); int loadedRowCount = 0; final int rowSize = dataTable.getRowSize(); boolean existsEmptyRow = false; for (int i = 0; i < rowSize; i++) { final DfDataRow dataRow = dataTable.getRow(i); if (ps == null) { final MyCreatedState myCreatedState = new MyCreatedState(); preparedSql = myCreatedState.buildPreparedSql(dataRow); ps = conn.prepareStatement(preparedSql); } if (doWriteDataRow(resource, file, dataTable, dataRow // basic resources , columnMetaMap, columnNameList // meta data , conn, ps // JDBC resources , loggingInsertType, suppressBatchUpdate)) { // option ++loadedRowCount; if (existsEmptyRow) { final int emptyRowNumber = dataRow.getRowNumber() - 1; throwXlsDataEmptyRowDataException(file, dataTable, emptyRowNumber); } } else { existsEmptyRow = true; } } if (existsEmptyRow) { _log.info("...Skipping the terminal garbage row"); } if (!suppressBatchUpdate) { boolean transactionClosed = false; try { conn.setAutoCommit(false); // transaction to retry after ps.executeBatch(); conn.commit(); transactionClosed = true; } catch (SQLException e) { conn.rollback(); transactionClosed = true; if (!(e instanceof BatchUpdateException)) { throw e; } _log.info("...Retrying by suppressing batch update: " + tableDbName); final PreparedStatement retryPs = conn.prepareStatement(preparedSql); for (int i = 0; i < rowSize; i++) { final DfDataRow dataRow = dataTable.getRow(i); try { doWriteDataRow(resource, file, dataTable, dataRow // basic resources , columnMetaMap, columnNameList // meta data , conn, retryPs // JDBC resources , LoggingInsertType.NONE, true); // option (no logging and suppress batch) } catch (SQLException rowEx) { retryEx = rowEx; retryDataRow = dataRow; break; } } try { retryPs.close(); } catch (SQLException ignored) { } throw e; } finally { if (!transactionClosed) { conn.rollback(); // for other exceptions } } } noticeLoadedRowSize(tableDbName, loadedRowCount); checkImplicitClassification(file, tableDbName, columnNameList, conn); } catch (SQLException e) { handleWriteTableException(file, dataTable, e, retryEx, retryDataRow, columnNameList); } finally { closeResource(conn, ps); // process after (finally) handling table finallyHandlingTable(tableDbName, columnMetaMap); } }
From source file:org.rhq.enterprise.server.event.EventManagerBean.java
public void addEventData(Map<EventSource, Set<Event>> events) { if (events == null || events.size() == 0) return;/*from w w w . j a v a 2 s .c om*/ String statementSql; Connection conn = null; PreparedStatement ps = null; try { conn = rhqDs.getConnection(); DatabaseType dbType = DatabaseTypeFactory.getDatabaseType(conn); if (dbType instanceof PostgresqlDatabaseType || dbType instanceof OracleDatabaseType || dbType instanceof H2DatabaseType) { String nextvalSql = JDBCUtil.getNextValSql(conn, EventSource.TABLE_NAME); statementSql = String.format(EVENT_SOURCE_INSERT_STMT, nextvalSql); } else if (dbType instanceof SQLServerDatabaseType) { statementSql = EVENT_SOURCE_INSERT_STMT_AUTOINC; } else { throw new IllegalArgumentException("Unknown database type, can't continue: " + dbType); } // First insert the "keys" (i.e. the EventSources). ps = conn.prepareStatement(statementSql); try { for (EventSource eventSource : events.keySet()) { int paramIndex = 1; ps.setString(paramIndex++, eventSource.getEventDefinition().getName()); ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getName()); ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getPlugin()); ps.setInt(paramIndex++, eventSource.getResource().getId()); ps.setString(paramIndex++, eventSource.getLocation()); ps.setString(paramIndex++, eventSource.getEventDefinition().getName()); ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getName()); ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getPlugin()); ps.setInt(paramIndex++, eventSource.getResource().getId()); ps.setString(paramIndex++, eventSource.getLocation()); ps.addBatch(); } ps.executeBatch(); } finally { JDBCUtil.safeClose(ps); } if (dbType instanceof PostgresqlDatabaseType || dbType instanceof OracleDatabaseType || dbType instanceof H2DatabaseType) { String nextvalSql = JDBCUtil.getNextValSql(conn, Event.TABLE_NAME); statementSql = String.format(EVENT_INSERT_STMT, nextvalSql); } else if (dbType instanceof SQLServerDatabaseType) { statementSql = EVENT_INSERT_STMT_AUTOINC; } else { throw new IllegalArgumentException("Unknown database type, can't continue: " + dbType); } // Then insert the "values" (i.e. the Events). ps = conn.prepareStatement(statementSql); try { for (EventSource eventSource : events.keySet()) { Set<Event> eventData = events.get(eventSource); for (Event event : eventData) { int paramIndex = 1; ps.setString(paramIndex++, eventSource.getEventDefinition().getName()); ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getName()); ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getPlugin()); ps.setInt(paramIndex++, eventSource.getResource().getId()); ps.setString(paramIndex++, eventSource.getLocation()); ps.setLong(paramIndex++, event.getTimestamp()); ps.setString(paramIndex++, event.getSeverity().toString()); ps.setString(paramIndex++, event.getDetail()); ps.addBatch(); } notifyAlertConditionCacheManager("addEventData", eventSource, eventData.toArray(new Event[eventData.size()])); } ps.executeBatch(); } finally { JDBCUtil.safeClose(ps); } } catch (Throwable t) { // TODO what do we want to do here ? log.warn("addEventData: Insert of events failed : " + t.getMessage()); if (t instanceof SQLException) { SQLException e = (SQLException) t; Exception e2 = e.getNextException(); if (e2 != null) log.warn(" : " + e2.getMessage()); if (t.getCause() != null) log.warn(" : " + t.getCause().getMessage()); } } finally { JDBCUtil.safeClose(conn); } }
From source file:com.octo.captcha.engine.bufferedengine.buffer.DatabaseCaptchaBuffer.java
/** * Put a collection of captchas with his locale * * @param captchas The captchas to add//from w w w .j ava 2s . c om * @param locale The locale of the captchas */ public void putAllCaptcha(Collection captchas, Locale locale) { Connection con = null; PreparedStatement ps = null; if (captchas != null && captchas.size() > 0) { Iterator captIt = captchas.iterator(); if (log.isDebugEnabled()) { log.debug("try to insert " + captchas.size() + " captchas"); } try { con = datasource.getConnection(); con.setAutoCommit(false); ps = con.prepareStatement("insert into " + table + "(" + timeMillisColumn + "," + hashCodeColumn + "," + localeColumn + "," + captchaColumn + ") values (?,?,?,?)"); while (captIt.hasNext()) { Captcha captcha = (Captcha) captIt.next(); try { long currenttime = System.currentTimeMillis(); long hash = captcha.hashCode(); ps.setLong(1, currenttime); ps.setLong(2, hash); ps.setString(3, locale.toString()); // Serialise the entry final ByteArrayOutputStream outstr = new ByteArrayOutputStream(); final ObjectOutputStream objstr = new ObjectOutputStream(outstr); objstr.writeObject(captcha); objstr.close(); final ByteArrayInputStream inpstream = new ByteArrayInputStream(outstr.toByteArray()); ps.setBinaryStream(4, inpstream, outstr.size()); ps.addBatch(); if (log.isDebugEnabled()) { log.debug("insert captcha added to batch : " + currenttime + ";" + hash); } } catch (IOException e) { log.warn("error during captcha serialization, " + "check your class versions. removing row from database", e); } } //exexute batch and commit() ps.executeBatch(); log.debug("batch executed"); con.commit(); log.debug("batch commited"); } catch (SQLException e) { log.error(DB_ERROR, e); } finally { if (ps != null) { try { ps.close(); } catch (SQLException e) { } } if (con != null) { try { con.close(); } catch (SQLException e) { } } } } }