List of usage examples for java.sql PreparedStatement executeBatch
int[] executeBatch() throws SQLException;
From source file:org.bidtime.dbutils.QueryRunnerEx.java
/** * Executes the given batch of INSERT SQL statements. * @param conn The connection to use for the query call. * @param closeConn True if the connection should be closed, false otherwise. * @param sql The SQL statement to execute. * @param rsh The handler used to create the result object from * the <code>ResultSet</code> of auto-generated keys. * @param params The query replacement parameters. * @return The result generated by the handler. * @throws SQLException If there are database or parameter errors. * @since 1.6// w w w.ja va 2s . c o m */ private <T> T insertBatch(Connection conn, boolean closeConn, String sql, ResultSetHandler<T> rsh, Object[][] params) throws SQLException { if (conn == null) { throw new SQLException("Null connection"); } if (sql == null) { if (closeConn) { close(conn); } throw new SQLException("Null SQL statement"); } if (params == null) { if (closeConn) { close(conn); } throw new SQLException("Null parameters. If parameters aren't need, pass an empty array."); } PreparedStatement stmt = null; long startTime = System.currentTimeMillis(); T generatedKeys = null; try { stmt = this.prepareStatement(conn, sql, Statement.RETURN_GENERATED_KEYS); stmt.setQueryTimeout(StmtParams.getInstance().getStmtBatchTimeOut()); for (int i = 0; i < params.length; i++) { this.fillStatement(stmt, params[i]); stmt.addBatch(); } stmt.executeBatch(); ResultSet rs = stmt.getGeneratedKeys(); generatedKeys = rsh.handle(rs); } catch (SQLException e) { this.rethrow(e, sql, (Object[]) params); } finally { close(stmt); if (closeConn) { close(conn); } if (LogInsertSql.logInfoOrDebug()) { LogInsertSql.logFormatTimeNow(startTime, sql, params); } } return generatedKeys; }
From source file:org.apache.hadoop.hive.ql.metadata.BIStore.java
public int insertInsertExeInfo(Connection cc, Collection<InsertExeInfo> insertInfoList) { if (cc == null || insertInfoList == null || insertInfoList.isEmpty()) { return -1; }/*from w ww . java 2s. c om*/ int rt = -1; PreparedStatement pstmt; String queryID = ""; try { pstmt = cc.prepareStatement( "insert into tdw_insert_info(queryid, desttable, successnum, rejectnum, ismultiinsert) values (?,?,?,?,?)"); for (InsertExeInfo insertInfo : insertInfoList) { queryID = insertInfo.getQueryID(); pstmt.setString(1, insertInfo.getQueryID()); pstmt.setString(2, insertInfo.getDestTable()); pstmt.setLong(3, insertInfo.getFsSuccessNum()); pstmt.setLong(4, insertInfo.getFsRejectNum()); pstmt.setBoolean(5, insertInfo.getIsMultiInsert()); pstmt.addBatch(); } pstmt.executeBatch(); rt = 0; } catch (SQLException e) { LOG.error(" insert INSERT EXE Info failed: " + queryID); e.printStackTrace(); } return rt; }
From source file:org.wso2.carbon.is.migration.dao.ClaimDAO.java
/** * Add claim properties//from ww w .j a va 2 s . c om * * @param connection * @param localClaimId * @param claimProperties * @param tenantId * @throws ISMigrationException */ private void addClaimProperties(Connection connection, int localClaimId, Map<String, String> claimProperties, int tenantId) throws ISMigrationException { PreparedStatement prepStmt = null; if (localClaimId > 0 && claimProperties != null) { try { String query = SQLConstants.ADD_CLAIM_PROPERTY; prepStmt = connection.prepareStatement(query); for (Map.Entry<String, String> property : claimProperties.entrySet()) { prepStmt.setInt(1, localClaimId); prepStmt.setString(2, property.getKey()); prepStmt.setString(3, property.getValue() != null ? property.getValue() : ""); prepStmt.setInt(4, tenantId); prepStmt.addBatch(); } prepStmt.executeBatch(); } catch (SQLException e) { throw new ISMigrationException("Error while adding claim properties", e); } finally { IdentityDatabaseUtil.closeStatement(prepStmt); } } }
From source file:com.nabla.wapp.server.auth.UserManager.java
public boolean updateUserRoleTable() throws SQLException { final Map<Integer, Map<Integer, Set<Integer>>> userRoles = loadUserRoles(); Database.executeUpdate(conn, "DELETE FROM user_role;"); final PreparedStatement stmt = conn .prepareStatement("INSERT INTO user_role (object_id, user_id, role_id) VALUES(?,?,?);"); try {// www . ja v a 2 s.co m stmt.clearBatch(); for (Map.Entry<Integer, Map<Integer, Set<Integer>>> e : userRoles.entrySet()) { if (e.getKey() == null) stmt.setNull(1, Types.BIGINT); else stmt.setInt(1, e.getKey()); for (Map.Entry<Integer, Set<Integer>> ee : e.getValue().entrySet()) { stmt.setInt(2, ee.getKey()); for (Integer roleId : ee.getValue()) { stmt.setInt(3, roleId); stmt.addBatch(); } } } return Database.isBatchCompleted(stmt.executeBatch()); } finally { stmt.close(); } }
From source file:org.apache.beehive.controls.system.jdbc.JdbcControlImpl.java
/** * Create and exec a {@link PreparedStatement} * * @param method the method to invoke//from w w w . j a v a 2s . c o m * @param args the method's arguments * @return the return value from the {@link PreparedStatement} * @throws Throwable any exception that occurs; the caller should handle these appropriately */ protected Object execPreparedStatement(Method method, Object[] args) throws Throwable { final SQL methodSQL = (SQL) _context.getMethodPropertySet(method, SQL.class); if (methodSQL == null || methodSQL.statement() == null) { throw new ControlException("Method " + method.getName() + " is missing @SQL annotation"); } setTypeMappers(methodSQL.typeMappersOverride()); // // build the statement and execute it // PreparedStatement ps = null; try { Class returnType = method.getReturnType(); SqlStatement sqlStatement = _sqlParser.parse(methodSQL.statement()); ps = sqlStatement.createPreparedStatement(_context, _connection, _cal, method, args); if (LOGGER.isInfoEnabled()) { LOGGER.info("PreparedStatement: " + sqlStatement.createPreparedStatementString(_context, _connection, method, args)); } // // special processing for batch updates // if (sqlStatement.isBatchUpdate()) { return ps.executeBatch(); } // // execute the statement // boolean hasResults = ps.execute(); // // callable statement processing // if (sqlStatement.isCallableStatement()) { SQLParameter[] params = (SQLParameter[]) args[0]; for (int i = 0; i < params.length; i++) { if (params[i].dir != SQLParameter.IN) { params[i].value = ((CallableStatement) ps).getObject(i + 1); } } return null; } // // process returned data // ResultSet rs = null; int updateCount = ps.getUpdateCount(); if (hasResults) { rs = ps.getResultSet(); } if (sqlStatement.getsGeneratedKeys()) { rs = ps.getGeneratedKeys(); hasResults = true; } if (!hasResults && updateCount > -1) { boolean moreResults = ps.getMoreResults(); int tempUpdateCount = ps.getUpdateCount(); while ((moreResults && rs == null) || tempUpdateCount > -1) { if (moreResults) { rs = ps.getResultSet(); hasResults = true; moreResults = false; tempUpdateCount = -1; } else { moreResults = ps.getMoreResults(); tempUpdateCount = ps.getUpdateCount(); } } } Object returnObject = null; if (hasResults) { // // if a result set mapper was specified in the methods annotation, use it // otherwise find the mapper for the return type in the hashmap // final Class resultSetMapperClass = methodSQL.resultSetMapper(); final ResultSetMapper rsm; if (!UndefinedResultSetMapper.class.isAssignableFrom(resultSetMapperClass)) { if (ResultSetMapper.class.isAssignableFrom(resultSetMapperClass)) { rsm = (ResultSetMapper) resultSetMapperClass.newInstance(); } else { throw new ControlException( "Result set mappers must be subclasses of ResultSetMapper.class!"); } } else { if (_resultMappers.containsKey(returnType)) { rsm = _resultMappers.get(returnType); } else { if (_xmlObjectClass != null && _xmlObjectClass.isAssignableFrom(returnType)) { rsm = _resultMappers.get(_xmlObjectClass); } else { rsm = DEFAULT_MAPPER; } } } returnObject = rsm.mapToResultType(_context, method, rs, _cal); if (rsm.canCloseResultSet() == false) { getResources().add(ps); } // // empty ResultSet // } else { if (returnType.equals(Void.TYPE)) { returnObject = null; } else if (returnType.equals(Integer.TYPE)) { returnObject = new Integer(updateCount); } else if (!sqlStatement.isCallableStatement()) { throw new ControlException( "Method " + method.getName() + "is DML but does not return void or int"); } } return returnObject; } finally { // Keep statements open that have in-use result sets if (ps != null && !getResources().contains(ps)) { ps.close(); } } }
From source file:org.wso2.carbon.is.migration.service.v530.dao.ClaimDAO.java
/** * Add claim properties/*from ww w. j a v a 2s . c om*/ * * @param connection * @param localClaimId * @param claimProperties * @param tenantId * @throws MigrationClientException */ private void addClaimProperties(Connection connection, int localClaimId, Map<String, String> claimProperties, int tenantId) throws MigrationClientException { PreparedStatement prepStmt = null; if (localClaimId > 0 && claimProperties != null) { try { String query = SQLConstants.ADD_CLAIM_PROPERTY; prepStmt = connection.prepareStatement(query); for (Map.Entry<String, String> property : claimProperties.entrySet()) { prepStmt.setInt(1, localClaimId); prepStmt.setString(2, property.getKey()); prepStmt.setString(3, property.getValue() != null ? property.getValue() : ""); prepStmt.setInt(4, tenantId); prepStmt.addBatch(); } prepStmt.executeBatch(); } catch (SQLException e) { throw new MigrationClientException("Error while adding claim properties", e); } finally { IdentityDatabaseUtil.closeStatement(prepStmt); } } }
From source file:coral.data.DataServiceJbdcImpl.java
@Override public synchronized void saveOETData(String collection, ExpData stage) { long id = System.currentTimeMillis(); if (id <= lastid) { lastid++;//from ww w. ja v a 2s .c o m id = lastid; } lastid = id; PreparedStatement prep; try { prep = conn.prepareStatement("insert into states values (?, ?, ?, ?, ?, ?, ?);"); String inmsg = (stage.inmsg.length() < 70) ? stage.inmsg : stage.inmsg.substring(0, 70); prep.setString(1, Long.toString(id)); prep.setString(2, collection); prep.setString(3, stage.template); prep.setString(4, "1"); prep.setString(5, Integer.toString(stage._msgCounter)); prep.setString(6, Integer.toString(stage._stageCounter)); prep.setString(7, inmsg); prep.addBatch(); conn.setAutoCommit(false); prep.executeBatch(); conn.setAutoCommit(true); conn.commit(); put(id, collection, stage.newMap()); } catch (SQLException e) { e.printStackTrace(); } }
From source file:com.sec.ose.osi.localdb.identification.IdentificationDBManager.java
synchronized public static void execute(PreparedStatement tmpPreparedStatement) { if (tmpPreparedStatement == null) { return;//from w ww .java2s . com } log.debug(tmpPreparedStatement.toString()); try { conn.setAutoCommit(false); tmpPreparedStatement.executeBatch(); conn.commit(); conn.setAutoCommit(true); tmpPreparedStatement.clearBatch(); } catch (SQLException e) { log.warn(e); } }
From source file:org.schedoscope.metascope.tasks.repository.mysql.impl.TableEntityMySQLRepository.java
public void insertOrUpdatePartial(Connection connection, List<TableEntity> tables) { String insertTableSql = "insert into table_entity (table_fqdn, table_name, database_name, url_path_prefix, external_table, " + "table_description, storage_format, materialize_once, transformation_type, status) " + "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) " + "on duplicate key update table_fqdn=values(table_fqdn), table_name=values(table_name), database_name=values(database_name), " + "url_path_prefix=values(url_path_prefix),external_table=values(external_table), table_description=values(table_description), " + "storage_format=values(storage_format), materialize_once=values(materialize_once), transformation_type=values(transformation_type), " + "status=values(status)"; PreparedStatement stmt = null; try {/*from www . j a va 2 s.c o m*/ int batch = 0; connection.setAutoCommit(false); stmt = connection.prepareStatement(insertTableSql); for (TableEntity tableEntity : tables) { stmt.setString(1, tableEntity.getFqdn()); stmt.setString(2, tableEntity.getTableName()); stmt.setString(3, tableEntity.getDatabaseName()); stmt.setString(4, tableEntity.getUrlPathPrefix()); stmt.setBoolean(5, tableEntity.isExternalTable()); stmt.setString(6, tableEntity.getTableDescription()); stmt.setString(7, tableEntity.getStorageFormat()); stmt.setBoolean(8, tableEntity.isMaterializeOnce()); stmt.setString(9, tableEntity.getTransformationType()); stmt.setString(10, tableEntity.getStatus()); stmt.addBatch(); batch++; if (batch % 1024 == 0) { stmt.executeBatch(); } } stmt.executeBatch(); connection.commit(); connection.setAutoCommit(true); } catch (SQLException e) { LOG.error("Could not save table", e); } finally { DbUtils.closeQuietly(stmt); } }
From source file:org.openbel.framework.core.kam.JdbcKAMLoaderImpl.java
@Override public void loadDocumentNamespaceMap(Map<Integer, List<Integer>> dnsm) throws SQLException { PreparedStatement ps = getPreparedStatement(DOCUMENT_NAMESPACE_SQL); Set<Entry<Integer, List<Integer>>> entries = dnsm.entrySet(); for (final Entry<Integer, List<Integer>> entry : entries) { final Integer key = entry.getKey(); for (final Integer nsi : entry.getValue()) { ps.setInt(1, (key + 1));/*w w w. j a va 2 s . c o m*/ ps.setInt(2, (nsi + 1)); ps.addBatch(); } } ps.executeBatch(); }