List of usage examples for java.sql PreparedStatement executeBatch
int[] executeBatch() throws SQLException;
From source file:com.yahoo.ycsb.db.PostgreSQLJsonbClient.java
@Override public void cleanup() throws DBException { if (batchSize > 0) { try {/*from w w w . j a va2s .co m*/ // commit un-finished batches for (PreparedStatement st : cachedStatements.values()) { if (!st.getConnection().isClosed() && !st.isClosed() && (numRowsInBatch % batchSize != 0)) { st.executeBatch(); } } } catch (SQLException e) { System.err.println("Error in cleanup execution. " + e); throw new DBException(e); } } try { cleanupAllConnections(); } catch (SQLException e) { System.err.println("Error in closing the connection. " + e); throw new DBException(e); } }
From source file:eionet.cr.dao.virtuoso.VirtuosoUrgentHarvestQueueDAO.java
@Override public void addPullHarvests(List<UrgentHarvestQueueItemDTO> queueItems) throws DAOException { String sql = "insert into URGENT_HARVEST_QUEUE (URL,\"TIMESTAMP\") VALUES (?,NOW())"; PreparedStatement ps = null; Connection conn = null;//from ww w.j a va2 s. c o m try { conn = getSQLConnection(); ps = conn.prepareStatement(sql); for (int i = 0; i < queueItems.size(); i++) { UrgentHarvestQueueItemDTO dto = queueItems.get(i); String url = dto.getUrl(); if (url != null) { url = StringUtils.substringBefore(url, "#"); } ps.setString(1, url); ps.addBatch(); } ps.executeBatch(); } catch (Exception e) { throw new DAOException(e.getMessage(), e); } finally { SQLUtil.close(ps); SQLUtil.close(conn); } }
From source file:org.wso2.carbon.device.mgt.core.dao.impl.ApplicationMappingDAOImpl.java
@Override public void addApplicationMappings(int deviceId, List<Integer> applicationIds, int tenantId) throws DeviceManagementDAOException { Connection conn;/*from ww w . j ava 2 s .co m*/ PreparedStatement stmt = null; ResultSet rs = null; try { conn = this.getConnection(); String sql = "INSERT INTO DM_DEVICE_APPLICATION_MAPPING (DEVICE_ID, APPLICATION_ID, " + "TENANT_ID) VALUES (?, ?, ?)"; conn.setAutoCommit(false); stmt = conn.prepareStatement(sql); for (int applicationId : applicationIds) { stmt.setInt(1, deviceId); stmt.setInt(2, applicationId); stmt.setInt(3, tenantId); stmt.addBatch(); } stmt.executeBatch(); } catch (SQLException e) { throw new DeviceManagementDAOException("Error occurred while adding device application mappings", e); } finally { DeviceManagementDAOUtil.cleanupResources(stmt, rs); } }
From source file:org.rhq.enterprise.server.purge.PurgeTemplate.java
private int deleteRows(List<KEY> selectedKeys) throws Exception { Connection connection = null; PreparedStatement preparedStatement = null; try {/*from w w w . j a v a2s.c o m*/ userTransaction.begin(); String deleteRowByKeyQuery = getDeleteRowByKeyQuery(databaseType); connection = dataSource.getConnection(); preparedStatement = connection.prepareStatement(deleteRowByKeyQuery); for (KEY key : selectedKeys) { setDeleteRowByKeyQueryParams(preparedStatement, key); preparedStatement.addBatch(); } int[] batchResults = preparedStatement.executeBatch(); userTransaction.commit(); return evalDeletedRows(batchResults); } finally { JDBCUtil.safeClose(connection, preparedStatement, null); rollbackIfTransactionActive(); } }
From source file:eionet.cr.dao.virtuoso.VirtuosoEndpointHarvestQueryDAO.java
@Override public void delete(List<Integer> ids) throws DAOException { if (ids == null || ids.isEmpty()) { return;// www . j a v a2 s . co m } Connection conn = null; PreparedStatement stmt = null; try { conn = getSQLConnection(); conn.setAutoCommit(false); stmt = conn.prepareStatement(DELETE_SQL); for (Integer id : ids) { stmt.setInt(1, id); stmt.addBatch(); } stmt.executeBatch(); conn.commit(); } catch (SQLException e) { SQLUtil.rollback(conn); throw new DAOException(e.getMessage(), e); } finally { SQLUtil.close(stmt); SQLUtil.close(conn); } }
From source file:com.dsf.dbxtract.cdc.journal.JournalExecutor.java
/** * Removes imported references from journal table. * /*from w w w. ja va 2 s . c o m*/ * @param conn * @param rows * @throws SQLException */ private void deleteFromJournal(Connection conn, List<Map<String, Object>> rows) throws SQLException { if (rows.isEmpty()) { if (logger.isDebugEnabled()) logger.debug(logPrefix + "nothing to clean"); return; } if (logger.isDebugEnabled()) logger.debug(logPrefix + "cleaning journal " + handler.getJournalTable()); StringBuilder sb = new StringBuilder("delete from " + handler.getJournalTable() + " where "); for (int i = 0; i < journalColumns.size(); i++) { sb.append(i > 0 ? " and " : "").append(journalColumns.get(i)).append("=?"); } PreparedStatement ps = null; try { ps = conn.prepareStatement(sb.toString()); for (Map<String, Object> keys : rows) { for (int i = 0; i < journalColumns.size(); i++) { ps.setObject(i + 1, keys.get(journalColumns.get(i))); } ps.addBatch(); } ps.executeBatch(); logger.info(logPrefix + rows.size() + " rows removed (" + handler.getJournalTable() + ")"); } finally { DBUtils.close(ps); } }
From source file:com.taobao.tddl.jdbc.group.TGroupPreparedStatement.java
private int[] executeBatchOnConnection(Connection conn) throws SQLException { PreparedStatement ps = createPreparedStatementInternal(conn, sql); for (Map<Integer, ParameterContext> parameterSettings : pstArgs) { setBatchParameters(ps, parameterSettings.values()); ps.addBatch();// ww w.j a va 2 s. c o m } return ps.executeBatch(); }
From source file:eionet.cr.dao.virtuoso.VirtuosoEndpointHarvestQueryDAO.java
@Override public void activateDeactivate(List<Integer> ids) throws DAOException { if (ids == null || ids.isEmpty()) { return;/*from w ww .java 2s .com*/ } Connection conn = null; PreparedStatement stmt = null; try { conn = getSQLConnection(); conn.setAutoCommit(false); stmt = conn.prepareStatement(ACTIVATE_DEACTIVATE_SQL); for (Integer id : ids) { stmt.setInt(1, id); stmt.addBatch(); } stmt.executeBatch(); conn.commit(); } catch (SQLException e) { SQLUtil.rollback(conn); throw new DAOException(e.getMessage(), e); } finally { SQLUtil.close(stmt); SQLUtil.close(conn); } }
From source file:org.openadaptor.auxil.connector.jdbc.writer.AbstractSQLWriter.java
/** * Write a batch of records to a database. * <br>/* w w w . j a v a 2 s . co m*/ * This will execute a batch PreparedStatement if the incoming batch * has multiple record, and the connection has batch support. * Otherwise it will repeatedly execute a PreparedStatement for * each record in the batch. It delegates to createBatchStatement() or * createStatement accordingly. * * @param data Object[] containing records to be written. * @throws SQLException if the batch cannot be written. */ public void writeBatch(Object[] data) throws SQLException { try { int len = data.length; if (((len > 1 || this.usedBatch) && (batchSupport))) { this.usedBatch = true; log.debug("Constructing a batch, size=" + len); PreparedStatement ps = createBatchStatement(data); log.debug("Writing batch"); int[] updateCounts = ps.executeBatch(); if (log.isDebugEnabled()) { int updates = 0; for (int i = 0; i < updateCounts.length; i++) { updates += updateCounts[i]; } log.debug("Summed update count: " + updates); } log.info("Batch written"); releaseStatement(ps); } else { if (log.isDebugEnabled()) { if (len > 1) { log.debug("No batch support - executing individual statements for " + len + " records"); } else { log.debug("Executing statement for single record"); } } for (int i = 0; i < len; i++) { Object datum = data[i]; if (datum == null) { throw new SQLException("Cannot create Statement from null data"); } PreparedStatement ps = createStatement(data[i]); ps.executeUpdate(); releaseStatement(ps); } } } catch (SQLException sqle) { //Just log to debug and rethrow log.debug("Exception in writeBatch(): " + sqle.getMessage()); throw sqle; } }
From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java
@Override public void truncateOperationIDsForArchival() throws ArchivalDAOException { PreparedStatement stmt = null; try {//ww w. jav a 2s . co m Connection conn = ArchivalSourceDAOFactory.getConnection(); conn.setAutoCommit(false); String sql = "TRUNCATE DM_ARCHIVED_OPERATIONS"; stmt = conn.prepareStatement(sql); stmt.addBatch(); stmt.executeBatch(); conn.commit(); } catch (SQLException e) { throw new ArchivalDAOException("Error occurred while truncating operation Ids", e); } finally { ArchivalDAOUtil.cleanupResources(stmt); } }