List of usage examples for java.sql PreparedStatement executeBatch
int[] executeBatch() throws SQLException;
From source file:org.paxml.bean.excel.WriteExcelTag.java
/** * Do excel query.// www . ja v a 2 s . co m * * @param context * @return iterator if lazy, otherwise list. * @throws Exception */ private Object doQuery(Context context) throws Exception { Connection con = getConnection(new ExcelFile(getValue()).getFile()); PreparedStatement s = null; try { s = getPreparedStatement(con); int[] results = s.executeBatch(); if (results.length == 1) { return results[0]; } else { return Arrays.asList(results); } } catch (SQLException e) { throw new PaxmlRuntimeException(e); } finally { closeQueryResource(con, s, null); } }
From source file:org.cartoweb.stats.report.DbTestCase.java
protected void addRecord(StatsRecord record) throws SQLException { final String query = "INSERT INTO " + TABLE_NAME + " (" + Import.MAPPER.getFieldNames() + ") VALUES (" + Import.MAPPER.getInsertPlaceHolders() + ")"; PreparedStatement stmt = con.prepareStatement(query); Import.MAPPER.saveToDb(stmt, record, 1); stmt.addBatch();//from ww w . j a v a2s . c o m stmt.executeBatch(); stmt.close(); con.commit(); }
From source file:com.redhat.victims.database.VictimsSQL.java
/** * Helper function to execute all pending patch operations in the given * {@link PreparedStatement}s and close it. * * @param preparedStatements/*from w w w . java 2 s . c o m*/ * @throws SQLException */ protected void executeBatchAndClose(PreparedStatement... preparedStatements) throws SQLException { for (PreparedStatement ps : preparedStatements) { ps.executeBatch(); ps.clearBatch(); ps.close(); } }
From source file:com.dsf.dbxtract.cdc.AppJournalDeleteTest.java
/** * Rigourous Test :-)/*from w ww.j a v a2s. c o m*/ * * @throws Exception * in case of any error */ @Test(timeOut = 120000) public void testAppWithJournalDelete() throws Exception { final Config config = new Config(configFile); BasicDataSource ds = new BasicDataSource(); Source source = config.getDataSources().getSources().get(0); ds.setDriverClassName(source.getDriver()); ds.setUsername(source.getUser()); ds.setPassword(source.getPassword()); ds.setUrl(source.getConnection()); // prepara os dados Connection conn = ds.getConnection(); conn.createStatement().execute("truncate table test"); conn.createStatement().execute("truncate table j$test"); // Carrega os dados de origem PreparedStatement ps = conn.prepareStatement("insert into test (key1,key2,data) values (?,?,?)"); for (int i = 0; i < TEST_SIZE; i++) { if ((i % 100) == 0) { ps.executeBatch(); } ps.setInt(1, i); ps.setInt(2, i); ps.setInt(3, (int) Math.random() * 500); ps.addBatch(); } ps.executeBatch(); ps.close(); app = new App(config); app.start(); Assert.assertEquals(config.getHandlers().iterator().next().getStrategy(), JournalStrategy.DELETE); // Popula as tabelas de journal ps = conn.prepareStatement("insert into j$test (key1,key2) values (?,?)"); for (int i = 0; i < TEST_SIZE; i++) { if ((i % 500) == 0) { ps.executeBatch(); } ps.setInt(1, i); ps.setInt(2, i); ps.addBatch(); } ps.executeBatch(); ps.close(); while (true) { TimeUnit.MILLISECONDS.sleep(500); ResultSet rs = conn.createStatement().executeQuery("select count(*) from j$test"); if (rs.next()) { long count = rs.getLong(1); System.out.println("remaining journal rows: " + count); rs.close(); if (count == 0L) break; } } conn.close(); ds.close(); }
From source file:namedatabasescraper.PageScraper.java
public void storeToDb(MainWindow parent) throws SQLException { logger.log(Level.INFO, "Started storing names for scraper id {0}", this.dirname); Connection conn = NameDatabaseScraper.application.getConnection(); PreparedStatement stmt2 = conn.prepareStatement("INSERT INTO names VALUES (?, ?)"); stmt2.setString(2, this.dirname); conn.setAutoCommit(false);/*from w w w .j av a 2 s .c o m*/ for (String name : this.names) { stmt2.setString(1, name); stmt2.addBatch(); } stmt2.executeBatch(); conn.setAutoCommit(true); logger.log(Level.INFO, "Stored " + this.names.size() + " names for scraper id {0}", this.dirname); }
From source file:biblivre3.cataloging.bibliographic.IndexDAO.java
public final boolean insert(IndexTable table, List<IndexDTO> indexList) { if (indexList == null && indexList.isEmpty()) { return false; }//from w ww. ja v a2 s. com Connection con = null; try { con = getDataSource().getConnection(); StringBuilder sql = new StringBuilder(); sql.append(" INSERT INTO ").append(table.getTableName()); sql.append(" (index_word, record_serial) "); sql.append(" VALUES (?, ?);"); PreparedStatement pst = con.prepareStatement(sql.toString()); for (IndexDTO index : indexList) { pst.setString(1, StringUtils.substring(index.getWord(), 0, 511)); pst.setInt(2, index.getRecordSerial()); pst.addBatch(); } pst.executeBatch(); } catch (BatchUpdateException bue) { log.error(bue.getNextException(), bue); throw new ExceptionUser("ERROR_BIBLIO_DAO_EXCEPTION"); } catch (Exception e) { log.error(e.getMessage(), e); throw new ExceptionUser("ERROR_BIBLIO_DAO_EXCEPTION"); } finally { closeConnection(con); } return true; }
From source file:com.adaptris.core.services.jdbc.JdbcBatchingDataCaptureService.java
@Override protected long executeUpdate(PreparedStatement insert) throws SQLException { int count = counter.get().incrementAndGet(); insert.addBatch();//ww w . j a v a 2s . c o m long rowsUpdated = 0; if (count % batchWindow() == 0) { log.trace("BatchWindow reached, executeBatch()"); rowsUpdated = rowsUpdated(insert.executeBatch()); } return rowsUpdated; }
From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.DeleteDependencyHelper.java
private void doInPreparedStatement(long sysTime, MMDDependency dependency, PreparedStatement ps) throws SQLException { // ?ID/*w w w .j a v a2 s .com*/ ps.setString(1, dependency.getOwnerMetadata().getId()); // ?ID ps.setString(2, dependency.getValueMetadata().getId()); ps.addBatch(); ps.clearParameters(); if (++super.count % super.batchSize == 0) { ps.executeBatch(); ps.clearBatch(); } }
From source file:de.langmi.spring.batch.examples.readers.jdbc.JdbcPagingItemReaderTests.java
/** * Setup Datasource and create table for test. * * @throws Exception /* w ww . j a v a2 s . c o m*/ */ @Before public void setUp() throws Exception { // DataSource Setup, apache commons dataSource = new BasicDataSource(); dataSource.setDriverClassName("org.hsqldb.jdbcDriver"); dataSource.setUrl("jdbc:hsqldb:mem:testdb"); dataSource.setUsername("sa"); dataSource.setPassword(""); // drop table if exists Connection conn = dataSource.getConnection(); Statement st = conn.createStatement(); st.execute(DROP_TEST_TABLE); conn.commit(); st.close(); conn.close(); // create table conn = dataSource.getConnection(); st = conn.createStatement(); st.execute(CREATE_TEST_TABLE); conn.commit(); st.close(); conn.close(); // fill with values conn = dataSource.getConnection(); // prevent auto commit for batching conn.setAutoCommit(false); PreparedStatement ps = conn.prepareStatement(INSERT); // fill with values for (int i = 0; i < EXPECTED_COUNT; i++) { ps.setString(1, String.valueOf(i)); ps.addBatch(); } ps.executeBatch(); conn.commit(); ps.close(); conn.close(); }
From source file:org.rimudb.Session.java
public void commit(boolean ignoreErrors) throws RimuDBException { try {// w w w.ja va2s.c o m if (getCurrentBatchSize() > 0) { // Execute all the batches in the queue while (statementQueue.size() > 0) { BatchEntry batchEntry = statementQueue.remove(); PreparedStatement ps = batchEntry.getPreparedStatement(); try { int[] results = ps.executeBatch(); } catch (BatchUpdateException be) { // Iterate through the batch counts to determine which data object had the error int[] updateCounts = be.getUpdateCounts(); // If the updateCounts match the number of records in the batch, then if (updateCounts.length == batchEntry.getSize()) { // Log a warning message with the entries that failed. for (int i = 0; i < updateCounts.length; i++) { if (updateCounts[i] == Statement.EXECUTE_FAILED) { DataObject dataObject = batchEntry.getDataObject(i); log.warn("Batch update of " + dataObject.getClass().getSimpleName() + " " + dataObject.getPrimaryWhereList() + " failed."); } } } if (!ignoreErrors) { throw be; } } // If this was a batch insert with generated keys if (batchEntry.getType() == BATCH_INSERT) { // If this table has generated keys and the database supports them then if (batchEntry.getTable().processesGeneratedKeys()) { // Populate the record Record[] records = batchEntry.getRecords(); batchEntry.getTable().populateGeneratedKeys(ps, records); } } ps.close(); } } // Commit the transaction getConnection().commit(); } catch (SQLException e) { throw new RimuDBException(e, getClass().getName()); } }