Example usage for java.sql PreparedStatement executeBatch

List of usage examples for java.sql PreparedStatement executeBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement executeBatch.

Prototype

int[] executeBatch() throws SQLException;

Source Link

Document

Submits a batch of commands to the database for execution and if all commands execute successfully, returns an array of update counts.

Usage

From source file:de.langmi.spring.batch.examples.readers.support.CompositeCursorItemReaderTest.java

/**
 * Create a table and fill with some test data.
 *
 * @param dataSource/*from www.j  ava 2  s. c  om*/
 * @throws Exception 
 */
private void createTableWithTestData(final DataSource dataSource) throws Exception {
    // create table
    Connection conn = dataSource.getConnection();
    Statement st = conn.createStatement();
    st.execute(CREATE_TEST_TABLE);
    conn.commit();
    st.close();
    conn.close();

    // fill with values
    conn = dataSource.getConnection();
    // prevent auto commit for batching
    conn.setAutoCommit(false);
    PreparedStatement ps = conn.prepareStatement(INSERT);
    // fill with values
    for (int i = 0; i < EXPECTED_COUNT; i++) {
        ps.setString(1, String.valueOf(i));
        ps.addBatch();
    }
    ps.executeBatch();
    conn.commit();
    ps.close();
    conn.close();
}

From source file:com.dsf.dbxtract.cdc.AppJournalWindowTest.java

/**
 * Rigourous Test :-)//from  w  ww .  ja  va2 s.  c o  m
 * 
 * @throws Exception
 *             in case of any error
 */
@Test(dependsOnMethods = "setUp", timeOut = 120000)
public void testAppWithJournalWindow() throws Exception {

    final Config config = new Config(configFile);

    BasicDataSource ds = new BasicDataSource();
    Source source = config.getDataSources().getSources().get(0);
    ds.setDriverClassName(source.getDriver());
    ds.setUsername(source.getUser());
    ds.setPassword(source.getPassword());
    ds.setUrl(source.getConnection());

    // prepara os dados
    Connection conn = ds.getConnection();

    conn.createStatement().execute("truncate table test");
    conn.createStatement().execute("truncate table j$test");

    // Carrega os dados de origem
    PreparedStatement ps = conn.prepareStatement("insert into test (key1,key2,data) values (?,?,?)");
    for (int i = 0; i < TEST_SIZE; i++) {
        if ((i % 100) == 0) {
            ps.executeBatch();
        }
        ps.setInt(1, i);
        ps.setInt(2, i);
        ps.setInt(3, (int) Math.random() * 500);
        ps.addBatch();
    }
    ps.executeBatch();
    ps.close();

    // Popula as tabelas de journal
    ps = conn.prepareStatement("insert into j$test (key1,key2) values (?,?)");
    for (int i = 0; i < TEST_SIZE; i++) {
        if ((i % 500) == 0) {
            ps.executeBatch();
        }
        ps.setInt(1, i);
        ps.setInt(2, i);
        ps.addBatch();
    }
    ps.executeBatch();
    ps.close();

    Long maxWindowId = 0L;
    ResultSet rs = conn.createStatement().executeQuery("select max(window_id) from j$test");
    if (rs.next()) {
        maxWindowId = rs.getLong(1);
        System.out.println("maximum window_id loaded: " + maxWindowId);
    }
    rs.close();
    conn.close();
    ds.close();

    // Clear any previous test
    String zkKey = "/dbxtract/cdc/" + source.getName() + "/J$TEST/lastWindowId";
    if (client.checkExists().forPath(zkKey) != null)
        client.delete().forPath(zkKey);

    // starts monitor
    Monitor.getInstance(config);

    // start app
    app = new App(config);
    System.out.println(config.toString());
    app.start();

    Assert.assertEquals(config.getHandlers().iterator().next().getStrategy(), JournalStrategy.WINDOW);

    while (true) {
        TimeUnit.MILLISECONDS.sleep(500);

        try {
            Long lastWindowId = Long.parseLong(new String(client.getData().forPath(zkKey)));
            System.out.println("lastWindowId = " + lastWindowId);
            if (maxWindowId.longValue() == lastWindowId.longValue()) {
                System.out.println("expected window_id reached");
                break;
            }

        } catch (NoNodeException nne) {
            System.out.println("ZooKeeper - no node exception :: " + zkKey);
        }
    }
}

From source file:org.wso2.carbon.apimgt.migration.util.StatDBUtil.java

private static void updateResponseSummaryTable() {
    Connection connection = null;
    Statement statement = null;/*from   w  w  w  .  j  a  va 2 s .co m*/
    PreparedStatement preparedStatement = null;
    ResultSet resultSet = null;

    try {
        String sql = "SELECT CONTEXT, VERSION, API_VERSION FROM API_REQUEST_SUMMARY GROUP BY CONTEXT, VERSION, API_VERSION";

        connection = dataSource.getConnection();
        statement = connection.createStatement();
        connection.setAutoCommit(false);
        statement.setFetchSize(50);
        resultSet = statement.executeQuery(sql);

        preparedStatement = connection
                .prepareStatement("UPDATE API_RESPONSE_SUMMARY SET CONTEXT = concat(?, '/', ?) "
                        + "WHERE CONTEXT = ? AND API_VERSION = ?");
        while (resultSet.next()) {
            final String context = resultSet.getString("CONTEXT");
            final String version = resultSet.getString("VERSION");

            if (!context.endsWith('/' + version)) {
                preparedStatement.setString(1, context);
                preparedStatement.setString(2, version);
                preparedStatement.setString(3, context);
                preparedStatement.setString(4, resultSet.getString("API_VERSION"));
                preparedStatement.addBatch();
            }
        }
        preparedStatement.executeBatch();
        connection.commit();

    } catch (SQLException e) {
        log.error("SQLException when updating API_RESPONSE_SUMMARY table", e);
    } finally {
        try {
            if (preparedStatement != null)
                preparedStatement.close();
            if (statement != null)
                statement.close();
            if (resultSet != null)
                resultSet.close();
            if (connection != null)
                connection.close();
        } catch (SQLException e) {
            log.error("SQLException when closing resource", e);
        }
    }
}

From source file:las.DBConnector.java

/**
 * Load CSV test data into SQL Table/*  ww w .  j  av  a 2 s.  co  m*/
 *
 * example for clearFirst: boolean check =
 * DBConnector.checkDataExistedInTable("MEMBERS");
 *
 * if (check) { DBConnector.loadCSVIntoTable("src/resources/members.csv",
 * "MEMBERS", true); System.out.println("Test data inserted into MEMBERS
 * table"); }
 *
 * ignore createNewReader, since it uses for loadCSVIntoTable, don't modify
 * it
 *
 * Getter and Setter provided for Separator to set your own separator inside
 * your CSV File
 *
 * @param csvFile : src/resources/xxx.csv (put your csv file under this
 * path)
 * @param tableName: TABLENAME (All in capital letters)
 * @param clearFirst true = if data not existed in SQL Table, write test
 * data inside false = if data exisited in SQL Table, don't write again.
 * @throws java.lang.Exception
 */
public static void loadCSVIntoTable(String csvFile, String tableName, boolean clearFirst) throws Exception {

    CSVReader csvReader = null;
    if (null == DBConnector.conn) {
        throw new Exception("Not a valid connection.");
    }
    try {

        csvReader = DBConnector.getInstance().createNewReader(csvFile);

    } catch (ClassNotFoundException | SQLException | FileNotFoundException e) {
        e.printStackTrace();
        throw new Exception("Error occured while executing file. " + e.getMessage());
    }

    String[] headerRow = csvReader.readNext();

    if (null == headerRow) {
        throw new FileNotFoundException(
                "No columns defined in given CSV file." + "Please check the CSV file format.");
    }

    String questionmarks = StringUtils.repeat("?,", headerRow.length);
    questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1);

    String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName);
    query = query.replaceFirst(KEYS_REGEX, StringUtils.join(headerRow, ","));
    query = query.replaceFirst(VALUES_REGEX, questionmarks);

    String[] nextLine;
    PreparedStatement ps = null;
    try {
        conn.setAutoCommit(false);
        ps = conn.prepareStatement(query);

        if (clearFirst) {
            //delete data from table before loading csv
            conn.createStatement().execute("DELETE FROM " + tableName);
        }

        final int batchSize = 1000;
        int count = 0;
        Date date = null;
        while ((nextLine = csvReader.readNext()) != null) {

            if (null != nextLine) {
                int index = 1;
                for (String string : nextLine) {
                    date = DateUtil.convertToDate(string);
                    if (null != date) {
                        ps.setDate(index++, new java.sql.Date(date.getTime()));
                    } else {
                        ps.setString(index++, string);
                    }
                }
                ps.addBatch();
            }
            if (++count % batchSize == 0) {
                ps.executeBatch();
            }
        }
        ps.executeBatch(); // insert remaining records
        conn.commit();
    } catch (SQLException e) {
        conn.rollback();
        e.printStackTrace();
        throw new Exception("Error occured while loading data from file to database." + e.getMessage());
    } finally {
        if (null != ps) {
            ps.close();
        }

        csvReader.close();
    }
}

From source file:henu.dao.impl.CaclDaoImpl.java

@Override
public boolean recordCaclUser(long cid, long users[]) {

    int result = 0;
    try {//w w w. ja  v a2  s . c  om
        String sql = "insert into uc (cid, uid) values(?,?)";
        SqlDB.getConnection().setAutoCommit(false);
        PreparedStatement ps = SqlDB.executePreparedStatement(sql);

        for (int i = 0; i < users.length; i++) {
            ps.setLong(1, cid);
            ps.setLong(2, users[i]);
            ps.addBatch();
        }

        result = ps.executeBatch().length;
        ps.clearBatch();
        SqlDB.close();
    } catch (SQLException ex) {

    }

    return result == users.length;
}

From source file:org.apache.lucene.store.jdbc.support.JdbcTemplate.java

/**
 * A template method to execute that can execute the same sql several times using different
 * values set to it (in the <code>fillPrepareStatement</code>) callback).
 *///from   w  w  w . j a  v a 2s.c  om
public int[] executeBatch(String sql, PrepateStatementAwareCallback callback) throws JdbcStoreException {
    Connection con = DataSourceUtils.getConnection(dataSource);
    PreparedStatement ps = null;
    try {
        ps = con.prepareStatement(sql);
        //            ps.setQueryTimeout(settings.getQueryTimeout());
        callback.fillPrepareStatement(ps);
        return ps.executeBatch();
    } catch (JdbcStoreException e) {
        if (log.isTraceEnabled()) {
            log.trace("Failed to execute sql [" + sql + "]", e);
        }
        throw e;
    } catch (Exception e) {
        if (log.isTraceEnabled()) {
            log.trace("Failed to execute sql [" + sql + "]", e);
        }
        throw new JdbcStoreException("Failed to execute sql [" + sql + "]", e);
    } finally {
        DataSourceUtils.closeStatement(ps);
        DataSourceUtils.releaseConnection(con);
    }
}

From source file:de.is24.infrastructure.gridfs.http.metadata.generation.PrimaryDbGenerator.java

private void writeDependency(PreparedStatement ps, int pkgKey, List<YumPackageFormatEntry> dependencies)
        throws SQLException {
    for (YumPackageFormatEntry dependency : dependencies) {
        int c = fillStatementForYumPackageFormatEntry(ps, dependency, 1);
        ps.setInt(c, pkgKey);/*from w ww. jav a 2  s.co  m*/
        ps.addBatch();
    }
    if (!dependencies.isEmpty()) {
        ps.executeBatch();
    }
}

From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.DeleteMetadataAlterHelper.java

public Object doInPreparedStatement(PreparedStatement ps) throws SQLException, DataAccessException {
    startTime = AdapterExtractorContext.getInstance().getGlobalTime();
    taskInstanceId = AdapterExtractorContext.getInstance().getTaskInstanceId();

    for (MMMetaModel metaModel : metaModels) {
        batchLoadDelete(ps, metaModel);//from   w w w  . j  a  v a 2  s.co m
    }

    if (super.count % super.batchSize != 0) {
        ps.executeBatch();
        ps.clearBatch();

    }
    return null;
}

From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.CreateCompositionHelper.java

public Object doInPreparedStatement(PreparedStatement ps) throws SQLException {
    String startTime = DateUtil.getFormatTime(System.currentTimeMillis(), DateUtil.DATA_FORMAT);

    for (Iterator<MMMetaModel> iter = metaModels.iterator(); iter.hasNext();) {
        // ??//  w  w  w  .j av  a2s . c  om
        batchLoadCreate(iter.next(), ps);
    }
    // ??,???

    if (super.count % super.batchSize != 0) {
        ps.executeBatch();
        ps.clearBatch();

    }

    String logMsg = new StringBuilder(getMsg()).append(count).append(",:").append(startTime)
            .append(",?:")
            .append(DateUtil.getFormatTime(System.currentTimeMillis(), DateUtil.DATA_FORMAT)).toString();
    log.info(logMsg);
    AdapterExtractorContext.addExtractorLog(ExtractorLogLevel.INFO, logMsg);

    return null;
    // test for callback
    // throw new SQLException();
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.DataDeletionDAOImpl.java

@Override
public void deleteOperations() throws ArchivalDAOException {
    PreparedStatement stmt = null;
    try {/*from   w  w  w .  j a v  a 2s.  c  o  m*/
        Connection conn = ArchivalDestinationDAOFactory.getConnection();
        conn.setAutoCommit(false);
        String sql = "DELETE FROM DM_OPERATION_ARCH WHERE ARCHIVED_AT < DATE_SUB(NOW(), INTERVAL ? DAY)";
        stmt = conn.prepareStatement(sql);
        stmt.setInt(1, this.retentionPeriod);
        stmt.addBatch();
        stmt.executeBatch();
        conn.commit();
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while deleting operations", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt);
    }
}