Example usage for java.sql PreparedStatement addBatch

List of usage examples for java.sql PreparedStatement addBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement addBatch.

Prototype

void addBatch() throws SQLException;

Source Link

Document

Adds a set of parameters to this PreparedStatement object's batch of commands.

Usage

From source file:org.schedoscope.metascope.tasks.repository.mysql.impl.TransformationEntityMySQLRepository.java

@Override
public void insertOrUpdate(Connection connection, List<TransformationEntity> tes) {
    String insertTeSql = "insert into transformation_entity ("
            + JDBCUtil.getDatabaseColumnsForClass(TransformationEntity.class) + ") values ("
            + JDBCUtil.getValuesCountForClass(TransformationEntity.class) + ") " + "on duplicate key update "
            + MySQLUtil.getOnDuplicateKeyString(TransformationEntity.class);
    PreparedStatement stmt = null;
    try {/*  w  ww.j  a v  a  2 s  .  com*/
        int batch = 0;
        connection.setAutoCommit(false);
        stmt = connection.prepareStatement(insertTeSql);
        for (TransformationEntity te : tes) {
            stmt.setString(1, te.getFqdn());
            stmt.setString(2, te.getTransformationKey());
            stmt.setString(3, te.getTransformationValue());
            stmt.setString(4, te.getFqdn());
            stmt.addBatch();
            batch++;
            if (batch % 1024 == 0) {
                stmt.executeBatch();
            }
        }
        stmt.executeBatch();
        connection.commit();
        connection.setAutoCommit(true);
    } catch (SQLException e) {
        LOG.error("Could not save transformation property", e);
    } finally {
        DbUtils.closeQuietly(stmt);
    }
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.DataDeletionDAOImpl.java

@Override
public void deleteNotifications() throws ArchivalDAOException {
    PreparedStatement stmt = null;
    try {/* w w w .j a  va2 s .c  om*/
        Connection conn = ArchivalDestinationDAOFactory.getConnection();
        conn.setAutoCommit(false);
        String sql = "DELETE FROM DM_NOTIFICATION_ARCH"
                + "  WHERE ARCHIVED_AT < DATE_SUB(NOW(), INTERVAL ? DAY)";
        stmt = conn.prepareStatement(sql);
        stmt.setInt(1, this.retentionPeriod);
        stmt.addBatch();
        stmt.executeBatch();
        conn.commit();
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while deleting notifications", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt);
    }
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.DataDeletionDAOImpl.java

@Override
public void deleteCommandOperations() throws ArchivalDAOException {
    PreparedStatement stmt = null;
    try {//from w w w.j ava  2 s.com
        Connection conn = ArchivalDestinationDAOFactory.getConnection();
        conn.setAutoCommit(false);
        String sql = "DELETE FROM DM_COMMAND_OPERATION_ARCH"
                + "  WHERE ARCHIVED_AT < DATE_SUB(NOW(), INTERVAL ? DAY)";
        stmt = conn.prepareStatement(sql);
        stmt.setInt(1, this.retentionPeriod);
        stmt.addBatch();
        stmt.executeBatch();
        conn.commit();
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while deleting command operations", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt);
    }
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.DataDeletionDAOImpl.java

@Override
public void deleteProfileOperations() throws ArchivalDAOException {
    PreparedStatement stmt = null;
    try {/*from w w w  . ja va 2 s. c  om*/
        Connection conn = ArchivalDestinationDAOFactory.getConnection();
        conn.setAutoCommit(false);
        String sql = "DELETE FROM DM_PROFILE_OPERATION_ARCH"
                + "  WHERE ARCHIVED_AT < DATE_SUB(NOW(), INTERVAL ? DAY)";
        stmt = conn.prepareStatement(sql);
        stmt.setInt(1, this.retentionPeriod);
        stmt.addBatch();
        stmt.executeBatch();
        conn.commit();
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while deleting profile operations", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt);
    }
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.DataDeletionDAOImpl.java

@Override
public void deleteOperationResponses() throws ArchivalDAOException {
    PreparedStatement stmt = null;
    try {//ww  w . j a  va 2  s.c  o  m
        Connection conn = ArchivalDestinationDAOFactory.getConnection();
        conn.setAutoCommit(false);
        String sql = "DELETE FROM DM_DEVICE_OPERATION_RESPONSE_ARCH "
                + "WHERE ARCHIVED_AT < DATE_SUB(NOW(), INTERVAL ? DAY)";
        stmt = conn.prepareStatement(sql);
        stmt.setInt(1, this.retentionPeriod);
        stmt.addBatch();
        stmt.executeBatch();
        conn.commit();
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while deleting operation responses", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt);
    }
}

From source file:com.chenxin.authority.common.logback.DBAppender.java

/**
 * Add an exception statement either as a batch or execute immediately if
 * batch updates are not supported.//from   w  w  w. j a  va 2  s  .  c  om
 */
void updateExceptionStatement(PreparedStatement exceptionStatement, String txt, short i, long eventId)
        throws SQLException {
    exceptionStatement.setLong(1, eventId);
    exceptionStatement.setShort(2, i);
    exceptionStatement.setString(3, txt);
    if (cnxSupportsBatchUpdates) {
        exceptionStatement.addBatch();
    } else {
        exceptionStatement.execute();
    }
}

From source file:org.plista.kornakapi.core.storage.MySqlMaxPersistentStorage.java

@Override
public void batchSetPreferences(Iterator<Preference> preferences, int batchSize) throws IOException {
    Connection conn = null;/*from   ww w . j a va  2s .  c o  m*/
    PreparedStatement stmt = null;

    try {
        conn = dataSource.getConnection();
        stmt = conn.prepareStatement(IMPORT_QUERY_MAX);

        int recordsQueued = 0;

        while (preferences.hasNext()) {
            Preference preference = preferences.next();
            stmt.setLong(1, preference.getUserID());
            stmt.setLong(2, preference.getItemID());
            stmt.setFloat(3, preference.getValue());
            stmt.addBatch();

            if (++recordsQueued % batchSize == 0) {
                stmt.executeBatch();
                log.info("imported {} records in batch", recordsQueued);
            }
        }

        if (recordsQueued % batchSize != 0) {
            stmt.executeBatch();
            log.info("imported {} records in batch. done.", recordsQueued);
        }

    } catch (SQLException e) {
        throw new IOException(e);
    } finally {
        IOUtils.quietClose(stmt);
        IOUtils.quietClose(conn);
    }
}

From source file:com.redhat.victims.database.VictimsSqlDB.java

/**
 * Remove all records matching the records in the given {@link RecordStream}
 * if it exists./*from   w  w w. j av  a2  s . c  o m*/
 *
 * @param recordStream
 * @throws SQLException
 * @throws IOException
 */
protected int remove(Connection connection, RecordStream recordStream) throws SQLException, IOException {
    int count = 0;
    PreparedStatement ps = statement(connection, Query.DELETE_RECORD_HASH);
    while (recordStream.hasNext()) {
        VictimsRecord vr = recordStream.getNext();
        setObjects(ps, vr.hash);
        ps.addBatch();
        count++;
    }
    executeBatchAndClose(ps);
    return count;
}

From source file:chh.utils.db.source.common.JdbcClient.java

public void executeInsertQuery(String query, List<List<Column>> columnLists) {
    Connection connection = null;
    try {//from   www . j a  v a2 s .co  m
        connection = connectionProvider.getConnection();
        boolean autoCommit = connection.getAutoCommit();
        if (autoCommit) {
            connection.setAutoCommit(false);
        }

        LOG.debug("Executing query {}", query);

        PreparedStatement preparedStatement = connection.prepareStatement(query);
        if (queryTimeoutSecs > 0) {
            preparedStatement.setQueryTimeout(queryTimeoutSecs);
        }

        for (List<Column> columnList : columnLists) {
            setPreparedStatementParams(preparedStatement, columnList);
            preparedStatement.addBatch();
        }

        int[] results = preparedStatement.executeBatch();
        if (Arrays.asList(results).contains(Statement.EXECUTE_FAILED)) {
            connection.rollback();
            throw new RuntimeException(
                    "failed at least one sql statement in the batch, operation rolled back.");
        } else {
            try {
                connection.commit();
            } catch (SQLException e) {
                throw new RuntimeException("Failed to commit insert query " + query, e);
            }
        }
    } catch (SQLException e) {
        throw new RuntimeException("Failed to execute insert query " + query, e);
    } finally {
        closeConnection(connection);
    }
}

From source file:org.latticesoft.util.resource.dao.QueryService.java

/**
 * Prepares the statement for batch execution
 * @param pstmt the statement to be prepared
 * @param data the bean for prepartion/*from  w  ww .  j av a 2 s. com*/
 */
private void prepareBatch(PreparedStatement pstmt, Object data) {
    if (data == null || pstmt == null)
        return;
    this.prepare(pstmt, data);
    try {
        pstmt.addBatch();
    } catch (SQLException sqle) {
        if (log.isErrorEnabled()) {
            log.error(sqle);
        }
    }
}