Example usage for java.sql PreparedStatement addBatch

List of usage examples for java.sql PreparedStatement addBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement addBatch.

Prototype

void addBatch() throws SQLException;

Source Link

Document

Adds a set of parameters to this PreparedStatement object's batch of commands.

Usage

From source file:org.openmrs.util.databasechange.ConceptReferenceTermChangeSet.java

/**
 * Convenience method that inserts rows into the concept reference term table. The
 * concept_map_id values becomes the concept_reference_term_id values
 * //from w  w w . j a  v a  2s  .  co m
 * @param connection the current database connection
 * @param listOfPropertyValueMaps a list of property and value maps for the objects to insert
 * @throws CustomChangeException
 */
private void insertRows(JdbcConnection connection, List<Map<String, Object>> listOfPropertyValueMaps)
        throws CustomChangeException {
    if (CollectionUtils.isNotEmpty(listOfPropertyValueMaps)) {
        PreparedStatement pStmt = null;
        try {
            connection.setAutoCommit(false);
            pStmt = connection.prepareStatement("INSERT INTO concept_reference_term"
                    + "(concept_reference_term_id, concept_source_id, code, description, creator, date_created, retired, uuid) "
                    + "VALUES(?, ?, ?, ?, ?, ?, ?, ?)");

            for (Map<String, Object> propertyValueMap : listOfPropertyValueMaps) {
                pStmt.setInt(1, (Integer) propertyValueMap.get("termId"));
                pStmt.setInt(2, (Integer) propertyValueMap.get("sourceId"));
                pStmt.setString(3, propertyValueMap.get("code").toString());
                pStmt.setString(4, (propertyValueMap.get("description") == null) ? null
                        : propertyValueMap.get("description").toString());
                pStmt.setInt(5, (Integer) propertyValueMap.get("creator"));
                pStmt.setDate(6, (Date) propertyValueMap.get("dateCreated"));
                pStmt.setBoolean(7, false);
                pStmt.setString(8, propertyValueMap.get("uuid").toString());

                pStmt.addBatch();
            }

            try {
                int[] updateCounts = pStmt.executeBatch();
                for (int i = 0; i < updateCounts.length; i++) {
                    if (updateCounts[i] > -1) {
                        log.debug("Successfully executed: updateCount=" + updateCounts[i]);
                    } else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) {
                        log.debug("Successfully executed; No Success info");
                    } else if (updateCounts[i] == Statement.EXECUTE_FAILED) {
                        log.warn("Failed to execute update");
                    }
                }

                log.debug("Committing updates...");
                connection.commit();
            } catch (BatchUpdateException be) {
                log.warn("Error generated while processsing batch update", be);
                int[] updateCounts = be.getUpdateCounts();

                for (int i = 0; i < updateCounts.length; i++) {
                    if (updateCounts[i] > -1) {
                        log.warn("Executed with exception: updateCount=" + updateCounts[i]);
                    } else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) {
                        log.warn("Executed with exception; No Success info");
                    } else if (updateCounts[i] == Statement.EXECUTE_FAILED) {
                        log.warn("Failed to execute update with exception");
                    }
                }

                try {
                    log.warn("Rolling back batch", be);
                    connection.rollback();
                } catch (Exception rbe) {
                    log.warn("Error generated while rolling back batch update", be);
                }

                //marks the changeset as a failed one
                throw new CustomChangeException(
                        "Failed to generate concept reference terms from existing concept mappings.");
            }
        } catch (DatabaseException e) {
            throw new CustomChangeException("Error generated", e);
        } catch (SQLException e) {
            throw new CustomChangeException("Error generated", e);
        } finally {
            //reset to auto commit mode
            try {
                connection.setAutoCommit(true);
            } catch (DatabaseException e) {
                log.warn("Failed to reset auto commit back to true", e);
            }

            if (pStmt != null) {
                try {
                    pStmt.close();
                } catch (SQLException e) {
                    log.warn("Failed to close the prepared statement object");
                }
            }
        }
    } else
        log.error("List of property value maps is null or empty");
}

From source file:org.rhq.enterprise.server.measurement.MeasurementDataManagerBean.java

/**
 * Add metrics data to the database. Data that is passed can come from several Schedules, but needs to be of only
 * one type of MeasurementGathering. For good performance it is important that the agent sends batches as big as
 * possible (ok, perhaps not more than 100 items at a time).
 *
 * @param data the actual data points/*  w w  w  .j  ava 2s .c om*/
 */
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public void addNumericData(Set<MeasurementDataNumeric> data) {
    if ((data == null) || (data.isEmpty())) {
        return;
    }

    int expectedCount = data.size();

    Connection conn = null;
    DatabaseType dbType = null;

    Map<String, PreparedStatement> statements = new HashMap<String, PreparedStatement>();

    try {
        conn = rhqDs.getConnection();
        dbType = DatabaseTypeFactory.getDatabaseType(conn);

        if (dbType instanceof Postgresql83DatabaseType) {
            Statement st = null;
            try {
                // Take advantage of async commit here
                st = conn.createStatement();
                st.execute("SET synchronous_commit = off");
            } finally {
                JDBCUtil.safeClose(st);
            }
        }

        for (MeasurementDataNumeric aData : data) {
            Double value = aData.getValue();
            if ((value == null) || Double.isNaN(value) || Double.isInfinite(value)) {
                expectedCount--;
                continue;
            }

            String table = MeasurementDataManagerUtility.getTable(aData.getTimestamp());

            PreparedStatement ps = statements.get(table);

            if (ps == null) {
                String insertSql = "INSERT  /*+ APPEND */ INTO " + table
                        + "(schedule_id,time_stamp,value) VALUES(?,?,?)";
                ps = conn.prepareStatement(insertSql);
                statements.put(table, ps);
            }

            ps.setInt(1, aData.getScheduleId());
            ps.setLong(2, aData.getTimestamp());
            ps.setDouble(3, value);
            ps.addBatch();
        }

        int count = 0;
        for (PreparedStatement ps : statements.values()) {
            int[] res = ps.executeBatch();
            for (int updates : res) {
                if ((updates != 1) && (updates != -2)) // oracle returns -2 on success
                {
                    throw new MeasurementStorageException("Unexpected batch update size [" + updates + "]");
                }

                count++;
            }
        }

        if (count != expectedCount) {
            throw new MeasurementStorageException("Failure to store measurement data.");
        }

        notifyAlertConditionCacheManager("mergeMeasurementReport",
                data.toArray(new MeasurementData[data.size()]));
    } catch (SQLException e) {
        log.warn("Failure saving measurement numeric data:\n" + ThrowableUtil.getAllMessages(e));
    } catch (Exception e) {
        log.error("Error persisting numeric data", e);
    } finally {
        for (PreparedStatement ps : statements.values()) {
            JDBCUtil.safeClose(ps);
        }

        JDBCUtil.safeClose(conn);
    }
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java

@Override
public void moveConfigOperations() throws ArchivalDAOException {
    Statement stmt = null;//  ww  w. j  a va  2s .c  o  m
    PreparedStatement stmt2 = null;
    Statement stmt3 = null;
    ResultSet rs = null;
    try {
        Connection conn = ArchivalSourceDAOFactory.getConnection();
        String sql = "SELECT * FROM DM_CONFIG_OPERATION WHERE OPERATION_ID IN "
                + "(SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt = this.createMemoryEfficientStatement(conn);
        rs = stmt.executeQuery(sql);

        Connection conn2 = ArchivalDestinationDAOFactory.getConnection();

        sql = "INSERT INTO DM_CONFIG_OPERATION_ARCH VALUES(?, ?, ?, ?)";
        stmt2 = conn2.prepareStatement(sql);

        int count = 0;
        while (rs.next()) {
            stmt2.setInt(1, rs.getInt("OPERATION_ID"));
            stmt2.setBytes(2, rs.getBytes("OPERATION_CONFIG"));
            stmt2.setInt(3, rs.getInt("ENABLED"));
            stmt2.setTimestamp(4, this.currentTimestamp);
            stmt2.addBatch();

            if (++count % batchSize == 0) {
                stmt2.executeBatch();
            }
        }
        stmt2.executeBatch();
        if (log.isDebugEnabled()) {
            log.debug(count + " [CONFIG_OPERATION] Records copied to the archival table. Starting deletion");
        }
        sql = "DELETE FROM DM_CONFIG_OPERATION"
                + "  WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt3 = conn.createStatement();
        int affected = stmt3.executeUpdate(sql);
        if (log.isDebugEnabled()) {
            log.debug(affected + " Rows deleted");
        }
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while moving config operations", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt, rs);
        ArchivalDAOUtil.cleanupResources(stmt2);
        ArchivalDAOUtil.cleanupResources(stmt3);
    }
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java

@Override
public void moveProfileOperations() throws ArchivalDAOException {
    Statement stmt = null;/*from  ww w .  j  a  v  a2s  . co m*/
    PreparedStatement stmt2 = null;
    Statement stmt3 = null;
    ResultSet rs = null;
    try {
        Connection conn = ArchivalSourceDAOFactory.getConnection();
        String sql = "SELECT * FROM DM_PROFILE_OPERATION WHERE OPERATION_ID IN "
                + "(SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt = this.createMemoryEfficientStatement(conn);
        rs = stmt.executeQuery(sql);

        Connection conn2 = ArchivalDestinationDAOFactory.getConnection();

        sql = "INSERT INTO DM_PROFILE_OPERATION_ARCH VALUES(?, ?, ?, ?)";
        stmt2 = conn2.prepareStatement(sql);

        int count = 0;
        while (rs.next()) {
            stmt2.setInt(1, rs.getInt("OPERATION_ID"));
            stmt2.setInt(2, rs.getInt("ENABLED"));
            stmt2.setBytes(3, rs.getBytes("OPERATION_DETAILS"));
            stmt2.setTimestamp(4, this.currentTimestamp);
            stmt2.addBatch();

            if (++count % batchSize == 0) {
                stmt2.executeBatch();
            }
        }
        stmt2.executeBatch();
        if (log.isDebugEnabled()) {
            log.debug(count + " [PROFILE_OPERATION] Records copied to the archival table. Starting deletion");
        }
        sql = "DELETE FROM DM_PROFILE_OPERATION"
                + "  WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt3 = conn.createStatement();
        int affected = stmt3.executeUpdate(sql);
        if (log.isDebugEnabled()) {
            log.debug(affected + " Rows deleted");
        }
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while moving profile operations", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt, rs);
        ArchivalDAOUtil.cleanupResources(stmt2);
        ArchivalDAOUtil.cleanupResources(stmt3);
    }
}

From source file:org.accada.epcis.repository.capture.CaptureOperationsBackendSQL.java

/**
 * {@inheritDoc}/*w w  w.  jav  a  2s. c om*/
 */
public void insertExtensionFieldsForEvent(final CaptureOperationsSession session, final long eventId,
        final String eventType, final List<EventFieldExtension> exts) throws SQLException {
    for (EventFieldExtension ext : exts) {
        String insert = "INSERT INTO event_" + eventType + "_extensions " + "(event_id, fieldname, prefix, "
                + ext.getValueColumnName() + ") VALUES (?, ? ,?, ?)";
        PreparedStatement ps = session.getBatchInsert(insert);
        if (LOG.isDebugEnabled()) {
            LOG.debug("INSERT: " + insert);
            LOG.debug("       insert param 1: " + eventId);
            LOG.debug("       insert param 2: " + ext.getFieldname());
            LOG.debug("       insert param 3: " + ext.getPrefix());
            LOG.debug("       insert param 4: " + ext.getStrValue());
        }
        ps.setLong(1, eventId);
        ps.setString(2, ext.getFieldname());
        ps.setString(3, ext.getPrefix());
        if (ext.getIntValue() != null) {
            ps.setInt(4, ext.getIntValue().intValue());
        } else if (ext.getFloatValue() != null) {
            ps.setFloat(4, ext.getFloatValue().floatValue());
        } else if (ext.getDateValue() != null) {
            ps.setTimestamp(4, ext.getDateValue());
        } else {
            ps.setString(4, ext.getStrValue());
        }
        ps.addBatch();
    }
}

From source file:com.archivas.clienttools.arcutils.utils.database.ManagedJobSchema.java

private void insertIntoTempTable(PooledDbConnection conn, Set<String> paths) throws SQLException {
    PreparedStatement insertStmt = conn.prepareStatement(INSERT_INTO_TEMP_TABLE_STMT_NAME,
            insertIntoTempTableSql);/* w  w  w .  j a v  a  2  s. c  om*/

    for (String path : paths) {
        insertStmt.clearParameters();
        insertStmt.setString(1, path);
        insertStmt.addBatch();
    }

    insertStmt.executeBatch();
    conn.commit();
}

From source file:com.esofthead.mycollab.module.project.service.ibatis.GanttAssignmentServiceImpl.java

private void massUpdateMilestoneGanttItems(final List<MilestoneGanttItem> milestoneGanttItems,
        Integer sAccountId) {/*from  w ww . j a va 2 s .  c  om*/
    if (CollectionUtils.isNotEmpty(milestoneGanttItems)) {
        Lock lock = DistributionLockUtil.getLock("gantt-milestone-service" + sAccountId);
        try {
            final long now = new GregorianCalendar().getTimeInMillis();
            if (lock.tryLock(30, TimeUnit.SECONDS)) {
                try (Connection connection = dataSource.getConnection()) {
                    connection.setAutoCommit(false);
                    PreparedStatement preparedStatement = connection.prepareStatement(
                            "UPDATE `m_prj_milestone` SET " + "name = ?, `startdate` = ?, `enddate` = ?, "
                                    + "`lastUpdatedTime`=?, `owner`=?, `ganttIndex`=? WHERE `id` = ?");
                    for (int i = 0; i < milestoneGanttItems.size(); i++) {
                        preparedStatement.setString(1, milestoneGanttItems.get(i).getName());
                        preparedStatement.setDate(2,
                                getDateWithNullValue(milestoneGanttItems.get(i).getStartDate()));
                        preparedStatement.setDate(3,
                                getDateWithNullValue(milestoneGanttItems.get(i).getEndDate()));
                        preparedStatement.setDate(4, new Date(now));
                        preparedStatement.setString(5, milestoneGanttItems.get(i).getAssignUser());
                        preparedStatement.setInt(6, milestoneGanttItems.get(i).getGanttIndex());
                        preparedStatement.setInt(7, milestoneGanttItems.get(i).getId());
                        preparedStatement.addBatch();

                    }
                    preparedStatement.executeBatch();
                    connection.commit();
                }
            }
        } catch (Exception e) {
            throw new MyCollabException(e);
        } finally {
            DistributionLockUtil.removeLock("gantt-milestone-service" + sAccountId);
            lock.unlock();
        }
    }
}

From source file:org.wso2.carbon.is.migration.client.MigrateFrom5to510.java

public void migrateUMData() {
    Connection identityConnection = null;
    Connection umConnection = null;

    PreparedStatement selectServiceProviders = null;
    PreparedStatement updateRole = null;

    ResultSet selectServiceProvidersRS = null;

    try {//from   ww w.  j  a  v a2  s.co m
        identityConnection = dataSource.getConnection();
        umConnection = umDataSource.getConnection();

        identityConnection.setAutoCommit(false);
        umConnection.setAutoCommit(false);

        selectServiceProviders = identityConnection.prepareStatement(SQLQueries.LOAD_APP_NAMES);
        selectServiceProvidersRS = selectServiceProviders.executeQuery();

        updateRole = umConnection.prepareStatement(SQLQueries.UPDATE_ROLES);
        while (selectServiceProvidersRS.next()) {
            String appName = selectServiceProvidersRS.getString("APP_NAME");
            int tenantId = selectServiceProvidersRS.getInt("TENANT_ID");
            updateRole.setString(1,
                    ApplicationConstants.APPLICATION_DOMAIN + UserCoreConstants.DOMAIN_SEPARATOR + appName);
            updateRole.setString(2, appName);
            updateRole.setInt(3, tenantId);
            updateRole.addBatch();
        }
        updateRole.executeBatch();

        identityConnection.commit();
        umConnection.commit();
    } catch (SQLException e) {
        log.error(e);
    } finally {
        IdentityDatabaseUtil.closeResultSet(selectServiceProvidersRS);
        IdentityDatabaseUtil.closeStatement(selectServiceProviders);
        IdentityDatabaseUtil.closeStatement(updateRole);
        IdentityDatabaseUtil.closeConnection(identityConnection);
        IdentityDatabaseUtil.closeConnection(umConnection);
    }
}

From source file:org.wso2.carbon.identity.openidconnect.dao.RequestObjectDAOImpl.java

private void insertRequestObjectClaims(int requestObjectId, List<List<RequestedClaim>> claims,
        Connection connection) throws IdentityOAuth2Exception {

    String sqlStmt = SQLQueries.STORE_IDN_OIDC_REQ_OBJECT_CLAIMS;
    PreparedStatement prepStmt = null;
    Map<Integer, List<String>> claimValues = new HashMap<>();
    try {/*from ww  w  . j a  v a 2 s.  c  om*/
        String dbProductName = connection.getMetaData().getDatabaseProductName();
        prepStmt = connection.prepareStatement(sqlStmt,
                new String[] { DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, ID) });
        for (List<RequestedClaim> list : claims) {
            if (CollectionUtils.isNotEmpty(list)) {
                for (RequestedClaim claim : list) {
                    prepStmt.setInt(1, requestObjectId);
                    prepStmt.setString(2, claim.getName());
                    prepStmt.setString(3, claim.isEssential() ? "1" : "0");
                    prepStmt.setString(4, claim.getValue());
                    if (OIDCConstants.USERINFO.equals(claim.getType())) {
                        prepStmt.setString(5, "1");
                    } else if (OIDCConstants.ID_TOKEN.equals(claim.getType())) {
                        prepStmt.setString(5, "0");
                    }
                    prepStmt.addBatch();
                    if (log.isDebugEnabled()) {
                        log.debug("Claim :" + claim.getName() + "is added to the batch against :"
                                + claim.getType());
                    }
                }
            }
            prepStmt.executeBatch();
            connection.commit();
        }
        Map<Integer, String> insertedRequestObjectClaims = getInsertedRequestObjectClaims(requestObjectId);
        if (MapUtils.isNotEmpty(insertedRequestObjectClaims)) {
            for (Map.Entry<Integer, String> entry : insertedRequestObjectClaims.entrySet()) {
                for (List<RequestedClaim> list : claims) {
                    if (CollectionUtils.isNotEmpty(list)) {
                        for (RequestedClaim claim : list) {
                            if (claim.getName().equals(entry.getValue())) {
                                claimValues.put(entry.getKey(), claim.getValues());
                            }

                        }
                    }

                }
            }
            if (MapUtils.isNotEmpty(claimValues)) {
                insertRequestObjectClaimValues(claimValues, connection);
            }
        }
    } catch (SQLException e) {
        String errorMessage = "Error when storing the request object claims.";
        log.error(errorMessage, e);
        throw new IdentityOAuth2Exception(errorMessage, e);
    } finally {
        IdentityApplicationManagementUtil.closeStatement(prepStmt);
    }
}

From source file:com.alibaba.wasp.jdbc.TestPreparedStatement.java

public void testLobTempFiles() throws SQLException {
    Statement stat = conn.createStatement();
    stat.execute("CREATE TABLE TEST(ID INT PRIMARY KEY, DATA CLOB)");
    PreparedStatement prep = conn.prepareStatement("INSERT INTO TEST VALUES(?, ?)");
    for (int i = 0; i < 5; i++) {
        prep.setInt(1, i);//from  ww  w.  ja  va2s.c o m
        if (i % 2 == 0) {
            prep.setCharacterStream(2, new StringReader(getString(i)), -1);
        }
        prep.execute();
    }
    ResultSet rs = stat.executeQuery("SELECT * FROM TEST ORDER BY ID");
    int check = 0;
    for (int i = 0; i < 5; i++) {
        assertTrue(rs.next());
        if (i % 2 == 0) {
            check = i;
        }
        assertEquals(getString(check), rs.getString(2));
    }
    assertFalse(rs.next());
    stat.execute("DELETE FROM TEST");
    for (int i = 0; i < 3; i++) {
        prep.setInt(1, i);
        prep.setCharacterStream(2, new StringReader(getString(i)), -1);
        prep.addBatch();
    }
    prep.executeBatch();
    rs = stat.executeQuery("SELECT * FROM TEST ORDER BY ID");
    for (int i = 0; i < 3; i++) {
        assertTrue(rs.next());
        assertEquals(getString(i), rs.getString(2));
    }
    assertFalse(rs.next());
    stat.execute("DROP TABLE TEST");
}