Example usage for java.sql PreparedStatement addBatch

List of usage examples for java.sql PreparedStatement addBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement addBatch.

Prototype

void addBatch() throws SQLException;

Source Link

Document

Adds a set of parameters to this PreparedStatement object's batch of commands.

Usage

From source file:org.openmrs.module.formentry.databasechange.MigrateXsltsAndTemplatesChangeset.java

private void migrateResources(JdbcConnection connection, boolean isXslt) throws CustomChangeException {
    Statement selectStmt = null;//ww  w  . j av a  2s.c o  m
    PreparedStatement insertResourcesStmt = null;
    PreparedStatement insertClobsStmt = null;
    Boolean originalAutoCommit = null;
    ResultSet rs = null;
    String resourceName = (isXslt) ? FormEntryConstants.FORMENTRY_XSLT_FORM_RESOURCE_NAME
            : FormEntryConstants.FORMENTRY_TEMPLATE_FORM_RESOURCE_NAME;
    String columnName = (isXslt) ? "xslt" : "template";

    try {
        originalAutoCommit = connection.getAutoCommit();
        selectStmt = connection.createStatement();
        boolean hasResults = selectStmt.execute("SELECT form_id, " + columnName + " FROM form WHERE "
                + columnName + " IS NOT NULL AND " + columnName + " != ''");
        if (hasResults) {
            rs = selectStmt.getResultSet();
            insertClobsStmt = connection
                    .prepareStatement("INSERT INTO clob_datatype_storage (value, uuid) VALUES(?,?)");
            insertResourcesStmt = connection.prepareStatement(
                    "INSERT INTO form_resource (form_id, name, value_reference, datatype, preferred_handler, uuid) VALUES (?,'"
                            + resourceName + "',?,'" + LongFreeTextDatatype.class.getName() + "','"
                            + LongFreeTextFileUploadHandler.class.getName() + "',?)");

            String defaultXslt = IOUtils
                    .toString(getClass().getClassLoader().getResourceAsStream("default.xslt"));
            //intentionally didn't check for NULL so the exception halts the changeset
            defaultXslt = defaultXslt.trim();

            while (rs.next()) {
                String resourceValue = rs.getString(columnName);
                //if the form has an xslt and it differs from the default one
                if (StringUtils.isNotBlank(resourceValue)
                        && (!isXslt || !resourceValue.trim().equals(defaultXslt))) {
                    //set the clob storage values
                    String clobUuid = UUID.randomUUID().toString();
                    insertClobsStmt.setString(1, resourceValue.trim());
                    insertClobsStmt.setString(2, clobUuid);
                    insertClobsStmt.addBatch();

                    //set the resource column values
                    insertResourcesStmt.setInt(1, rs.getInt("form_id"));
                    insertResourcesStmt.setString(2, clobUuid);
                    insertResourcesStmt.setString(3, UUID.randomUUID().toString());
                    insertResourcesStmt.addBatch();
                }
            }

            boolean successfullyAddedClobs = false;
            int[] clobInsertCounts = insertClobsStmt.executeBatch();
            if (clobInsertCounts != null) {
                for (int i = 0; i < clobInsertCounts.length; i++) {
                    if (clobInsertCounts[i] > -1) {
                        successfullyAddedClobs = true;
                        log.debug("Successfully inserted resource clobs: insert count=" + clobInsertCounts[i]);
                    } else if (clobInsertCounts[i] == Statement.SUCCESS_NO_INFO) {
                        successfullyAddedClobs = true;
                        log.debug("Successfully inserted resource clobs; No Success info");
                    } else if (clobInsertCounts[i] == Statement.EXECUTE_FAILED) {
                        log.warn("Failed to insert resource clobs");
                    }
                }
            }

            if (successfullyAddedClobs) {
                int[] resourceInsertCounts = insertResourcesStmt.executeBatch();
                if (resourceInsertCounts != null) {
                    boolean commit = false;
                    for (int i = 0; i < resourceInsertCounts.length; i++) {
                        if (resourceInsertCounts[i] > -1) {
                            commit = true;
                            log.debug("Successfully inserted " + columnName + " resources: insert count="
                                    + resourceInsertCounts[i]);
                        } else if (resourceInsertCounts[i] == Statement.SUCCESS_NO_INFO) {
                            commit = true;
                            log.debug("Successfully inserted " + columnName + " resources; No Success info");
                        } else if (resourceInsertCounts[i] == Statement.EXECUTE_FAILED) {
                            log.warn("Failed to insert " + columnName + " resources");
                        }
                    }

                    if (commit) {
                        log.debug("Committing " + columnName + " resource inserts...");
                        connection.commit();
                    }
                }
            }
        }
    } catch (Exception e) {
        log.warn("Error generated while processsing generation of " + columnName + " form resources", e);

        try {
            if (connection != null) {
                connection.rollback();
            }
        } catch (Exception ex) {
            log.error("Failed to rollback", ex);
        }

        throw new CustomChangeException(e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException e) {
                log.warn("Failed to close the resultset object");
            }
        }
        if (connection != null && originalAutoCommit != null) {
            try {
                connection.setAutoCommit(originalAutoCommit);
            } catch (DatabaseException e) {
                log.error("Failed to reset auto commit", e);
            }
        }

        closeStatementQuietly(selectStmt);
        closeStatementQuietly(insertClobsStmt);
        closeStatementQuietly(insertResourcesStmt);
    }
}

From source file:gobblin.metastore.database.DatabaseJobHistoryStoreV101.java

private void addTaskExecutionInfoToBatch(PreparedStatement upsertStatement, TaskExecutionInfo info)
        throws SQLException {
    Preconditions.checkArgument(info.hasTaskId());
    Preconditions.checkArgument(info.hasJobId());

    int index = 0;
    upsertStatement.setString(++index, info.getTaskId());
    upsertStatement.setString(++index, info.getJobId());
    upsertStatement.setTimestamp(++index, info.hasStartTime() ? new Timestamp(info.getStartTime()) : null,
            getCalendarUTCInstance());//from w  w  w  .  java 2 s.  com
    upsertStatement.setTimestamp(++index, info.hasEndTime() ? new Timestamp(info.getEndTime()) : null,
            getCalendarUTCInstance());
    upsertStatement.setLong(++index, info.hasDuration() ? info.getDuration() : -1);
    upsertStatement.setString(++index, info.hasState() ? info.getState().name() : null);
    upsertStatement.setString(++index, info.hasFailureException() ? info.getFailureException() : null);
    upsertStatement.setLong(++index, info.hasLowWatermark() ? info.getLowWatermark() : -1);
    upsertStatement.setLong(++index, info.hasHighWatermark() ? info.getHighWatermark() : -1);
    upsertStatement.setString(++index,
            info.hasTable() && info.getTable().hasNamespace() ? info.getTable().getNamespace() : null);
    upsertStatement.setString(++index,
            info.hasTable() && info.getTable().hasName() ? info.getTable().getName() : null);
    upsertStatement.setString(++index,
            info.hasTable() && info.getTable().hasType() ? info.getTable().getType().name() : null);
    upsertStatement.addBatch();
}

From source file:org.wso2.carbon.is.migration.service.v510.migrator.IdentityDataMigrator.java

/**
 * migrate data in the identity database and finalize the database table restructuring
 *//*from w  ww  .j av  a 2s  .  co  m*/
public void migrateIdentityData() throws MigrationClientException {
    log.info("MIGRATION-LOGS >> Going to start : migrateIdentityData.");
    Connection identityConnection = null;
    PreparedStatement selectFromAccessTokenPS = null;
    PreparedStatement insertScopeAssociationPS = null;
    PreparedStatement insertTokenScopeHashPS = null;
    PreparedStatement insertTokenIdPS = null;
    PreparedStatement updateUserNamePS = null;
    PreparedStatement selectFromAuthorizationCodePS = null;
    PreparedStatement updateUserNameAuthorizationCodePS = null;
    PreparedStatement selectIdnAssociatedIdPS = null;
    PreparedStatement updateIdnAssociatedIdPS = null;
    PreparedStatement selectConsumerAppsPS = null;
    PreparedStatement updateConsumerAppsPS = null;

    ResultSet accessTokenRS = null;
    ResultSet authzCodeRS = null;
    ResultSet selectIdnAssociatedIdRS = null;
    ResultSet selectConsumerAppsRS = null;
    try {
        identityConnection = getDataSource().getConnection();
        identityConnection.setAutoCommit(false);

        try {
            selectConsumerAppsPS = identityConnection.prepareStatement(SQLQueries.SELECT_FROM_CONSUMER_APPS);
            updateConsumerAppsPS = identityConnection.prepareStatement(SQLQueries.UPDATE_CONSUMER_APPS);

            selectConsumerAppsRS = selectConsumerAppsPS.executeQuery();
            log.info("MIGRATION-LOGS >> Executed query : " + selectConsumerAppsPS.toString());
            boolean isConsumerAppsAvail = false;
            while (selectConsumerAppsRS.next()) {
                int id = selectConsumerAppsRS.getInt("ID");
                String username = selectConsumerAppsRS.getString("USERNAME");
                String userDomainFromDB = selectConsumerAppsRS.getString("USER_DOMAIN");

                try {
                    if (userDomainFromDB == null) {
                        String userDomain = UserCoreUtil.extractDomainFromName(username);
                        username = UserCoreUtil.removeDomainFromName(username);

                        updateConsumerAppsPS.setString(1, username);
                        updateConsumerAppsPS.setString(2, userDomain);
                        updateConsumerAppsPS.setInt(3, id);
                        if (isBatchUpdate()) {
                            isConsumerAppsAvail = true;
                            updateConsumerAppsPS.addBatch();
                        } else {
                            updateConsumerAppsPS.executeUpdate();
                            log.info("MIGRATION-LOGS >> Executed query : " + updateConsumerAppsPS.toString());
                        }
                        if (log.isDebugEnabled()) {
                            log.debug("MIGRATION-LOGS >> migrating consumer app :" + id);
                        }
                    }
                } catch (Exception e) {
                    log.error("MIGRATION-ERROR-LOGS-011 >> Error while executing the migration.", e);
                    if (!isContinueOnError()) {
                        throw new MigrationClientException("Error while executing the migration.", e);
                    }
                }
            }
            if (isConsumerAppsAvail && isBatchUpdate()) {
                int[] ints = updateConsumerAppsPS.executeBatch();
                log.info("MIGRATION-LOGS >> Executed query : " + updateConsumerAppsPS.toString());
            }
        } catch (Exception e) {
            log.error("MIGRATION-ERROR-LOGS-012 >> Error while executing the migration.", e);
            if (!isContinueOnError()) {
                throw new MigrationClientException("Error while executing the migration.", e);
            }
        }

        String selectFromAccessToken = SQLQueries.SELECT_FROM_ACCESS_TOKEN;
        selectFromAccessTokenPS = identityConnection.prepareStatement(selectFromAccessToken);

        String insertScopeAssociation = SQLQueries.INSERT_SCOPE_ASSOCIATION;
        insertScopeAssociationPS = identityConnection.prepareStatement(insertScopeAssociation);

        String insertTokenScopeHash = SQLQueries.INSERT_TOKEN_SCOPE_HASH;
        insertTokenScopeHashPS = identityConnection.prepareStatement(insertTokenScopeHash);

        String insertTokenId = SQLQueries.INSERT_TOKEN_ID;
        insertTokenIdPS = identityConnection.prepareStatement(insertTokenId);

        String updateUserName = SQLQueries.UPDATE_USER_NAME;
        updateUserNamePS = identityConnection.prepareStatement(updateUserName);

        try {
            accessTokenRS = selectFromAccessTokenPS.executeQuery();
            log.info("MIGRATION-LOGS >> Executed query : " + selectFromAccessTokenPS.toString());
            while (accessTokenRS.next()) {
                String accessToken = null;
                try {
                    accessToken = accessTokenRS.getString("ACCESS_TOKEN");
                    String scopeString = accessTokenRS.getString("TOKEN_SCOPE");
                    String authzUser = accessTokenRS.getString("AUTHZ_USER");
                    String tokenIdFromDB = accessTokenRS.getString("TOKEN_ID");

                    if (tokenIdFromDB == null) {
                        String tokenId = UUID.randomUUID().toString();

                        String username = UserCoreUtil
                                .removeDomainFromName(MultitenantUtils.getTenantAwareUsername(authzUser));
                        String userDomain = UserCoreUtil.extractDomainFromName(authzUser);
                        int tenantId = ISMigrationServiceDataHolder.getRealmService().getTenantManager()
                                .getTenantId(MultitenantUtils.getTenantDomain(authzUser));

                        try {
                            insertTokenIdPS.setString(1, tokenId);
                            insertTokenIdPS.setString(2, accessToken);

                            if (isBatchUpdate()) {
                                insertTokenIdPS.addBatch();
                            } else {
                                insertTokenIdPS.executeUpdate();
                                log.info("MIGRATION-LOGS >> Executed query : " + insertTokenIdPS.toString());
                            }
                        } catch (Exception e) {
                            log.error("MIGRATION-ERROR-LOGS-013 >> Error while executing the migration.", e);
                            if (!isContinueOnError()) {
                                throw new MigrationClientException("Error while executing the migration.", e);
                            }
                        }

                        try {
                            updateUserNamePS.setString(1, username);
                            updateUserNamePS.setInt(2, tenantId);
                            updateUserNamePS.setString(3, userDomain);
                            updateUserNamePS.setString(4, authzUser);
                            updateUserNamePS.setString(5, accessToken);
                            if (isBatchUpdate()) {
                                updateUserNamePS.addBatch();
                            } else {
                                updateConsumerAppsPS.executeUpdate();
                                log.info("MIGRATION-LOGS >> Executed query : "
                                        + updateConsumerAppsPS.toString());
                            }
                        } catch (Exception e) {
                            log.error("MIGRATION-ERROR-LOGS-014 >> Error while executing the migration.", e);
                            if (!isContinueOnError()) {
                                throw new MigrationClientException("Error while executing the migration.", e);
                            }
                        }

                        try {
                            insertTokenScopeHashPS.setString(1, DigestUtils.md5Hex(scopeString));
                            insertTokenScopeHashPS.setString(2, accessToken);
                            if (isBatchUpdate()) {
                                insertTokenScopeHashPS.addBatch();
                            } else {
                                insertTokenScopeHashPS.executeUpdate();
                                log.info("MIGRATION-LOGS >> Executed query : "
                                        + insertTokenScopeHashPS.toString());
                            }
                        } catch (Exception e) {
                            log.error("MIGRATION-ERROR-LOGS-015 >> Error while executing the migration.", e);
                            if (!isContinueOnError()) {
                                throw new MigrationClientException("Error while executing the migration.", e);
                            }
                        }

                        if (log.isDebugEnabled()) {
                            log.debug("MIGRATION-LOGS >> migrating access token : " + accessToken);
                        }

                        if (scopeString != null) {
                            String scopes[] = scopeString.split(" ");
                            for (String scope : scopes) {
                                try {
                                    insertScopeAssociationPS.setString(1, tokenId);
                                    insertScopeAssociationPS.setString(2, scope);
                                    if (isBatchUpdate()) {
                                        insertScopeAssociationPS.addBatch();
                                    } else {
                                        insertScopeAssociationPS.executeUpdate();
                                        log.info("MIGRATION-LOGS >> Executed query : "
                                                + insertScopeAssociationPS.toString());
                                    }
                                } catch (Exception e) {
                                    log.error(
                                            "MIGRATION-ERROR-LOGS-016 >> Error while executing the migration.",
                                            e);
                                    if (!isContinueOnError()) {
                                        throw new MigrationClientException(
                                                "Error while executing the migration.", e);
                                    }
                                }
                            }
                        }
                    }
                } catch (UserStoreException e) {
                    log.error("MIGRATION-ERROR-LOGS-017 >> Error while executing the migration.", e);
                    if (!isContinueOnError()) {
                        throw new MigrationClientException("Error while executing the migration.", e);
                    }
                }
            }
            if (isBatchUpdate()) {
                try {
                    insertTokenIdPS.executeBatch();
                    log.info("MIGRATION-LOGS >> Executed query : " + insertTokenIdPS.toString());
                } catch (SQLException e) {
                    log.error("MIGRATION-ERROR-LOGS-018 >> Error while executing the migration.", e);
                    if (!isContinueOnError()) {
                        throw new MigrationClientException("Error while executing the migration.", e);
                    }
                }

                try {
                    log.info("MIGRATION-LOGS >> Started : " + insertScopeAssociationPS.toString());
                    insertScopeAssociationPS.executeBatch();
                    log.info("MIGRATION-LOGS >> Executed query : " + insertScopeAssociationPS.toString());
                } catch (SQLException e) {
                    log.error("MIGRATION-ERROR-LOGS-019 >> Error while executing the migration.", e);
                    if (!isContinueOnError()) {
                        throw new MigrationClientException("Error while executing the migration.", e);
                    }
                }
                try {
                    updateUserNamePS.executeBatch();
                    log.info("MIGRATION-LOGS >> Executed query : " + updateUserNamePS.toString());
                } catch (SQLException e) {
                    log.error("MIGRATION-ERROR-LOGS-020 >> Error while executing the migration.", e);
                    if (!isContinueOnError()) {
                        throw new MigrationClientException("Error while executing the migration.", e);
                    }
                }

                try {
                    insertTokenScopeHashPS.executeBatch();
                    log.info("MIGRATION-LOGS >> Executed query : " + insertTokenScopeHashPS.toString());
                } catch (SQLException e) {
                    log.error("MIGRATION-ERROR-LOGS-021 >> Error while executing the migration.", e);
                    if (!isContinueOnError()) {
                        throw new MigrationClientException("Error while executing the migration.", e);
                    }
                }
            }
        } catch (Exception e) {
            log.error("MIGRATION-ERROR-LOGS-022 >> Error while executing the migration.", e);
            if (!isContinueOnError()) {
                throw new MigrationClientException("Error while executing the migration.", e);
            }
        }

        String selectFromAuthorizationCode = SQLQueries.SELECT_FROM_AUTHORIZATION_CODE;
        selectFromAuthorizationCodePS = identityConnection.prepareStatement(selectFromAuthorizationCode);

        String updateUserNameAuthorizationCode = SQLQueries.UPDATE_USER_NAME_AUTHORIZATION_CODE;
        updateUserNameAuthorizationCodePS = identityConnection
                .prepareStatement(updateUserNameAuthorizationCode);

        try {
            authzCodeRS = selectFromAuthorizationCodePS.executeQuery();
            log.info("MIGRATION-LOGS >> Executed query : " + authzCodeRS.toString());
            while (authzCodeRS.next()) {
                String authorizationCode = null;
                try {
                    authorizationCode = authzCodeRS.getString("AUTHORIZATION_CODE");
                    String authzUser = authzCodeRS.getString("AUTHZ_USER");
                    String userDomainFromDB = authzCodeRS.getString("USER_DOMAIN");

                    if (userDomainFromDB == null) {
                        String username = UserCoreUtil
                                .removeDomainFromName(MultitenantUtils.getTenantAwareUsername(authzUser));
                        String userDomain = UserCoreUtil.extractDomainFromName(authzUser);
                        int tenantId = ISMigrationServiceDataHolder.getRealmService().getTenantManager()
                                .getTenantId(MultitenantUtils.getTenantDomain(authzUser));

                        try {
                            updateUserNameAuthorizationCodePS.setString(1, username);
                            updateUserNameAuthorizationCodePS.setInt(2, tenantId);
                            updateUserNameAuthorizationCodePS.setString(3, userDomain);
                            updateUserNameAuthorizationCodePS.setString(4, UUID.randomUUID().toString());
                            updateUserNameAuthorizationCodePS.setString(5, authzUser);
                            updateUserNameAuthorizationCodePS.setString(6, authorizationCode);
                            if (isBatchUpdate()) {
                                updateUserNameAuthorizationCodePS.addBatch();
                            } else {
                                updateUserNameAuthorizationCodePS.executeUpdate();
                                log.info("MIGRATION-LOGS >> Executed query : "
                                        + updateUserNameAuthorizationCodePS.toString());
                            }
                        } catch (Exception e) {
                            log.error("MIGRATION-ERROR-LOGS-023 >> Error while executing the migration.", e);
                            if (!isContinueOnError()) {
                                throw new MigrationClientException("Error while executing the migration.", e);
                            }
                        }
                        if (log.isDebugEnabled()) {
                            log.debug("MIGRATION-LOGS >> migrating authorization code : " + authorizationCode);
                        }
                    }
                } catch (UserStoreException e) {
                    log.warn("MIGRATION-LOGS >> Error while migrating authorization code : "
                            + authorizationCode);
                    if (!isContinueOnError()) {
                        throw new MigrationClientException("Error while executing the migration.", e);
                    }
                }
            }
            if (isBatchUpdate()) {
                updateUserNameAuthorizationCodePS.executeBatch();
                log.info("MIGRATION-LOGS >> Executed query : " + updateUserNameAuthorizationCodePS.toString());
            }
        } catch (Exception e) {
            log.error("MIGRATION-ERROR-LOGS-024 >> Error while executing the migration.", e);
            if (!isContinueOnError()) {
                throw new MigrationClientException("Error while executing the migration.", e);
            }
        }

        String selectIdnAssociatedId = SQLQueries.SELECT_IDN_ASSOCIATED_ID;
        selectIdnAssociatedIdPS = identityConnection.prepareStatement(selectIdnAssociatedId);

        try {
            selectIdnAssociatedIdRS = selectIdnAssociatedIdPS.executeQuery();

            updateIdnAssociatedIdPS = identityConnection.prepareStatement(SQLQueries.UPDATE_IDN_ASSOCIATED_ID);

            while (selectIdnAssociatedIdRS.next()) {
                int id = selectIdnAssociatedIdRS.getInt("ID");
                String username = selectIdnAssociatedIdRS.getString("USER_NAME");
                String userDomainFromDB = selectIdnAssociatedIdRS.getString("DOMAIN_NAME");

                if (userDomainFromDB == null) {
                    try {
                        updateIdnAssociatedIdPS.setString(1, UserCoreUtil.extractDomainFromName(username));
                        updateIdnAssociatedIdPS.setString(2, UserCoreUtil.removeDomainFromName(username));
                        updateIdnAssociatedIdPS.setInt(3, id);
                        if (isBatchUpdate()) {
                            updateIdnAssociatedIdPS.addBatch();
                        } else {
                            updateIdnAssociatedIdPS.executeUpdate();
                            log.info(
                                    "MIGRATION-LOGS >> Executed query : " + updateIdnAssociatedIdPS.toString());
                        }
                        if (log.isDebugEnabled()) {
                            log.debug("MIGRATION-LOGS >> migrating IdnAssociatedId : " + id);
                        }
                    } catch (Exception e) {
                        log.error("MIGRATION-ERROR-LOGS-024 >> Error while executing the migration.", e);
                        if (!isContinueOnError()) {
                            throw new MigrationClientException("Error while executing the migration.", e);
                        }
                    }
                }
            }
            if (isBatchUpdate()) {
                updateIdnAssociatedIdPS.executeBatch();
                log.info("MIGRATION-LOGS >> Executed query : " + updateIdnAssociatedIdPS.toString());
            }
        } catch (Exception e) {
            log.error("MIGRATION-ERROR-LOGS-025 >> Error while executing the migration.", e);
            if (!isContinueOnError()) {
                throw new MigrationClientException("Error while executing the migration.", e);
            }
        }

        identityConnection.commit();

    } catch (SQLException e) {
        IdentityDatabaseUtil.rollBack(identityConnection);
        log.error("MIGRATION-ERROR-LOGS--026 >> Error while executing the migration.", e);
        if (!isContinueOnError()) {
            throw new MigrationClientException("Error while executing the migration.", e);
        }
    } catch (Exception e) {
        log.error("MIGRATION-ERROR-LOGS-027 >> Error while executing the migration.", e);
        if (!isContinueOnError()) {
            throw new MigrationClientException("Error while executing the migration.", e);
        }
    } finally {
        try {
            IdentityDatabaseUtil.closeResultSet(accessTokenRS);
            IdentityDatabaseUtil.closeResultSet(authzCodeRS);
            IdentityDatabaseUtil.closeResultSet(selectIdnAssociatedIdRS);
            IdentityDatabaseUtil.closeResultSet(selectConsumerAppsRS);

            IdentityDatabaseUtil.closeStatement(selectFromAccessTokenPS);
            IdentityDatabaseUtil.closeStatement(insertScopeAssociationPS);
            IdentityDatabaseUtil.closeStatement(insertTokenIdPS);
            IdentityDatabaseUtil.closeStatement(updateUserNamePS);
            IdentityDatabaseUtil.closeStatement(insertTokenScopeHashPS);
            IdentityDatabaseUtil.closeStatement(updateUserNameAuthorizationCodePS);
            IdentityDatabaseUtil.closeStatement(selectFromAuthorizationCodePS);
            IdentityDatabaseUtil.closeStatement(selectIdnAssociatedIdPS);
            IdentityDatabaseUtil.closeStatement(updateIdnAssociatedIdPS);
            IdentityDatabaseUtil.closeStatement(selectConsumerAppsPS);
            IdentityDatabaseUtil.closeStatement(updateConsumerAppsPS);

            IdentityDatabaseUtil.closeConnection(identityConnection);
        } catch (Exception e) {
            log.error("MIGRATION-ERROR-LOGS-028 >> Error while executing the migration.", e);
        }
    }
    log.info("MIGRATION-LOGS >> Done : migrateIdentityData.");
}

From source file:org.apache.ctakes.ytex.uima.mapper.DocumentMapperServiceImpl.java

private BiMap<Annotation, Integer> saveAnnoBase(final JCas jcas, final Set<String> setTypesToIgnore,
        final int docId) {
    final AnnotationIndex<Annotation> annoIdx = jcas.getAnnotationIndex(Annotation.typeIndexID);
    final List<Annotation> listAnno = new ArrayList<Annotation>(annoIdx.size());
    final BiMap<Annotation, Integer> mapAnnoToId = HashBiMap.create();
    final FSIterator<Annotation> annoIterator = annoIdx.iterator();
    this.sessionFactory.getCurrentSession().doWork(new Work() {

        @Override/*from ww w .  j  av a  2  s  .c  o m*/
        public void execute(Connection conn) throws SQLException {
            PreparedStatement ps = null;
            ResultSet rs = null;
            try {
                ps = conn.prepareStatement("insert into " + getTablePrefix()
                        + "anno_base (document_id, span_begin, span_end, uima_type_id) values (?, ?, ?, ?)",
                        Statement.RETURN_GENERATED_KEYS);
                while (annoIterator.hasNext()) {
                    Annotation anno = (Annotation) annoIterator.next();
                    String annoClass = anno.getClass().getName();
                    if (!setTypesToIgnore.contains(annoClass) && uimaTypeMap.containsKey(annoClass)) {
                        // should not ignore, and we know how to map this
                        // annotation
                        listAnno.add(anno);
                        ps.setInt(1, docId);
                        ps.setInt(2, anno.getBegin());
                        ps.setInt(3, anno.getEnd());
                        ps.setInt(4, uimaTypeMap.get(annoClass).getUimaTypeID());
                        ps.addBatch();
                    }
                }
                ps.executeBatch();
                rs = ps.getGeneratedKeys();
                int annoIndex = 0;
                while (rs.next()) {
                    mapAnnoToId.put(listAnno.get(annoIndex), rs.getInt(1));
                    annoIndex++;
                }
            } catch (SQLException e) {
                throw new RuntimeException(e);
            } finally {
                if (rs != null) {
                    try {
                        rs.close();
                    } catch (SQLException e) {
                    }
                }
                if (ps != null) {
                    try {
                        ps.close();
                    } catch (SQLException e) {
                    }
                }
            }
        }
    });
    return mapAnnoToId;
}

From source file:org.rhq.enterprise.server.event.EventManagerBean.java

public void addEventData(Map<EventSource, Set<Event>> events) {

    if (events == null || events.size() == 0)
        return;/*from ww  w . j a v  a2s  .  c om*/

    String statementSql;
    Connection conn = null;
    PreparedStatement ps = null;
    try {
        conn = rhqDs.getConnection();
        DatabaseType dbType = DatabaseTypeFactory.getDatabaseType(conn);

        if (dbType instanceof PostgresqlDatabaseType || dbType instanceof OracleDatabaseType
                || dbType instanceof H2DatabaseType) {
            String nextvalSql = JDBCUtil.getNextValSql(conn, EventSource.TABLE_NAME);
            statementSql = String.format(EVENT_SOURCE_INSERT_STMT, nextvalSql);
        } else if (dbType instanceof SQLServerDatabaseType) {
            statementSql = EVENT_SOURCE_INSERT_STMT_AUTOINC;
        } else {
            throw new IllegalArgumentException("Unknown database type, can't continue: " + dbType);
        }

        // First insert the "keys" (i.e. the EventSources).
        ps = conn.prepareStatement(statementSql);
        try {
            for (EventSource eventSource : events.keySet()) {
                int paramIndex = 1;
                ps.setString(paramIndex++, eventSource.getEventDefinition().getName());
                ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getName());
                ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getPlugin());
                ps.setInt(paramIndex++, eventSource.getResource().getId());
                ps.setString(paramIndex++, eventSource.getLocation());
                ps.setString(paramIndex++, eventSource.getEventDefinition().getName());
                ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getName());
                ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getPlugin());
                ps.setInt(paramIndex++, eventSource.getResource().getId());
                ps.setString(paramIndex++, eventSource.getLocation());

                ps.addBatch();
            }
            ps.executeBatch();
        } finally {
            JDBCUtil.safeClose(ps);
        }

        if (dbType instanceof PostgresqlDatabaseType || dbType instanceof OracleDatabaseType
                || dbType instanceof H2DatabaseType) {
            String nextvalSql = JDBCUtil.getNextValSql(conn, Event.TABLE_NAME);
            statementSql = String.format(EVENT_INSERT_STMT, nextvalSql);
        } else if (dbType instanceof SQLServerDatabaseType) {
            statementSql = EVENT_INSERT_STMT_AUTOINC;
        } else {
            throw new IllegalArgumentException("Unknown database type, can't continue: " + dbType);
        }

        // Then insert the "values" (i.e. the Events).
        ps = conn.prepareStatement(statementSql);
        try {
            for (EventSource eventSource : events.keySet()) {
                Set<Event> eventData = events.get(eventSource);
                for (Event event : eventData) {
                    int paramIndex = 1;
                    ps.setString(paramIndex++, eventSource.getEventDefinition().getName());
                    ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getName());
                    ps.setString(paramIndex++, eventSource.getEventDefinition().getResourceType().getPlugin());
                    ps.setInt(paramIndex++, eventSource.getResource().getId());
                    ps.setString(paramIndex++, eventSource.getLocation());
                    ps.setLong(paramIndex++, event.getTimestamp());
                    ps.setString(paramIndex++, event.getSeverity().toString());
                    ps.setString(paramIndex++, event.getDetail());
                    ps.addBatch();
                }

                notifyAlertConditionCacheManager("addEventData", eventSource,
                        eventData.toArray(new Event[eventData.size()]));
            }
            ps.executeBatch();
        } finally {
            JDBCUtil.safeClose(ps);
        }

    } catch (Throwable t) {
        // TODO what do we want to do here ?
        log.warn("addEventData: Insert of events failed : " + t.getMessage());
        if (t instanceof SQLException) {
            SQLException e = (SQLException) t;
            Exception e2 = e.getNextException();
            if (e2 != null)
                log.warn("     : " + e2.getMessage());
            if (t.getCause() != null)
                log.warn("     : " + t.getCause().getMessage());
        }
    } finally {
        JDBCUtil.safeClose(conn);
    }
}

From source file:org.wso2.carbon.policy.mgt.core.dao.impl.PolicyDAOImpl.java

@Override
public Policy updateUserOfPolicy(List<String> usersToAdd, Policy policy) throws PolicyManagerDAOException {
    Connection conn;//w  w w. j  a v  a 2 s  .  com
    PreparedStatement insertStmt = null;
    PreparedStatement deleteStmt = null;
    final List<String> currentUsers = policy.getUsers();

    SetReferenceTransformer<String> transformer = new SetReferenceTransformer<String>();

    transformer.transform(currentUsers, usersToAdd);
    usersToAdd = transformer.getObjectsToAdd();
    List<String> usersToDelete = transformer.getObjectsToRemove();
    try {
        conn = this.getConnection();
        if (usersToAdd.size() > 0) {
            String query = "INSERT INTO DM_USER_POLICY (POLICY_ID, USERNAME) VALUES (?, ?)";
            insertStmt = conn.prepareStatement(query);
            for (String username : usersToAdd) {
                insertStmt.setInt(1, policy.getId());
                insertStmt.setString(2, username);
                insertStmt.addBatch();
            }
            insertStmt.executeBatch();
        }
        if (usersToDelete.size() > 0) {
            String deleteQuery = "DELETE FROM DM_USER_POLICY WHERE USERNAME=? AND POLICY_ID=?";
            deleteStmt = conn.prepareStatement(deleteQuery);
            for (String username : usersToDelete) {
                deleteStmt.setString(1, username);
                deleteStmt.setInt(2, policy.getId());
                deleteStmt.addBatch();
            }
            deleteStmt.executeBatch();
        }

    } catch (SQLException e) {
        throw new PolicyManagerDAOException("Error occurred while adding the user name with policy to database",
                e);
    } finally {
        PolicyManagementDAOUtil.cleanupResources(insertStmt, null);
        PolicyManagementDAOUtil.cleanupResources(deleteStmt, null);
    }
    return policy;
}

From source file:org.wso2.carbon.policy.mgt.core.dao.impl.PolicyDAOImpl.java

@Override
public Policy updateRolesOfPolicy(List<String> rolesToAdd, Policy previousPolicy)
        throws PolicyManagerDAOException {
    Connection conn;/*from   ww w. j  av  a2  s  .c  o m*/
    PreparedStatement insertStmt = null;
    PreparedStatement deleteStmt = null;

    final List<String> currentRoles = previousPolicy.getRoles();

    SetReferenceTransformer<String> transformer = new SetReferenceTransformer<String>();

    transformer.transform(currentRoles, rolesToAdd);
    rolesToAdd = transformer.getObjectsToAdd();
    List<String> rolesToDelete = transformer.getObjectsToRemove();
    try {
        conn = this.getConnection();
        if (rolesToAdd.size() > 0) {
            String query = "INSERT INTO DM_ROLE_POLICY (ROLE_NAME, POLICY_ID) VALUES (?, ?)";
            insertStmt = conn.prepareStatement(query);
            for (String role : rolesToAdd) {
                insertStmt.setString(1, role);
                insertStmt.setInt(2, previousPolicy.getId());
                insertStmt.addBatch();
            }
            insertStmt.executeBatch();
        }
        if (rolesToDelete.size() > 0) {
            String deleteQuery = "DELETE FROM DM_ROLE_POLICY WHERE ROLE_NAME=? AND POLICY_ID=?";
            deleteStmt = conn.prepareStatement(deleteQuery);
            for (String role : rolesToDelete) {
                deleteStmt.setString(1, role);
                deleteStmt.setInt(2, previousPolicy.getId());
                deleteStmt.addBatch();
            }
            deleteStmt.executeBatch();
        }
    } catch (SQLException e) {
        throw new PolicyManagerDAOException("Error occurred while adding the role name with policy to database",
                e);
    } finally {
        PolicyManagementDAOUtil.cleanupResources(insertStmt, null);
        PolicyManagementDAOUtil.cleanupResources(deleteStmt, null);
    }
    return previousPolicy;
}

From source file:org.openmrs.util.databasechange.ConceptValidatorChangeSet.java

/**
 * Executes all the changes to the concept names as a batch update.
 *
 * @param connection The database connection
 *///from ww w .  j a  va 2  s  . co  m
private void runBatchUpdate(JdbcConnection connection) {
    PreparedStatement pStmt = null;

    try {
        connection.setAutoCommit(false);
        pStmt = connection.prepareStatement(
                "UPDATE concept_name SET locale = ?, concept_name_type = ?, locale_preferred = ?, voided = ?, date_voided = ?, void_reason = ?, voided_by = ? WHERE concept_name_id = ?");

        Integer userId = DatabaseUpdater.getAuthenticatedUserId();
        //is we have no authenticated user(for API users), set as Daemon
        if (userId == null || userId < 1) {
            userId = getInt(connection, "SELECT min(user_id) FROM users");
            //leave it as null rather than setting it to 0
            if (userId < 1) {
                userId = null;
            }
        }

        for (ConceptName conceptName : updatedConceptNames) {
            pStmt.setString(1, conceptName.getLocale().toString());
            pStmt.setString(2,
                    (conceptName.getConceptNameType() != null) ? conceptName.getConceptNameType().toString()
                            : null);
            pStmt.setBoolean(3, conceptName.isLocalePreferred());
            pStmt.setBoolean(4, conceptName.isVoided());
            pStmt.setDate(5, conceptName.isVoided() ? new Date(System.currentTimeMillis()) : null);
            pStmt.setString(6, conceptName.getVoidReason());
            // "Not all databases allow for a non-typed Null to be sent to the backend", so we can't use setInt
            pStmt.setObject(7, (conceptName.isVoided() && userId != null) ? userId : null, Types.INTEGER);
            pStmt.setInt(8, conceptName.getConceptNameId());

            pStmt.addBatch();
        }

        try {
            int[] updateCounts = pStmt.executeBatch();
            for (int i = 0; i < updateCounts.length; i++) {
                if (updateCounts[i] > -1) {
                    log.debug("Successfully executed: updateCount=" + updateCounts[i]);
                } else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) {
                    log.debug("Successfully executed; No Success info");
                } else if (updateCounts[i] == Statement.EXECUTE_FAILED) {
                    log.warn("Failed to execute update");
                }
            }

            log.debug("Committing updates...");
            connection.commit();
        } catch (BatchUpdateException be) {
            log.warn("Error generated while processsing batch update", be);
            int[] updateCounts = be.getUpdateCounts();

            for (int i = 0; i < updateCounts.length; i++) {
                if (updateCounts[i] > -1) {
                    log.warn("Executed with exception: updateCount=" + updateCounts[i]);
                } else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) {
                    log.warn("Executed with exception; No Success info");
                } else if (updateCounts[i] == Statement.EXECUTE_FAILED) {
                    log.warn("Failed to execute update with exception");
                }
            }

            try {
                log.warn("Rolling back batch", be);
                connection.rollback();
            } catch (Exception rbe) {
                log.warn("Error generated while rolling back batch update", be);
            }
        }
    } catch (SQLException e) {
        log.warn("Error generated", e);
    } catch (DatabaseException e) {
        log.warn("Error generated", e);
    } finally {
        //reset to auto commit mode
        try {
            connection.setAutoCommit(true);
        } catch (DatabaseException e) {
            log.warn("Failed to reset auto commit back to true", e);
        }

        if (pStmt != null) {
            try {
                pStmt.close();
            } catch (SQLException e) {
                log.warn("Failed to close the prepared statement object");
            }
        }
    }
}

From source file:org.rimudb.Table.java

public void deleteByPrimaryKeyInBatch(Session session, WhereList primaryWhereList,
        boolean ignoreAutoCommitBatchErrors) throws RimuDBException {
    checkWhereListIsPrimaryKey(primaryWhereList, getPrimaryKeyCount());
    PreparedStatement stmt = null;
    int statID = 0;
    try {/* w  ww  . j a va2 s . c  o m*/

        String sql = sqlAdapter.getDeleteStatement(tableMetaData, getTableName(), primaryWhereList);

        // Get the statistic ID
        int loggingType = getDatabase().getDatabaseConfiguration().getLoggingType();
        if (loggingType == DatabaseConfiguration.LOG_STATISTICS) {
            statID = StatisticCollector.getInstance().createStatistic(sql);
        } else if (loggingType == DatabaseConfiguration.LOG_SQL_ONLY) {
            log.info("SQL=" + sql);
        }

        stmt = session.getBatchStatement(this, Session.BATCH_DELETE);
        if (stmt == null) {
            stmt = createPreparedStatement(session.getConnection(), sql, CrudType.DELETE);
            session.setBatchStatement(this, stmt, Session.BATCH_DELETE);
        }

        recordBinder.bindStatement(stmt, primaryWhereList);

        if (statID > 0)
            StatisticCollector.getInstance().logEvent(statID, "preparetime");

        stmt.addBatch();

        if (statID > 0)
            StatisticCollector.getInstance().logEvent(statID, "executetime");

        if (statID > 0) {
            StatisticCollector.getInstance().logEvent(statID, "processtime");
            if (StatisticCollector.getInstance().exceedsThreshold(statID,
                    getDatabase().getDatabaseConfiguration().getLoggingThreshold())) {
                String text = StatisticCollector.getInstance().formatStatistics(statID,
                        getDatabase().getStatisticFormatter());
                log.info(text);
            }
            StatisticCollector.getInstance().removeID(statID);
        }

    } catch (SQLException e) {
        throw new RimuDBException(e);
    }

}

From source file:org.springframework.jdbc.core.JdbcTemplate.java

public int[] batchUpdate(String sql, final BatchPreparedStatementSetter pss) throws DataAccessException {
    if (logger.isDebugEnabled()) {
        logger.debug("Executing SQL batch update [" + sql + "]");
    }//from   w  w w .ja v  a2 s .  c  o m
    return (int[]) execute(sql, new PreparedStatementCallback() {
        public Object doInPreparedStatement(PreparedStatement ps) throws SQLException {
            int batchSize = pss.getBatchSize();
            DatabaseMetaData dbmd = ps.getConnection().getMetaData();
            try {
                boolean supportsBatchUpdates = false;
                try {
                    if (dbmd != null) {
                        if (dbmd.supportsBatchUpdates()) {
                            if (logger.isDebugEnabled()) {
                                logger.debug("Batch Updates supported for [" + dbmd.getDriverName() + " "
                                        + dbmd.getDriverVersion() + "]");
                            }
                            supportsBatchUpdates = true;
                        } else {
                            if (logger.isDebugEnabled()) {
                                logger.debug("Batch Updates are not supported for [" + dbmd.getDriverName()
                                        + " " + dbmd.getDriverVersion() + "]");
                            }
                        }
                    }
                } catch (AbstractMethodError ame) {
                    logger.warn("Driver does not support JDBC 2.0 method supportsBatchUpdatres ["
                            + dbmd.getDriverName() + " " + dbmd.getDriverVersion() + "]");
                }
                if (supportsBatchUpdates) {
                    for (int i = 0; i < batchSize; i++) {
                        pss.setValues(ps, i);
                        ps.addBatch();
                    }
                    return ps.executeBatch();
                } else {
                    int[] rowsAffected = new int[batchSize];
                    for (int i = 0; i < batchSize; i++) {
                        pss.setValues(ps, i);
                        rowsAffected[i] = ps.executeUpdate();
                    }
                    return rowsAffected;
                }
            } finally {
                if (pss instanceof ParameterDisposer) {
                    ((ParameterDisposer) pss).cleanupParameters();
                }
            }
        }
    });
}