Example usage for java.sql PreparedStatement executeBatch

List of usage examples for java.sql PreparedStatement executeBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement executeBatch.

Prototype

int[] executeBatch() throws SQLException;

Source Link

Document

Submits a batch of commands to the database for execution and if all commands execute successfully, returns an array of update counts.

Usage

From source file:org.wso2.carbon.is.migration.client.MigrateFrom5to510.java

/**
 * migrate data in the identity database and finalize the database table restructuring
 *//*from ww  w  .ja  v  a 2  s  .  c o  m*/
public void migrateIdentityData() {

    Connection identityConnection = null;
    PreparedStatement selectFromAccessTokenPS = null;
    PreparedStatement insertScopeAssociationPS = null;
    PreparedStatement insertTokenScopeHashPS = null;
    PreparedStatement insertTokenIdPS = null;
    PreparedStatement updateUserNamePS = null;
    PreparedStatement selectFromAuthorizationCodePS = null;
    PreparedStatement updateUserNameAuthorizationCodePS = null;
    PreparedStatement selectIdnAssociatedIdPS = null;
    PreparedStatement updateIdnAssociatedIdPS = null;
    PreparedStatement selectConsumerAppsPS = null;
    PreparedStatement updateConsumerAppsPS = null;

    ResultSet accessTokenRS = null;
    ResultSet authzCodeRS = null;
    ResultSet selectIdnAssociatedIdRS = null;
    ResultSet selectConsumerAppsRS = null;
    try {
        identityConnection = dataSource.getConnection();
        identityConnection.setAutoCommit(false);

        selectConsumerAppsPS = identityConnection.prepareStatement(SQLQueries.SELECT_FROM_CONSUMER_APPS);
        updateConsumerAppsPS = identityConnection.prepareStatement(SQLQueries.UPDATE_CONSUMER_APPS);

        selectConsumerAppsRS = selectConsumerAppsPS.executeQuery();
        while (selectConsumerAppsRS.next()) {
            int id = selectConsumerAppsRS.getInt("ID");
            String username = selectConsumerAppsRS.getString("USERNAME");
            String userDomainFromDB = selectConsumerAppsRS.getString("USER_DOMAIN");

            if (userDomainFromDB == null) {
                String userDomain = UserCoreUtil.extractDomainFromName(username);
                username = UserCoreUtil.removeDomainFromName(username);

                updateConsumerAppsPS.setString(1, username);
                updateConsumerAppsPS.setString(2, userDomain);
                updateConsumerAppsPS.setInt(3, id);
                updateConsumerAppsPS.addBatch();
            }
        }
        updateConsumerAppsPS.executeBatch();

        String selectFromAccessToken = SQLQueries.SELECT_FROM_ACCESS_TOKEN;
        selectFromAccessTokenPS = identityConnection.prepareStatement(selectFromAccessToken);

        String insertScopeAssociation = SQLQueries.INSERT_SCOPE_ASSOCIATION;
        insertScopeAssociationPS = identityConnection.prepareStatement(insertScopeAssociation);

        String insertTokenScopeHash = SQLQueries.INSERT_TOKEN_SCOPE_HASH;
        insertTokenScopeHashPS = identityConnection.prepareStatement(insertTokenScopeHash);

        String insertTokenId = SQLQueries.INSERT_TOKEN_ID;
        insertTokenIdPS = identityConnection.prepareStatement(insertTokenId);

        String updateUserName = SQLQueries.UPDATE_USER_NAME;
        updateUserNamePS = identityConnection.prepareStatement(updateUserName);

        accessTokenRS = selectFromAccessTokenPS.executeQuery();
        while (accessTokenRS.next()) {
            String accessToken = null;
            try {
                accessToken = accessTokenRS.getString("ACCESS_TOKEN");
                String scopeString = accessTokenRS.getString("TOKEN_SCOPE");
                String authzUser = accessTokenRS.getString("AUTHZ_USER");
                String tokenIdFromDB = accessTokenRS.getString("TOKEN_ID");

                if (tokenIdFromDB == null) {
                    String tokenId = UUID.randomUUID().toString();

                    String username = UserCoreUtil
                            .removeDomainFromName(MultitenantUtils.getTenantAwareUsername(authzUser));
                    String userDomain = UserCoreUtil.extractDomainFromName(authzUser);
                    int tenantId = ISMigrationServiceDataHolder.getRealmService().getTenantManager()
                            .getTenantId(MultitenantUtils.getTenantDomain(authzUser));

                    insertTokenIdPS.setString(1, tokenId);
                    insertTokenIdPS.setString(2, accessToken);
                    insertTokenIdPS.addBatch();

                    updateUserNamePS.setString(1, username);
                    updateUserNamePS.setInt(2, tenantId);
                    updateUserNamePS.setString(3, userDomain);
                    updateUserNamePS.setString(4, authzUser);
                    updateUserNamePS.setString(5, accessToken);
                    updateUserNamePS.addBatch();

                    insertTokenScopeHashPS.setString(1, DigestUtils.md5Hex(scopeString));
                    insertTokenScopeHashPS.setString(2, accessToken);
                    insertTokenScopeHashPS.addBatch();

                    if (scopeString != null) {
                        String scopes[] = scopeString.split(" ");
                        for (String scope : scopes) {
                            insertScopeAssociationPS.setString(1, tokenId);
                            insertScopeAssociationPS.setString(2, scope);
                            insertScopeAssociationPS.addBatch();
                        }
                    }
                }
            } catch (UserStoreException e) {
                log.warn("Error while migrating access token : " + accessToken);
            }
        }

        String selectFromAuthorizationCode = SQLQueries.SELECT_FROM_AUTHORIZATION_CODE;
        selectFromAuthorizationCodePS = identityConnection.prepareStatement(selectFromAuthorizationCode);

        String updateUserNameAuthorizationCode = SQLQueries.UPDATE_USER_NAME_AUTHORIZATION_CODE;
        updateUserNameAuthorizationCodePS = identityConnection
                .prepareStatement(updateUserNameAuthorizationCode);

        authzCodeRS = selectFromAuthorizationCodePS.executeQuery();
        while (authzCodeRS.next()) {
            String authorizationCode = null;
            try {
                authorizationCode = authzCodeRS.getString("AUTHORIZATION_CODE");
                String authzUser = authzCodeRS.getString("AUTHZ_USER");
                String userDomainFromDB = authzCodeRS.getString("USER_DOMAIN");

                if (userDomainFromDB == null) {
                    String username = UserCoreUtil
                            .removeDomainFromName(MultitenantUtils.getTenantAwareUsername(authzUser));
                    String userDomain = UserCoreUtil.extractDomainFromName(authzUser);
                    int tenantId = ISMigrationServiceDataHolder.getRealmService().getTenantManager()
                            .getTenantId(MultitenantUtils.getTenantDomain(authzUser));

                    updateUserNameAuthorizationCodePS.setString(1, username);
                    updateUserNameAuthorizationCodePS.setInt(2, tenantId);
                    updateUserNameAuthorizationCodePS.setString(3, userDomain);
                    updateUserNameAuthorizationCodePS.setString(4, UUID.randomUUID().toString());
                    updateUserNameAuthorizationCodePS.setString(5, authzUser);
                    updateUserNameAuthorizationCodePS.setString(6, authorizationCode);
                    updateUserNameAuthorizationCodePS.addBatch();
                }
            } catch (UserStoreException e) {
                log.warn("Error while migrating authorization code : " + authorizationCode);
            }
        }
        insertTokenIdPS.executeBatch();
        insertScopeAssociationPS.executeBatch();
        updateUserNamePS.executeBatch();
        insertTokenScopeHashPS.executeBatch();
        updateUserNameAuthorizationCodePS.executeBatch();

        String selectIdnAssociatedId = SQLQueries.SELECT_IDN_ASSOCIATED_ID;
        selectIdnAssociatedIdPS = identityConnection.prepareStatement(selectIdnAssociatedId);
        selectIdnAssociatedIdRS = selectIdnAssociatedIdPS.executeQuery();

        updateIdnAssociatedIdPS = identityConnection.prepareStatement(SQLQueries.UPDATE_IDN_ASSOCIATED_ID);

        while (selectIdnAssociatedIdRS.next()) {
            int id = selectIdnAssociatedIdRS.getInt("ID");
            String username = selectIdnAssociatedIdRS.getString("USER_NAME");
            String userDomainFromDB = selectIdnAssociatedIdRS.getString("DOMAIN_NAME");

            if (userDomainFromDB == null) {
                updateIdnAssociatedIdPS.setString(1, UserCoreUtil.extractDomainFromName(username));
                updateIdnAssociatedIdPS.setString(2, UserCoreUtil.removeDomainFromName(username));
                updateIdnAssociatedIdPS.setInt(3, id);
                updateIdnAssociatedIdPS.addBatch();
            }
        }
        updateIdnAssociatedIdPS.executeBatch();

        identityConnection.commit();

    } catch (SQLException e) {
        IdentityDatabaseUtil.rollBack(identityConnection);
        log.error(e);
    } catch (Exception e) {
        log.error(e);
    } finally {
        IdentityDatabaseUtil.closeResultSet(accessTokenRS);
        IdentityDatabaseUtil.closeResultSet(authzCodeRS);
        IdentityDatabaseUtil.closeResultSet(selectIdnAssociatedIdRS);
        IdentityDatabaseUtil.closeResultSet(selectConsumerAppsRS);

        IdentityDatabaseUtil.closeStatement(selectFromAccessTokenPS);
        IdentityDatabaseUtil.closeStatement(insertScopeAssociationPS);
        IdentityDatabaseUtil.closeStatement(insertTokenIdPS);
        IdentityDatabaseUtil.closeStatement(updateUserNamePS);
        IdentityDatabaseUtil.closeStatement(insertTokenScopeHashPS);
        IdentityDatabaseUtil.closeStatement(updateUserNameAuthorizationCodePS);
        IdentityDatabaseUtil.closeStatement(selectFromAuthorizationCodePS);
        IdentityDatabaseUtil.closeStatement(selectIdnAssociatedIdPS);
        IdentityDatabaseUtil.closeStatement(updateIdnAssociatedIdPS);
        IdentityDatabaseUtil.closeStatement(selectConsumerAppsPS);
        IdentityDatabaseUtil.closeStatement(updateConsumerAppsPS);

        IdentityDatabaseUtil.closeConnection(identityConnection);
    }
}

From source file:com.esofthead.mycollab.module.project.service.ibatis.GanttAssignmentServiceImpl.java

private void massUpdateTaskGanttItems(final List<TaskGanttItem> taskGanttItems, Integer sAccountId) {
    if (CollectionUtils.isNotEmpty(taskGanttItems)) {
        Lock lock = DistributionLockUtil.getLock("gantt-task-service" + sAccountId);
        try {/*from  ww w. ja v a 2s . co m*/
            final long now = new GregorianCalendar().getTimeInMillis();
            if (lock.tryLock(30, TimeUnit.SECONDS)) {
                try (Connection connection = dataSource.getConnection()) {
                    connection.setAutoCommit(false);
                    PreparedStatement batchTasksStatement = connection.prepareStatement(
                            "UPDATE `m_prj_task` SET " + "taskname = ?, `startdate` = ?, `enddate` = ?, "
                                    + "`lastUpdatedTime`=?, `percentagecomplete`=?, `assignUser`=?, `ganttindex`=?, "
                                    + "`milestoneId`=?, `parentTaskId`=? WHERE `id` = ?");
                    for (int i = 0; i < taskGanttItems.size(); i++) {
                        TaskGanttItem ganttItem = taskGanttItems.get(i);
                        if (ProjectTypeConstants.TASK.equals(ganttItem.getType())) {
                            batchTasksStatement.setString(1, ganttItem.getName());
                            batchTasksStatement.setDate(2, getDateWithNullValue(ganttItem.getStartDate()));
                            batchTasksStatement.setDate(3, getDateWithNullValue(ganttItem.getEndDate()));
                            batchTasksStatement.setDate(4, new Date(now));
                            batchTasksStatement.setDouble(5, ganttItem.getProgress());
                            batchTasksStatement.setString(6, ganttItem.getAssignUser());
                            batchTasksStatement.setInt(7, ganttItem.getGanttIndex());
                            batchTasksStatement.setObject(8, ganttItem.getMilestoneId());
                            batchTasksStatement.setObject(9, ganttItem.getParentTaskId());
                            batchTasksStatement.setInt(10, ganttItem.getId());
                            batchTasksStatement.addBatch();
                        }

                    }
                    batchTasksStatement.executeBatch();
                    connection.commit();
                }
            }
        } catch (Exception e) {
            throw new MyCollabException(e);
        } finally {
            DistributionLockUtil.removeLock("gantt-task-service" + sAccountId);
            lock.unlock();
        }
    }
}

From source file:org.openbel.framework.core.kam.JdbcKAMLoaderImpl.java

/**
 * {@inheritDoc}//from  www  .ja v  a2 s .co  m
 */
@Override
public void loadNodes(NamespaceTable nt, ParameterTable pt, TermTable tt, TermParameterMapTable tpmt,
        ProtoNodeTable pnt) throws SQLException {
    List<String> terms = tt.getTermValues();
    Map<Integer, List<Integer>> tpmtidx = tpmt.getTermParameterIndex();

    PreparedStatement knps = getPreparedStatement(KAM_NODE_SQL);

    // load kam nodes
    final List<String> nodes = pnt.getProtoNodes();
    final Map<Integer, Integer> eqn = pnt.getEquivalences();
    final Set<Integer> added = new HashSet<Integer>();
    for (int i = 0, n = nodes.size(); i < n; i++) {
        final Integer eqId = eqn.get(i);

        // continue if we have already seen this equivalent proto node
        if (added.contains(eqId)) {
            continue;
        }

        added.add(eqId);

        final String nl = nodes.get(i);

        Integer knl = valueIndexMap.get(nl);
        if (knl == null) {
            knl = saveObject(1, nl);
            valueIndexMap.put(nl, knl);
        }

        String tf = nl.substring(0, nl.indexOf('('));
        int fv = FunctionEnum.getFunctionEnum(tf).getValue();

        // XXX offset
        knps.setInt(1, eqId + 1);
        knps.setInt(2, fv);
        knps.setInt(3, knl);
        knps.addBatch();
    }

    knps.executeBatch();

    PreparedStatement knpps = getPreparedStatement(KAM_NODE_PARAMETER_SQL);
    PreparedStatement knups = getPreparedStatement(KAM_PARAMETER_UUID_SQL);
    final Map<Integer, Integer> gpi = pt.getGlobalIndex();
    final Map<Integer, SkinnyUUID> guu = pt.getGlobalUUIDs();

    final Set<Integer> seenKamNodes = sizedHashSet(nodes.size());
    final Set<Integer> seenGlobalIds = sizedHashSet(gpi.size());

    // load kam node parameters
    final Map<Integer, Integer> nti = pnt.getNodeTermIndex();
    for (int i = 0, n = nodes.size(); i < n; i++) {
        final Integer ti = nti.get(i);

        // XXX offset
        final Integer eqId = eqn.get(i) + 1;

        // don't add kam node parameters if we have already seen the
        // equivalent node.
        if (seenKamNodes.contains(eqId)) {
            continue;
        }

        List<Integer> gtpl = tpmtidx.get(ti);
        if (hasItems(gtpl)) {
            for (int j = 0; j < gtpl.size(); j++) {
                // get parameter index, retrieve global parameter, and set
                Integer parameterIndex = gtpl.get(j);

                final Integer gid = gpi.get(parameterIndex);

                // XXX offset
                knpps.setInt(1, gid + 1);
                knpps.setInt(2, eqId);
                knpps.setInt(3, j);
                knpps.addBatch();

                if (seenGlobalIds.contains(gid)) {
                    continue;
                }
                SkinnyUUID uuid = guu.get(gid);
                if (uuid != null) {
                    // XXX offset
                    knups.setInt(1, gid + 1);
                    knups.setLong(2, uuid.getMostSignificantBits());
                    knups.setLong(3, uuid.getLeastSignificantBits());
                    knups.addBatch();
                }
                seenGlobalIds.add(gid);
            }
        }

        // track equivalent kam node
        seenKamNodes.add(eqId);
    }

    knpps.executeBatch();

    PreparedStatement pps = getPreparedStatement(TERM_PARAMETER_SQL);
    PreparedStatement tps = getPreparedStatement(TERM_SQL);
    final Map<Integer, Integer> termNodes = pnt.getTermNodeIndex();
    // load term parameters and terms
    int tpindex = 0;
    for (int ti = 0; ti < terms.size(); ti++) {
        String t = terms.get(ti);

        // find node equivalence
        Integer nodeId = termNodes.get(ti);
        // XXX offset
        final Integer eqId = eqn.get(nodeId) + 1;

        Integer ctl = valueIndexMap.get(t);
        if (ctl == null) {
            ctl = saveObject(1, t);
            valueIndexMap.put(t, ctl);
        }

        // XXX offset
        tps.setInt(1, ti + 1);
        // XXX offset
        tps.setInt(2, eqId);
        tps.setInt(3, ctl);
        tps.addBatch();

        int ord = 0;
        List<Integer> pl = tpmtidx.get(ti);
        if (hasItems(pl)) {
            for (Integer pi : pl) {
                TableParameter p = pt.getIndexTableParameter().get(pi);
                Integer cpv = valueIndexMap.get(p.getValue());
                if (cpv == null) {
                    cpv = saveObject(1, p.getValue());
                    valueIndexMap.put(p.getValue(), cpv);
                }

                final Integer gid = gpi.get(pi);

                // XXX offset
                pps.setInt(1, tpindex + 1);
                // XXX offset
                pps.setInt(2, gid + 1);
                // XXX offset
                pps.setInt(3, ti + 1);

                // find index for the parameter's namespace
                TableNamespace tn = p.getNamespace();
                Integer ni = null;
                if (tn != null) {
                    ni = nt.getNamespaceIndex().get(tn);
                }

                if (ni == null) {
                    pps.setNull(4, Types.INTEGER);
                } else {
                    // XXX offset
                    pps.setInt(4, ni + 1);
                }

                pps.setInt(5, cpv);
                pps.setInt(6, ord);
                pps.addBatch();

                ord++;
                tpindex++;

                if (seenGlobalIds.contains(gid)) {
                    continue;
                }
                SkinnyUUID uuid = guu.get(gid);
                if (uuid != null) {
                    // XXX offset
                    knups.setInt(1, gid + 1);
                    knups.setLong(2, uuid.getMostSignificantBits());
                    knups.setLong(3, uuid.getLeastSignificantBits());
                    knups.addBatch();
                }
                seenGlobalIds.add(gid);
            }
        }
    }

    tps.executeBatch();
    pps.executeBatch();
    knups.executeBatch();
}

From source file:org.openbel.framework.core.kam.JdbcKAMLoaderImpl.java

/**
 * {@inheritDoc}//  ww  w.  j a va 2s.com
 */
@Override
public void loadAnnotationDefinitions(AnnotationDefinitionTable adt) throws SQLException {
    PreparedStatement adps = getPreparedStatement(ANNOTATION_DEFINITION_SQL);

    for (Map.Entry<Integer, TableAnnotationDefinition> ade : adt.getIndexDefinition().entrySet()) {
        TableAnnotationDefinition ad = ade.getValue();
        adps.setInt(1, (ade.getKey() + 1));
        adps.setString(2, ad.getName());

        if (AnnotationDefinitionTable.URL_ANNOTATION_TYPE_ID == ad.getAnnotationType()) {
            adps.setNull(3, Types.VARCHAR);
            adps.setNull(4, Types.VARCHAR);
        } else {
            adps.setString(3, StringUtils.abbreviate(ad.getDescription(), MAX_MEDIUM_VARCHAR_LENGTH));
            adps.setString(4, StringUtils.abbreviate(ad.getUsage(), MAX_MEDIUM_VARCHAR_LENGTH));
        }

        final int oid;
        final String domain = ad.getAnnotationDomain();
        final Integer objectId = valueIndexMap.get(domain);
        if (objectId != null) {
            oid = objectId;
        } else {
            oid = saveObject(1, domain);
            valueIndexMap.put(domain, oid);
        }

        adps.setInt(5, oid);
        adps.setInt(6, ad.getAnnotationType());
        adps.addBatch();
    }

    adps.executeBatch();

    // associate annotate definitions to documents
    PreparedStatement dadmps = getPreparedStatement(DOCUMENT_ANNOTATION_DEFINITION_MAP_SQL);
    Map<Integer, Set<Integer>> dadm = adt.getDocumentAnnotationDefinitions();

    Set<Entry<Integer, Set<Integer>>> entries = dadm.entrySet();
    for (final Entry<Integer, Set<Integer>> entry : entries) {
        final Integer key = entry.getKey();
        for (final Integer adid : entry.getValue()) {
            dadmps.setInt(1, (key + 1));
            dadmps.setInt(2, (adid + 1));
            dadmps.addBatch();
        }
        dadmps.executeBatch();
    }
}

From source file:org.nuxeo.ecm.core.storage.sql.jdbc.JDBCRowMapper.java

/**
 * Inserts multiple rows, all for the same table.
 *//*  w ww . j  a v a2  s .  com*/
protected void insertSimpleRows(String tableName, List<Row> rows) throws StorageException {
    if (rows.isEmpty()) {
        return;
    }
    String sql = sqlInfo.getInsertSql(tableName);
    if (sql == null) {
        throw new StorageException("Unknown table: " + tableName);
    }
    String loggedSql = supportsBatchUpdates && rows.size() > 1 ? sql + " -- BATCHED" : sql;
    List<Column> columns = sqlInfo.getInsertColumns(tableName);
    try {
        PreparedStatement ps = connection.prepareStatement(sql);
        try {
            int batch = 0;
            for (Row row : rows) {
                batch++;
                if (logger.isLogEnabled()) {
                    logger.logSQL(loggedSql, columns, row);
                }
                int i = 1;
                for (Column column : columns) {
                    column.setToPreparedStatement(ps, i++, row.get(column.getKey()));
                }
                if (supportsBatchUpdates) {
                    ps.addBatch();
                    if (batch % UPDATE_BATCH_SIZE == 0) {
                        ps.executeBatch();
                        countExecute();
                    }
                } else {
                    ps.execute();
                    countExecute();
                }
            }
            if (supportsBatchUpdates) {
                ps.executeBatch();
                countExecute();
            }
        } finally {
            closeStatement(ps);
        }
    } catch (Exception e) {
        checkConnectionReset(e);
        if (e instanceof BatchUpdateException) {
            BatchUpdateException bue = (BatchUpdateException) e;
            if (e.getCause() == null && bue.getNextException() != null) {
                // provide a readable cause in the stack trace
                e.initCause(bue.getNextException());
            }
        }
        checkConcurrentUpdate(e);
        throw new StorageException("Could not insert: " + sql, e);
    }
}

From source file:com.wso2telco.dep.operatorservice.dao.OperatorDAO.java

public void insertBlacklistAggregatoRows(final Integer appID, final String subscriber, final int operatorid,
        final String[] merchants) throws SQLException, Exception {

    Connection con = null;//from   w w w . j  a  v  a  2s  .com
    final StringBuilder sql = new StringBuilder();
    PreparedStatement pst = null;

    try {

        con = DbUtils.getDbConnection(DataSourceNames.WSO2TELCO_DEP_DB);

        sql.append(" INSERT INTO ");
        sql.append(OparatorTable.MERCHANT_OPCO_BLACKLIST.getTObject());
        sql.append(" (application_id, operator_id, subscriber, merchant)");
        sql.append("VALUES (?, ?, ?, ?) ");

        pst = con.prepareStatement(sql.toString());

        /**
         * Set autocommit off to handle the transaction
         */
        con.setAutoCommit(false);

        /**
         * each merchant log as black listed
         */
        for (String merchant : merchants) {

            if (appID == null) {
                pst.setNull(1, Types.INTEGER);
            } else {
                pst.setInt(1, appID);
            }
            pst.setInt(2, operatorid);
            pst.setString(3, subscriber);
            pst.setString(4, merchant);
            pst.addBatch();
        }

        log.debug("sql query in insertBlacklistAggregatoRows : " + pst);

        pst.executeBatch();

        /**
         * commit the transaction if all success
         */
        con.commit();
    } catch (SQLException e) {

        log.error("database operation error in insertBlacklistAggregatoRows : ", e);
        /**
         * rollback if Exception occurs
         */
        con.rollback();

        /**
         * throw it into upper layer
         */
        throw e;
    } catch (Exception e) {

        log.error("error in insertBlacklistAggregatoRows : ", e);
        /**
         * rollback if Exception occurs
         */
        con.rollback();

        /**
         * throw it into upper layer
         */
        throw e;
    } finally {

        DbUtils.closeAllConnections(pst, con, null);
    }
}

From source file:com.feedzai.commons.sql.abstraction.engine.AbstractDatabaseEngine.java

/**
 * Closes the connection to the database.
 *//*from   w  ww .j a v a 2  s . c o m*/
@Override
public synchronized void close() {
    try {
        if (properties.isSchemaPolicyCreateDrop()) {
            for (Map.Entry<String, MappedEntity> me : entities.entrySet()) {
                try {
                    // Flush first
                    final PreparedStatement insert = me.getValue().getInsert();
                    final PreparedStatement insertReturning = me.getValue().getInsertReturning();
                    try {
                        insert.executeBatch();

                        if (insertReturning != null) {
                            insertReturning.executeBatch();

                        }
                    } catch (SQLException ex) {
                        logger.debug(String.format("Failed to flush before dropping entity '%s'",
                                me.getValue().getEntity().getName()), ex);
                    } finally {
                        if (insert != null) {
                            try {
                                insert.close();
                            } catch (Exception e) {
                                logger.trace("Could not close prepared statement.", e);
                            }
                        }

                        if (insertReturning != null) {
                            try {
                                insertReturning.close();
                            } catch (Exception e) {
                                logger.trace("Could not close prepared statement.", e);
                            }
                        }
                    }

                    dropEntity(me.getValue().getEntity());
                } catch (DatabaseEngineException ex) {
                    logger.debug(
                            String.format("Failed to drop entity '%s'", me.getValue().getEntity().getName()),
                            ex);
                }
            }
        }

        conn.close();
        logger.debug("Connection to database closed");
    } catch (SQLException ex) {
        logger.warn("Unable to close connection");
    }
}

From source file:edu.ncsa.sstde.indexing.postgis.PostgisIndexer.java

private void writeIndex(CloseableIteration<? extends BindingSet, QueryEvaluationException> iterator,
        boolean autoCommit) throws SQLException, QueryEvaluationException {
    PreparedStatement statement = getInsertStatment();
    boolean oldAutoCommit = statement.getConnection().getAutoCommit();
    statement.getConnection().setAutoCommit(autoCommit);
    // Object[] varNames = this.getSettings().getIndexGraph().getVarNames()
    // .toArray();
    // int[] types = getSQLTypes(varNames);

    int cache = 0;
    int batchsize = this.getSettings().getBatchSize();
    long count = 0;
    DIGEST.reset();//from  www.  j  a va 2s. c  o  m
    for (; iterator.hasNext(); cache++) {

        BindingSet bindingSet = iterator.next();
        setInsertValue(this.getVarNames(), this.getSQLTypes(), statement, bindingSet);

        try {
            if (autoCommit) {
                statement.executeUpdate();
            } else {
                statement.addBatch();
            }

        } catch (SQLException e) {
            e.printStackTrace();
        }

        if (cache > batchsize) {
            if (!autoCommit) {
                statement.executeBatch();
                statement.getConnection().commit();
            }

            LOG.info(this.name + " " + (count += cache));
            cache = 0;
        }
    }

    if (!autoCommit) {
        statement.getConnection().commit();
    }
    statement.getConnection().setAutoCommit(oldAutoCommit);
}

From source file:com.wso2telco.dep.mediator.dao.SMSMessagingDAO.java

/**
 * Operatorsubs entry.//from   ww  w. ja  v a2 s.c  o m
 *
 * @param domainsubs
 *            the domainsubs
 * @param moSubscriptionId
 *            the moSubscriptionId
 * @return true, if successful
 * @throws Exception
 *             the exception
 */
public void operatorSubsEntry(List<OperatorSubscriptionDTO> domainsubs, Integer moSubscriptionId)
        throws SQLException, Exception {

    Connection con = null;
    PreparedStatement insertStatement = null;
    PreparedStatement updateStatement = null;

    try {

        con = DbUtils.getDbConnection(DataSourceNames.WSO2TELCO_DEP_DB);
        if (con == null) {

            throw new Exception("Connection not found");
        }

        /**
         * Set autocommit off to handle the transaction
         */
        con.setAutoCommit(false);

        StringBuilder queryString = new StringBuilder("INSERT INTO ");
        queryString.append(DatabaseTables.OPERATORSUBS.getTableName());
        queryString.append(" (mo_subscription_did, domainurl, operator) ");
        queryString.append("VALUES (?, ?, ?)");

        insertStatement = con.prepareStatement(queryString.toString());

        for (OperatorSubscriptionDTO d : domainsubs) {

            insertStatement.setInt(1, moSubscriptionId);
            insertStatement.setString(2, d.getDomain());
            insertStatement.setString(3, d.getOperator());

            insertStatement.addBatch();
        }

        log.debug("sql query in operatorSubsEntry : " + insertStatement);

        insertStatement.executeBatch();

        StringBuilder updateQueryString = new StringBuilder("UPDATE ");
        updateQueryString.append(DatabaseTables.SUBSCRIPTIONS.getTableName());
        updateQueryString.append(" SET is_active = ?");
        updateQueryString.append(" WHERE mo_subscription_did = ?");

        updateStatement = con.prepareStatement(updateQueryString.toString());

        updateStatement.setInt(1, 1);
        updateStatement.setInt(2, moSubscriptionId);

        log.debug("sql query in operatorSubsEntry : " + updateStatement);

        updateStatement.executeUpdate();

        /**
         * commit the transaction if all success
         */
        con.commit();
    } catch (SQLException e) {

        /**
         * rollback if Exception occurs
         */
        con.rollback();

        log.error("database operation error in operatorSubsEntry : ", e);
        throw e;
    } catch (Exception e) {

        /**
         * rollback if Exception occurs
         */
        con.rollback();

        log.error("error in operatorSubsEntry : ", e);
        throw e;
    } finally {

        DbUtils.closeAllConnections(insertStatement, con, null);
        DbUtils.closeAllConnections(updateStatement, null, null);
    }
}

From source file:org.opencb.cellbase.app.transform.GeneParser.java

@Deprecated
private void insertGenomeSequence(String sequenceName, boolean haplotypeSequenceType,
        PreparedStatement sqlInsert, StringBuilder sequenceStringBuilder) throws SQLException {
    int chunk = 0;
    int start = 1;
    int end = CHUNK_SIZE - 1;
    // if the sequence read is not HAP then we stored in sqlite
    if (!haplotypeSequenceType && !sequenceName.contains("PATCH")) {
        logger.info("Indexing genome sequence {} ...", sequenceName);
        sqlInsert.setString(5, sequenceName);
        //chromosome sequence length could shorter than CHUNK_SIZE
        if (sequenceStringBuilder.length() < CHUNK_SIZE) {
            sqlInsert.setString(1, sequenceName + "_" + chunk + "_" + chunkIdSuffix);
            sqlInsert.setInt(2, start);/*ww  w  .  j av a 2  s.  c  om*/
            sqlInsert.setInt(3, sequenceStringBuilder.length() - 1);
            sqlInsert.setString(4, sequenceStringBuilder.toString());

            // Sequence to store is larger than CHUNK_SIZE
        } else {
            int sequenceLength = sequenceStringBuilder.length();

            sqlConn.setAutoCommit(false);
            while (start < sequenceLength) {
                if (chunk % 10000 == 0 && chunk != 0) {
                    sqlInsert.executeBatch();
                    sqlConn.commit();
                }

                // chunkId is common for all the options
                sqlInsert.setString(1, sequenceName + "_" + chunk + "_" + chunkIdSuffix);
                if (start == 1) { // First chunk of the chromosome
                    // First chunk contains CHUNK_SIZE-1 nucleotides as index start at position 1 but must end at 1999
                    //                                        chunkSequence = sequenceStringBuilder.substring(start - 1, CHUNK_SIZE - 1);
                    //                                        genomeSequenceChunk = new GenomeSequenceChunk(chromosome, chromosome+"_"+chunk+"_"
                    // +chunkIdSuffix, start, end, sequenceType, sequenceAssembly, chunkSequence);
                    sqlInsert.setInt(2, start);
                    sqlInsert.setInt(3, end);
                    sqlInsert.setString(4, sequenceStringBuilder.substring(start - 1, CHUNK_SIZE - 1));

                    start += CHUNK_SIZE - 1;
                } else { // Regular chunk
                    if ((start + CHUNK_SIZE) < sequenceLength) {
                        sqlInsert.setInt(2, start);
                        sqlInsert.setInt(3, end);
                        sqlInsert.setString(4,
                                sequenceStringBuilder.substring(start - 1, start + CHUNK_SIZE - 1));
                        start += CHUNK_SIZE;
                    } else { // Last chunk of the chromosome
                        sqlInsert.setInt(2, start);
                        sqlInsert.setInt(3, sequenceLength);
                        sqlInsert.setString(4, sequenceStringBuilder.substring(start - 1, sequenceLength));
                        start = sequenceLength;
                    }
                }
                // we add the inserts in a batch
                sqlInsert.addBatch();

                end = start + CHUNK_SIZE - 1;
                chunk++;
            }

            sqlInsert.executeBatch();
            sqlConn.commit();

            sqlConn.setAutoCommit(true);
        }
    }
}