Example usage for java.sql PreparedStatement executeBatch

List of usage examples for java.sql PreparedStatement executeBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement executeBatch.

Prototype

int[] executeBatch() throws SQLException;

Source Link

Document

Submits a batch of commands to the database for execution and if all commands execute successfully, returns an array of update counts.

Usage

From source file:org.sonar.core.persistence.DbTemplate.java

public DbTemplate copyTable(DataSource source, DataSource dest, String table, String... whereClauses) {
    LOG.debug("Copy table {}", table);

    String selectQuery = selectQuery(table, whereClauses);
    truncate(dest, table);//from  w  w  w  .j a  v  a2s .c  o  m

    Connection sourceConnection = null;
    Statement sourceStatement = null;
    ResultSet sourceResultSet = null;
    Connection destConnection = null;
    ResultSet destResultSet = null;
    PreparedStatement destStatement = null;
    try {
        sourceConnection = source.getConnection();
        sourceStatement = sourceConnection.createStatement();
        sourceResultSet = sourceStatement.executeQuery(selectQuery);

        if (sourceResultSet.next()) {
            List<String> columnNames = columnNames(sourceResultSet);
            int colCount = columnNames.size();

            destConnection = dest.getConnection();
            destConnection.setAutoCommit(false);

            String insertSql = new StringBuilder().append("INSERT INTO ").append(table).append("(")
                    .append(Joiner.on(",").join(columnNames)).append(") VALUES(")
                    .append(StringUtils.repeat("?", ",", colCount)).append(")").toString();
            destStatement = destConnection.prepareStatement(insertSql);
            int count = 0;
            do {
                for (int col = 1; col <= colCount; col++) {
                    Object value = sourceResultSet.getObject(columnNames.get(col - 1));
                    destStatement.setObject(col, value);
                }
                count++;
                destStatement.addBatch();
                if (count % BatchSession.MAX_BATCH_SIZE == 0) {
                    destStatement.executeBatch();
                    destConnection.commit();

                }
            } while (sourceResultSet.next());

            destStatement.executeBatch();
            destConnection.commit();
        }
    } catch (SQLException e) {
        LOG.error("Fail to copy table " + table, e);
        throw new IllegalStateException("Fail to copy table " + table, e);
    } finally {
        DatabaseUtils.closeQuietly(destStatement);
        DatabaseUtils.closeQuietly(destResultSet);
        DatabaseUtils.closeQuietly(destConnection);
        DatabaseUtils.closeQuietly(sourceResultSet);
        DatabaseUtils.closeQuietly(sourceStatement);
        DatabaseUtils.closeQuietly(sourceConnection);
    }

    return this;
}

From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.CreateMetadataAlterHelper.java

protected void doInPreparedStatement(PreparedStatement ps, String metaModelCode, boolean hasChildMetaModel,
        List<AbstractMetadata> metadatas) throws SQLException {
    try {/* w w  w  . ja  v a 2  s.  co m*/
        for (AbstractMetadata metadata : metadatas) {
            // ?ID
            String sequenceId = sequenceDao.getUuid();
            ps.setString(1, sequenceId);

            // ID
            ps.setString(3, taskInstanceId);
            // ?ID
            ps.setString(4, metadata.getId());
            // 
            ps.setString(5, metaModelCode);
            // ID
            ps.setString(7, userId);

            // : ALTERATION_TIME
            ps.setLong(9, startTime);

            // ? ? 2010-05-18 fbchen
            //ps.setString(3, genAttrs(metadata));

            setPs(ps, metadata, metaModelCode, hasChildMetaModel);

            String parentId = metadata.getParentMetadata().getId();
            if (parentId == null || parentId.equals("")) {
                parentId = "0";
            }
            ps.setString(11, parentId);
            ps.addBatch();
            ps.clearParameters();

            if (++super.count % super.batchSize == 0) {
                ps.executeBatch();
                ps.clearBatch();
            }

        }
    } catch (SQLException e) {
        // ??,????,,??
        log.warn("??!", e);
    }

}

From source file:org.openmrs.util.databasechange.ConceptReferenceTermChangeSet.java

/**
 * Convenience method that inserts rows into the concept reference term table. The
 * concept_map_id values becomes the concept_reference_term_id values
 * // w  ww.j  a  va 2s. c om
 * @param connection the current database connection
 * @param listOfPropertyValueMaps a list of property and value maps for the objects to insert
 * @throws CustomChangeException
 */
private void insertRows(JdbcConnection connection, List<Map<String, Object>> listOfPropertyValueMaps)
        throws CustomChangeException {
    if (CollectionUtils.isNotEmpty(listOfPropertyValueMaps)) {
        PreparedStatement pStmt = null;
        try {
            connection.setAutoCommit(false);
            pStmt = connection.prepareStatement("INSERT INTO concept_reference_term"
                    + "(concept_reference_term_id, concept_source_id, code, description, creator, date_created, retired, uuid) "
                    + "VALUES(?, ?, ?, ?, ?, ?, ?, ?)");

            for (Map<String, Object> propertyValueMap : listOfPropertyValueMaps) {
                pStmt.setInt(1, (Integer) propertyValueMap.get("termId"));
                pStmt.setInt(2, (Integer) propertyValueMap.get("sourceId"));
                pStmt.setString(3, propertyValueMap.get("code").toString());
                pStmt.setString(4, (propertyValueMap.get("description") == null) ? null
                        : propertyValueMap.get("description").toString());
                pStmt.setInt(5, (Integer) propertyValueMap.get("creator"));
                pStmt.setDate(6, (Date) propertyValueMap.get("dateCreated"));
                pStmt.setBoolean(7, false);
                pStmt.setString(8, propertyValueMap.get("uuid").toString());

                pStmt.addBatch();
            }

            try {
                int[] updateCounts = pStmt.executeBatch();
                for (int i = 0; i < updateCounts.length; i++) {
                    if (updateCounts[i] > -1) {
                        log.debug("Successfully executed: updateCount=" + updateCounts[i]);
                    } else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) {
                        log.debug("Successfully executed; No Success info");
                    } else if (updateCounts[i] == Statement.EXECUTE_FAILED) {
                        log.warn("Failed to execute update");
                    }
                }

                log.debug("Committing updates...");
                connection.commit();
            } catch (BatchUpdateException be) {
                log.warn("Error generated while processsing batch update", be);
                int[] updateCounts = be.getUpdateCounts();

                for (int i = 0; i < updateCounts.length; i++) {
                    if (updateCounts[i] > -1) {
                        log.warn("Executed with exception: updateCount=" + updateCounts[i]);
                    } else if (updateCounts[i] == Statement.SUCCESS_NO_INFO) {
                        log.warn("Executed with exception; No Success info");
                    } else if (updateCounts[i] == Statement.EXECUTE_FAILED) {
                        log.warn("Failed to execute update with exception");
                    }
                }

                try {
                    log.warn("Rolling back batch", be);
                    connection.rollback();
                } catch (Exception rbe) {
                    log.warn("Error generated while rolling back batch update", be);
                }

                //marks the changeset as a failed one
                throw new CustomChangeException(
                        "Failed to generate concept reference terms from existing concept mappings.");
            }
        } catch (DatabaseException e) {
            throw new CustomChangeException("Error generated", e);
        } catch (SQLException e) {
            throw new CustomChangeException("Error generated", e);
        } finally {
            //reset to auto commit mode
            try {
                connection.setAutoCommit(true);
            } catch (DatabaseException e) {
                log.warn("Failed to reset auto commit back to true", e);
            }

            if (pStmt != null) {
                try {
                    pStmt.close();
                } catch (SQLException e) {
                    log.warn("Failed to close the prepared statement object");
                }
            }
        }
    } else
        log.error("List of property value maps is null or empty");
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java

@Override
public void moveConfigOperations() throws ArchivalDAOException {
    Statement stmt = null;/* w w  w  . j  a  v a 2  s .c o  m*/
    PreparedStatement stmt2 = null;
    Statement stmt3 = null;
    ResultSet rs = null;
    try {
        Connection conn = ArchivalSourceDAOFactory.getConnection();
        String sql = "SELECT * FROM DM_CONFIG_OPERATION WHERE OPERATION_ID IN "
                + "(SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt = this.createMemoryEfficientStatement(conn);
        rs = stmt.executeQuery(sql);

        Connection conn2 = ArchivalDestinationDAOFactory.getConnection();

        sql = "INSERT INTO DM_CONFIG_OPERATION_ARCH VALUES(?, ?, ?, ?)";
        stmt2 = conn2.prepareStatement(sql);

        int count = 0;
        while (rs.next()) {
            stmt2.setInt(1, rs.getInt("OPERATION_ID"));
            stmt2.setBytes(2, rs.getBytes("OPERATION_CONFIG"));
            stmt2.setInt(3, rs.getInt("ENABLED"));
            stmt2.setTimestamp(4, this.currentTimestamp);
            stmt2.addBatch();

            if (++count % batchSize == 0) {
                stmt2.executeBatch();
            }
        }
        stmt2.executeBatch();
        if (log.isDebugEnabled()) {
            log.debug(count + " [CONFIG_OPERATION] Records copied to the archival table. Starting deletion");
        }
        sql = "DELETE FROM DM_CONFIG_OPERATION"
                + "  WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt3 = conn.createStatement();
        int affected = stmt3.executeUpdate(sql);
        if (log.isDebugEnabled()) {
            log.debug(affected + " Rows deleted");
        }
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while moving config operations", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt, rs);
        ArchivalDAOUtil.cleanupResources(stmt2);
        ArchivalDAOUtil.cleanupResources(stmt3);
    }
}

From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java

@Override
public void moveProfileOperations() throws ArchivalDAOException {
    Statement stmt = null;// w  w w .ja  va  2  s .c  om
    PreparedStatement stmt2 = null;
    Statement stmt3 = null;
    ResultSet rs = null;
    try {
        Connection conn = ArchivalSourceDAOFactory.getConnection();
        String sql = "SELECT * FROM DM_PROFILE_OPERATION WHERE OPERATION_ID IN "
                + "(SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt = this.createMemoryEfficientStatement(conn);
        rs = stmt.executeQuery(sql);

        Connection conn2 = ArchivalDestinationDAOFactory.getConnection();

        sql = "INSERT INTO DM_PROFILE_OPERATION_ARCH VALUES(?, ?, ?, ?)";
        stmt2 = conn2.prepareStatement(sql);

        int count = 0;
        while (rs.next()) {
            stmt2.setInt(1, rs.getInt("OPERATION_ID"));
            stmt2.setInt(2, rs.getInt("ENABLED"));
            stmt2.setBytes(3, rs.getBytes("OPERATION_DETAILS"));
            stmt2.setTimestamp(4, this.currentTimestamp);
            stmt2.addBatch();

            if (++count % batchSize == 0) {
                stmt2.executeBatch();
            }
        }
        stmt2.executeBatch();
        if (log.isDebugEnabled()) {
            log.debug(count + " [PROFILE_OPERATION] Records copied to the archival table. Starting deletion");
        }
        sql = "DELETE FROM DM_PROFILE_OPERATION"
                + "  WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)";
        stmt3 = conn.createStatement();
        int affected = stmt3.executeUpdate(sql);
        if (log.isDebugEnabled()) {
            log.debug(affected + " Rows deleted");
        }
    } catch (SQLException e) {
        throw new ArchivalDAOException("Error occurred while moving profile operations", e);
    } finally {
        ArchivalDAOUtil.cleanupResources(stmt, rs);
        ArchivalDAOUtil.cleanupResources(stmt2);
        ArchivalDAOUtil.cleanupResources(stmt3);
    }
}

From source file:org.openvpms.tools.data.migration.DetailsMigrator.java

private void export(Connection connection, String from, String to, String key) throws SQLException {
    System.out.println("Migrating details from " + from + " to " + to);
    Date start = new Date();
    PreparedStatement select = connection
            .prepareStatement("select " + key + ", details from " + from + " where details is not null");
    PreparedStatement insert = connection.prepareStatement(
            "insert into " + to + " (" + key + ", type, name, value) " + "values (?, ?, ?, ?)");
    ResultSet set = select.executeQuery();
    int input = 0;
    int output = 0;
    int batch = 0;
    while (set.next()) {
        ++input;/*w w  w .j a  va  2s  .  c o m*/
        long id = set.getLong(1);
        String details = set.getString(2);
        if (!StringUtils.isEmpty(details)) {
            DynamicAttributeMap map = (DynamicAttributeMap) stream.fromXML(details);
            Map<String, Serializable> attributes = map.getAttributes();
            for (Map.Entry<String, Serializable> entry : attributes.entrySet()) {
                if (entry.getValue() != null) {
                    // don't insert nulls. See OBF-161
                    String name = entry.getKey();
                    TypedValue value = new TypedValue(entry.getValue());
                    insert.setLong(1, id);
                    insert.setString(2, value.getType());
                    insert.setString(3, name);
                    insert.setString(4, value.getValue());
                    insert.addBatch();
                    ++output;
                }
            }
        }
        ++batch;
        if (batch >= 1000) {
            // commit every 1000 input rows
            insert.executeBatch();
            connection.commit();
            batch = 0;
        }
    }
    if (batch != 0) {
        insert.executeBatch();
        connection.commit();
    }
    set.close();
    insert.close();
    select.close();
    Date end = new Date();
    double elapsed = (end.getTime() - start.getTime()) / 1000;
    System.out.printf("Processed %d rows, generating %d rows in %.2fs\n", input, output, elapsed);
}

From source file:com.esofthead.mycollab.module.project.service.ibatis.GanttAssignmentServiceImpl.java

private void massUpdateMilestoneGanttItems(final List<MilestoneGanttItem> milestoneGanttItems,
        Integer sAccountId) {//from  w ww  .j  av  a 2  s. c o m
    if (CollectionUtils.isNotEmpty(milestoneGanttItems)) {
        Lock lock = DistributionLockUtil.getLock("gantt-milestone-service" + sAccountId);
        try {
            final long now = new GregorianCalendar().getTimeInMillis();
            if (lock.tryLock(30, TimeUnit.SECONDS)) {
                try (Connection connection = dataSource.getConnection()) {
                    connection.setAutoCommit(false);
                    PreparedStatement preparedStatement = connection.prepareStatement(
                            "UPDATE `m_prj_milestone` SET " + "name = ?, `startdate` = ?, `enddate` = ?, "
                                    + "`lastUpdatedTime`=?, `owner`=?, `ganttIndex`=? WHERE `id` = ?");
                    for (int i = 0; i < milestoneGanttItems.size(); i++) {
                        preparedStatement.setString(1, milestoneGanttItems.get(i).getName());
                        preparedStatement.setDate(2,
                                getDateWithNullValue(milestoneGanttItems.get(i).getStartDate()));
                        preparedStatement.setDate(3,
                                getDateWithNullValue(milestoneGanttItems.get(i).getEndDate()));
                        preparedStatement.setDate(4, new Date(now));
                        preparedStatement.setString(5, milestoneGanttItems.get(i).getAssignUser());
                        preparedStatement.setInt(6, milestoneGanttItems.get(i).getGanttIndex());
                        preparedStatement.setInt(7, milestoneGanttItems.get(i).getId());
                        preparedStatement.addBatch();

                    }
                    preparedStatement.executeBatch();
                    connection.commit();
                }
            }
        } catch (Exception e) {
            throw new MyCollabException(e);
        } finally {
            DistributionLockUtil.removeLock("gantt-milestone-service" + sAccountId);
            lock.unlock();
        }
    }
}

From source file:org.wso2.carbon.is.migration.client.MigrateFrom5to510.java

public void migrateUMData() {
    Connection identityConnection = null;
    Connection umConnection = null;

    PreparedStatement selectServiceProviders = null;
    PreparedStatement updateRole = null;

    ResultSet selectServiceProvidersRS = null;

    try {/*from   w  ww. ja va  2 s.c  o  m*/
        identityConnection = dataSource.getConnection();
        umConnection = umDataSource.getConnection();

        identityConnection.setAutoCommit(false);
        umConnection.setAutoCommit(false);

        selectServiceProviders = identityConnection.prepareStatement(SQLQueries.LOAD_APP_NAMES);
        selectServiceProvidersRS = selectServiceProviders.executeQuery();

        updateRole = umConnection.prepareStatement(SQLQueries.UPDATE_ROLES);
        while (selectServiceProvidersRS.next()) {
            String appName = selectServiceProvidersRS.getString("APP_NAME");
            int tenantId = selectServiceProvidersRS.getInt("TENANT_ID");
            updateRole.setString(1,
                    ApplicationConstants.APPLICATION_DOMAIN + UserCoreConstants.DOMAIN_SEPARATOR + appName);
            updateRole.setString(2, appName);
            updateRole.setInt(3, tenantId);
            updateRole.addBatch();
        }
        updateRole.executeBatch();

        identityConnection.commit();
        umConnection.commit();
    } catch (SQLException e) {
        log.error(e);
    } finally {
        IdentityDatabaseUtil.closeResultSet(selectServiceProvidersRS);
        IdentityDatabaseUtil.closeStatement(selectServiceProviders);
        IdentityDatabaseUtil.closeStatement(updateRole);
        IdentityDatabaseUtil.closeConnection(identityConnection);
        IdentityDatabaseUtil.closeConnection(umConnection);
    }
}

From source file:it.cnr.icar.eric.server.persistence.rdb.InternationalStringDAO.java

public void insert(String parentId, InternationalStringType is) throws RegistryException {
    PreparedStatement pstmt = null;

    try {/*from ww w  .  j  ava 2 s .co  m*/
        String str = "INSERT INTO " + getTableName() + " VALUES(?, " + // charsetName
                "?," + // lang
                "?, " + // value
                "?)"; // parentId
        pstmt = context.getConnection().prepareStatement(str);

        if (is != null) {
            Iterator<LocalizedStringType> lsItems = is.getLocalizedString().iterator();

            while (lsItems.hasNext()) {
                LocalizedStringType ebLocalizedStringType = lsItems.next();
                @SuppressWarnings("unused")
                String charset = ebLocalizedStringType.getCharset();
                String lang = ebLocalizedStringType.getLang();
                String value = ebLocalizedStringType.getValue();
                String charsetName = ebLocalizedStringType.getCharset();

                if (value != null && value.length() > 0) {
                    pstmt.setString(1, charsetName);
                    pstmt.setString(2, lang);
                    pstmt.setString(3, value);
                    pstmt.setString(4, parentId);

                    log.trace("stmt = " + pstmt.toString());
                    pstmt.addBatch();
                }
            }
        }

        if (is != null) {
            @SuppressWarnings("unused")
            int[] updateCounts = pstmt.executeBatch();
        }
    } catch (SQLException e) {
        log.error(ServerResourceBundle.getInstance().getString("message.CaughtException1"), e);
        throw new RegistryException(e);
    } finally {
        closeStatement(pstmt);
    }
}

From source file:uta.ak.CollectTweets.java

public void collectTweetsByKeyWords(String keyWords, String sinceDate, String untilDate, String tag) {
    try {//from w  w w.j  a  v  a 2s  . c om

        ConfigurationBuilder cb = new ConfigurationBuilder();
        cb.setDebugEnabled(true).setOAuthConsumerKey("LuhVZOucqdHX6x0lcVgJO6QK3")
                .setOAuthConsumerSecret("6S7zbGLvHMXDMgRXq7jRIA6QmMpdI8i5IJNpnjlB55vpHpFMpj")
                .setOAuthAccessToken("861637891-kLunD37VRY8ipAK3TVOA0YKOKxeidliTqMtNb7wf")
                .setOAuthAccessTokenSecret("vcKDxs6qHnEE8fhIJr5ktDcTbPGql5o3cNtZuztZwPYl4");
        TwitterFactory tf = new TwitterFactory(cb.build());
        Twitter twitter = tf.getInstance();

        Connection con = null; //MYSQL
        Class.forName("com.mysql.jdbc.Driver").newInstance(); //MYSQL
        con = DriverManager.getConnection("jdbc:mysql://127.0.0.1:3306/USTTMP", "root", "root.123"); //MYSQL
        System.out.println("connection yes");

        String insertSQL = "INSERT INTO c_rawtext(mme_lastupdate, mme_updater, title, text, tag, text_createdate) VALUES (NOW(), \"AK\", ?, ?, ?, ?)";
        PreparedStatement insertPS = con.prepareStatement(insertSQL);

        Calendar cal = Calendar.getInstance();
        cal.add(Calendar.DATE, 1);
        SimpleDateFormat format1 = new SimpleDateFormat("yyyy-MM-dd");

        Query query = new Query(keyWords);
        query.setSince(sinceDate);
        query.setUntil(untilDate);
        query.setCount(100);
        query.setLang("en");

        QueryResult result = twitter.search(query);
        for (Status status : result.getTweets()) {
            //                    System.out.println("@" + status.getUser().getScreenName() +
            //                                       " | " + status.getCreatedAt().toString() +
            //                                       ":" + status.getText());
            //                    System.out.println("Inserting the record into the table...");

            String formattedDate = format1.format(status.getCreatedAt());

            insertPS.setString(1, status.getUser().getScreenName());
            insertPS.setString(2, status.getText());
            insertPS.setString(3, tag);
            insertPS.setString(4, formattedDate);
            insertPS.addBatch();
        }

        System.out.println("Start to insert records...");
        insertPS.clearParameters();
        int[] results = insertPS.executeBatch();

    } catch (Exception te) {
        te.printStackTrace();
        System.out.println("Failed: " + te.getMessage());
        System.exit(-1);
    }
}