Example usage for java.sql PreparedStatement executeBatch

List of usage examples for java.sql PreparedStatement executeBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement executeBatch.

Prototype

int[] executeBatch() throws SQLException;

Source Link

Document

Submits a batch of commands to the database for execution and if all commands execute successfully, returns an array of update counts.

Usage

From source file:com.dbmojo.QueryExecutor.java

/**
 * Execute a set of queries/updates encoded in JSON via <b>reqStr</b> in the 
 * format <br><br><i>[{query:"select x from y",values:[]},{}&#46;&#46;&#46;]
 * </i>&#46;<br><br>The <b>update</b> flag determines whether or not to 
 * treat each statement in the <b>reqStr</b> as an update or a query&#46;
 *//*  w  w w . j  a va 2s . c o  m*/
public String execute(String reqStr, boolean update) throws Exception {

    if (DebugLog.enabled) {
        DebugLog.add(this, "Begin execute");
    }

    String message = "";
    ArrayList<HashMap> resultsList = new ArrayList<HashMap>();
    LinkedHashMap<String, PreparedStatement> bpstmts = null;
    Statement bstmt = null;

    try {
        this.open(update);

        if (update) {
            conn.setAutoCommit(false);
        }

        final JSONArray reqs = new JSONArray(reqStr);
        final boolean batchUpdates = reqs.length() > 1;

        //Connection MUST be ready to go
        if (this.conn == null) {
            throw new QueryExecutorException("Connection could not be checked out");
        }

        final int rLen = reqs.length();

        if (rLen <= 0) {
            throw new QueryExecutorException("No queries specified");
        }

        for (int r = 0; r < rLen; r++) {
            String rMessage = "";
            final JSONObject reqObj = reqs.getJSONObject(r);
            JSONArray tValues = reqObj.optJSONArray("values");
            String[] values = new String[(tValues != null ? tValues.length() : 0)];

            //Convert the JSONArray to a String[]
            for (int v = 0; v < values.length; v++) {
                values[v] = tValues.getString(v);
            }

            String query = reqObj.getString("query");
            final boolean prepared = values != null;

            //Can't move forward without a query!
            if (query == null || query.equals("")) {
                throw new QueryExecutorException("Query is missing");
            }

            //Here's where we need to do either an update or a query
            if (update) {
                if (batchUpdates) {
                    // This is NOT a prepared statement and we need to create a
                    // batch statement to add all non prepared statements to
                    if (!prepared && bstmt == null) {
                        bstmt = conn.createStatement();

                        // This IS a prepared statement and we need to create a
                        // ordered map of prepared statements so we can execute
                        // these statements together in order (sortof...)
                    } else if (prepared && bpstmts == null) {
                        bpstmts = new LinkedHashMap<String, PreparedStatement>();
                    }

                    addBatchUpdate(this.conn, prepared, query, values, bstmt, bpstmts);
                } else {

                    // Single update query / prepared statement to execute
                    executeUpdate(this.conn, prepared, query, values);
                }
            } else {
                resultsList.add(executeQuery(this.conn, prepared, query, values));
            }
        }

        //Execute Batch Updates
        if (update && batchUpdates) {
            //Execute any prepared statement batches that have been gathered.
            //If we have an SQL error and exception will be thrown
            if (bpstmts != null && bpstmts.size() > 0) {
                for (PreparedStatement p : bpstmts.values()) {
                    if (DebugLog.enabled) {
                        DebugLog.add(this, "Executing batch prepared statement");
                    }

                    p.executeBatch();
                }
            }

            //Execute all the standard SQL in a batch. 
            //If we have a SQL error an Exception will be thrown
            if (bstmt != null) {
                if (DebugLog.enabled) {
                    DebugLog.add(this, "Executing batch statement");
                }

                bstmt.executeBatch();
            }
        }

        if (update) {
            this.conn.commit();
        }

    } catch (JSONException je) {

        //There was an error parsing the JSON
        final String err = je.toString();
        if (DebugLog.enabled) {
            DebugLog.add(this, err);
        }

        resultsList.add(Util.getError(err));

    } catch (Exception e) {

        //We couldn't connect to the DB...      
        if (this.conn == null) {
            final String err = e.toString();

            if (ErrorLog.enabled) {
                ErrorLog.add(this, err, false);
            }

            resultsList.add(Util.getError(err));

            //There was an error executing the query/update
        } else if (update) {
            final String err = "Rolling Back Update(s): " + e;

            if (DebugLog.enabled) {
                DebugLog.add(this, err);
            }

            if (this.conn != null) {
                this.conn.rollback();
            }

            resultsList.add(Util.getError(err));
        } else {
            final String err = e.toString();

            if (DebugLog.enabled) {
                DebugLog.add(this, err);
            }

            resultsList.add(Util.getError(err));
        }

    } finally {
        //Cleanup batch statement (If applicable)
        if (bstmt != null) {
            try {
                if (DebugLog.enabled) {
                    DebugLog.add(this, "Closing batch statement");
                }
                bstmt.close();
            } catch (Exception se) {
                String err = "Error closing batch statement - " + se.toString();
                if (ErrorLog.enabled) {
                    ErrorLog.add(this, err, false);
                }
                resultsList.add(Util.getError(err));
            }
        }

        //Cleanup the batch prepared statement (If applicable)
        if (bpstmts != null) {
            for (PreparedStatement p : bpstmts.values()) {
                try {
                    if (p != null) {
                        p.close();
                        if (DebugLog.enabled) {
                            DebugLog.add(this, "Closing batch prepared stmnt");
                        }
                    }
                } catch (Exception pse) {
                    String err = "Error closing batch prepared stmnt - " + pse.toString();
                    if (ErrorLog.enabled) {
                        ErrorLog.add(this, err, false);
                    }
                    resultsList.add(Util.getError(err));
                }
            }
        }

        if (DebugLog.enabled) {
            DebugLog.add(this, "Closing connection");
        }

        //Cleanup DB connection (Always applicable)
        this.conn.close();
    }

    if (DebugLog.enabled) {
        DebugLog.add(this, "End execute");
    }

    //UPDATE => [{message:"",status:"success"}]
    if (update && resultsList.size() <= 0) {

        HashMap pObj = new HashMap();
        pObj.put("message", "");
        pObj.put("status", "success");
        pObj.put("rows", new ArrayList());
        pObj.put("types", new ArrayList());
        pObj.put("cols", new ArrayList());
        resultsList.add(pObj);

    }

    //Serialize resultsArray into JSON
    return serializeToJson(resultsList);
}

From source file:org.apache.gora.sql.store.SqlStore.java

@Override
public void flush() throws IOException {
    Exception deferred = null;/*  w ww  . j  av a2  s .c o  m*/
    synchronized (writeCache) {
        for (PreparedStatement stmt : writeCache) {
            try {
                stmt.executeBatch();
            } catch (SQLException ex) {
                if (ex instanceof BatchUpdateException && ex.getMessage().contains("Incorrect string value")) {
                    ; // do nothing
                } else {
                    deferred = ex;
                    break;
                }
            }
        }
        for (PreparedStatement stmt : writeCache) {
            SqlUtils.close(stmt);
        }
        writeCache.clear();
    }
    if (deferred != null)
        throw new IOException(deferred);
    try {
        connection.commit();
    } catch (SQLException ex) {
        throw new IOException(ex);
    }
}

From source file:org.seasar.dbflute.s2dao.sqlhandler.TnAbstractBasicSqlHandler.java

protected int[] executeBatch(PreparedStatement ps, List<?> list) {
    final boolean saveMillis = isSaveMillis();
    if (saveMillis) {
        saveBeforeSqlTimeMillis();/*from www  .j a v  a2 s . com*/
    }
    hookSqlFireBefore();
    int[] batchResult = null;
    SQLException nativeCause = null;
    try {
        batchResult = ps.executeBatch();
        if (saveMillis) {
            saveAfterSqlTimeMillis();
        }
        return batchResult;
    } catch (SQLException e) {
        nativeCause = e;
        handleSQLException(e, ps, true);
        return null; // unreachable
    } finally {
        hookSqlFireFinally(batchResult, nativeCause);
    }
}

From source file:com.agiletec.plugins.jpcrowdsourcing.aps.system.services.idea.IdeaDAO.java

protected void updateTags(IIdea idea, Connection conn) {
    PreparedStatement stat = null;
    try {/*from ww  w. ja  va2s.  c o m*/
        this.removeTags(idea.getId(), conn);

        stat = conn.prepareStatement(INSERT_IDEA_TAGS);
        this.addTagsRelationsRecord(idea, stat);
        stat.executeBatch();
        conn.commit();
    } catch (Throwable t) {
        _logger.error("Error updating Idea tags for {}", idea.getId(), t);
        throw new RuntimeException("Error updating Idea tags", t);
    } finally {
        closeDaoResources(null, stat, null);
    }
}

From source file:com.gs.obevo.db.impl.core.jdbc.JdbcHelper.java

public int[] batchUpdate(Connection conn, String sql, Object[][] argsArray) {
    PreparedStatement ps = null;
    try {//from w  w w  .  ja  va 2s .c  o  m
        this.jdbcHandler.preUpdate(conn, this);

        if (LOG.isDebugEnabled()) {
            LOG.debug("Executing batch update on connection {}: {} with args: {}", displayConnection(conn), sql,
                    argsArray);
        }
        ps = conn.prepareStatement(sql);
        for (Object[] args : argsArray) {
            for (int j = 0; j < args.length; j++) {
                if (!parameterTypeEnabled || args[j] != null) {
                    ps.setObject(j + 1, args[j]);
                } else {
                    ps.setNull(j + 1, ps.getParameterMetaData().getParameterType(j + 1));
                }
            }
            ps.addBatch();
        }

        return ps.executeBatch();
    } catch (SQLException e) {
        LOG.error("Error during batch execution; will print out the full batch stack trace: ");
        this.logSqlBatchException(e, 0);
        throw new DataAccessException(e);
    } finally {
        DbUtils.closeQuietly(ps);
    }
}

From source file:connectivity.connection.java

private String addUser(JSONObject user) throws SQLException {
    //System.out.println(user.get("id"));
    PreparedStatement ps = con.prepareStatement("Select user_id from users where user_id=?");
    ps.setString(1, user.get("id").toString());
    ResultSet rs = ps.executeQuery();
    if (rs.next()) {
        return user.get("id").toString();
    } else {/*from  w  w w  .  j ava  2s .  co m*/
        ps = con.prepareStatement(
                "INSERT INTO `users` (`user_id`, `firstname`, `gender`, `photo`) VALUES (?, ?, ?, ?);");
        ps.setString(1, user.get("id").toString());
        ps.setString(2, user.get("firstName").toString());
        ps.setString(3, user.get("gender").toString());
        ps.setString(4, gson.toJson(user.get("photo")));
        ps.addBatch();
        ps.executeBatch();
        return user.get("id").toString();
    }
}

From source file:info.naiv.lab.java.tool.sqlite.exporter.component.DataAccess.java

/**
 *
 * @param sourceTempl/*from ww  w  . ja v a 2s. c o m*/
 * @param destTempl
 * @param info
 * @param valueHandler
 */
public void copy(final JdbcTemplate sourceTempl, JdbcTemplate destTempl, TableInfo info,
        final ValueHandler valueHandler) {

    final Query sq = selectTables.merge(info);

    Query q = insertSql.merge(info);
    final List<Field> fields = info.getFields();
    q.execute(destTempl, new PreparedStatementCallback<Void>() {

        @Override
        public Void doInPreparedStatement(final PreparedStatement ps) throws SQLException, DataAccessException {
            ps.getConnection().setAutoCommit(false);
            try {
                List<Integer> list = sq.query(sourceTempl, new RowMapper<Integer>() {

                    @Override
                    public Integer mapRow(ResultSet rs, int rowNum) throws SQLException {
                        int col = 1;
                        for (Field field : fields) {
                            Object val = valueHandler.handleValue(field, rs);
                            ps.setObject(col, val);
                        }
                        ps.addBatch();
                        if (rowNum % BATCH_SIZE == 0) {
                            logger.info("execBatch: {}", rowNum);
                            ps.executeBatch();
                        }
                        return rowNum;
                    }
                });

                int total = list.size();
                logger.info("total Batch: {}", total);
                if (total % BATCH_SIZE != 0) {
                    ps.executeBatch();
                }
                ps.getConnection().commit();
            } catch (SQLException | RuntimeException e) {
                ps.getConnection().rollback();
                throw e;
            }
            return null;
        }
    });
}

From source file:org.wso2.carbon.analytics.datasource.rdbms.RDBMSAnalyticsFileSystem.java

private void writeChunksInsertOrUpdate(String path, List<DataChunk> chunks) throws IOException {
    Connection conn = null;//ww  w .  ja  v  a2  s  .co m
    PreparedStatement stmt = null;
    try {
        conn = this.getConnection(false);
        stmt = conn.prepareStatement(this.getWriteDataChunkQuery());
        for (DataChunk chunk : chunks) {
            this.populateStatementWithDataChunk(stmt, path, chunk);
            stmt.addBatch();
        }
        stmt.executeBatch();
        conn.commit();
    } catch (SQLException e) {
        RDBMSUtils.rollbackConnection(conn);
        /* this is maybe because we are updating some data already in the file with a seek operation,
         * and the given write chunk query is not an insert or update, so lets insert sequentially
         * and check, if an error comes, a separate update statement will be executed and checked */
        if (log.isDebugEnabled()) {
            log.debug("Chunk batch write failed: " + e.getMessage()
                    + ", falling back to sequential insert/update..");
        }
        RDBMSUtils.cleanupConnection(null, stmt, null);
        stmt = null;
        this.writeChunksSequentially(conn, path, chunks);
    } finally {
        RDBMSUtils.cleanupConnection(null, stmt, conn);
    }
}

From source file:org.wso2.carbon.is.migration.dao.IdpMetaDataDAO.java

public void addIdpMetaData(List<IdpMetaData> idpMetaDataDAOs) throws ISMigrationException {

    String sql = "INSERT INTO IDP_METADATA(IDP_ID, NAME, VALUE, DISPLAY_NAME, TENANT_ID) values(?,?,?,?,?)";

    PreparedStatement prepStmt = null;
    Connection connection = IdentityDatabaseUtil.getDBConnection();
    try {//  w w w .ja v  a  2s  .  c om
        String dbProductName = connection.getMetaData().getDatabaseProductName();
        prepStmt = connection.prepareStatement(sql, new String[] {
                DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, SQLConstants.ID_COLUMN) });

        for (IdpMetaData idpMetaData : idpMetaDataDAOs) {
            prepStmt.setInt(1, idpMetaData.getIdpId());
            prepStmt.setString(2, idpMetaData.getName());
            prepStmt.setString(3, idpMetaData.getValue());
            prepStmt.setString(4, idpMetaData.getDisplayName());
            prepStmt.setInt(5, idpMetaData.getTenantId());
            prepStmt.addBatch();
        }

        prepStmt.executeBatch();
        connection.commit();
    } catch (SQLException e) {
        throw new ISMigrationException("Error while inserting default resident idp property values.", e);
    } finally {
        IdentityDatabaseUtil.closeStatement(prepStmt);
    }
}

From source file:org.wso2.carbon.device.mgt.extensions.device.type.template.dao.PropertyBasedPluginDAOImpl.java

public boolean addDevice(Device device) throws DeviceTypeMgtPluginException {
    boolean status = false;
    Connection conn = null;/*from   w  w w .  j  av a2s  .  c  om*/
    PreparedStatement stmt = null;
    try {
        conn = deviceTypeDAOHandler.getConnection();
        stmt = conn.prepareStatement(
                "INSERT INTO DM_DEVICE_PROPERTIES(DEVICE_TYPE_NAME, DEVICE_IDENTIFICATION, PROPERTY_NAME, "
                        + "PROPERTY_VALUE, TENANT_ID) VALUES (?, ?, ?, ?, ?)");
        for (String propertyKey : deviceProps) {
            stmt.setString(1, deviceType);
            stmt.setString(2, device.getDeviceIdentifier());
            stmt.setString(3, propertyKey);
            stmt.setString(4, getPropertyValue(device.getProperties(), propertyKey));
            stmt.setInt(5, PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(true));
            stmt.addBatch();
        }
        stmt.executeBatch();
        status = true;
    } catch (SQLException e) {
        String msg = "Error occurred while adding the device '" + device.getDeviceIdentifier()
                + "' to the type " + deviceType + " db.";
        log.error(msg, e);
        throw new DeviceTypeMgtPluginException(msg, e);
    } finally {
        DeviceTypeUtils.cleanupResources(stmt, null);
    }
    return status;
}