Example usage for java.sql PreparedStatement executeBatch

List of usage examples for java.sql PreparedStatement executeBatch

Introduction

In this page you can find the example usage for java.sql PreparedStatement executeBatch.

Prototype

int[] executeBatch() throws SQLException;

Source Link

Document

Submits a batch of commands to the database for execution and if all commands execute successfully, returns an array of update counts.

Usage

From source file:mil.army.usace.data.dataquery.rdbms.RdbmsDataQuery.java

private void runDml(DML dmlType, List records, boolean useDeclaredOnly, List<String> includeFields) {
    PreparedStatement st = null;
    boolean inTrans = inTransaction();
    int batchCount = 0;
    String command = null;/*from  w  ww.j a va  2s.  c  om*/
    if (!inTrans)
        startTransaction();
    try {
        Object obj = records.get(0);
        Class objClass = obj.getClass();
        String schema = getEntitySchema(objClass);
        Boolean isCamelCased = useCamelCase(objClass);
        HashMap<Method, String> fieldMapping = getFieldMapping(objClass, GET, isCamelCased, useDeclaredOnly);
        HashMap<Integer, Method> indexMapping = new HashMap();
        String tableName = getTableName(objClass);
        if (tableName == null)
            tableName = getDbName(isCamelCased, objClass.getSimpleName(), null);

        if (dmlType == DML.UPDATE)
            command = getUpdateCommand(tableName, schema, fieldMapping, indexMapping, includeFields);
        else if (dmlType == DML.INSERT)
            command = getInsertCommand(tableName, schema, fieldMapping, indexMapping);
        else
            command = getDeleteCommand(tableName, schema, fieldMapping, indexMapping);

        st = conn.prepareStatement(command);

        for (Object record : records) {
            for (int index : indexMapping.keySet()) {
                Object value = indexMapping.get(index).invoke(record, null);
                if (value instanceof java.util.Date) {
                    value = new java.sql.Date(((java.util.Date) value).getTime());
                }
                st.setObject((Integer) index, value);
            }

            if (useBatch == true)
                st.addBatch();
            else
                st.executeUpdate();

            if (useBatch == true && ++batchCount % batchSize == 0) {
                st.executeBatch();
            }
        }
        if (useBatch == true)
            st.executeBatch(); //flush out remaining records
        if (!inTrans)
            commitTransaction();
    } catch (Exception ex) {
        ex.printStackTrace();
        if (!inTrans)
            rollbackTransaction();
        throw new DataQueryException(command, "runDml", ex);
    } finally {
        if (st != null) {
            try {
                st.close();
            } catch (Exception ex) {
            }
        }
    }
}

From source file:com.pactera.edg.am.metamanager.extractor.dao.helper.CreateMetadataHelper.java

public Object doInPreparedStatement(PreparedStatement ps) throws SQLException {
    // ?/*w  ww .  j  ava2s.  c om*/

    Map<String, String> mAttrs = metaModel.getMAttrs();
    boolean hasChildMetaModel = metaModel.isHasChildMetaModel();

    // ???
    List<AbstractMetadata> metadatas = metaModel.getMetadatas();
    int size = metadatas.size();
    String code = "";
    String metaModelCode = "";
    MMMetadata parentMetadata = null;
    String logMsg = "";
    try {
        for (int i = 0; i < size; i++) {

            MMMetadata metadata = (MMMetadata) metadatas.get(i);
            if (metadata.isHasExist()) {
                // ??,??
                continue;
            }

            parentMetadata = metadata.getParentMetadata();
            if (parentMetadata == null) {
                String error = new StringBuilder("?:").append(metadata.getCode())
                        .append(" ,??!!").toString();
                log.error(error);
                throw new SQLException(error);
            }
            String metadataNamespace = genNamespace(parentMetadata, metadata.getId(), hasChildMetaModel);

            // ?ID
            ps.setString(1, metadata.getId());
            code = metadata.getCode();
            // ???
            ps.setString(2, code);
            // ???
            ps.setString(3,
                    (metadata.getName() == null || metadata.getName().equals("")) ? code : metadata.getName());
            // ID
            metaModelCode = metaModel.getCode();
            ps.setString(4, metaModelCode);

            // namespaceID
            ps.setString(5, metadataNamespace);
            ps.setString(6, parentMetadata.getId());
            // START_TIME: 
            ps.setLong(7, this.getGlobalTime());

            int index = setAttrs(ps, metadata, mAttrs);

            setPs(ps, metadata, index + 7);

            if (log.isDebugEnabled()) {
                log.debug(new StringBuilder().append(":parent_id:").append(parentMetadata.getId())
                        .append(",parent_code:").append(parentMetadata.getCode()).append(",instance_code:")
                        .append(code).append(",classifier_id:").append(metaModelCode).toString());
            }
            ps.addBatch();
            // ??
            ps.clearParameters();

            if (++super.count % super.batchSize == 0) {
                ps.executeBatch();
                ps.clearBatch();
            }
        }

        if (super.count % super.batchSize != 0) {
            ps.executeBatch();
            ps.clearBatch();

        }
    } catch (SQLException e) {
        logMsg = new StringBuilder().append("?,?:parent_id:")
                .append(parentMetadata.getId()).append(",parent_code:").append(parentMetadata.getCode())
                .append(",instance_code:").append(code).append(",classifier_id:").append(metaModelCode)
                .append("  ?:").append(e.getLocalizedMessage()).toString();
        log.error(logMsg);
        AdapterExtractorContext.addExtractorLog(ExtractorLogLevel.ERROR, logMsg);
        throw e;
    }
    return null;
    // test for callback
    // throw new SQLException();
}

From source file:org.wso2.carbon.idp.mgt.dao.IdPManagementDAO.java

/**
 * @param conn// w ww . j a v  a  2 s  .  com
 * @param idPId
 * @param claims
 * @throws SQLException
 */
private void addIdPClaims(Connection conn, int idPId, int tenantId, Claim[] claims) throws SQLException {
    PreparedStatement prepStmt = null;

    if (claims == null || claims.length == 0) {
        return;
    }

    try {
        // SP_IDP_ID, SP_IDP_CLAIM
        String sqlStmt = IdPManagementConstants.SQLQueries.ADD_IDP_CLAIMS_SQL;
        prepStmt = conn.prepareStatement(sqlStmt);
        for (Claim claim : claims) {
            prepStmt.setInt(1, idPId);
            prepStmt.setInt(2, tenantId);
            prepStmt.setString(3, CharacterEncoder.getSafeText(claim.getClaimUri()));
            prepStmt.addBatch();
            prepStmt.clearParameters();
        }
        prepStmt.executeBatch();
    } finally {
        IdentityApplicationManagementUtil.closeStatement(prepStmt);
    }
}

From source file:org.wso2.carbon.idp.mgt.dao.IdPManagementDAO.java

/**
 * @param conn//from   www.j av a  2s.c  o m
 * @param idPId
 * @param idpRoleNames
 * @throws SQLException
 */
private void addIdPRoles(Connection conn, int idPId, int tenantId, String[] idpRoleNames) throws SQLException {

    PreparedStatement prepStmt = null;
    // SP_IDP_ID, SP_IDP_ROLE
    String sqlStmt = IdPManagementConstants.SQLQueries.ADD_IDP_ROLES_SQL;

    if (idpRoleNames == null || idpRoleNames.length == 0) {
        return;
    }

    try {
        prepStmt = conn.prepareStatement(sqlStmt);

        for (String idpRole : idpRoleNames) {
            prepStmt.setInt(1, idPId);
            prepStmt.setInt(2, tenantId);
            prepStmt.setString(3, CharacterEncoder.getSafeText(idpRole));
            prepStmt.addBatch();
            prepStmt.clearParameters();
        }

        prepStmt.executeBatch();

    } finally {
        IdentityApplicationManagementUtil.closeStatement(prepStmt);
    }
}

From source file:fr.aliacom.obm.common.calendar.CalendarDaoJdbcImpl.java

private void removeAttendees(Connection con, List<Attendee> toRemove, Event ev) throws SQLException {
    PreparedStatement ps = null;
    String q = "DELETE FROM EventLink WHERE eventlink_event_id=? AND eventlink_entity_id=? ";

    logger.info("event update will remove {} attendees.", toRemove.size());
    try {/*from  w w w.j ava 2 s  .  com*/
        ps = con.prepareStatement(q);

        for (Attendee at : toRemove) {
            ps.setInt(1, ev.getObmId().getObmId());
            ps.setInt(2, at.getEntityId().getId());
            ps.addBatch();
        }

        ps.executeBatch();
    } finally {
        obmHelper.cleanup(null, ps, null);
    }
}

From source file:com.wabacus.system.dataimport.DataImportItem.java

private void doDeleteData(Connection conn, AbsDatabaseType dbtype) throws SQLException {
    PreparedStatement pstmtDelete = null;
    try {/*w w w  .  j a v a  2 s .c  o  m*/
        DataImportSqlBean disqlbean = null;
        if (fileProcessor.isEmpty()) {//????
            disqlbean = new DataImportSqlBean();
            disqlbean.setSql("delete from " + configBean.getTablename());
        } else {
            disqlbean = configBean.getLstImportSqlObjs(dynimportype).get(0);
        }
        log.debug(disqlbean.getSql());
        this.errorSqlTrace = disqlbean.getSql();
        pstmtDelete = conn.prepareStatement(disqlbean.getSql());
        if (disqlbean.getLstParamColsInFile() == null || disqlbean.getLstParamColsInFile().size() == 0) {
            pstmtDelete.executeUpdate();
        } else {
            boolean matchFileIndex = configBean.getColMapBean().getFileMapType().equals("index");
            List<String> lstColNames = getLstColNames(matchFileIndex);
            this.lstColNamesTrace = lstColNames;
            boolean hasUnCommitData = false;//????
            List lstDataColValuesPerRow;
            Map<String, Object> mDataColValues = null;
            int i = fileProcessor.getStartrecordindex();
            for (int len = fileProcessor.getStartrecordindex() + fileProcessor.getRecordcount(); i < len; i++) {
                lstDataColValuesPerRow = fileProcessor.getRowData(i);
                if (lstDataColValuesPerRow == null || lstDataColValuesPerRow.size() == 0)
                    continue;
                if (configBean.getInterceptor() != null) {
                    boolean flag = configBean.getInterceptor().beforeImportRow(conn, this, lstColNames,
                            lstDataColValuesPerRow);
                    if (!flag)
                        continue;
                }
                if (!matchFileIndex) {
                    mDataColValues = getAllColTitleAndValueMap(lstColNames, lstDataColValuesPerRow);
                }
                updateDBRowData(pstmtDelete, dbtype, disqlbean.getLstParamColsInFile(),
                        disqlbean.getLstParamTypes(), matchFileIndex, lstDataColValuesPerRow, mDataColValues);
                if (configBean.getInterceptor() != null) {
                    configBean.getInterceptor().afterImportRow(conn, this, lstColNames, lstDataColValuesPerRow);
                }
                hasUnCommitData = true;
                if (shouldBatchCommit(i)) {
                    pstmtDelete.executeBatch();
                    hasUnCommitData = false;
                }
            }
            if (hasUnCommitData) {
                pstmtDelete.executeBatch();
            }
        }
    } finally {
        fileProcessor.destroy();
        WabacusAssistant.getInstance().release(null, pstmtDelete);
    }
}

From source file:fr.aliacom.obm.common.calendar.CalendarDaoJdbcImpl.java

private void removeFromDeletedEvent(Connection con, Event ev, Collection<Integer> attendeeIds)
        throws SQLException {
    PreparedStatement deleteStatement = null;
    try {/*from   w  w  w  . j a  va 2s  .c  o m*/
        deleteStatement = con.prepareStatement(
                "DELETE FROM DeletedEvent " + "WHERE deletedevent_event_ext_id=? AND deletedevent_user_id=?");
        String extId = ev.getExtId().getExtId();
        for (int attendeeId : attendeeIds) {
            deleteStatement.setString(1, extId);
            deleteStatement.setInt(2, attendeeId);
            deleteStatement.addBatch();
        }
        deleteStatement.executeBatch();
    } finally {
        obmHelper.cleanup(null, deleteStatement, null);
    }
}

From source file:HSqlManager.java

public static void uniqueDB(Connection connection, int bps) throws ClassNotFoundException, SQLException,
        InstantiationException, IllegalAccessException, IOException {
    DpalLoad.main(new String[1]);
    HSqlPrimerDesign.Dpal_Inst = DpalLoad.INSTANCE_WIN64;
    String base = new File("").getAbsolutePath();
    if (!written) {
        CSV.makeDirectory(new File(base + "/PhageData"));
        INSTANCE.readFileAll(INSTANCE.path).stream().forEach(x -> {
            try {
                CSV.writeDataCSV(x[1], Fasta.process(x[1], bps), bps);
            } catch (IOException e) {
                e.printStackTrace();//from   ww w. j a  v  a  2 s .c o m
            }
        });
    }
    Connection db = connection;
    db.setAutoCommit(false);
    Statement stat = db.createStatement();
    PrintWriter log = new PrintWriter(new File("javalog.log"));
    stat.execute("SET FILES LOG FALSE;\n");
    PreparedStatement st = db
            .prepareStatement("UPDATE Primerdb.Primers" + " SET UniqueP = true, Tm = ?, GC =?, Hairpin =?"
                    + "WHERE Cluster = ? and Strain = ? and " + "Sequence = ? and Bp = ?");
    ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;");
    List<String[]> phages = new ArrayList<>();
    while (call.next()) {
        String[] r = new String[3];
        r[0] = call.getString("Strain");
        r[1] = call.getString("Cluster");
        r[2] = call.getString("Name");
        phages.add(r);
    }
    phages.stream().map(x -> x[0]).collect(Collectors.toSet()).stream().forEach(x -> {
        phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]).collect(Collectors.toSet()).parallelStream()
                .forEach(z -> {
                    try {
                        Set<String> nonclustphages = phages.stream()
                                .filter(a -> a[0].equals(x) && !a[1].equals(z)).map(a -> a[2])
                                .collect(Collectors.toSet());
                        ResultSet resultSet = stat.executeQuery("Select Sequence from primerdb.primers"
                                + " where Strain ='" + x + "' and Cluster ='" + z + "' and CommonP = true"
                                + " and Bp = " + Integer.valueOf(bps) + " ");
                        Set<CharSequence> primers = Collections.synchronizedSet(new HashSet<>());
                        while (resultSet.next()) {
                            primers.add(resultSet.getString("Sequence"));
                        }
                        for (String phage : nonclustphages) {
                            CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + phage + ".csv")
                                    .parallelStream().filter(primer -> primers.contains(primer))
                                    .forEach(primers::remove);

                        }
                        int i = 0;
                        for (CharSequence a : primers) {
                            try {
                                st.setDouble(1, HSqlPrimerDesign.primerTm(a, 0, 800, 1.5, 0.2));
                                st.setDouble(2, HSqlPrimerDesign.gcContent(a));
                                st.setBoolean(3, HSqlPrimerDesign.calcHairpin((String) a, 4));
                                st.setString(4, z);
                                st.setString(5, x);
                                st.setString(6, a.toString());
                                st.setInt(7, bps);
                                st.addBatch();
                            } catch (SQLException e) {
                                e.printStackTrace();
                                System.out.println("Error occurred at " + x + " " + z);
                            }
                            i++;
                            if (i == 1000) {
                                i = 0;
                                st.executeBatch();
                                db.commit();
                            }
                        }
                        if (i > 0) {
                            st.executeBatch();
                            db.commit();
                        }
                    } catch (SQLException e) {
                        e.printStackTrace();
                        System.out.println("Error occurred at " + x + " " + z);
                    }
                    log.println(z);
                    log.flush();
                    System.gc();
                });
    });
    stat.execute("SET FILES LOG TRUE\n");
    st.close();
    stat.close();
    System.out.println("Unique Updated");
}

From source file:fr.aliacom.obm.common.calendar.CalendarDaoJdbcImpl.java

private void insertExceptions(AccessToken editor, Event ev, Connection con, EventObmId id) throws SQLException {
    PreparedStatement ps = null;
    try {// w  ww. ja  va2s.  co  m
        ps = con.prepareStatement("insert into EventException "
                + "(eventexception_parent_id, eventexception_date, eventexception_usercreate) "
                + "values (?, ?, " + editor.getObmId() + ")");
        for (Date exd : ev.getRecurrence().getExceptions()) {
            ps.setInt(1, id.getObmId());
            ps.setTimestamp(2, new Timestamp(exd.getTime()));
            ps.addBatch();
        }
        ps.executeBatch();
    } finally {
        obmHelper.cleanup(null, ps, null);
    }
}

From source file:org.wso2.appcloud.core.dao.ApplicationDAO.java

/**
 * Method for adding label, which associated with a version of an application, to database.
 *
 * @param dbConnection  database Connection
 * @param tags          list of tags/*  ww w .jav a 2s . c  om*/
 * @param versionHashId version hash id
 * @param tenantId      tenant id
 * @throws AppCloudException
 */
public void addTags(Connection dbConnection, List<Tag> tags, String versionHashId, int tenantId)
        throws AppCloudException {

    PreparedStatement preparedStatement = null;

    try {

        preparedStatement = dbConnection.prepareStatement(SQLQueryConstants.ADD_TAG);

        for (Tag tag : tags) {

            preparedStatement.setString(1, tag.getTagName());
            preparedStatement.setString(2, tag.getTagValue());
            preparedStatement.setString(3, versionHashId);
            preparedStatement.setString(4, tag.getDescription());
            preparedStatement.setInt(5, tenantId);

            preparedStatement.addBatch();
        }

        preparedStatement.executeBatch();

    } catch (SQLException e) {
        String msg = "Error occurred while adding tags to database for version with hash id : " + versionHashId
                + " in tenant : " + tenantId;
        throw new AppCloudException(msg, e);

    } finally {
        DBUtil.closePreparedStatement(preparedStatement);
    }
}