List of usage examples for java.sql PreparedStatement addBatch
void addBatch() throws SQLException;
PreparedStatement
object's batch of commands. From source file:com.flexive.ejb.beans.BriefcaseEngineBean.java
private void replaceMetaData(Connection con, long id, Collection<FxReferenceMetaData<FxPK>> metadata) throws FxUpdateException { PreparedStatement stmt = null; boolean success = false; try {//ww w. j a va2s . c om stmt = con.prepareStatement( "UPDATE " + TBL_BRIEFCASE_DATA + " SET metadata=? WHERE briefcase_id=? AND id=?"); stmt.setLong(2, id); for (FxReferenceMetaData<FxPK> metaData : metadata) { final String meta = metaData.getSerializedForm(); stmt.setString(1, meta); stmt.setLong(3, metaData.getReference().getId()); stmt.addBatch(); } stmt.executeBatch(); success = true; } catch (SQLException e) { throw new FxUpdateException(LOG, e); } finally { if (!success) { EJBUtils.rollback(ctx); } closeObjects(BriefcaseEngineBean.class, null, stmt); } }
From source file:com.globalsight.everest.permission.Permission.java
/** * Update Table permissiongroup. If permission id is greater than 300, the * id should plus 1. Then update permission_set to new string. *//*from w ww .ja va 2 s.com*/ private static void updateUnbalancedPermissionGroupSet() { Connection c = null; PreparedStatement stmt = null; PreparedStatement stmt1 = null; ResultSet rs = null; try { c = ConnectionPool.getConnection(); c.setAutoCommit(false); stmt = c.prepareStatement(SQL_SELECT_PERMISSION_SET_FROM_PERMISSION_GROUP); stmt1 = c.prepareStatement(SQL_UPDATE_PERMISSION_SET); rs = stmt.executeQuery(); while (rs.next()) { long id = rs.getLong(1); String permissionSet = rs.getString(2); String[] permissionIdArray = permissionSet.split("\\|"); StringBuffer newPermissionSet = new StringBuffer(); for (String permissionId : permissionIdArray) { if (StringUtils.isNotEmpty(permissionId)) { long lId = Long.parseLong(permissionId); if (lId >= 300) { lId += 1; } newPermissionSet.append("|").append(lId); } } newPermissionSet.append("|"); stmt1.setString(1, newPermissionSet.toString()); stmt1.setLong(2, id); stmt1.addBatch(); } stmt1.executeBatch(); c.commit(); } catch (Exception e) { logger.error("Failed to update permission_group from database.", e); } finally { ConnectionPool.silentClose(rs); ConnectionPool.silentClose(stmt); ConnectionPool.silentClose(stmt1); ConnectionPool.silentReturnConnection(c); } }
From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java
@Override public void moveOperations() throws ArchivalDAOException { Statement stmt = null;//from w w w.j a va2 s . c o m PreparedStatement stmt2 = null; Statement stmt3 = null; ResultSet rs = null; try { Connection conn = ArchivalSourceDAOFactory.getConnection(); String sql = "SELECT * FROM DM_OPERATION WHERE ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)"; stmt = this.createMemoryEfficientStatement(conn); rs = stmt.executeQuery(sql); Connection conn2 = ArchivalDestinationDAOFactory.getConnection(); sql = "INSERT INTO DM_OPERATION_ARCH VALUES(?, ?, ?, ?, ?, ?)"; stmt2 = conn2.prepareStatement(sql); int count = 0; while (rs.next()) { stmt2.setInt(1, rs.getInt("ID")); stmt2.setString(2, rs.getString("TYPE")); stmt2.setTimestamp(3, rs.getTimestamp("CREATED_TIMESTAMP")); stmt2.setTimestamp(4, rs.getTimestamp("RECEIVED_TIMESTAMP")); stmt2.setString(5, rs.getString("OPERATION_CODE")); stmt2.setTimestamp(6, this.currentTimestamp); stmt2.addBatch(); if (++count % batchSize == 0) { stmt2.executeBatch(); } } stmt2.executeBatch(); if (log.isDebugEnabled()) { log.debug(count + " [OPERATIONS] Records copied to the archival table. Starting deletion"); } sql = "DELETE FROM DM_OPERATION WHERE ID IN (" + "SELECT ID FROM DM_ARCHIVED_OPERATIONS)"; stmt3 = conn.createStatement(); int affected = stmt3.executeUpdate(sql); if (log.isDebugEnabled()) { log.debug(affected + " Rows deleted"); } } catch (SQLException e) { throw new ArchivalDAOException("Error occurred while moving operations", e); } finally { ArchivalDAOUtil.cleanupResources(stmt, rs); ArchivalDAOUtil.cleanupResources(stmt2); ArchivalDAOUtil.cleanupResources(stmt3); } }
From source file:com.flexive.ejb.beans.configuration.DivisionConfigurationEngineBean.java
/** * {@inheritDoc}/* www . j ava 2 s . co m*/ */ @Override @TransactionAttribute(TransactionAttributeType.REQUIRED) public void setResourceValue(String key, FxString value) throws FxApplicationException { if (StringUtils.isBlank(key)) return; key = key.trim(); if (key.length() > 250) throw new FxApplicationException("ex.configuration.resource.key.tooLong", key); if (!StringUtils.isAsciiPrintable(key)) throw new FxApplicationException("ex.configuration.resource.key.nonAscii", key); Connection con = null; PreparedStatement ps = null; try { con = Database.getDbConnection(); ps = con.prepareStatement("DELETE FROM " + TBL_RESOURCES + " WHERE RKEY=?"); ps.setString(1, key); ps.executeUpdate(); if (value != null && !value.isEmpty()) { ps.close(); ps = con.prepareStatement("INSERT INTO " + TBL_RESOURCES + " (RKEY,LANG,RVAL)VALUES(?,?,?)"); ps.setString(1, key); for (long lang : value.getTranslatedLanguages()) { ps.setLong(2, lang); ps.setString(3, value.getTranslation(lang)); ps.addBatch(); } ps.executeBatch(); } } catch (SQLException e) { throw new FxApplicationException(e, "ex.db.sqlError", e.getMessage()); } finally { Database.closeObjects(DivisionConfigurationEngine.class, con, ps); } }
From source file:org.forgerock.openidm.repo.jdbc.impl.MappedTableHandler.java
/** * Adds the option to batch more than one create statement * * @param batchCreate/*from w w w. j a v a 2 s .co m*/ * if true just adds create to batched statements, does not * execute. false the statement is executed directly * @see org.forgerock.openidm.repo.jdbc.TableHandler#create(java.lang.String, * java.lang.String, java.lang.String, java.util.Map, * java.sql.Connection) for the other parameters */ protected void create(String fullId, String type, String localId, Map<String, Object> obj, Connection connection, PreparedStatement createStatement, boolean batchCreate) throws SQLException, IOException { logger.debug("Create with fullid {}", fullId); String rev = "0"; obj.put("_id", localId); // Save the id in the object obj.put("_rev", rev); // Save the rev in the object, and return the // changed rev from the create. JsonValue objVal = new JsonValue(obj); logger.debug("Preparing statement {} with {}, {}, {}", createStatement, type, localId, rev); populatePrepStatementColumns(createStatement, objVal, tokenReplacementPropPointers); if (!batchCreate) { logger.debug("Executing: {}", createStatement); int val = createStatement.executeUpdate(); logger.debug("Created object for id {} with rev {}", fullId, rev); } else { createStatement.addBatch(); logger.debug("Added create for object id {} with rev {} to batch", fullId, rev); } }
From source file:eu.celarcloud.celar_ms.ServerPack.Database.MySQL.DBHandlerWithConnPool.java
public void createSubscription(SubObj sub, MetricObj metric) { PreparedStatement stmt = null; Connection c = null;/*from w ww . j a v a 2 s . c o m*/ try { c = this.getConnection(); stmt = c.prepareStatement(CREATE_SUBSCRIPTION); stmt.setString(1, sub.getSubID()); stmt.setString(2, sub.getGroupingFunc().name()); stmt.setString(3, sub.getOriginMetric()); stmt.setInt(4, sub.getPeriod()); stmt.executeUpdate(); stmt = c.prepareStatement(CREATE_METRIC_FOR_SUB); stmt.setString(1, metric.getMetricID()); stmt.setString(2, metric.getAgentID()); stmt.setString(3, metric.getName()); stmt.setString(4, metric.getGroup()); stmt.setString(5, metric.getUnits()); stmt.setString(6, metric.getType()); stmt.setString(7, "yes"); stmt.executeUpdate(); stmt = c.prepareStatement(ADD_AGENT_TO_SUB); String subID = sub.getSubID(); for (String agentID : sub.getAgentList()) { stmt.setString(1, subID); stmt.setString(2, agentID); stmt.addBatch(); } stmt.executeBatch(); } catch (SQLException e) { server.writeToLog(Level.SEVERE, "MySQL Handler createSubscription>> " + e); } catch (Exception e) { server.writeToLog(Level.SEVERE, "MySQL Handler createSubscription>> " + e); } finally { this.release(stmt, c); } }
From source file:com.dbmojo.QueryExecutor.java
/** Add a batch update to either a single statement, the correct * passed prepared statement.//from w w w .j a v a 2s .co m */ private void addBatchUpdate(Connection conn, boolean prepared, String query, String[] values, Statement bstmt, LinkedHashMap<String, PreparedStatement> bpstmts) throws Exception { //If this is NOT a prepared statement then add the query to a raw SQL batch if (!prepared) { if (DebugLog.enabled) { DebugLog.add(this, "Adding update '" + query + "' to statement batch"); } bstmt.addBatch(query); } else { //If this IS a prepared statement then check for its existence //in the pstmts hash. If it doesn't exist then create a new //pstmt for the query and add it to the hash. PreparedStatement pstmt = null; if (bpstmts.containsKey(query)) { if (DebugLog.enabled) { DebugLog.add(this, "Retrieving pstmt batch for query '" + query + "'"); } pstmt = bpstmts.get(query); } else { if (DebugLog.enabled) { DebugLog.add(this, "Starting pstmt batch for query '" + query + "'"); } pstmt = conn.prepareStatement(query); } if (DebugLog.enabled) { DebugLog.add(this, "Setting vals on pstmt batch for query '" + query + "'"); } setPreparedStatementValues(pstmt, values); //Add THIS set of values to the batch for this specific //prepared statement. Later on all prepared statment batches //will be executed sequentially if (DebugLog.enabled) { DebugLog.add(this, "Adding to pstmt batch for query '" + query + "'"); } pstmt.addBatch(); bpstmts.put(query, pstmt); } }
From source file:HSqlManager.java
public static void commonClusterNewPhages(Connection connection, int bps) throws SQLException, IOException, ClassNotFoundException, IllegalAccessException, InstantiationException { Connection db = connection;// ww w.j a v a2 s. c o m String base = new File("").getAbsolutePath(); db.setAutoCommit(false); PreparedStatement st = db.prepareStatement("UPDATE Primerdb.Primers SET CommonP = False," + " UniqueP = False" + " WHERE Cluster = ? and " + "Strain = ? and Sequence = ? and Bp =?"); Statement stat = db.createStatement(); if (newPhages != null) { List<String[]> phages = newPhages; phages.forEach(x -> { try { CSV.writeDataCSV(x[0], Fasta.process(x[0], bps), bps); CSV.writeDataCSV(x[0], Fasta.processPrimers(x[0], bps), bps); } catch (IOException e) { e.printStackTrace(); } Set<CharSequence> primers = new HashSet<>(); try { ResultSet rs = stat.executeQuery( "SELECT * FROM Primerdb.Primers WHERE" + " Sequence = '" + x[1] + "' and Clusters = '" + x[2] + "' and CommonP = True" + "and Bp =" + Integer.valueOf(bps)); while (rs.next()) { primers.add((CharSequence) rs.getString("Sequence")); } primers.removeAll(CSV.readCSV(base + "/PhageData/" + Integer.toString(bps) + x[0] + ".csv")); if (primers.size() != 0) { primers.forEach(y -> { try { //finish update st.setString(1, x[1]); st.setString(2, x[2]); st.setString(3, y.toString()); st.setInt(4, bps); st.addBatch(); } catch (SQLException e) { e.printStackTrace(); } }); st.executeBatch(); db.commit(); } } catch (SQLException e) { e.printStackTrace(); } }); } System.out.println("Common Updated"); st.close(); }
From source file:connectivity.connection.java
public void sortLikes() throws SQLException, ParseException { PreparedStatement ps = con.prepareStatement("Select tip_id,likes_content from tips; "); ResultSet rs = ps.executeQuery(); ArrayList likes = new ArrayList(); while (rs.next()) { String[] arr = new String[2]; arr[0] = rs.getString("tip_id"); arr[1] = rs.getString("likes_content"); likes.add(arr);//from ww w .j a va 2s. c om } //System.out.println(likes); for (Object like : likes) { try { String[] arr = (String[]) like; JSONObject json = (JSONObject) new JSONParser().parse(arr[1]); //System.out.print(json.get("groups").toString()); JSONArray groups = (JSONArray) new JSONParser().parse((json.get("groups").toString())); //Object [] a=(Object[]) user.get(0); JSONObject groups0 = (JSONObject) new JSONParser().parse((groups.get(0).toString())); JSONArray items = (JSONArray) new JSONParser().parse((groups0.get("items").toString())); for (Object item : items) { String ret = addUser((JSONObject) item); ps = con.prepareStatement("INSERT INTO `user_likes_tips` VALUES (?, ?);"); ps.setString(1, ret); ps.setString(2, arr[0]); ps.addBatch(); } ps.executeBatch(); //System.out.print('a'); } catch (Exception e) { System.out.println("exception e=" + e); } } }
From source file:gobblin.metastore.database.DatabaseJobHistoryStoreV101.java
private void addMetricToBatch(PreparedStatement upsertStatement, Metric metric, String id) throws SQLException { Preconditions.checkArgument(!Strings.isNullOrEmpty(id)); Preconditions.checkArgument(metric.hasGroup()); Preconditions.checkArgument(metric.hasName()); Preconditions.checkArgument(metric.hasType()); Preconditions.checkArgument(metric.hasValue()); int index = 0; upsertStatement.setString(++index, id); upsertStatement.setString(++index, metric.getGroup()); upsertStatement.setString(++index, metric.getName()); upsertStatement.setString(++index, metric.getType().name()); upsertStatement.setString(++index, metric.getValue()); upsertStatement.addBatch(); }