List of usage examples for java.sql PreparedStatement addBatch
void addBatch() throws SQLException;
PreparedStatement
object's batch of commands. From source file:org.wso2.carbon.is.migration.dao.ClaimDAO.java
/** * Add claim properties/*from www.j a v a 2 s.c o m*/ * * @param connection * @param localClaimId * @param claimProperties * @param tenantId * @throws ISMigrationException */ private void addClaimProperties(Connection connection, int localClaimId, Map<String, String> claimProperties, int tenantId) throws ISMigrationException { PreparedStatement prepStmt = null; if (localClaimId > 0 && claimProperties != null) { try { String query = SQLConstants.ADD_CLAIM_PROPERTY; prepStmt = connection.prepareStatement(query); for (Map.Entry<String, String> property : claimProperties.entrySet()) { prepStmt.setInt(1, localClaimId); prepStmt.setString(2, property.getKey()); prepStmt.setString(3, property.getValue() != null ? property.getValue() : ""); prepStmt.setInt(4, tenantId); prepStmt.addBatch(); } prepStmt.executeBatch(); } catch (SQLException e) { throw new ISMigrationException("Error while adding claim properties", e); } finally { IdentityDatabaseUtil.closeStatement(prepStmt); } } }
From source file:org.openbel.framework.core.kam.JdbcKAMLoaderImpl.java
/** * {@inheritDoc}//from w w w . j av a 2s. co m */ @Override public Map<String, Integer> loadFunctionTypes() throws SQLException { PreparedStatement ps = getPreparedStatement(FUNCTION_TYPE_SQL); Map<String, Integer> functionTypeIdMap = new HashMap<String, Integer>(); int ftid = 0; for (FunctionEnum f : FunctionEnum.values()) { String functionName = f.getDisplayValue(); ps.setInt(1, ftid); ps.setString(2, functionName); ps.addBatch(); functionTypeIdMap.put(functionName, ftid); ftid++; } ps.executeBatch(); return functionTypeIdMap; }
From source file:org.apache.hadoop.hive.ql.metadata.BIStore.java
public int insertInsertExeInfo(Connection cc, Collection<InsertExeInfo> insertInfoList) { if (cc == null || insertInfoList == null || insertInfoList.isEmpty()) { return -1; }//from w w w. j a v a 2 s . co m int rt = -1; PreparedStatement pstmt; String queryID = ""; try { pstmt = cc.prepareStatement( "insert into tdw_insert_info(queryid, desttable, successnum, rejectnum, ismultiinsert) values (?,?,?,?,?)"); for (InsertExeInfo insertInfo : insertInfoList) { queryID = insertInfo.getQueryID(); pstmt.setString(1, insertInfo.getQueryID()); pstmt.setString(2, insertInfo.getDestTable()); pstmt.setLong(3, insertInfo.getFsSuccessNum()); pstmt.setLong(4, insertInfo.getFsRejectNum()); pstmt.setBoolean(5, insertInfo.getIsMultiInsert()); pstmt.addBatch(); } pstmt.executeBatch(); rt = 0; } catch (SQLException e) { LOG.error(" insert INSERT EXE Info failed: " + queryID); e.printStackTrace(); } return rt; }
From source file:consultor.CSVLoader.java
/** * Parse CSV file using OpenCSV library and load in * given database table. /* w w w. jav a 2 s . c om*/ * @param csvFile Input CSV file * @param tableName Database table name to import data * @param truncateBeforeLoad Truncate the table before inserting * new records. * @throws Exception */ public void loadCSV(String csvFile, String tableName, boolean truncateBeforeLoad) throws Exception { CSVReader csvReader = null; if (null == this.connection) { throw new Exception("Not a valid connection."); } try { csvReader = new CSVReader(new FileReader(csvFile), this.seprator); } catch (Exception e) { e.printStackTrace(); throw new Exception("Error occured while executing file. " + e.getMessage()); } String[] headerRow = csvReader.readNext(); if (null == headerRow) { throw new FileNotFoundException( "No columns defined in given CSV file." + "Please check the CSV file format."); } String questionmarks = StringUtils.repeat("?,", headerRow.length); questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1); String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName); query = query.replaceFirst(KEYS_REGEX, StringUtils.join(headerRow, ",")); query = query.replaceFirst(VALUES_REGEX, questionmarks); System.out.println("Query: " + query); String[] nextLine; Connection con = null; PreparedStatement ps = null; try { con = this.connection; con.setAutoCommit(false); ps = con.prepareStatement(query); if (truncateBeforeLoad) { //delete data from table before loading csv con.createStatement().execute("DELETE FROM " + tableName); } final int batchSize = 1000; int count = 0; Date date = null; while ((nextLine = csvReader.readNext()) != null) { if (null != nextLine) { int index = 1; for (String string : nextLine) { date = DateUtil.convertToDate(string); if (null != date) { ps.setDate(index++, new java.sql.Date(date.getTime())); } else { ps.setString(index++, string); } } ps.addBatch(); } if (++count % batchSize == 0) { ps.executeBatch(); } } ps.executeBatch(); // insert remaining records con.commit(); } catch (Exception e) { con.rollback(); e.printStackTrace(); throw new Exception("Error occured while loading data from file to database." + e.getMessage()); } finally { if (null != ps) ps.close(); if (null != con) con.close(); csvReader.close(); } }
From source file:coral.data.DataServiceJbdcImpl.java
@Override public synchronized void saveOETData(String collection, ExpData stage) { long id = System.currentTimeMillis(); if (id <= lastid) { lastid++;/*from w ww . j av a 2s. c o m*/ id = lastid; } lastid = id; PreparedStatement prep; try { prep = conn.prepareStatement("insert into states values (?, ?, ?, ?, ?, ?, ?);"); String inmsg = (stage.inmsg.length() < 70) ? stage.inmsg : stage.inmsg.substring(0, 70); prep.setString(1, Long.toString(id)); prep.setString(2, collection); prep.setString(3, stage.template); prep.setString(4, "1"); prep.setString(5, Integer.toString(stage._msgCounter)); prep.setString(6, Integer.toString(stage._stageCounter)); prep.setString(7, inmsg); prep.addBatch(); conn.setAutoCommit(false); prep.executeBatch(); conn.setAutoCommit(true); conn.commit(); put(id, collection, stage.newMap()); } catch (SQLException e) { e.printStackTrace(); } }
From source file:org.wso2.carbon.is.migration.service.v530.dao.ClaimDAO.java
/** * Add claim properties//from w w w. j ava2 s . co m * * @param connection * @param localClaimId * @param claimProperties * @param tenantId * @throws MigrationClientException */ private void addClaimProperties(Connection connection, int localClaimId, Map<String, String> claimProperties, int tenantId) throws MigrationClientException { PreparedStatement prepStmt = null; if (localClaimId > 0 && claimProperties != null) { try { String query = SQLConstants.ADD_CLAIM_PROPERTY; prepStmt = connection.prepareStatement(query); for (Map.Entry<String, String> property : claimProperties.entrySet()) { prepStmt.setInt(1, localClaimId); prepStmt.setString(2, property.getKey()); prepStmt.setString(3, property.getValue() != null ? property.getValue() : ""); prepStmt.setInt(4, tenantId); prepStmt.addBatch(); } prepStmt.executeBatch(); } catch (SQLException e) { throw new MigrationClientException("Error while adding claim properties", e); } finally { IdentityDatabaseUtil.closeStatement(prepStmt); } } }
From source file:com.agiletec.plugins.jpcrowdsourcing.aps.system.services.idea.IdeaDAO.java
private void addTagsRelationsRecord(IIdea idea, PreparedStatement stat) throws ApsSystemException { if (idea.getTags().size() > 0) { try {//from w w w .j av a 2s . c o m Iterator<String> codeIter = idea.getTags().iterator(); while (codeIter.hasNext()) { String code = codeIter.next(); int i = 1; stat.setString(i++, idea.getId()); stat.setString(i++, code); stat.addBatch(); stat.clearParameters(); } } catch (SQLException e) { _logger.error("Errore in aggiunta record tabella collaboration_idea_tags {}", idea.getId(), e); throw new RuntimeException("Errore in aggiunta record tabella collaboration_idea_tags", e); } } }
From source file:netflow.DatabaseProxy.java
public void saveHosts(Map<String, HostTraffic> cache, java.util.Date date) { if (cache.size() == 0) { log.debug("Host cache empty"); return;/* www . ja va2 s .co m*/ } log.debug("Saving " + cache.size() + " records for " + date); String sql = getQuery("neflow.details.insert"); try { PreparedStatement pstmt = con.prepareStatement(sql); Timestamp t = new java.sql.Timestamp(date.getTime()); for (String key : cache.keySet()) { HostTraffic traffic = cache.get(key); if (!hasRecord(t, traffic.getHostAddress(), traffic.getNetworkId())) { pstmt.setTimestamp(1, t); pstmt.setString(2, traffic.getHostAddress()); pstmt.setInt(3, traffic.getNetworkId()); pstmt.setLong(4, traffic.getInputBytes()); pstmt.setLong(5, traffic.getOutputBytes()); pstmt.addBatch(); } } int[] results = pstmt.executeBatch(); log.info("saveHosts(): saved " + results.length + " records"); pstmt.close(); pstmt.clearParameters(); } catch (SQLException e) { log.error("Saving hosts error: " + e.getMessage()); SQLException ex = e.getNextException(); if (ex != null) { log.error(ex.getMessage()); } e.printStackTrace(System.err); } }
From source file:org.wso2.carbon.apimgt.migration.client.MigrateFrom110to200.java
private void deleteIdnAccessToken(PreparedStatement accessTokenDelete, ConsumerKeyDTO consumerKeyDTO) throws SQLException { accessTokenDelete.setString(1, consumerKeyDTO.getEncryptedConsumerKey()); accessTokenDelete.addBatch(); }
From source file:org.wso2.carbon.apimgt.migration.client.MigrateFrom110to200.java
private void deleteIdnConsumerApps(PreparedStatement consumerAppsDelete, ConsumerKeyDTO consumerKeyDTO) throws SQLException { consumerAppsDelete.setString(1, consumerKeyDTO.getEncryptedConsumerKey()); consumerAppsDelete.addBatch(); }