List of usage examples for java.sql PreparedStatement executeBatch
int[] executeBatch() throws SQLException;
From source file:org.seasar.dbflute.logic.replaceschema.loaddata.impl.DfDelimiterDataWriterImpl.java
public void writeData(DfDelimiterDataResultInfo resultInfo) throws IOException { _log.info("/= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = "); _log.info("writeData(" + _fileName + ")"); _log.info("= = = = = = =/"); FileInputStream fis = null;/*from w w w. ja va 2 s . co m*/ InputStreamReader ir = null; BufferedReader br = null; final String dataDirectory = Srl.substringLastFront(_fileName, "/"); final LoggingInsertType loggingInsertType = getLoggingInsertType(dataDirectory); final String tableDbName; { String tmp = _fileName.substring(_fileName.lastIndexOf("/") + 1, _fileName.lastIndexOf(".")); if (tmp.indexOf("-") >= 0) { tmp = tmp.substring(tmp.indexOf("-") + "-".length()); } tableDbName = tmp; } final Map<String, DfColumnMeta> columnInfoMap = getColumnMetaMap(tableDbName); if (columnInfoMap.isEmpty()) { throwTableNotFoundException(_fileName, tableDbName); } // process before handling table beforeHandlingTable(tableDbName, columnInfoMap); String lineString = null; String preContinueString = null; String executedSql = null; final List<String> columnNameList = new ArrayList<String>(); final List<String> additionalColumnList = new ArrayList<String>(); final List<String> valueList = new ArrayList<String>(); final File dataFile = new File(_fileName); Connection conn = null; PreparedStatement ps = null; try { fis = new FileInputStream(dataFile); ir = new InputStreamReader(fis, _encoding); br = new BufferedReader(ir); FirstLineInfo firstLineInfo = null; int loopIndex = -1; int rowNumber = 0; int addedBatchSize = 0; while (true) { ++loopIndex; lineString = br.readLine(); if (lineString == null) { break; } // /- - - - - - - - - - - - - - - - - - - - - - // initialize column definition from first line // - - - - - - - - - -/ if (loopIndex == 0) { firstLineInfo = getFirstLineInfo(_delimiter, lineString); columnNameList.addAll(firstLineInfo.getColumnNameList()); if (columnNameList.isEmpty()) { throwDelimiterDataColumnDefNotFoundException(_fileName, tableDbName); } final StringSet columnSet = StringSet.createAsFlexible(); columnSet.addAll(columnNameList); for (String defaultColumn : _defaultValueMap.keySet()) { if (columnSet.contains(defaultColumn)) { continue; } additionalColumnList.add(defaultColumn); } columnNameList.addAll(additionalColumnList); continue; } // /- - - - - - - - - - - - - - - // analyze values in line strings // - - - - - - - - - -/ lineString = filterLineString(lineString); { if (preContinueString != null && !preContinueString.equals("")) { lineString = preContinueString + "\n" + lineString; } final ValueLineInfo valueLineInfo = arrangeValueList(lineString, _delimiter); final List<String> ls = valueLineInfo.getValueList(); if (valueLineInfo.isContinueNextLine()) { preContinueString = ls.remove(ls.size() - 1); valueList.addAll(ls); continue; } valueList.addAll(ls); } // *one record is prepared here // /- - - - - - - - - - - - - - // check definition differences // - - - - - - - - - -/ if (isDifferentColumnValueCount(firstLineInfo, valueList)) { String msg = "The count of values wasn't correct:"; msg = msg + " column=" + firstLineInfo.getColumnNameList().size(); msg = msg + " value=" + valueList.size(); msg = msg + " -> " + valueList; resultInfo.registerWarningFile(_fileName, msg); // clear temporary variables valueList.clear(); preContinueString = null; continue; } // *valid record is prepared here ++rowNumber; // /- - - - - - - - - - - - - - - - // process registration to database // - - - - - - - - - -/ final DfDelimiterDataWriteSqlBuilder sqlBuilder = new DfDelimiterDataWriteSqlBuilder(); sqlBuilder.setTableDbName(tableDbName); sqlBuilder.setColumnInfoMap(columnInfoMap); sqlBuilder.setColumnNameList(columnNameList); sqlBuilder.setValueList(valueList); sqlBuilder.setNotFoundColumnMap(resultInfo.getNotFoundColumnMap()); sqlBuilder.setConvertValueMap(_convertValueMap); sqlBuilder.setDefaultValueMap(_defaultValueMap); sqlBuilder.setBindTypeProvider(new DfColumnBindTypeProvider() { public Class<?> provideBindType(String tableName, DfColumnMeta columnMeta) { return getBindType(tableName, columnMeta); } }); if (conn == null) { conn = _dataSource.getConnection(); } if (ps == null) { executedSql = sqlBuilder.buildSql(); ps = conn.prepareStatement(executedSql); } final Map<String, Object> columnValueMap = sqlBuilder.setupParameter(); handleLoggingInsert(tableDbName, columnNameList, columnValueMap, loggingInsertType, rowNumber); int bindCount = 1; final Set<Entry<String, Object>> entrySet = columnValueMap.entrySet(); for (Entry<String, Object> entry : entrySet) { final String columnName = entry.getKey(); final Object obj = entry.getValue(); // /- - - - - - - - - - - - - - - - - - // process Null (against Null Headache) // - - - - - - - - - -/ if (processNull(tableDbName, columnName, obj, ps, bindCount, columnInfoMap)) { bindCount++; continue; } // /- - - - - - - - - - - - - - - // process NotNull and NotString // - - - - - - - - - -/ // If the value is not null and the value has the own type except string, // It registers the value to statement by the type. if (processNotNullNotString(tableDbName, columnName, obj, conn, ps, bindCount, columnInfoMap)) { bindCount++; continue; } // /- - - - - - - - - - - - - - - - - - // process NotNull and StringExpression // - - - - - - - - - -/ final String value = (String) obj; processNotNullString(dataFile, tableDbName, columnName, value, conn, ps, bindCount, columnInfoMap); bindCount++; } if (isMergedSuppressBatchUpdate(dataDirectory)) { ps.execute(); } else { ps.addBatch(); ++addedBatchSize; if (addedBatchSize == 100000) { // this is supported in only delimiter data writer // because delimiter data can treat large data ps.executeBatch(); // to avoid OutOfMemory ps.clearBatch(); // for next batch addedBatchSize = 0; } } // *one record is finished here // clear temporary variables // if an exception occurs from execute() or addBatch(), // this valueList is to be information for debug valueList.clear(); preContinueString = null; } if (ps != null && addedBatchSize > 0) { ps.executeBatch(); } noticeLoadedRowSize(tableDbName, rowNumber); checkImplicitClassification(dataFile, tableDbName, columnNameList, conn); } catch (FileNotFoundException e) { throw e; } catch (IOException e) { throw e; } catch (SQLException e) { final SQLException nextEx = e.getNextException(); if (nextEx != null && !e.equals(nextEx)) { // focus on next exception _log.warn("*Failed to register: " + e.getMessage()); String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, nextEx); throw new DfDelimiterDataRegistrationFailureException(msg, nextEx); // switch! } else { String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, e); throw new DfDelimiterDataRegistrationFailureException(msg, e); } } catch (RuntimeException e) { String msg = buildRegExpMessage(_fileName, tableDbName, executedSql, valueList, e); throw new DfDelimiterDataRegistrationFailureException(msg, e); } finally { try { if (fis != null) { fis.close(); } if (ir != null) { ir.close(); } if (br != null) { br.close(); } } catch (java.io.IOException ignored) { _log.warn("File-close threw the exception: ", ignored); } if (ps != null) { try { ps.close(); } catch (SQLException ignored) { _log.info("Statement.close() threw the exception!", ignored); } } if (conn != null) { try { conn.close(); } catch (SQLException ignored) { _log.info("Connection.close() threw the exception!", ignored); } } // process after (finally) handling table finallyHandlingTable(tableDbName, columnInfoMap); } }
From source file:org.wso2.carbon.appmgt.impl.dao.AppMDAO.java
/** * Save XACML policies, policy group wise * * @param policyGroupId Policy Group Id//from w w w .ja v a 2s . c om * @param objPartialMappings XACML policy related details object array * @param conn sql connection * @throws AppManagementException if any an error found while saving data to DB */ private static void savePolicyPartialMappings(int policyGroupId, Object[] objPartialMappings, Connection conn) throws SQLException { String query = "INSERT INTO APM_POLICY_GRP_PARTIAL_MAPPING(POLICY_GRP_ID, POLICY_PARTIAL_ID) " + "VALUES(?,?) "; PreparedStatement preparedStatement = null; try { preparedStatement = conn.prepareStatement(query); for (int i = 0; i < objPartialMappings.length; i++) { preparedStatement.setInt(1, policyGroupId); preparedStatement.setInt(2, ((Double) (objPartialMappings[i])).intValue()); preparedStatement.addBatch(); } preparedStatement.executeBatch(); } catch (SQLException e) { log.error("SQL Error while executing the query to save policy partial mappings: " + query + " : (Policy Group Id:" + policyGroupId + ", Policy Partial Mappings:" + objPartialMappings + ")", e); /* In the code im using a single SQL connection passed from the parent function so I'm logging the error here and throwing the SQLException so the connection will be disposed by the parent function. */ throw e; } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, null, null); } }
From source file:org.ramadda.geodata.cdmdata.PointDatabaseTypeHandler.java
/** * _more_// ww w . j a v a 2 s . com * * * @param request _more_ * @param entry _more_ * @param metadata _more_ * @param fdp _more_ * @param connection _more_ * @param newEntry _more_ * * @throws Exception _more_ */ private void insertData(Request request, Entry entry, List<PointDataMetadata> metadata, FeatureDatasetPoint fdp, Connection connection, boolean newEntry) throws Exception { String tableName = getTableName(entry); String[] ARRAY = new String[metadata.size()]; for (PointDataMetadata pdm : metadata) { ARRAY[pdm.getColumnNumber()] = pdm.getColumnName(); } String insertString = SqlUtil.makeInsert(tableName, SqlUtil.commaNoDot(ARRAY), SqlUtil.getQuestionMarks(ARRAY.length)); double north = 0, south = 0, east = 0, west = 0; long minTime = (newEntry ? Long.MAX_VALUE : entry.getStartDate()); long maxTime = (newEntry ? Long.MIN_VALUE : entry.getEndDate()); PreparedStatement insertStmt = connection.prepareStatement(insertString); Object[] values = new Object[metadata.size()]; int cnt = 0; int batchCnt = 0; GregorianCalendar calendar = new GregorianCalendar(RepositoryUtil.TIMEZONE_DEFAULT); boolean didone = false; Hashtable properties = getProperties(entry); int baseId = Misc.getProperty(properties, PROP_ID, 0); int totalCnt = Misc.getProperty(properties, PROP_CNT, 0); long t1 = System.currentTimeMillis(); long tt1 = System.currentTimeMillis(); // for(int i=0;i<200;i++) { PointFeatureIterator pfi = CdmDataOutputHandler.getPointIterator(fdp); while (pfi.hasNext()) { PointFeature po = (PointFeature) pfi.next(); ucar.unidata.geoloc.EarthLocation el = po.getLocation(); if (el == null) { continue; } double lat = el.getLatitude(); double lon = el.getLongitude(); double alt = el.getAltitude(); Date time = po.getNominalTimeAsDate(); long tmpTime = time.getTime(); if (tmpTime < minTime) { minTime = tmpTime; } if (tmpTime > maxTime) { maxTime = tmpTime; } if (didone) { north = Math.max(north, lat); south = Math.min(south, lat); west = Math.min(west, lon); east = Math.max(east, lon); } else { north = (newEntry ? lat : entry.hasNorth() ? entry.getNorth() : lat); south = (newEntry ? lat : entry.hasSouth() ? entry.getSouth() : lat); east = (newEntry ? lon : entry.hasEast() ? entry.getEast() : lon); west = (newEntry ? lon : entry.hasWest() ? entry.getWest() : lon); } didone = true; calendar.setTime(time); StructureData structure = po.getData(); boolean hadAnyNumericValues = false; boolean hadGoodNumericValue = false; /* if(totalCnt<5) { StructureMembers.Member member = structure.findMember("altitude"); if(member!=null) { double d = structure.convertScalarFloat(member); } else { System.err.println("no member"); } } */ for (PointDataMetadata pdm : metadata) { Object value; if (COL_ID.equals(pdm.getColumnName())) { value = new Integer(baseId); baseId++; } else if (COL_LATITUDE.equals(pdm.getColumnName())) { value = new Double(checkWriteValue(lat)); } else if (COL_LONGITUDE.equals(pdm.getColumnName())) { value = new Double(checkWriteValue(lon)); } else if (COL_ALTITUDE.equals(pdm.getColumnName())) { value = new Double(checkWriteValue(alt)); } else if (COL_DATE.equals(pdm.getColumnName())) { value = time; } else if (COL_HOUR.equals(pdm.getColumnName())) { value = new Integer(calendar.get(GregorianCalendar.HOUR)); } else if (COL_MONTH.equals(pdm.getColumnName())) { value = new Integer(calendar.get(GregorianCalendar.MONTH)); } else { StructureMembers.Member member = structure.findMember((String) pdm.shortName); if (pdm.isString()) { value = structure.getScalarString(member); if (value == null) { value = ""; } value = value.toString().trim(); } else { double d = structure.convertScalarFloat(member); hadAnyNumericValues = true; if (d == d) { hadGoodNumericValue = true; } value = new Double(checkWriteValue(d)); } } values[pdm.getColumnNumber()] = value; } if (hadAnyNumericValues && !hadGoodNumericValue) { continue; } totalCnt++; getDatabaseManager().setValues(insertStmt, values); insertStmt.addBatch(); batchCnt++; if (batchCnt > 100) { insertStmt.executeBatch(); batchCnt = 0; } if (((++cnt) % 5000) == 0) { System.err.println("added " + cnt + " observations " + (System.currentTimeMillis() - tt1)); } } // } if (batchCnt > 0) { insertStmt.executeBatch(); } insertStmt.close(); long t2 = System.currentTimeMillis(); System.err.println("inserted " + cnt + " observations in " + (t2 - t1) + "ms"); properties.put(PROP_CNT, totalCnt + ""); properties.put(PROP_ID, baseId + ""); setProperties(entry, properties); if (didone) { entry.setWest(west); entry.setEast(east); entry.setNorth(north); entry.setSouth(south); } if (minTime != Long.MAX_VALUE) { entry.setStartDate(minTime); entry.setEndDate(maxTime); } }
From source file:com.flexive.core.storage.genericSQL.GenericHierarchicalStorage.java
/** * {@inheritDoc}/*w ww . ja va2s . c o m*/ */ @Override public FxPK contentSave(Connection con, FxEnvironment env, StringBuilder sql, FxContent content, long fqnPropertyId) throws FxInvalidParameterException, FxUpdateException, FxNoAccessException { content.getRootGroup().removeEmptyEntries(); content.getRootGroup().compactPositions(true); content.checkValidity(); FxPK pk = content.getPk(); if (pk.isNew() || !pk.isDistinctVersion()) throw new FxInvalidParameterException("PK", "ex.content.pk.invalid.save", pk); FxDelta delta; FxContent original; final FxType type = env.getType(content.getTypeId()); final UserTicket ticket = FxContext.getUserTicket(); try { FxCachedContent cachedContent = CacheAdmin.getCachedContent(pk); if (cachedContent != null) original = cachedContent.getContent().copy(); else original = contentLoad(con, content.getPk(), env, sql); original.getRootGroup().removeEmptyEntries(); original.getRootGroup().compactPositions(true); //unwrap all no access values so they can be saved if (type.isUsePropertyPermissions() && !ticket.isGlobalSupervisor()) { FxContext.get().runAsSystem(); try { FxPermissionUtils.unwrapNoAccessValues(content, original); } finally { FxContext.get().stopRunAsSystem(); } } delta = FxDelta.processDelta(original, content); } catch (FxLoadException e) { throw new FxUpdateException(e); } catch (FxNotFoundException e) { throw new FxUpdateException(e); } if (original.getStepId() != content.getStepId()) { Workflow wf = env.getWorkflow(env.getStep(content.getStepId()).getWorkflowId()); if (!wf.isRouteValid(original.getStepId(), content.getStepId())) { throw new FxInvalidParameterException("STEP", "ex.content.step.noRoute", env.getStepDefinition(env.getStep(original.getStepId()).getStepDefinitionId()).getLabel() .getBestTranslation(), env.getStepDefinition(env.getStep(content.getStepId()).getStepDefinitionId()).getLabel() .getBestTranslation()); } if (type.isTrackHistory()) EJBLookup.getHistoryTrackerEngine().track(type, content.getPk(), null, "history.content.step.change", env.getStepDefinition(env.getStep(original.getStepId()).getStepDefinitionId()).getName(), env.getStepDefinition(env.getStep(content.getStepId()).getStepDefinitionId()).getName()); } if (!delta.changes()) { if (LOG.isDebugEnabled()) { LOG.debug("====== NO CHANGES ======="); } return pk; } else { if (LOG.isDebugEnabled()) { LOG.debug(delta.dump()); } } FulltextIndexer ft = getFulltextIndexer(pk, con); FxFlatStorage fs = type.isContainsFlatStorageAssignments() ? FxFlatStorageManager.getInstance() : null; if (type.isUsePropertyPermissions() && !ticket.isGlobalSupervisor()) FxPermissionUtils.checkPropertyPermissions(content.getLifeCycleInfo().getCreatorId(), delta, ACLPermission.EDIT); lockTables(con, pk.getId(), pk.getVersion()); if (delta.isInternalPropertyChanged()) { updateMainEntry(con, content); } else if (delta.isGroupDataChanged()) { updateGroupPositions(con, content); } try { disableDetailUniqueChecks(con); //full replace code start // removeDetailEntriesVersion(con, pk); // createDetailEntries(con, env, sql, pk, content.isMaxVersion(), content.isLiveVersion(), content.getData("/")); //full replace code end boolean checkScripting = type.hasScriptedAssignments(); FxScriptBinding binding = null; ScriptingEngine scripting = null; if (checkScripting) { scripting = EJBLookup.getScriptingEngine(); binding = new FxScriptBinding(); binding.setVariable("content", content); } //before... scripts if (checkScripting) { //delta-deletes: for (FxDelta.FxDeltaChange change : delta.getRemoves()) { for (long scriptId : change.getOriginalData().getAssignment() .getScriptMapping(FxScriptEvent.BeforeDataChangeDelete)) { binding.setVariable("change", change); scripting.runScript(scriptId, binding); } } //delta-updates: for (FxDelta.FxDeltaChange change : delta.getUpdates()) { for (long scriptId : change.getOriginalData().getAssignment() .getScriptMapping(FxScriptEvent.BeforeDataChangeUpdate)) { binding.setVariable("change", change); scripting.runScript(scriptId, binding); } } //delta-adds: for (FxDelta.FxDeltaChange change : delta.getAdds()) { for (long scriptId : change.getNewData().getAssignment() .getScriptMapping(FxScriptEvent.BeforeDataChangeAdd)) { binding.setVariable("change", change); scripting.runScript(scriptId, binding); } } //reprocess deltas incase scripts performed any changes to data delta = FxDelta.processDelta(original, content); } //delta-deletes: for (FxDelta.FxDeltaChange change : delta.getRemoves()) { if (type.isUsePropertyPermissions() && change.isProperty()) { final ACL deltaACL = type.getPropertyAssignment(change.getXPath()).getACL(); if (!ticket.mayDeleteACL(deltaACL.getId(), content.getLifeCycleInfo().getCreatorId())) throw new FxNoAccessException("ex.acl.noAccess.property.delete", deltaACL.getDisplayName(), change.getXPath()); } if (!change.getOriginalData().isSystemInternal()) { deleteDetailData(con, sql, pk, change.getOriginalData()); if (change.isProperty()) { //check if the removed property is a FQN if (((FxPropertyData) change.getOriginalData()).getPropertyId() == fqnPropertyId) { syncFQNName(con, content, pk, change); } } ft.index(change); } } //delta-updates: List<FxDelta.FxDeltaChange> updatesRemaining = new ArrayList<FxDelta.FxDeltaChange>(delta.getUpdates()); PreparedStatement ps_insert = null; PreparedStatement ps_update = null; try { ps_insert = con.prepareStatement(CONTENT_DATA_INSERT); ps_update = con.prepareStatement(CONTENT_DATA_UPDATE); while (updatesRemaining.size() > 0) { FxDelta.FxDeltaChange change = updatesRemaining.get(0); //noinspection CaughtExceptionImmediatelyRethrown try { if (!change.getOriginalData().isSystemInternal()) { if (change.isGroup()) { if (change.isPositionChange() && !change.isDataChange()) { //groups can only change position updatePropertyData(change, null, null, con, ps_update, pk, null); } } else { FxProperty prop = env .getProperty(((FxPropertyData) change.getNewData()).getPropertyId()); if (!change._isUpdateable()) { deleteDetailData(con, sql, pk, change.getOriginalData()); insertPropertyData(prop, content.getData("/"), con, ps_insert, null, pk, ((FxPropertyData) change.getNewData()), content.isMaxVersion(), content.isLiveVersion()); } else { updatePropertyData(change, prop, content.getData("/"), con, ps_update, pk, ((FxPropertyData) change.getNewData())); } //check if the property changed is a FQN if (prop.getId() == fqnPropertyId) { syncFQNName(con, content, pk, change); } } } updatesRemaining.remove(0); ft.index(change); } catch (SQLException e) { change._increaseRetries(); if (change._getRetryCount() > 100) throw e; updatesRemaining.remove(0); updatesRemaining.add(change); //add as last } } //flatstorage adds/updates if (fs != null && delta.getFlatStorageAddsUpdates().size() > 0) fs.setPropertyData(con, pk, type.getId(), content.getStepId(), content.isMaxVersion(), content.isLiveVersion(), delta.getFlatStorageAddsUpdates(), false); //delta-adds: for (FxDelta.FxDeltaChange change : delta.getAdds()) { if (type.isUsePropertyPermissions() && change.isProperty()) { final ACL acl = type.getPropertyAssignment(change.getXPath()).getACL(); if (!ticket.mayCreateACL(acl.getId(), content.getLifeCycleInfo().getCreatorId())) throw new FxNoAccessException("ex.acl.noAccess.property.create", acl.getDisplayName(), change.getXPath()); } if (!change.getNewData().isSystemInternal() && change.isProperty()) { final FxProperty prop = env .getProperty(((FxPropertyData) change.getNewData()).getPropertyId()); insertPropertyData(prop, content.getData("/"), con, ps_insert, null, pk, ((FxPropertyData) change.getNewData()), content.isMaxVersion(), content.isLiveVersion()); ft.index(change); //check if the property changed is a FQN if (prop.getId() == fqnPropertyId) { syncFQNName(con, content, pk, change); } } } ps_update.executeBatch(); ps_insert.executeBatch(); } finally { Database.closeObjects(GenericHierarchicalStorage.class, ps_update, ps_insert); } checkUniqueConstraints(con, env, sql, pk, content.getTypeId()); if (delta.isInternalPropertyChanged()) { final boolean stepsUpdated = updateStepDependencies(con, content.getPk().getId(), content.getPk().getVersion(), env, type, content.getStepId()); fixContentVersionStats(con, env, type, content.getPk().getId(), false, stepsUpdated); } content.resolveBinaryPreview(); if (original.getBinaryPreviewId() != content.getBinaryPreviewId() || original.getBinaryPreviewACL() != content.getBinaryPreviewACL()) binaryStorage.updateContentBinaryEntry(con, pk, content.getBinaryPreviewId(), content.getBinaryPreviewACL()); enableDetailUniqueChecks(con); if (!content.isForceLifeCycle()) //only update the lci if not forced to keep LifeCycleInfoImpl.updateLifeCycleInfo(TBL_CONTENT, "ID", "VER", content.getPk().getId(), content.getPk().getVersion(), false, false); //after... scripts if (checkScripting) { //delta-deletes: for (FxDelta.FxDeltaChange change : delta.getRemoves()) { for (long scriptId : change.getOriginalData().getAssignment() .getScriptMapping(FxScriptEvent.AfterDataChangeDelete)) { binding.setVariable("change", change); scripting.runScript(scriptId, binding); } } //delta-updates: for (FxDelta.FxDeltaChange change : delta.getUpdates()) { for (long scriptId : change.getOriginalData().getAssignment() .getScriptMapping(FxScriptEvent.AfterDataChangeUpdate)) { binding.setVariable("change", change); scripting.runScript(scriptId, binding); } } //delta-adds: for (FxDelta.FxDeltaChange change : delta.getAdds()) { for (long scriptId : change.getNewData().getAssignment() .getScriptMapping(FxScriptEvent.AfterDataChangeAdd)) { binding.setVariable("change", change); scripting.runScript(scriptId, binding); } } } ft.commitChanges(); if (type.isTrackHistory()) { HistoryTrackerEngine tracker = EJBLookup.getHistoryTrackerEngine(); XStream xs = ConversionEngine.getXStream(); for (FxDelta.FxDeltaChange add : delta.getAdds()) tracker.track(type, pk, add.getNewData().isGroup() ? null : xs.toXML(((FxPropertyData) add.getNewData()).getValue()), "history.content.data.add", add.getXPath()); for (FxDelta.FxDeltaChange remove : delta.getRemoves()) tracker.track(type, pk, remove.getOriginalData().isGroup() ? null : xs.toXML(((FxPropertyData) remove.getOriginalData()).getValue()), "history.content.data.removed", remove.getXPath()); for (FxDelta.FxDeltaChange update : delta.getUpdates()) { if (update.isPositionChangeOnly()) tracker.track(type, pk, null, "history.content.data.update.posOnly", update.getXPath(), update.getOriginalData().getPos(), update.getNewData().getPos()); else if (update.isPositionChange()) tracker.track(type, pk, update.getNewData().isGroup() ? null : xs.toXML(((FxPropertyData) update.getNewData()).getValue()), "history.content.data.update.pos", update.getXPath(), update.getOriginalData().getPos(), update.getNewData().getPos()); else tracker.track(type, pk, update.getNewData().isGroup() ? null : "<original>\n" + xs.toXML(((FxPropertyData) update.getOriginalData()).getValue()) + "\n</original>\n" + "<new>\n" + xs.toXML(((FxPropertyData) update.getNewData()).getValue()) + "\n</new>\n", "history.content.data.update", update.getXPath()); } } } catch (FxCreateException e) { throw new FxUpdateException(e); } catch (FxApplicationException e) { throw new FxUpdateException(e); } catch (SQLException e) { throw new FxUpdateException(LOG, e, "ex.db.sqlError", e.getMessage()); } catch (Exception e) { throw new FxUpdateException(LOG, e, "ex.content.save.error", pk, e); } finally { ft.cleanup(); } return content.getPk(); }
From source file:com.att.pirates.controller.ProjectController.java
private void processProjectAppOwnersByArtifactName(List<ProjectAppOwnerModel> existingOwners, List<ProjectAppOwnerModel> userSelectedOwners, String artifactName, String dueDateForArtifact, String UUID, int impactId) { if (impactId == 4 && artifactName.equalsIgnoreCase(PiratesConstants.ISTExec)) { return;/*from w w w .j a v a 2 s . c o m*/ } Connection con = null; PreparedStatement updateRow = null; PreparedStatement deleteRow = null; PreparedStatement insertRow = null; PreparedStatement syncRow = null; SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String today = format.format(Calendar.getInstance().getTime()); String insertSQL = " INSERT INTO [AppProjectArtifactOwners] " + " ([UUID] " + " ,[DueDate] " + " ,[ArtifactName] " + " ,[PRISMId] " + " ,[ApplicationName] " + " ,[ModuleId] " + " ,[IsPrimaryOwner] " + " ,[MileStoneId] " + " ,[DateCreated] " + " ,[UpdatedByUUID] " + " ,[SystemNote]) " + " VALUES " + " ( ?,?,?,?,?,?,?,?,?,?,?) "; String updateSQL = " UPDATE AppProjectArtifactOwners " + " SET DueDate = ? " + " ,UpdatedByUUID = ? " + " ,SystemNote = ? " + " WHERE " + " UUID = ? and ArtifactName = ? and PRISMId = ? and ApplicationName = ? and ModuleId = ? and MileStoneId = ? "; String deleteSQL = " DELETE FROM AppProjectArtifactOwners " + " WHERE " + " UUID = ? and ArtifactName = ? and PRISMId = ? and ApplicationName = ? and ModuleId = ? and MileStoneId = ? "; // we need to make sure duedates for each artifact for all applications in this project // to be in sync String syncSQL = " Update a " + " Set a.duedate = t.duedate " + " from AppProjectArtifactOwners a " + " join " + " ( " + " select prismid, applicationname, artifactname, duedate " + " from AppProjectArtifactOwners " + " Where artifactname = ? and prismid = ? " + " And applicationname = ? " + " and isprimaryowner = 1 " + " ) t " + " on a.prismid=t.prismid and a.artifactname=t.artifactname " + " and a.applicationname <> t.applicationname " + " where a.isprimaryowner = 1 "; try { List<String> artifactNames = new ArrayList<String>(); String prismId = ""; String applicationName = ""; con = DBUtility.getDBConnection(); con.setAutoCommit(false); insertRow = con.prepareStatement(insertSQL); // inserts boolean isAdd = false; for (ProjectAppOwnerModel o : userSelectedOwners) { if (o.getIsNew()) { if (!isAdd) isAdd = true; logger.error(msgHeader + " userSelectedOwners " + artifactName + " " + o.getUUID() + " is being added.."); insertRow.setString(1, o.getUUID()); insertRow.setString(2, o.getDueDate()); insertRow.setString(3, o.getArtifactName()); insertRow.setString(4, o.getPrismId()); insertRow.setString(5, o.getApplicationName()); insertRow.setInt(6, Integer.valueOf(o.getModuleId())); insertRow.setInt(7, 1); insertRow.setInt(8, Integer.valueOf(o.getMileStoneId())); insertRow.setString(9, today); insertRow.setString(10, o.getUpdatedByUUID()); insertRow.setString(11, "Record created by " + o.getUpdatedByUUID() + " on " + o.getDateCreated()); insertRow.addBatch(); // update artifactNames list if (!artifactNames.contains(o.getArtifactName())) { artifactNames.add(o.getArtifactName()); } if (prismId.isEmpty()) { prismId = o.getPrismId(); } if (applicationName.isEmpty()) { applicationName = o.getApplicationName(); } } } if (isAdd) insertRow.executeBatch(); if (!isAdd) logger.error(msgHeader + " processProjectAppOwnersByArtifactName.. nothing to insert"); // updates boolean isUpdate = false; updateRow = con.prepareStatement(updateSQL); for (ProjectAppOwnerModel o : existingOwners) { if (o.getIsNew()) { // do nothing here... } else { SimpleDateFormat yFormat = new SimpleDateFormat("MM/dd/yyyy"); Date userSelected = yFormat.parse(dueDateForArtifact); Date existing = yFormat.parse(o.getDueDate()); if (existing.compareTo(userSelected) == 0) { logger.error(msgHeader + " new duedate: " + dueDateForArtifact + " is the same as existing duedate " + o.getDueDate() + " , nothing to do here..."); } else { if (!isUpdate) isUpdate = true; logger.error(msgHeader + " existingOwners " + artifactName + " " + o.getUUID() + " is being updated.."); updateRow.setString(1, dueDateForArtifact); updateRow.setString(2, UUID); updateRow.setString(3, "Record updated by " + UUID + " on " + today); updateRow.setString(4, o.getUUID()); updateRow.setString(5, o.getArtifactName()); updateRow.setString(6, o.getPrismId()); updateRow.setString(7, o.getApplicationName()); updateRow.setInt(8, Integer.valueOf(o.getModuleId())); updateRow.setInt(9, Integer.valueOf(o.getMileStoneId())); updateRow.addBatch(); // update artifactNames list if (!artifactNames.contains(o.getArtifactName())) { artifactNames.add(o.getArtifactName()); } if (prismId.isEmpty()) { prismId = o.getPrismId(); } if (applicationName.isEmpty()) { applicationName = o.getApplicationName(); } } } } if (isUpdate) updateRow.executeBatch(); if (!isUpdate) logger.error(msgHeader + " processProjectAppOwnersByArtifactName.. nothing to update"); // deletes boolean isDelete = false; deleteRow = con.prepareStatement(deleteSQL); for (ProjectAppOwnerModel o : existingOwners) { if (o.getIsNew()) { if (!isDelete) isDelete = true; logger.error(msgHeader + " existingOwners " + artifactName + " " + o.getUUID() + " is being deleted.."); deleteRow.setString(1, o.getUUID()); deleteRow.setString(2, o.getArtifactName()); deleteRow.setString(3, o.getPrismId()); deleteRow.setString(4, o.getApplicationName()); deleteRow.setInt(5, Integer.valueOf(o.getModuleId())); deleteRow.setInt(6, Integer.valueOf(o.getMileStoneId())); deleteRow.addBatch(); } else { // do nothing here } } if (isDelete) deleteRow.executeBatch(); if (!isDelete) logger.error(msgHeader + " processProjectAppOwnersByArtifactName.. nothing to delete"); if (isAdd || isUpdate || isDelete) { // sync up same artifact, same project different application's due dates if (!artifactNames.isEmpty()) { syncRow = con.prepareStatement(syncSQL); for (String a : artifactNames) { logger.error("Setting syncup parameters.. artifactname: " + a + ", prismId: " + prismId + ", applicationName: " + applicationName); syncRow.setString(1, a); syncRow.setString(2, prismId); syncRow.setString(3, applicationName); syncRow.addBatch(); } syncRow.executeBatch(); } con.commit(); } else { logger.error(msgHeader + " processProjectAppOwnersByArtifactName.. nothing to commit"); } } catch (SQLException e) { if (con != null) { try { logger.error(e.getMessage() + ", processProjectAppOwnersByArtifactName.. Transaction is being rolled back.. " + e.getMessage()); con.rollback(); } catch (SQLException excep) { logger.error(excep.getMessage() + ", processProjectAppOwnersByArtifactName.. Transaction is being rolled back.." + excep.getMessage()); try { con.rollback(); } catch (SQLException logOrIgnore) { } } } } catch (Exception ex) { logger.error(ex + ", processProjectAppOwnersByArtifactName.. Transaction is being rolled back.." + ex.getMessage()); try { con.rollback(); } catch (SQLException logOrIgnore) { } } finally { if (updateRow != null) { try { updateRow.close(); } catch (SQLException e) { } } if (deleteRow != null) { try { deleteRow.close(); } catch (SQLException e) { } } if (insertRow != null) { try { insertRow.close(); } catch (SQLException e) { } } if (con != null) { try { con.setAutoCommit(true); con.close(); } catch (SQLException logOrIgnore) { } } } }
From source file:com.emr.utilities.CSVLoader.java
/** * Parse CSV file using OpenCSV library and load in * given database table. /*from w w w . j av a 2s. c o m*/ * @param csvFile {@link String} Input CSV file * @param tableName {@link String} Database table name to import data * @param truncateBeforeLoad {@link boolean} Truncate the table before inserting * new records. * @param destinationColumns {@link String[]} Array containing the destination columns */ public void loadCSV(String csvFile, String tableName, boolean truncateBeforeLoad, String[] destinationColumns, List columnsToBeMapped) throws Exception { CSVReader csvReader = null; if (null == this.connection) { throw new Exception("Not a valid connection."); } try { csvReader = new CSVReader(new FileReader(csvFile), this.seprator); } catch (Exception e) { String stacktrace = org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace(e); JOptionPane.showMessageDialog(null, "Error occured while executing file. Error Details: " + stacktrace, "File Error", JOptionPane.ERROR_MESSAGE); throw new Exception("Error occured while executing file. " + stacktrace); } String[] headerRow = csvReader.readNext(); if (null == headerRow) { throw new FileNotFoundException( "No columns defined in given CSV file." + "Please check the CSV file format."); } //Get indices of columns to be mapped List mapColumnsIndices = new ArrayList(); for (Object o : columnsToBeMapped) { String column = (String) o; column = column.substring(column.lastIndexOf(".") + 1, column.length()); int i; for (i = 0; i < headerRow.length; i++) { if (headerRow[i].equals(column)) { mapColumnsIndices.add(i); } } } String questionmarks = StringUtils.repeat("?,", headerRow.length); questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1); String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName); query = query.replaceFirst(KEYS_REGEX, StringUtils.join(destinationColumns, ",")); query = query.replaceFirst(VALUES_REGEX, questionmarks); String log_query = query.substring(0, query.indexOf("VALUES(")); String[] nextLine; Connection con = null; PreparedStatement ps = null; PreparedStatement ps2 = null; PreparedStatement reader = null; ResultSet rs = null; try { con = this.connection; con.setAutoCommit(false); ps = con.prepareStatement(query); File file = new File("sqlite/db"); if (!file.exists()) { file.createNewFile(); } db = new SQLiteConnection(file); db.open(true); //if destination table==person, also add an entry in the table person_identifier //get column indices for the person_id and uuid columns int person_id_column_index = -1; int uuid_column_index = -1; int maxLength = 100; int firstname_index = -1; int middlename_index = -1; int lastname_index = -1; int clanname_index = -1; int othername_index = -1; if (tableName.equals("person")) { int i; ps2 = con.prepareStatement( "insert ignore into person_identifier(person_id,identifier_type_id,identifier) values(?,?,?)"); for (i = 0; i < headerRow.length; i++) { if (headerRow[i].equals("person_id")) { person_id_column_index = i; } if (headerRow[i].equals("uuid")) { uuid_column_index = i; } /*if(headerRow[i].equals("first_name")){ System.out.println("Found firstname index: " + i); firstname_index=i; } if(headerRow[i].equals("middle_name")){ System.out.println("Found firstname index: " + i); middlename_index=i; } if(headerRow[i].equals("last_name")){ System.out.println("Found firstname index: " + i); lastname_index=i; } if(headerRow[i].equals("clan_name")){ System.out.println("Found firstname index: " + i); clanname_index=i; } if(headerRow[i].equals("other_name")){ System.out.println("Found firstname index: " + i); othername_index=i; }*/ } } if (truncateBeforeLoad) { //delete data from table before loading csv try (Statement stmnt = con.createStatement()) { stmnt.execute("DELETE FROM " + tableName); stmnt.close(); } } if (tableName.equals("person")) { try (Statement stmt2 = con.createStatement()) { stmt2.execute( "ALTER TABLE person CHANGE COLUMN first_name first_name VARCHAR(50) NULL DEFAULT NULL AFTER person_guid,CHANGE COLUMN middle_name middle_name VARCHAR(50) NULL DEFAULT NULL AFTER first_name,CHANGE COLUMN last_name last_name VARCHAR(50) NULL DEFAULT NULL AFTER middle_name;"); stmt2.close(); } } final int batchSize = 1000; int count = 0; Date date = null; while ((nextLine = csvReader.readNext()) != null) { if (null != nextLine) { int index = 1; int person_id = -1; String uuid = ""; int identifier_type_id = 3; if (tableName.equals("person")) { reader = con.prepareStatement( "select identifier_type_id from identifier_type where identifier_type_name='UUID'"); rs = reader.executeQuery(); if (!rs.isBeforeFirst()) { //no uuid row //insert it Integer numero = 0; Statement stmt = con.createStatement(); numero = stmt.executeUpdate( "insert into identifier_type(identifier_type_id,identifier_type_name) values(50,'UUID')", Statement.RETURN_GENERATED_KEYS); ResultSet rs2 = stmt.getGeneratedKeys(); if (rs2.next()) { identifier_type_id = rs2.getInt(1); } rs2.close(); stmt.close(); } else { while (rs.next()) { identifier_type_id = rs.getInt("identifier_type_id"); } } } int counter = 1; String temp_log = log_query + "VALUES("; //string to be logged for (String string : nextLine) { //if current index is in the list of columns to be mapped, we apply that mapping for (Object o : mapColumnsIndices) { int i = (int) o; if (index == (i + 1)) { //apply mapping to this column string = applyDataMapping(string); } } if (tableName.equals("person")) { //get person_id and uuid if (index == (person_id_column_index + 1)) { person_id = Integer.parseInt(string); } if (index == (uuid_column_index + 1)) { uuid = string; } } //check if string is a date if (string.matches("\\d{2}-[a-zA-Z]{3}-\\d{4} \\d{2}:\\d{2}:\\d{2}") || string.matches("\\d{2}-[a-zA-Z]{3}-\\d{4}")) { java.sql.Date dt = formatDate(string); temp_log = temp_log + "'" + dt.toString() + "'"; ps.setDate(index++, dt); } else { if ("".equals(string)) { temp_log = temp_log + "''"; ps.setNull(index++, Types.NULL); } else { temp_log = temp_log + "'" + string + "'"; ps.setString(index++, string); } } if (counter < headerRow.length) { temp_log = temp_log + ","; } else { temp_log = temp_log + ");"; System.out.println(temp_log); } counter++; } if (tableName.equals("person")) { if (!"".equals(uuid) && person_id != -1) { ps2.setInt(1, person_id); ps2.setInt(2, identifier_type_id); ps2.setString(3, uuid); ps2.addBatch(); } } ps.addBatch(); } if (++count % batchSize == 0) { ps.executeBatch(); if (tableName.equals("person")) { ps2.executeBatch(); } } } ps.executeBatch(); // insert remaining records if (tableName.equals("person")) { ps2.executeBatch(); } con.commit(); } catch (Exception e) { if (con != null) con.rollback(); if (db != null) db.dispose(); String stacktrace = org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace(e); JOptionPane.showMessageDialog(null, "Error occured while executing file. Error Details: " + stacktrace, "File Error", JOptionPane.ERROR_MESSAGE); throw new Exception("Error occured while executing file. " + stacktrace); } finally { if (null != reader) reader.close(); if (null != ps) ps.close(); if (null != ps2) ps2.close(); if (null != con) con.close(); csvReader.close(); } }
From source file:com.zimbra.cs.db.DbMailItem.java
public static void setFolder(List<Message> msgs, Folder folder) throws ServiceException { if (msgs == null || msgs.isEmpty()) { return;/*from w ww . ja va 2 s. com*/ } Mailbox mbox = folder.getMailbox(); DbConnection conn = mbox.getOperationConnection(); PreparedStatement stmt = null; try { // commented out because at present messages cannot have names (and thus can't have naming conflicts) // if (!Db.supports(Db.Capability.UNIQUE_NAME_INDEX) || Db.supports(Db.Capability.CASE_SENSITIVE_COMPARISON)) { // stmt = conn.prepareStatement("SELECT mi.name" + // " FROM " + getMailItemTableName(mbox, "mi") + ", " + getMailItemTableName(mbox, "m2") + // " WHERE mi.id IN " + DbUtil.suitableNumberOfVariables(itemIDs) + // " AND mi.name IS NOT NULL and m2.name IS NOT NULL" + // " AND m2.folder_id = ? AND mi.id <> m2.id" + // " AND " + (Db.supports(Db.Capability.CASE_SENSITIVE_COMPARISON) ? "UPPER(mi.name) = UPPER(m2.name)" : "mi.name = m2.name") + // " AND mi.mailbox_id = ? AND m2.mailbox_id = ?"); // int pos = 1; // for (Message msg : msgs) // stmt.setInt(pos++, msg.getId()); // stmt.setInt(pos++, folder.getId()); // stmt.setInt(pos++, mbox.getId()); // stmt.setInt(pos++, mbox.getId()); // rs = stmt.executeQuery(); // if (rs.next()) // throw MailServiceException.ALREADY_EXISTS(rs.getString(1)); // rs.close(); // stmt.close(); // } int count = 0; int batchSize = 500; String imapRenumber = mbox.isTrackingImap() ? ", imap_id = CASE WHEN imap_id IS NULL THEN NULL ELSE 0 END" : ""; stmt = conn.prepareStatement("UPDATE " + getMailItemTableName(folder) + " SET folder_id = ?, prev_folders=?, mod_metadata = ?, change_date = ?" + imapRenumber + " WHERE " + IN_THIS_MAILBOX_AND + "id = ?"); int modseq = mbox.getOperationChangeID(); for (int j = 0; j < msgs.size(); j++) { int pos = 1; stmt.setInt(pos++, folder.getId()); UnderlyingData ud = msgs.get(j).getUnderlyingData(); //prev folders ordered by modseq ascending, e.g. 100:2;200:101;300:5 if (msgs.get(j).getFolderId() != folder.getId()) { String prevFolders = ud.getPrevFolders(); if (!StringUtil.isNullOrEmpty(prevFolders)) { String[] modseq2FolderId = prevFolders.split(";"); int maxCount = mbox.getAccount().getServer().getPrevFoldersToTrackMax(); if (modseq2FolderId.length < maxCount) { prevFolders += ";" + modseq + ":" + ud.folderId; } else { //reached max, get rid of the oldest one String[] tmp = new String[maxCount]; System.arraycopy(modseq2FolderId, 1, tmp, 0, maxCount - 1); tmp[maxCount - 1] = modseq + ":" + ud.folderId; prevFolders = StringUtil.join(";", tmp); } } else { prevFolders = modseq + ":" + ud.folderId; } stmt.setString(pos++, prevFolders); ud.setPrevFolders(prevFolders); } else { stmt.setString(pos++, msgs.get(j).getUnderlyingData().getPrevFolders()); } stmt.setInt(pos++, modseq); stmt.setInt(pos++, mbox.getOperationTimestamp()); pos = setMailboxId(stmt, mbox, pos); stmt.setInt(pos++, msgs.get(j).getId()); stmt.addBatch(); if (++count % batchSize == 0) { stmt.executeBatch(); } } stmt.executeBatch(); stmt.close(); stmt = null; } catch (SQLException e) { // catch item_id uniqueness constraint violation and return failure // if (Db.errorMatches(e, Db.Error.DUPLICATE_ROW)) // throw MailServiceException.ALREADY_EXISTS(msgs.toString(), e); // else throw ServiceException.FAILURE("writing new folder data for messages", e); } finally { DbPool.closeStatement(stmt); } }
From source file:com.pari.nm.utils.db.InventoryDBHelper.java
public static void insertSnmpScalarsInBatch(int deviceId, Map<String, ISnmpScalar> snmpScalars, String type) { String insQuery = DBHelperConstants.INSERT_SNMP_MIB_QUERY; Connection con = null;// w w w. j a v a 2 s .co m PreparedStatement ps = null; try { con = DBHelper.getConnection(); ps = con.prepareStatement(insQuery); con.setAutoCommit(false); for (String oid : snmpScalars.keySet()) { ISnmpScalar value = snmpScalars.get(oid); String xmlValue = value.toXml(); ps.setInt(1, deviceId); ps.setString(2, oid); // There's no table oid for scalar values ps.setString(3, null); ps.setString(6, null); // TODO: Do we need to compress data before storing? if (ps instanceof OraclePreparedStatement) { ((OraclePreparedStatement) ps).setStringForClob(4, xmlValue); } else { logger.debug("PS is not OraclePreparedStatement, inserting as regular string"); ps.setString(4, xmlValue); } ps.setString(5, type); ps.addBatch(); } ps.executeBatch(); con.commit(); } catch (SQLException sqlex) { logger.error("Error while inserting rows to database", sqlex); try { if (con != null) { con.rollback(); } } catch (SQLException ex) { logger.error("Error while calling rollback on db conn", ex); } } catch (Exception ex) { logger.error("Error while inserting snmp data in batch", ex); } finally { try { if (con != null) { con.setAutoCommit(true); } } catch (SQLException sqlex) { logger.error("Error while calling setAutoCommit", sqlex); } try { ps.close(); } catch (SQLException sqlex) { logger.error("Error while closing ps", sqlex); } DBHelper.releaseConnection(con); } }
From source file:com.viettel.logistic.wms.dao.StockGoodsSerialDAO.java
public ResultDTO importStockGoodsSerialBatch(StockTransDTO stockTrans, StockTransDetailDTO stockTransDetail, List<StockTransSerialDTO> lstStockTransSerial, Connection connection, String serialStatus) { ResultDTO resultDTO = new ResultDTO(); //connection. //THONG TIN SO LUONG NHAP Double amount = 0D;//from w ww .j ava 2s. co m Double amountIssue = 0D; //PREPARE STATEMENTS PreparedStatement prstmtInsertStockTransSerial; PreparedStatement prstmtInsertStockGoodsSerial; //SQL StringBuilder sqlStockGoodsSerial = new StringBuilder(); StringBuilder sqlStockTransSerial = new StringBuilder(); String serial; String prefixSerial = ""; String suffixFromSerial; String suffixToSerial; String fromSerial; String toSerial; int numberNeedToCommit = 0; int numberOfSuccess = 0; int numberOfFail = 0; // List paramsStockTransSerial; List paramsStockGoodsSerial; try { //1.KHOI TAO SESSION //2.1 TAO STATEMENTS STOCK_GOODS_SERIAL sqlStockGoodsSerial .append(" INSERT INTO stock_goods_serial (id, cust_id, owner_id, owner_type, goods_id,"); sqlStockGoodsSerial.append(" goods_state, status,sale_type, change_user,"); sqlStockGoodsSerial.append(" price,channel_type_id, barcode, change_date,"); sqlStockGoodsSerial.append( " import_date, sale_date, bincode, add_infor, cell_code,serial,partner_id,import_stock_trans_id,order_id) "); sqlStockGoodsSerial.append( " VALUES (STOCK_GOODS_SERIAL_SEQ.nextval,?,?,?,?,?,?,?,?,TO_NUMBER(?),TO_NUMBER(?),?,to_date(?,'dd/MM/yyyy hh24:mi:ss'),to_date(?,'dd/MM/yyyy hh24:mi:ss'),?,?,?,?,?,TO_NUMBER(?),TO_NUMBER(?),TO_NUMBER(?)) "); sqlStockGoodsSerial.append(" LOG ERRORS REJECT LIMIT UNLIMITED "); //2.2 TAO STATEMENTS STOCK_TRANS_SERIAL sqlStockTransSerial.append("INSERT INTO stock_trans_serial "); sqlStockTransSerial.append(" ( stock_trans_serial_id, stock_trans_id," + " stock_trans_detail_id, stock_trans_date, goods_id," + " goods_code, goods_name, goods_state, goods_unit_type," + " from_serial, to_serial," + " amount_order, amount_real, bincode, barcode, " + " create_datetime," + " cell_code ) "); sqlStockTransSerial.append( " VALUES (STOCK_TRANS_SERIAL_SEQ.nextval,?,?,to_date(?,'dd/MM/yyyy hh24:mi:ss'),?,?,?,?,?,?,?,?,?,?,?,to_date(?,'dd/MM/yyyy hh24:mi:ss'),?) LOG ERRORS REJECT LIMIT UNLIMITED "); //3. TAO PREPARE STATEMENT prstmtInsertStockTransSerial = connection.prepareStatement(sqlStockTransSerial.toString()); prstmtInsertStockGoodsSerial = connection.prepareStatement(sqlStockGoodsSerial.toString()); //Chi tiet serial for (StockTransSerialDTO stockTransSerial : lstStockTransSerial) { fromSerial = stockTransSerial.getFromSerial(); toSerial = stockTransSerial.getToSerial(); numberNeedToCommit++; //SET PARAMS FOR STOCK_TRANS_SERIAL paramsStockTransSerial = setParamsStockTransSerial(stockTransSerial); //SET PARAMS AND ADD TO BATCH for (int idx = 0; idx < paramsStockTransSerial.size(); idx++) { prstmtInsertStockTransSerial.setString(idx + 1, DataUtil.nvl(paramsStockTransSerial.get(idx), "").toString()); } prstmtInsertStockTransSerial.addBatch(); //Insert chi tiet serial if (!StringUtils.isInteger(fromSerial) || !StringUtils.isInteger(toSerial)) {//Serial la ky tu serial = fromSerial; paramsStockGoodsSerial = setParamsStockGoodsSerial(stockTrans, stockTransDetail, stockTransSerial, serial, serialStatus); //SET PARAMS AND ADD TO BATCH for (int idx = 0; idx < paramsStockGoodsSerial.size(); idx++) { try { prstmtInsertStockGoodsSerial.setString(idx + 1, DataUtil.nvl(paramsStockGoodsSerial.get(idx), "").toString()); } catch (Exception e) { System.out.println(idx); } } prstmtInsertStockGoodsSerial.addBatch(); //bo sung them amount issue } else {//Serial la so //Kiem tra do dai serial kneu >19 thi cat do kieu Long chi co do dai toi da 19 int iLengthSuffixSerial = 0; if (fromSerial.length() > Constants.SERIAL_LIMIT) { prefixSerial = fromSerial.substring(0, fromSerial.length() - Constants.SERIAL_LIMIT); suffixFromSerial = fromSerial.substring(fromSerial.length() - Constants.SERIAL_LIMIT, fromSerial.length()); suffixToSerial = toSerial.substring(toSerial.length() - Constants.SERIAL_LIMIT, toSerial.length()); iLengthSuffixSerial = suffixFromSerial.length(); } else { suffixFromSerial = fromSerial; suffixToSerial = toSerial; iLengthSuffixSerial = fromSerial.length(); } // String tmpSuffixSerial; for (Long lSerial = Long.parseLong(suffixFromSerial); lSerial <= Long .parseLong(suffixToSerial); lSerial++) { tmpSuffixSerial = DataUtil.lPad(lSerial.toString(), "0", iLengthSuffixSerial); serial = prefixSerial + tmpSuffixSerial; paramsStockGoodsSerial = setParamsStockGoodsSerial(stockTrans, stockTransDetail, stockTransSerial, serial, serialStatus); //SET PARAMS AND ADD TO BATCH for (int idx = 0; idx < paramsStockGoodsSerial.size(); idx++) { prstmtInsertStockGoodsSerial.setString(idx + 1, DataUtil.nvl(paramsStockGoodsSerial.get(idx), "").toString()); } prstmtInsertStockGoodsSerial.addBatch(); //Bo sung them thong tin so luong amount issue //amountIssue++; } } //END IF if (numberNeedToCommit >= Constants.COMMIT_NUM) { try { prstmtInsertStockGoodsSerial.executeBatch(); prstmtInsertStockTransSerial.executeBatch(); numberOfSuccess = numberOfSuccess + numberNeedToCommit; } catch (Exception ex) { numberOfFail = numberOfFail + numberNeedToCommit; } numberNeedToCommit = 0; } } //END FOR if (numberNeedToCommit > 0) { try { prstmtInsertStockTransSerial.executeBatch(); prstmtInsertStockGoodsSerial.executeBatch(); numberOfSuccess += numberNeedToCommit; } catch (Exception ex) { // connection.rollback(); numberOfFail += numberNeedToCommit; } } prstmtInsertStockTransSerial.close(); prstmtInsertStockGoodsSerial.close(); } catch (SQLException | NumberFormatException e) { Logger.getLogger(StockGoodsSerialDAO.class.getName()).log(Level.SEVERE, null, e); } //lay so luong hang hoa insert vao ban err$_ List<StockGoodsSerialInforDTO> lstError = getListErrorImportRevoke(stockTrans.getStockTransId()); int amountError = 0; if (lstError != null) { amountError = lstError.size(); } Double strAmount = Double.parseDouble(stockTransDetail.getAmountReal() + ""); numberOfSuccess = Integer.parseInt(String.format("%.0f", strAmount)) - amountError;//tru so luong hang insert loi => so luong hang insert thanh cong numberOfFail = amountError;//so luong hang loi do ta ton tai serial cua khach hang trong kho amountIssue = (double) numberOfSuccess; // resultDTO.setMessage(ParamUtils.SUCCESS); resultDTO.setQuantityFail(numberOfFail); resultDTO.setQuantitySucc(numberOfSuccess); resultDTO.setAmount(amount); resultDTO.setAmountIssue(amountIssue); // tra ve list serial loi resultDTO.setLstStockGoodsSerialInforDTO(lstError); return resultDTO; }
From source file:org.wso2.carbon.appmgt.impl.dao.AppMDAO.java
/** * save applications wise policy groups//from w w w. j av a2 s .co m * * @param connection : SQL connection * @param applicationId : application id * @param policyGroupIds : policy groups id list * @throws AppManagementException if any an error found while saving data to DB */ public void saveApplicationPolicyGroupsMappings(Connection connection, int applicationId, Object[] policyGroupIds) throws AppManagementException { PreparedStatement preparedStatement = null; String query = "INSERT INTO APM_POLICY_GROUP_MAPPING(APP_ID, POLICY_GRP_ID) VALUES(?,?)"; try { preparedStatement = connection.prepareStatement(query); for (Object policyGroupId : policyGroupIds) { preparedStatement.setInt(1, applicationId); preparedStatement.setInt(2, Integer.parseInt(policyGroupId.toString())); preparedStatement.addBatch(); } preparedStatement.executeBatch(); } catch (SQLException e) { String strDataContext = "(applicationId:" + applicationId + ", policyGroupIds:" + policyGroupIds + ")"; handleException("SQL Error while executing the query to save Policy Group mappings : " + query + " : " + strDataContext, e); } finally { APIMgtDBUtil.closeAllConnections(preparedStatement, null, null); } }