List of usage examples for java.sql PreparedStatement setBinaryStream
void setBinaryStream(int parameterIndex, java.io.InputStream x, long length) throws SQLException;
From source file:org.wso2.carbon.registry.core.jdbc.dao.JDBCResourceDAO.java
public int addContentBytes(InputStream contentStream) throws RegistryException { JDBCDatabaseTransaction.ManagedRegistryConnection conn = JDBCDatabaseTransaction.getConnection(); int contentID = -1; try {/*from ww w. ja v a 2s .co m*/ String sql = "INSERT INTO REG_CONTENT (REG_CONTENT_DATA, REG_TENANT_ID) VALUES (?, ?)"; String sql1 = "SELECT MAX(REG_CONTENT_ID) FROM REG_CONTENT"; int size = contentStream.available(); PreparedStatement ps, ps1 = null; String dbProductName = conn.getMetaData().getDatabaseProductName(); boolean returnsGeneratedKeys = DBUtils.canReturnGeneratedKeys(dbProductName); if (returnsGeneratedKeys) { ps = conn.prepareStatement(sql, new String[] { DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "REG_CONTENT_ID") }); } else { ps = conn.prepareStatement(sql); } try { ps.setBinaryStream(1, contentStream, size); ps.setInt(2, CurrentSession.getTenantId()); ResultSet result; if (returnsGeneratedKeys) { ps.executeUpdate(); result = ps.getGeneratedKeys(); } else { synchronized (ADD_CONTENT_LOCK) { ps.executeUpdate(); if (dbProductName.equals("OpenEdge RDBMS")) { String sql2 = "UPDATE REG_CONTENT SET REG_CONTENT_ID = " + "PUB.REG_CONTENT_SEQUENCE.NEXTVAL WHERE REG_CONTENT_ID = 0"; PreparedStatement ps2 = null; try { ps2 = conn.prepareStatement(sql2); ps2.executeUpdate(); } finally { if (ps2 != null) { ps2.close(); } } } ps1 = conn.prepareStatement(sql1); result = ps1.executeQuery(); } } try { if (result.next()) { contentID = result.getInt(1); } } finally { if (result != null) { result.close(); } } } finally { try { if (ps1 != null) { ps1.close(); } } finally { if (ps != null) { ps.close(); } } } } catch (IOException e) { String msg = "An error occurred while processing content stream."; log.error(msg, e); throw new RegistryException(msg, e); } catch (SQLException e) { String msg = "Failed to write resource content to the database."; log.error(msg, e); throw new RegistryException(msg, e); } return contentID; }
From source file:org.sakaiproject.search.index.impl.JDBCClusterIndexStore.java
/** * updat this save this local segment into the db * /* w ww .j a v a 2 s .c om*/ * @param connection * @param addsi */ protected void updateDBPatchBLOB(Connection connection) throws SQLException, IOException { PreparedStatement segmentUpdate = null; PreparedStatement segmentInsert = null; InputStream packetStream = null; File packetFile = null; long newVersion = System.currentTimeMillis(); try { segmentUpdate = connection.prepareStatement( "update search_segments set packet_ = ?, version_ = ?, size_ = ? where name_ = ?"); segmentInsert = connection.prepareStatement( "insert into search_segments (packet_, name_, version_, size_ ) values ( ?,?,?,?)"); packetFile = clusterStorage.packPatch(); if (packetFile.exists()) { packetStream = new FileInputStream(packetFile); segmentUpdate.clearParameters(); segmentUpdate.setBinaryStream(1, packetStream, (int) packetFile.length()); segmentUpdate.setLong(2, newVersion); segmentUpdate.setLong(3, packetFile.length()); segmentUpdate.setString(4, INDEX_PATCHNAME); if (segmentUpdate.executeUpdate() != 1) { segmentInsert.clearParameters(); segmentInsert.setBinaryStream(1, packetStream, (int) packetFile.length()); segmentInsert.setString(2, INDEX_PATCHNAME); segmentInsert.setLong(3, newVersion); segmentInsert.setLong(4, packetFile.length()); if (segmentInsert.executeUpdate() != 1) { throw new SQLException(" Failed to insert patch "); } } if (log.isDebugEnabled()) log.debug("DB Updated Patch "); } else { log.warn(" Packed Patch does not exist " + packetFile.getPath()); } } finally { try { if (packetStream != null) { packetStream.close(); } } catch (Exception ex) { log.debug(ex); } try { packetFile.delete(); } catch (Exception ex) { log.debug(ex); } try { segmentUpdate.close(); } catch (Exception ex) { log.debug(ex); } try { segmentInsert.close(); } catch (Exception ex) { log.debug(ex); } } }
From source file:org.wso2.carbon.repository.core.jdbc.dao.JDBCResourceDAO.java
public int addContentBytes(InputStream contentStream) throws RepositoryException { JDBCDatabaseTransaction.ManagedRegistryConnection conn = JDBCDatabaseTransaction.getConnection(); int contentID = -1; try {/*from ww w. ja v a 2 s.c om*/ String sql = "INSERT INTO REG_CONTENT (REG_CONTENT_DATA, REG_TENANT_ID) VALUES (?, ?)"; String sql1 = "SELECT MAX(REG_CONTENT_ID) FROM REG_CONTENT"; int size = contentStream.available(); PreparedStatement ps, ps1 = null; String dbProductName = conn.getMetaData().getDatabaseProductName(); boolean returnsGeneratedKeys = DBUtils.canReturnGeneratedKeys(dbProductName); if (returnsGeneratedKeys) { ps = conn.prepareStatement(sql, new String[] { DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "REG_CONTENT_ID") }); } else { ps = conn.prepareStatement(sql); } try { ps.setBinaryStream(1, contentStream, size); ps.setInt(2, CurrentContext.getTenantId()); ResultSet result; if (returnsGeneratedKeys) { ps.executeUpdate(); result = ps.getGeneratedKeys(); } else { synchronized (ADD_CONTENT_LOCK) { ps.executeUpdate(); if (dbProductName.equals("OpenEdge RDBMS")) { String sql2 = "UPDATE REG_CONTENT SET REG_CONTENT_ID = " + "PUB.REG_CONTENT_SEQUENCE.NEXTVAL WHERE REG_CONTENT_ID = 0"; PreparedStatement ps2 = null; try { ps2 = conn.prepareStatement(sql2); ps2.executeUpdate(); } finally { if (ps2 != null) { ps2.close(); } } } ps1 = conn.prepareStatement(sql1); result = ps1.executeQuery(); } } try { if (result.next()) { contentID = result.getInt(1); } } finally { if (result != null) { result.close(); } } } finally { try { if (ps1 != null) { ps1.close(); } } finally { if (ps != null) { ps.close(); } } } } catch (IOException e) { String msg = "An error occurred while processing content stream."; log.error(msg, e); throw new RepositoryServerContentException(msg, e); } catch (SQLException e) { String msg = "Failed to write resource content to the database."; log.error(msg, e); throw new RepositoryDBException(msg, e); } return contentID; }
From source file:org.sakaiproject.search.index.impl.JDBCClusterIndexStore.java
/** * updat this save this local segment into the db * //from w w w . j av a 2 s .c o m * @param connection * @param addsi */ protected void updateDBSegmentBLOB(Connection connection, SegmentInfo addsi) throws SQLException, IOException { PreparedStatement segmentUpdate = null; PreparedStatement segmentInsert = null; InputStream packetStream = null; File packetFile = null; long newVersion = System.currentTimeMillis(); try { segmentUpdate = connection.prepareStatement( "update search_segments set packet_ = ?, version_ = ?, size_ = ? where name_ = ? and version_ = ?"); segmentInsert = connection.prepareStatement( "insert into search_segments (packet_, name_, version_, size_ ) values ( ?,?,?,?)"); packetFile = clusterStorage.packSegment(addsi, newVersion); if (packetFile.exists()) { packetStream = new FileInputStream(packetFile); if (addsi.isInDb()) { segmentUpdate.clearParameters(); segmentUpdate.setBinaryStream(1, packetStream, (int) packetFile.length()); segmentUpdate.setLong(2, newVersion); segmentUpdate.setLong(3, packetFile.length()); segmentUpdate.setString(4, addsi.getName()); segmentUpdate.setLong(5, addsi.getVersion()); if (segmentUpdate.executeUpdate() != 1) { throw new SQLException(" ant Find packet to update " + addsi); } } else { segmentInsert.clearParameters(); segmentInsert.setBinaryStream(1, packetStream, (int) packetFile.length()); segmentInsert.setString(2, addsi.getName()); segmentInsert.setLong(3, newVersion); segmentInsert.setLong(4, packetFile.length()); if (segmentInsert.executeUpdate() != 1) { throw new SQLException(" Failed to insert packet " + addsi); } } addsi.setVersion(newVersion); if (log.isDebugEnabled()) log.debug("DB Updated " + addsi); try { packetStream.close(); } catch (Exception ex) { log.debug(ex); } try { packetFile.delete(); } catch (Exception ex) { log.debug(ex); } } else { log.warn("Packet file does not exist " + packetFile.getPath()); } } finally { try { packetStream.close(); } catch (Exception ex) { log.debug(ex); } try { packetFile.delete(); } catch (Exception ex) { log.debug(ex); } try { segmentUpdate.close(); } catch (Exception ex) { log.debug(ex); } try { segmentInsert.close(); } catch (Exception ex) { log.debug(ex); } } }
From source file:helma.objectmodel.db.NodeManager.java
private void setStatementValue(PreparedStatement stmt, int stmtNumber, Property p, int columnType) throws SQLException { if (p.getValue() == null) { stmt.setNull(stmtNumber, columnType); } else {/* ww w . ja v a 2s. com*/ switch (columnType) { case Types.BIT: case Types.BOOLEAN: stmt.setBoolean(stmtNumber, p.getBooleanValue()); break; case Types.TINYINT: case Types.BIGINT: case Types.SMALLINT: case Types.INTEGER: stmt.setLong(stmtNumber, p.getIntegerValue()); break; case Types.REAL: case Types.FLOAT: case Types.DOUBLE: case Types.NUMERIC: case Types.DECIMAL: stmt.setDouble(stmtNumber, p.getFloatValue()); break; case Types.LONGVARBINARY: case Types.VARBINARY: case Types.BINARY: case Types.BLOB: Object b = p.getJavaObjectValue(); if (b instanceof byte[]) { byte[] buf = (byte[]) b; try { stmt.setBytes(stmtNumber, buf); } catch (SQLException x) { ByteArrayInputStream bout = new ByteArrayInputStream(buf); stmt.setBinaryStream(stmtNumber, bout, buf.length); } } else { throw new SQLException( "expected byte[] for binary column '" + p.getName() + "', found " + b.getClass()); } break; case Types.LONGVARCHAR: try { stmt.setString(stmtNumber, p.getStringValue()); } catch (SQLException x) { String str = p.getStringValue(); Reader r = new StringReader(str); stmt.setCharacterStream(stmtNumber, r, str.length()); } break; case Types.CLOB: String val = p.getStringValue(); Reader isr = new StringReader(val); stmt.setCharacterStream(stmtNumber, isr, val.length()); break; case Types.CHAR: case Types.VARCHAR: case Types.OTHER: stmt.setString(stmtNumber, p.getStringValue()); break; case Types.DATE: case Types.TIME: case Types.TIMESTAMP: stmt.setTimestamp(stmtNumber, p.getTimestampValue()); break; case Types.NULL: stmt.setNull(stmtNumber, 0); break; default: stmt.setString(stmtNumber, p.getStringValue()); break; } } }
From source file:org.sakaiproject.db.impl.BasicSqlService.java
/** * Execute the "write" sql - no response. a long binary field is set to "?" - fill it in with var * /* ww w. j av a2 s . c om*/ * @param sql * The sql statement. * @param fields * The array of fields for parameters. * @param var * The value to bind to the last parameter in the sql statement. * @param offset * The start within the var to write * @param len * The number of bytes of var, starting with index, to write * @return true if successful, false if not. */ public boolean dbWriteBinary(String sql, Object[] fields, byte[] var, int offset, int len) { // Note: does not support TRANSACTION_CONNECTION -ggolden if (LOG.isDebugEnabled()) { LOG.debug("dbWriteBinary(String " + sql + ", Object[] " + Arrays.toString(fields) + ", byte[] " + Arrays.toString(var) + ", int " + offset + ", int " + len + ")"); } // for DEBUG long start = 0; long connectionTime = 0; if (LOG.isDebugEnabled()) { String userId = usageSessionService().getSessionId(); LOG.debug("Sql.dbWriteBinary(): " + userId + "\n" + sql + " size:" + var.length); } Connection conn = null; PreparedStatement pstmt = null; boolean autoCommit = false; boolean resetAutoCommit = false; // stream from the var InputStream varStream = new ByteArrayInputStream(var, offset, len); boolean success = false; try { if (m_showSql) start = System.currentTimeMillis(); conn = borrowConnection(); if (m_showSql) connectionTime = System.currentTimeMillis() - start; // make sure we do not have auto commit - will change and reset if needed autoCommit = conn.getAutoCommit(); if (autoCommit) { conn.setAutoCommit(false); resetAutoCommit = true; } if (m_showSql) start = System.currentTimeMillis(); pstmt = conn.prepareStatement(sql); // put in all the fields int pos = prepareStatement(pstmt, fields); // last, put in the binary pstmt.setBinaryStream(pos, varStream, len); //int result = pstmt.executeUpdate(); // commit and indicate success conn.commit(); success = true; } catch (SQLException e) { // this is likely due to a key constraint problem... return false; } catch (Exception e) { LOG.warn("Sql.dbWriteBinary(): " + e); return false; } finally { //try //{ if (null != pstmt) { try { pstmt.close(); } catch (SQLException e) { LOG.warn("Sql.dbWriteBinary(): " + e); } } if (null != varStream) { try { varStream.close(); } catch (IOException e) { LOG.warn("Sql.dbWriteBinary(): " + e); } } if (null != conn) { // rollback on failure if (!success) { try { conn.rollback(); } catch (SQLException e) { LOG.warn("Sql.dbWriteBinary(): " + e); } } // if we changed the auto commit, reset here if (resetAutoCommit) { try { conn.setAutoCommit(autoCommit); } catch (SQLException e) { LOG.warn("Sql.dbWriteBinary(): " + e); } } returnConnection(conn); } } if (m_showSql) debug("sql write binary: len: " + len + " time: " + connectionTime + " / " + (System.currentTimeMillis() - start), sql, fields); return true; }
From source file:org.opencms.db.generic.CmsProjectDriver.java
/** * @see org.opencms.db.I_CmsProjectDriver#writePublishReport(org.opencms.db.CmsDbContext, org.opencms.util.CmsUUID, byte[]) *//* w w w. j a v a2 s.c om*/ public void writePublishReport(CmsDbContext dbc, CmsUUID publishId, byte[] content) throws CmsDataAccessException { Connection conn = null; PreparedStatement stmt = null; try { conn = m_sqlManager.getConnection(dbc); stmt = m_sqlManager.getPreparedStatement(conn, "C_PUBLISHJOB_WRITE_REPORT"); if (content.length < 2000) { stmt.setBytes(1, content); } else { stmt.setBinaryStream(1, new ByteArrayInputStream(content), content.length); } stmt.setString(2, publishId.toString()); stmt.executeUpdate(); } catch (SQLException e) { throw new CmsDbSqlException( Messages.get().container(Messages.ERR_GENERIC_SQL_1, CmsDbSqlException.getErrorQuery(stmt)), e); } finally { m_sqlManager.closeAll(dbc, conn, stmt, null); } }
From source file:org.sakaiproject.db.impl.BasicSqlService.java
/** * Execute the "insert" sql, returning a possible auto-update field Long value * /*from w w w . ja v a 2 s. c om*/ * @param sql * The sql statement. * @param fields * The array of fields for parameters. * @param callerConnection * The connection to use. * @param autoColumn * The name of the db column that will have auto-update - we will return the value used (leave null to disable this feature). * @param last * A stream to set as the last field. * @return The auto-update value, or null */ public Long dbInsert(Connection callerConnection, String sql, Object[] fields, String autoColumn, InputStream last, int lastLength) { boolean connFromThreadLocal = false; // check for a transaction conncetion if (callerConnection == null) { callerConnection = (Connection) threadLocalManager().get(TRANSACTION_CONNECTION); if (callerConnection != null) { // KNL-492 We set this so we can avoid returning a connection that is being managed elsewhere connFromThreadLocal = true; } } if (LOG.isDebugEnabled()) { LOG.debug("dbInsert(String " + sql + ", Object[] " + Arrays.toString(fields) + ", Connection " + callerConnection + ")"); } // for DEBUG long start = 0; long connectionTime = 0; if (LOG.isDebugEnabled()) { String userId = usageSessionService().getSessionId(); StringBuilder buf = new StringBuilder(); if (fields != null) { buf.append(fields[0]); for (int i = 1; i < fields.length; i++) { buf.append(", "); buf.append(fields[i]); } } LOG.debug("Sql.dbInsert(): " + userId + "\n" + sql + "\n" + buf); } Connection conn = null; PreparedStatement pstmt = null; boolean autoCommit = false; boolean resetAutoCommit = false; boolean success = false; Long rv = null; try { if (callerConnection != null) { conn = callerConnection; } else { if (m_showSql) start = System.currentTimeMillis(); conn = borrowConnection(); if (m_showSql) connectionTime = System.currentTimeMillis() - start; // make sure we have do not have auto commit - will change and reset if needed autoCommit = conn.getAutoCommit(); if (autoCommit) { conn.setAutoCommit(false); resetAutoCommit = true; } } if (m_showSql) start = System.currentTimeMillis(); pstmt = sqlServiceSql.prepareAutoColumn(conn, sql, autoColumn); // put in all the fields int pos = prepareStatement(pstmt, fields); // and the last one if (last != null) { pstmt.setBinaryStream(pos, last, lastLength); } int result = pstmt.executeUpdate(); rv = sqlServiceSql.getGeneratedKey(pstmt, sql); // commit unless we are in a transaction (provided with a connection) if (callerConnection == null) { conn.commit(); } // indicate success success = true; } catch (SQLException e) { // is this due to a key constraint problem... check each vendor's error codes boolean recordAlreadyExists = sqlServiceSql.getRecordAlreadyExists(e); if (m_showSql) { LOG.warn("Sql.dbInsert(): error code: " + e.getErrorCode() + " sql: " + sql + " binds: " + debugFields(fields) + " " + e); } if (recordAlreadyExists) return null; // perhaps due to a mysql deadlock? if (("mysql".equals(m_vendor)) && (e.getErrorCode() == 1213)) { // just a little fuss LOG.warn("Sql.dbInsert(): deadlock: error code: " + e.getErrorCode() + " sql: " + sql + " binds: " + debugFields(fields) + " " + e.toString()); throw new SqlServiceDeadlockException(e); } else if (recordAlreadyExists) { // just a little fuss LOG.warn("Sql.dbInsert(): unique violation: error code: " + e.getErrorCode() + " sql: " + sql + " binds: " + debugFields(fields) + " " + e.toString()); throw new SqlServiceUniqueViolationException(e); } else { // something ELSE went wrong, so lest make a fuss LOG.warn("Sql.dbInsert(): error code: " + e.getErrorCode() + " sql: " + sql + " binds: " + debugFields(fields) + " ", e); throw new RuntimeException("SqlService.dbInsert failure", e); } } catch (Exception e) { LOG.warn("Sql.dbInsert(): " + e); throw new RuntimeException("SqlService.dbInsert failure", e); } finally { try { if (null != pstmt) pstmt.close(); if ((null != conn) && (callerConnection == null)) { // rollback on failure if (!success) { conn.rollback(); } // if we changed the auto commit, reset here if (resetAutoCommit) { conn.setAutoCommit(autoCommit); } } } catch (Exception e) { LOG.warn("Sql.dbInsert(): " + e); throw new RuntimeException("SqlService.dbInsert failure", e); } //make sure we return the connection even if the rollback etc above // KNL-492 connFromThreadLocal is tested so we can avoid returning a // connection that is being managed elsewhere if (conn != null && !connFromThreadLocal) { returnConnection(conn); } } if (m_showSql) debug("Sql.dbWrite(): len: " + " time: " + connectionTime + " / " + (System.currentTimeMillis() - start), sql, fields); return rv; }
From source file:pt.iflow.flows.FlowHolderBean.java
/** * Upload a flow template to database/*from w ww.j av a 2s . c o m*/ * * @param userInfo * @param name * @param description * @param data * @return */ public boolean uploadFlowTemplate(UserInfoInterface userInfo, String name, String description, byte[] data) { if (!userInfo.isSysAdmin()) { Logger.error(userInfo.getUtilizador(), this, "uploadFlowTemplate", "User is not System Admin."); return false; } if (null == data) { Logger.error(userInfo.getUtilizador(), this, "uploadFlowTemplate", "Data is null"); return false; } Connection db = null; PreparedStatement st = null; ResultSet rs = null; boolean insert = true; boolean result = false; try { db = Utils.getDataSource().getConnection(); db.setAutoCommit(false); // copy from template st = db.prepareStatement("select count(*) FROM flow_template where name=?"); st.setString(1, name); rs = st.executeQuery(); if (rs.next()) { insert = (rs.getInt(1) == 0); } rs.close(); st.close(); if (insert) { st = db.prepareStatement("insert into flow_template (description,data,name) values (?,?,?)"); } else { st = db.prepareStatement("update flow_template set description = ?, data = ? where name = ?"); } st.setString(1, description); ByteArrayInputStream bin = new ByteArrayInputStream(data); st.setBinaryStream(2, bin, data.length); st.setString(3, name); result = (st.executeUpdate() == 1); db.commit(); } catch (Exception e) { try { db.rollback(); } catch (SQLException e1) { e1.printStackTrace(); } e.printStackTrace(); result = false; } finally { DatabaseInterface.closeResources(db, st, rs); } return result; }
From source file:pt.iflow.flows.FlowHolderBean.java
private synchronized State insertOrUpdateSubFlow(UserInfoInterface userInfo, String file, String name, byte[] data, boolean forceCreate, boolean makeVersion, String comment) { // recorrer a um metodo privado para efectuar a actualizacao // propriamente dita. // Esse mesmo metodo sera usado pelo deploy no caso de ser necessario // actualizar o catalogo. State result = new State(); Connection db = null;/*from w w w . ja va 2s . c o m*/ PreparedStatement pst = null; ResultSet rs = null; boolean flowFound = false; int flowid = -1; try { db = Utils.getDataSource().getConnection(); db.setAutoCommit(false); String query = "select flowid,flowversion from sub_flow where flowfile=? and organizationid=?"; Logger.debug(userInfo.getUtilizador(), this, "insertOrUpdateSubFlow", "Query1: " + query); pst = db.prepareStatement(query); pst.setString(1, file); pst.setString(2, userInfo.getOrganization()); rs = pst.executeQuery(); if (rs.next()) { flowFound = true; flowid = rs.getInt("flowid"); result.version = rs.getInt("flowversion"); } rs.close(); pst.close(); boolean copyToHistory = false; if (flowFound) { query = "update sub_flow set flowdata=?," + (makeVersion ? "flowversion=flowversion+1," : "") + "modified=? where flowid=?"; Logger.debug(userInfo.getUtilizador(), this, "insertOrUpdateSubFlow", "Query2a: " + query); pst = db.prepareStatement(query); pst.setBinaryStream(1, new ByteArrayInputStream(data), data.length); pst.setTimestamp(2, new Timestamp(System.currentTimeMillis())); pst.setInt(3, flowid); int upd = pst.executeUpdate(); pst.close(); result.created = false; copyToHistory = (upd != 0); } else if (forceCreate) { if (null == name) name = file; Timestamp now = new Timestamp(System.currentTimeMillis()); query = DBQueryManager.getQuery("FlowHolder.INSERT_SUBFLOW"); Logger.debug(userInfo.getUtilizador(), this, "insertOrUpdateSubFlow", "Query2b: " + query); pst = db.prepareStatement(query, new String[] { "flowid" }); pst.setString(1, name); pst.setString(2, file); pst.setTimestamp(3, now); pst.setString(4, userInfo.getOrganization()); pst.setBinaryStream(5, new ByteArrayInputStream(data), data.length); pst.setTimestamp(6, now); pst.executeUpdate(); rs = pst.getGeneratedKeys(); if (rs.next()) { result.created = true; flowid = rs.getInt(1); copyToHistory = true; } rs.close(); pst.close(); } else { throw new Exception("Cannot create sub flow."); } // Copy to flow history. if (copyToHistory && makeVersion) { if (null != comment && comment.length() > MAX_COMMENT_SIZE) comment = comment.substring(0, MAX_COMMENT_SIZE); query = DBQueryManager.getQuery("FlowHolder.COPY_SUB_FLOW_TO_HISTORY"); Logger.debug(userInfo.getUtilizador(), this, "insertOrUpdateSubFlow", "Query3: " + query); pst = db.prepareStatement(query); pst.setString(1, comment); pst.setInt(2, flowid); pst.executeUpdate(); pst.close(); result.version++; } db.commit(); result.success = true; } catch (Exception e) { try { db.rollback(); } catch (SQLException e1) { e1.printStackTrace(); } e.printStackTrace(); result.success = false; } finally { DatabaseInterface.closeResources(db, pst, rs); } result.flowid = flowid; return result; }