List of usage examples for java.sql PreparedStatement setBinaryStream
void setBinaryStream(int parameterIndex, java.io.InputStream x, long length) throws SQLException;
From source file:org.sakaiproject.nakamura.lite.storage.jdbc.JDBCStorageClient.java
public void insert(String keySpace, String columnFamily, String key, Map<String, Object> values, boolean probablyNew) throws StorageClientException { checkClosed();/* w ww .j a v a 2s. c o m*/ Map<String, PreparedStatement> statementCache = Maps.newHashMap(); boolean autoCommit = true; try { autoCommit = startBlock(); String rid = rowHash(keySpace, columnFamily, key); for (Entry<String, Object> e : values.entrySet()) { String k = e.getKey(); Object o = e.getValue(); if (o instanceof byte[]) { throw new RuntimeException( "Invalid content in " + k + ", storing byte[] rather than streaming it"); } } Map<String, Object> m = get(keySpace, columnFamily, key); for (Entry<String, Object> e : values.entrySet()) { String k = e.getKey(); Object o = e.getValue(); if (o instanceof RemoveProperty || o == null) { m.remove(k); } else { m.put(k, o); } } LOGGER.debug("Saving {} {} {} ", new Object[] { key, rid, m }); if (probablyNew && !UPDATE_FIRST_SEQUENCE.equals(getSql(SQL_STATEMENT_SEQUENCE))) { PreparedStatement insertBlockRow = getStatement(keySpace, columnFamily, SQL_BLOCK_INSERT_ROW, rid, statementCache); insertBlockRow.clearWarnings(); insertBlockRow.clearParameters(); insertBlockRow.setString(1, rid); InputStream insertStream = null; try { insertStream = Types.storeMapToStream(rid, m, columnFamily); } catch (UTFDataFormatException e) { throw new DataFormatException(INVALID_DATA_ERROR, e); } if ("1.5".equals(getSql(JDBC_SUPPORT_LEVEL))) { insertBlockRow.setBinaryStream(2, insertStream, insertStream.available()); } else { insertBlockRow.setBinaryStream(2, insertStream); } int rowsInserted = 0; try { rowsInserted = insertBlockRow.executeUpdate(); } catch (SQLException e) { LOGGER.debug(e.getMessage(), e); } if (rowsInserted == 0) { PreparedStatement updateBlockRow = getStatement(keySpace, columnFamily, SQL_BLOCK_UPDATE_ROW, rid, statementCache); updateBlockRow.clearWarnings(); updateBlockRow.clearParameters(); updateBlockRow.setString(2, rid); try { insertStream = Types.storeMapToStream(rid, m, columnFamily); } catch (UTFDataFormatException e) { throw new DataFormatException(INVALID_DATA_ERROR, e); } if ("1.5".equals(getSql(JDBC_SUPPORT_LEVEL))) { updateBlockRow.setBinaryStream(1, insertStream, insertStream.available()); } else { updateBlockRow.setBinaryStream(1, insertStream); } if (updateBlockRow.executeUpdate() == 0) { throw new StorageClientException("Failed to save " + rid); } else { LOGGER.debug("Updated {} ", rid); } } else { LOGGER.debug("Inserted {} ", rid); } } else { PreparedStatement updateBlockRow = getStatement(keySpace, columnFamily, SQL_BLOCK_UPDATE_ROW, rid, statementCache); updateBlockRow.clearWarnings(); updateBlockRow.clearParameters(); updateBlockRow.setString(2, rid); InputStream updateStream = null; try { updateStream = Types.storeMapToStream(rid, m, columnFamily); } catch (UTFDataFormatException e) { throw new DataFormatException(INVALID_DATA_ERROR, e); } if ("1.5".equals(getSql(JDBC_SUPPORT_LEVEL))) { updateBlockRow.setBinaryStream(1, updateStream, updateStream.available()); } else { updateBlockRow.setBinaryStream(1, updateStream); } if (updateBlockRow.executeUpdate() == 0) { PreparedStatement insertBlockRow = getStatement(keySpace, columnFamily, SQL_BLOCK_INSERT_ROW, rid, statementCache); insertBlockRow.clearWarnings(); insertBlockRow.clearParameters(); insertBlockRow.setString(1, rid); try { updateStream = Types.storeMapToStream(rid, m, columnFamily); } catch (UTFDataFormatException e) { throw new DataFormatException(INVALID_DATA_ERROR, e); } if ("1.5".equals(getSql(JDBC_SUPPORT_LEVEL))) { insertBlockRow.setBinaryStream(2, updateStream, updateStream.available()); } else { insertBlockRow.setBinaryStream(2, updateStream); } if (insertBlockRow.executeUpdate() == 0) { throw new StorageClientException("Failed to save " + rid); } else { LOGGER.debug("Inserted {} ", rid); } } else { LOGGER.debug("Updated {} ", rid); } } if ("1".equals(getSql(USE_BATCH_INSERTS))) { Set<PreparedStatement> removeSet = Sets.newHashSet(); // execute the updates and add the necessary inserts. Map<PreparedStatement, List<Entry<String, Object>>> insertSequence = Maps.newHashMap(); Set<PreparedStatement> insertSet = Sets.newHashSet(); for (Entry<String, Object> e : values.entrySet()) { String k = e.getKey(); Object o = e.getValue(); if (shouldIndex(keySpace, columnFamily, k)) { if (o instanceof RemoveProperty || o == null) { PreparedStatement removeStringColumn = getStatement(keySpace, columnFamily, SQL_REMOVE_STRING_COLUMN, rid, statementCache); removeStringColumn.setString(1, rid); removeStringColumn.setString(2, k); removeStringColumn.addBatch(); removeSet.add(removeStringColumn); } else { // remove all previous values PreparedStatement removeStringColumn = getStatement(keySpace, columnFamily, SQL_REMOVE_STRING_COLUMN, rid, statementCache); removeStringColumn.setString(1, rid); removeStringColumn.setString(2, k); removeStringColumn.addBatch(); removeSet.add(removeStringColumn); // insert new values, as we just removed them we know we can insert, no need to attempt update // the only thing that we know is the colum value changes so we have to re-index the whole // property Object[] valueMembers = (o instanceof Object[]) ? (Object[]) o : new Object[] { o }; for (Object ov : valueMembers) { String valueMember = ov.toString(); PreparedStatement insertStringColumn = getStatement(keySpace, columnFamily, SQL_INSERT_STRING_COLUMN, rid, statementCache); insertStringColumn.setString(1, valueMember); insertStringColumn.setString(2, rid); insertStringColumn.setString(3, k); insertStringColumn.addBatch(); LOGGER.debug("Insert Index {} {}", k, valueMember); insertSet.add(insertStringColumn); List<Entry<String, Object>> insertSeq = insertSequence.get(insertStringColumn); if (insertSeq == null) { insertSeq = Lists.newArrayList(); insertSequence.put(insertStringColumn, insertSeq); } insertSeq.add(e); } } } } if (!StorageClientUtils.isRoot(key)) { // create a holding map containing a rowhash of the parent and then process the entry to generate a update operation. Map<String, Object> autoIndexMap = ImmutableMap.of(InternalContent.PARENT_HASH_FIELD, (Object) rowHash(keySpace, columnFamily, StorageClientUtils.getParentObjectPath(key))); for (Entry<String, Object> e : autoIndexMap.entrySet()) { // remove all previous values PreparedStatement removeStringColumn = getStatement(keySpace, columnFamily, SQL_REMOVE_STRING_COLUMN, rid, statementCache); removeStringColumn.setString(1, rid); removeStringColumn.setString(2, e.getKey()); removeStringColumn.addBatch(); removeSet.add(removeStringColumn); PreparedStatement insertStringColumn = getStatement(keySpace, columnFamily, SQL_INSERT_STRING_COLUMN, rid, statementCache); insertStringColumn.setString(1, (String) e.getValue()); insertStringColumn.setString(2, rid); insertStringColumn.setString(3, e.getKey()); insertStringColumn.addBatch(); LOGGER.debug("Insert {} {}", e.getKey(), e.getValue()); insertSet.add(insertStringColumn); List<Entry<String, Object>> insertSeq = insertSequence.get(insertStringColumn); if (insertSeq == null) { insertSeq = Lists.newArrayList(); insertSequence.put(insertStringColumn, insertSeq); } insertSeq.add(e); } } LOGGER.debug("Remove set {}", removeSet); for (PreparedStatement pst : removeSet) { pst.executeBatch(); } LOGGER.debug("Insert set {}", insertSet); for (PreparedStatement pst : insertSet) { int[] res = pst.executeBatch(); List<Entry<String, Object>> insertSeq = insertSequence.get(pst); for (int i = 0; i < res.length; i++) { Entry<String, Object> e = insertSeq.get(i); if (res[i] <= 0 && res[i] != -2) { // Oracle drivers respond with -2 on a successful insert when the number is not known http://download.oracle.com/javase/1.3/docs/guide/jdbc/spec2/jdbc2.1.frame6.html LOGGER.warn("Index failed for {} {} ", new Object[] { rid, e.getKey(), e.getValue() }); } else { LOGGER.debug("Index inserted for {} {} ", new Object[] { rid, e.getKey(), e.getValue() }); } } } } else { for (Entry<String, Object> e : values.entrySet()) { String k = e.getKey(); Object o = e.getValue(); if (shouldIndex(keySpace, columnFamily, k)) { if (o instanceof RemoveProperty || o == null) { PreparedStatement removeStringColumn = getStatement(keySpace, columnFamily, SQL_REMOVE_STRING_COLUMN, rid, statementCache); removeStringColumn.clearWarnings(); removeStringColumn.clearParameters(); removeStringColumn.setString(1, rid); removeStringColumn.setString(2, k); int nrows = removeStringColumn.executeUpdate(); if (nrows == 0) { m = get(keySpace, columnFamily, key); LOGGER.debug("Column Not present did not remove {} {} Current Column:{} ", new Object[] { getRowId(keySpace, columnFamily, key), k, m }); } else { LOGGER.debug("Removed Index {} {} {} ", new Object[] { getRowId(keySpace, columnFamily, key), k, nrows }); } } else { PreparedStatement removeStringColumn = getStatement(keySpace, columnFamily, SQL_REMOVE_STRING_COLUMN, rid, statementCache); removeStringColumn.clearWarnings(); removeStringColumn.clearParameters(); removeStringColumn.setString(1, rid); removeStringColumn.setString(2, k); int nrows = removeStringColumn.executeUpdate(); if (nrows == 0) { m = get(keySpace, columnFamily, key); LOGGER.debug("Column Not present did not remove {} {} Current Column:{} ", new Object[] { getRowId(keySpace, columnFamily, key), k, m }); } else { LOGGER.debug("Removed Index {} {} {} ", new Object[] { getRowId(keySpace, columnFamily, key), k, nrows }); } Object[] os = (o instanceof Object[]) ? (Object[]) o : new Object[] { o }; for (Object ov : os) { String v = ov.toString(); PreparedStatement insertStringColumn = getStatement(keySpace, columnFamily, SQL_INSERT_STRING_COLUMN, rid, statementCache); insertStringColumn.clearWarnings(); insertStringColumn.clearParameters(); insertStringColumn.setString(1, v); insertStringColumn.setString(2, rid); insertStringColumn.setString(3, k); LOGGER.debug("Non Batch Insert Index {} {}", k, v); if (insertStringColumn.executeUpdate() == 0) { throw new StorageClientException("Failed to save " + getRowId(keySpace, columnFamily, key) + " column:[" + k + "] "); } else { LOGGER.debug("Inserted Index {} {} [{}]", new Object[] { getRowId(keySpace, columnFamily, key), k, v }); } } } } } if (!StorageClientUtils.isRoot(key)) { String parent = StorageClientUtils.getParentObjectPath(key); String hash = rowHash(keySpace, columnFamily, parent); LOGGER.debug("Hash of {}:{}:{} is {} ", new Object[] { keySpace, columnFamily, parent, hash }); Map<String, Object> autoIndexMap = ImmutableMap.of(InternalContent.PARENT_HASH_FIELD, (Object) hash); for (Entry<String, Object> e : autoIndexMap.entrySet()) { String k = e.getKey(); Object v = e.getValue(); PreparedStatement removeStringColumn = getStatement(keySpace, columnFamily, SQL_REMOVE_STRING_COLUMN, rid, statementCache); removeStringColumn.clearWarnings(); removeStringColumn.clearParameters(); removeStringColumn.setString(1, rid); removeStringColumn.setString(2, k); int nrows = removeStringColumn.executeUpdate(); if (nrows == 0) { m = get(keySpace, columnFamily, key); LOGGER.debug("Column Not present did not remove {} {} Current Column:{} ", new Object[] { getRowId(keySpace, columnFamily, key), k, m }); } else { LOGGER.debug("Removed Index {} {} {} ", new Object[] { getRowId(keySpace, columnFamily, key), k, nrows }); } PreparedStatement insertStringColumn = getStatement(keySpace, columnFamily, SQL_INSERT_STRING_COLUMN, rid, statementCache); insertStringColumn.clearWarnings(); insertStringColumn.clearParameters(); insertStringColumn.setString(1, v.toString()); insertStringColumn.setString(2, rid); insertStringColumn.setString(3, k); LOGGER.debug("Non Batch Insert Index {} {}", k, v); if (insertStringColumn.executeUpdate() == 0) { throw new StorageClientException("Failed to save " + getRowId(keySpace, columnFamily, key) + " column:[" + k + "] "); } else { LOGGER.debug("Inserted Index {} {} [{}]", new Object[] { getRowId(keySpace, columnFamily, key), k, v }); } } } } endBlock(autoCommit); } catch (SQLException e) { abandonBlock(autoCommit); LOGGER.warn("Failed to perform insert/update operation on {}:{}:{} ", new Object[] { keySpace, columnFamily, key }, e); throw new StorageClientException(e.getMessage(), e); } catch (IOException e) { abandonBlock(autoCommit); LOGGER.warn("Failed to perform insert/update operation on {}:{}:{} ", new Object[] { keySpace, columnFamily, key }, e); throw new StorageClientException(e.getMessage(), e); } finally { close(statementCache); } }
From source file:org.wso2.carbon.registry.core.jdbc.dao.JDBCResourceVersionDAO.java
public long createSnapshot(int pathId, String name, InputStream versionsStream) throws RegistryException { JDBCDatabaseTransaction.ManagedRegistryConnection conn = JDBCDatabaseTransaction.getConnection(); PreparedStatement ps = null; PreparedStatement ps1 = null; ResultSet result = null;//from ww w . ja v a 2 s. c om try { String sql = "INSERT INTO REG_SNAPSHOT (REG_PATH_ID, REG_RESOURCE_NAME, " + "REG_RESOURCE_VIDS, REG_TENANT_ID) VALUES (?, ?, ?, ?)"; String sql1 = "SELECT MAX(REG_SNAPSHOT_ID) FROM REG_SNAPSHOT"; int size = versionsStream.available(); String dbProductName = conn.getMetaData().getDatabaseProductName(); boolean returnsGeneratedKeys = DBUtils.canReturnGeneratedKeys(dbProductName); if (returnsGeneratedKeys) { ps = conn.prepareStatement(sql, new String[] { DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "REG_SNAPSHOT_ID") }); } else { ps = conn.prepareStatement(sql); } ps.setInt(1, pathId); ps.setString(2, name); ps.setBinaryStream(3, versionsStream, size); ps.setInt(4, CurrentSession.getTenantId()); if (returnsGeneratedKeys) { ps.executeUpdate(); result = ps.getGeneratedKeys(); } else { synchronized (ADD_SNAPSHOT_LOCK) { ps.executeUpdate(); if (dbProductName.equals("OpenEdge RDBMS")) { String sql2 = "UPDATE REG_SNAPSHOT SET REG_SNAPSHOT_ID = " + "PUB.REG_SNAPSHOT_SEQUENCE.NEXTVAL WHERE REG_SNAPSHOT_ID = 0"; PreparedStatement ps2 = null; try { ps2 = conn.prepareStatement(sql2); ps2.executeUpdate(); } finally { if (ps2 != null) { ps2.close(); } } } ps1 = conn.prepareStatement(sql1); result = ps1.executeQuery(); } } long snapshotID = -1; if (result.next()) { snapshotID = result.getLong(1); } return snapshotID; } catch (Exception e) { String msg = "Failed to write resource content to the database. " + e.getMessage(); log.error(msg, e); throw new RegistryException(msg, e); } finally { try { try { if (result != null) { result.close(); } } finally { try { if (ps1 != null) { ps1.close(); } } finally { if (ps != null) { ps.close(); } } } } catch (SQLException ex) { String msg = RegistryConstants.RESULT_SET_PREPARED_STATEMENT_CLOSE_ERROR; log.error(msg, ex); } } }
From source file:org.wso2.carbon.repository.core.jdbc.dao.JDBCResourceVersionDAO.java
public long createSnapshot(int pathId, String name, InputStream versionsStream) throws RepositoryException { JDBCDatabaseTransaction.ManagedRegistryConnection conn = JDBCDatabaseTransaction.getConnection(); PreparedStatement ps = null; PreparedStatement ps1 = null; ResultSet result = null;//from w w w . j a v a 2 s . c o m try { String sql = "INSERT INTO REG_SNAPSHOT (REG_PATH_ID, REG_RESOURCE_NAME, " + "REG_RESOURCE_VIDS, REG_TENANT_ID) VALUES (?, ?, ?, ?)"; String sql1 = "SELECT MAX(REG_SNAPSHOT_ID) FROM REG_SNAPSHOT"; int size = versionsStream.available(); String dbProductName = conn.getMetaData().getDatabaseProductName(); boolean returnsGeneratedKeys = DBUtils.canReturnGeneratedKeys(dbProductName); if (returnsGeneratedKeys) { ps = conn.prepareStatement(sql, new String[] { DBUtils.getConvertedAutoGeneratedColumnName(dbProductName, "REG_SNAPSHOT_ID") }); } else { ps = conn.prepareStatement(sql); } ps.setInt(1, pathId); ps.setString(2, name); ps.setBinaryStream(3, versionsStream, size); ps.setInt(4, CurrentContext.getTenantId()); if (returnsGeneratedKeys) { ps.executeUpdate(); result = ps.getGeneratedKeys(); } else { synchronized (ADD_SNAPSHOT_LOCK) { ps.executeUpdate(); if (dbProductName.equals("OpenEdge RDBMS")) { String sql2 = "UPDATE REG_SNAPSHOT SET REG_SNAPSHOT_ID = " + "PUB.REG_SNAPSHOT_SEQUENCE.NEXTVAL WHERE REG_SNAPSHOT_ID = 0"; PreparedStatement ps2 = null; try { ps2 = conn.prepareStatement(sql2); ps2.executeUpdate(); } finally { if (ps2 != null) { ps2.close(); } } } ps1 = conn.prepareStatement(sql1); result = ps1.executeQuery(); } } long snapshotID = -1; if (result.next()) { snapshotID = result.getLong(1); } return snapshotID; } catch (Exception e) { String msg = "Failed to write resource content to the database. " + e.getMessage(); log.error(msg, e); throw new RepositoryDBException(msg, e); } finally { try { try { if (result != null) { result.close(); } } finally { try { if (ps1 != null) { ps1.close(); } } finally { if (ps != null) { ps.close(); } } } } catch (SQLException ex) { String msg = InternalConstants.RESULT_SET_PREPARED_STATEMENT_CLOSE_ERROR; log.error(msg, ex); } } }
From source file:org.apache.qpid.server.store.derby.DerbyMessageStore.java
private void storeMetaData(Connection conn, long messageId, StorableMessageMetaData metaData) throws SQLException { if (_logger.isDebugEnabled()) { _logger.debug("Adding metadata for message " + messageId); }// ww w .j a va2 s . co m PreparedStatement stmt = conn.prepareStatement(INSERT_INTO_META_DATA); try { stmt.setLong(1, messageId); final int bodySize = 1 + metaData.getStorableSize(); byte[] underlying = new byte[bodySize]; underlying[0] = (byte) metaData.getType().ordinal(); java.nio.ByteBuffer buf = java.nio.ByteBuffer.wrap(underlying); buf.position(1); buf = buf.slice(); metaData.writeToBuffer(0, buf); ByteArrayInputStream bis = new ByteArrayInputStream(underlying); try { stmt.setBinaryStream(2, bis, underlying.length); int result = stmt.executeUpdate(); if (result == 0) { throw new RuntimeException("Unable to add meta data for message " + messageId); } } finally { try { bis.close(); } catch (IOException e) { throw new SQLException(e); } } } finally { stmt.close(); } }
From source file:org.quartz.impl.jdbcjobstore.oracle.OracleDelegate.java
public int insertTrigger(Connection conn, Trigger trigger, String state, JobDetail jobDetail) throws SQLException, IOException { byte[] data = null; if (trigger.getJobDataMap().size() > 0) { data = serializeJobData(trigger.getJobDataMap()).toByteArray(); }/* w w w .j av a 2 s . c om*/ PreparedStatement ps = null; ResultSet rs = null; int insertResult = 0; try { ps = conn.prepareStatement(rtp(INSERT_TRIGGER)); ps.setString(1, trigger.getName()); ps.setString(2, trigger.getGroup()); ps.setString(3, trigger.getJobName()); ps.setString(4, trigger.getJobGroup()); setBoolean(ps, 5, trigger.isVolatile()); ps.setString(6, trigger.getDescription()); ps.setBigDecimal(7, new BigDecimal(String.valueOf(trigger.getNextFireTime().getTime()))); long prevFireTime = -1; if (trigger.getPreviousFireTime() != null) { prevFireTime = trigger.getPreviousFireTime().getTime(); } ps.setBigDecimal(8, new BigDecimal(String.valueOf(prevFireTime))); ps.setString(9, state); if (trigger instanceof SimpleTrigger && ((SimpleTrigger) trigger).hasAdditionalProperties() == false) { ps.setString(10, TTYPE_SIMPLE); } else if (trigger instanceof CronTrigger && ((CronTrigger) trigger).hasAdditionalProperties() == false) { ps.setString(10, TTYPE_CRON); } else { ps.setString(10, TTYPE_BLOB); } ps.setBigDecimal(11, new BigDecimal(String.valueOf(trigger.getStartTime().getTime()))); long endTime = 0; if (trigger.getEndTime() != null) { endTime = trigger.getEndTime().getTime(); } ps.setBigDecimal(12, new BigDecimal(String.valueOf(endTime))); ps.setString(13, trigger.getCalendarName()); ps.setInt(14, trigger.getMisfireInstruction()); ps.setBinaryStream(15, null, 0); ps.setInt(16, trigger.getPriority()); insertResult = ps.executeUpdate(); if (data != null) { ps.close(); ps = conn.prepareStatement(rtp(UPDATE_ORACLE_TRIGGER_JOB_DETAIL_EMPTY_BLOB)); ps.setString(1, trigger.getName()); ps.setString(2, trigger.getGroup()); ps.executeUpdate(); ps.close(); ps = conn.prepareStatement(rtp(SELECT_ORACLE_TRIGGER_JOB_DETAIL_BLOB)); ps.setString(1, trigger.getName()); ps.setString(2, trigger.getGroup()); rs = ps.executeQuery(); int res = 0; Blob dbBlob = null; if (rs.next()) { dbBlob = writeDataToBlob(rs, 1, data); } else { return res; } rs.close(); ps.close(); ps = conn.prepareStatement(rtp(UPDATE_ORACLE_TRIGGER_JOB_DETAIL_BLOB)); ps.setBlob(1, dbBlob); ps.setString(2, trigger.getName()); ps.setString(3, trigger.getGroup()); res = ps.executeUpdate(); } } finally { closeResultSet(rs); closeStatement(ps); } if (insertResult > 0) { String[] trigListeners = trigger.getTriggerListenerNames(); for (int i = 0; trigListeners != null && i < trigListeners.length; i++) { insertTriggerListener(conn, trigger, trigListeners[i]); } } return insertResult; }
From source file:org.apache.qpid.server.store.derby.DerbyMessageStore.java
private void updateConfiguredObject(final ConfiguredObjectRecord configuredObject) throws AMQStoreException { if (_stateManager.isInState(State.ACTIVE)) { try {/*from w w w . j a v a2 s. com*/ Connection conn = newAutoCommitConnection(); try { PreparedStatement stmt = conn.prepareStatement(FIND_CONFIGURED_OBJECT); try { stmt.setString(1, configuredObject.getId().toString()); ResultSet rs = stmt.executeQuery(); try { if (rs.next()) { PreparedStatement stmt2 = conn.prepareStatement(UPDATE_CONFIGURED_OBJECTS); try { stmt2.setString(1, configuredObject.getType()); if (configuredObject.getAttributes() != null) { byte[] attributesAsBytes = configuredObject.getAttributes() .getBytes(UTF8_CHARSET); ByteArrayInputStream bis = new ByteArrayInputStream(attributesAsBytes); stmt2.setBinaryStream(2, bis, attributesAsBytes.length); } else { stmt2.setNull(2, Types.BLOB); } stmt2.setString(3, configuredObject.getId().toString()); stmt2.execute(); } finally { stmt2.close(); } } } finally { rs.close(); } } finally { stmt.close(); } } finally { conn.close(); } } catch (SQLException e) { throw new AMQStoreException( "Error updating configured object " + configuredObject + " in database: " + e.getMessage(), e); } } }
From source file:org.wso2.carbon.dataservices.core.description.query.SQLQuery.java
private void setBinaryValue(int queryType, String paramName, String value, String paramType, PreparedStatement sqlQuery, int i) throws SQLException, DataServiceFault { if ("IN".equals(paramType)) { if (value == null) { sqlQuery.setNull(i + 1, java.sql.Types.BINARY); } else {/* w ww . j a va2 s. c o m*/ byte[] data = this.getBytesFromBase64String(value); sqlQuery.setBinaryStream(i + 1, new ByteArrayInputStream(data), data.length); } } else if ("INOUT".equals(paramType)) { if (value == null) { ((CallableStatement) sqlQuery).setNull(i + 1, java.sql.Types.BINARY); } else { byte[] data = this.getBytesFromBase64String(value); ((CallableStatement) sqlQuery).setBinaryStream(i + 1, new ByteArrayInputStream(data), data.length); } ((CallableStatement) sqlQuery).registerOutParameter(i + 1, java.sql.Types.BINARY); } else { ((CallableStatement) sqlQuery).registerOutParameter(i + 1, java.sql.Types.BINARY); } }
From source file:org.quartz.impl.jdbcjobstore.PointbaseDelegate.java
public int updateTrigger(Connection conn, Trigger trigger, String state, JobDetail jobDetail) throws SQLException, IOException { ByteArrayOutputStream baos = serializeJobData(trigger.getJobDataMap()); int len = baos.toByteArray().length; ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); PreparedStatement ps = null; int insertResult = 0; try {/*from www . ja v a2 s . c om*/ ps = conn.prepareStatement(rtp(UPDATE_TRIGGER)); ps.setString(1, trigger.getJobName()); ps.setString(2, trigger.getJobGroup()); setBoolean(ps, 3, trigger.isVolatile()); ps.setString(4, trigger.getDescription()); long nextFireTime = -1; if (trigger.getNextFireTime() != null) { nextFireTime = trigger.getNextFireTime().getTime(); } ps.setBigDecimal(5, new BigDecimal(String.valueOf(nextFireTime))); long prevFireTime = -1; if (trigger.getPreviousFireTime() != null) { prevFireTime = trigger.getPreviousFireTime().getTime(); } ps.setBigDecimal(6, new BigDecimal(String.valueOf(prevFireTime))); ps.setString(7, state); if (trigger instanceof SimpleTrigger && ((SimpleTrigger) trigger).hasAdditionalProperties() == false) { // updateSimpleTrigger(conn, (SimpleTrigger)trigger); ps.setString(8, TTYPE_SIMPLE); } else if (trigger instanceof CronTrigger && ((CronTrigger) trigger).hasAdditionalProperties() == false) { // updateCronTrigger(conn, (CronTrigger)trigger); ps.setString(8, TTYPE_CRON); } else { // updateBlobTrigger(conn, trigger); ps.setString(8, TTYPE_BLOB); } ps.setBigDecimal(9, new BigDecimal(String.valueOf(trigger.getStartTime().getTime()))); long endTime = 0; if (trigger.getEndTime() != null) { endTime = trigger.getEndTime().getTime(); } ps.setBigDecimal(10, new BigDecimal(String.valueOf(endTime))); ps.setString(11, trigger.getCalendarName()); ps.setInt(12, trigger.getMisfireInstruction()); ps.setInt(13, trigger.getPriority()); ps.setBinaryStream(14, bais, len); ps.setString(15, trigger.getName()); ps.setString(16, trigger.getGroup()); insertResult = ps.executeUpdate(); } finally { closeStatement(ps); } if (insertResult > 0) { deleteTriggerListeners(conn, trigger.getName(), trigger.getGroup()); String[] trigListeners = trigger.getTriggerListenerNames(); for (int i = 0; trigListeners != null && i < trigListeners.length; i++) { insertTriggerListener(conn, trigger, trigListeners[i]); } } return insertResult; }
From source file:org.apache.qpid.server.store.derby.DerbyMessageStore.java
private void insertConfiguredObject(ConfiguredObjectRecord configuredObject) throws AMQStoreException { if (_stateManager.isInState(State.ACTIVE)) { try {/*from w w w. j ava 2 s . c o m*/ Connection conn = newAutoCommitConnection(); try { PreparedStatement stmt = conn.prepareStatement(FIND_CONFIGURED_OBJECT); try { stmt.setString(1, configuredObject.getId().toString()); ResultSet rs = stmt.executeQuery(); try { // If we don't have any data in the result set then we can add this configured object if (!rs.next()) { PreparedStatement insertStmt = conn .prepareStatement(INSERT_INTO_CONFIGURED_OBJECTS); try { insertStmt.setString(1, configuredObject.getId().toString()); insertStmt.setString(2, configuredObject.getType()); if (configuredObject.getAttributes() == null) { insertStmt.setNull(3, Types.BLOB); } else { byte[] attributesAsBytes = configuredObject.getAttributes() .getBytes(UTF8_CHARSET); ByteArrayInputStream bis = new ByteArrayInputStream(attributesAsBytes); insertStmt.setBinaryStream(3, bis, attributesAsBytes.length); } insertStmt.execute(); } finally { insertStmt.close(); } } } finally { rs.close(); } } finally { stmt.close(); } } finally { conn.close(); } } catch (SQLException e) { throw new AMQStoreException("Error inserting of configured object " + configuredObject + " into database: " + e.getMessage(), e); } } }
From source file:org.wso2.carbon.idp.mgt.dao.IdPManagementDAO.java
private void setBlobValue(String value, PreparedStatement prepStmt, int index) throws SQLException, IOException { if (value != null) { InputStream inputStream = new ByteArrayInputStream(CharacterEncoder.getSafeText(value).getBytes()); if (inputStream != null) { prepStmt.setBinaryStream(index, inputStream, inputStream.available()); } else {//from w w w . j a v a 2 s. c o m prepStmt.setBinaryStream(index, new ByteArrayInputStream(CharacterEncoder.getSafeText("").getBytes()), 0); } } else { prepStmt.setBinaryStream(index, new ByteArrayInputStream(CharacterEncoder.getSafeText("").getBytes()), 0); } }