List of usage examples for java.sql ResultSet getBytes
byte[] getBytes(String columnLabel) throws SQLException;
ResultSet
object as a byte
array in the Java programming language. From source file:org.springframework.jdbc.support.JdbcUtils.java
/** * Retrieve a JDBC column value from a ResultSet, using the specified value type. * <p>Uses the specifically typed ResultSet accessor methods, falling back to * {@link #getResultSetValue(java.sql.ResultSet, int)} for unknown types. * <p>Note that the returned value may not be assignable to the specified * required type, in case of an unknown type. Calling code needs to deal * with this case appropriately, e.g. throwing a corresponding exception. * @param rs is the ResultSet holding the data * @param index is the column index//from w w w . j a v a 2 s .c o m * @param requiredType the required value type (may be {@code null}) * @return the value object (possibly not of the specified required type, * with further conversion steps necessary) * @throws SQLException if thrown by the JDBC API * @see #getResultSetValue(ResultSet, int) */ @Nullable public static Object getResultSetValue(ResultSet rs, int index, @Nullable Class<?> requiredType) throws SQLException { if (requiredType == null) { return getResultSetValue(rs, index); } Object value; // Explicitly extract typed value, as far as possible. if (String.class == requiredType) { return rs.getString(index); } else if (boolean.class == requiredType || Boolean.class == requiredType) { value = rs.getBoolean(index); } else if (byte.class == requiredType || Byte.class == requiredType) { value = rs.getByte(index); } else if (short.class == requiredType || Short.class == requiredType) { value = rs.getShort(index); } else if (int.class == requiredType || Integer.class == requiredType) { value = rs.getInt(index); } else if (long.class == requiredType || Long.class == requiredType) { value = rs.getLong(index); } else if (float.class == requiredType || Float.class == requiredType) { value = rs.getFloat(index); } else if (double.class == requiredType || Double.class == requiredType || Number.class == requiredType) { value = rs.getDouble(index); } else if (BigDecimal.class == requiredType) { return rs.getBigDecimal(index); } else if (java.sql.Date.class == requiredType) { return rs.getDate(index); } else if (java.sql.Time.class == requiredType) { return rs.getTime(index); } else if (java.sql.Timestamp.class == requiredType || java.util.Date.class == requiredType) { return rs.getTimestamp(index); } else if (byte[].class == requiredType) { return rs.getBytes(index); } else if (Blob.class == requiredType) { return rs.getBlob(index); } else if (Clob.class == requiredType) { return rs.getClob(index); } else if (requiredType.isEnum()) { // Enums can either be represented through a String or an enum index value: // leave enum type conversion up to the caller (e.g. a ConversionService) // but make sure that we return nothing other than a String or an Integer. Object obj = rs.getObject(index); if (obj instanceof String) { return obj; } else if (obj instanceof Number) { // Defensively convert any Number to an Integer (as needed by our // ConversionService's IntegerToEnumConverterFactory) for use as index return NumberUtils.convertNumberToTargetClass((Number) obj, Integer.class); } else { // e.g. on Postgres: getObject returns a PGObject but we need a String return rs.getString(index); } } else { // Some unknown type desired -> rely on getObject. try { return rs.getObject(index, requiredType); } catch (AbstractMethodError err) { logger.debug("JDBC driver does not implement JDBC 4.1 'getObject(int, Class)' method", err); } catch (SQLFeatureNotSupportedException ex) { logger.debug("JDBC driver does not support JDBC 4.1 'getObject(int, Class)' method", ex); } catch (SQLException ex) { logger.debug("JDBC driver has limited support for JDBC 4.1 'getObject(int, Class)' method", ex); } // Corresponding SQL types for JSR-310 / Joda-Time types, left up // to the caller to convert them (e.g. through a ConversionService). String typeName = requiredType.getSimpleName(); if ("LocalDate".equals(typeName)) { return rs.getDate(index); } else if ("LocalTime".equals(typeName)) { return rs.getTime(index); } else if ("LocalDateTime".equals(typeName)) { return rs.getTimestamp(index); } // Fall back to getObject without type specification, again // left up to the caller to convert the value if necessary. return getResultSetValue(rs, index); } // Perform was-null check if necessary (for results that the JDBC driver returns as primitives). return (rs.wasNull() ? null : value); }
From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java
@Override public void moveConfigOperations() throws ArchivalDAOException { Statement stmt = null;/*from w ww . j ava 2 s . c om*/ PreparedStatement stmt2 = null; Statement stmt3 = null; ResultSet rs = null; try { Connection conn = ArchivalSourceDAOFactory.getConnection(); String sql = "SELECT * FROM DM_CONFIG_OPERATION WHERE OPERATION_ID IN " + "(SELECT ID FROM DM_ARCHIVED_OPERATIONS)"; stmt = this.createMemoryEfficientStatement(conn); rs = stmt.executeQuery(sql); Connection conn2 = ArchivalDestinationDAOFactory.getConnection(); sql = "INSERT INTO DM_CONFIG_OPERATION_ARCH VALUES(?, ?, ?, ?)"; stmt2 = conn2.prepareStatement(sql); int count = 0; while (rs.next()) { stmt2.setInt(1, rs.getInt("OPERATION_ID")); stmt2.setBytes(2, rs.getBytes("OPERATION_CONFIG")); stmt2.setInt(3, rs.getInt("ENABLED")); stmt2.setTimestamp(4, this.currentTimestamp); stmt2.addBatch(); if (++count % batchSize == 0) { stmt2.executeBatch(); } } stmt2.executeBatch(); if (log.isDebugEnabled()) { log.debug(count + " [CONFIG_OPERATION] Records copied to the archival table. Starting deletion"); } sql = "DELETE FROM DM_CONFIG_OPERATION" + " WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)"; stmt3 = conn.createStatement(); int affected = stmt3.executeUpdate(sql); if (log.isDebugEnabled()) { log.debug(affected + " Rows deleted"); } } catch (SQLException e) { throw new ArchivalDAOException("Error occurred while moving config operations", e); } finally { ArchivalDAOUtil.cleanupResources(stmt, rs); ArchivalDAOUtil.cleanupResources(stmt2); ArchivalDAOUtil.cleanupResources(stmt3); } }
From source file:org.apache.synapse.message.store.impl.jdbc.JDBCMessageStore.java
/** * Process a given Statement object//w w w.j ava2s. c o m * * @param stmt - Statement to process * @return - Results as a List of MessageContexts */ private MessageContext processResultingStatement(Statement stmt) throws SynapseException { MessageContext resultMsg = null; // Execute the prepared statement, and return list of messages as an ArrayList Connection con = null; ResultSet rs = null; PreparedStatement ps = null; try { con = jdbcConfiguration.getConnection(); ps = con.prepareStatement(stmt.getRawStatement()); int index = 1; for (Object param : stmt.getParameters()) { if (param instanceof String) { ps.setString(index, (String) param); } else if (param instanceof Integer) { ps.setInt(index, (Integer) param); } index++; } rs = ps.executeQuery(); while (rs.next()) { byte[] msgObj; try { msgObj = rs.getBytes("message"); } catch (SQLException e) { throw new SynapseException("Error executing statement : " + stmt.getRawStatement() + " against DataSource : " + jdbcConfiguration.getDSName(), e); } if (msgObj != null) { ObjectInputStream ios = null; try { // Convert back to MessageContext and add to list ios = new ObjectInputStream(new ByteArrayInputStream(msgObj)); Object msg = ios.readObject(); if (msg instanceof StorableMessage) { StorableMessage jdbcMsg = (StorableMessage) msg; org.apache.axis2.context.MessageContext axis2Mc = this.newAxis2Mc(); MessageContext synapseMc = this.newSynapseMc(axis2Mc); resultMsg = MessageConverter.toMessageContext(jdbcMsg, axis2Mc, synapseMc); } } catch (Exception e) { throw new SynapseException("Error reading object input stream", e); } finally { try { ios.close(); } catch (IOException e) { logger.error("Error while closing object input stream", e); } } } else { throw new SynapseException("Retrieved Object is null"); } } } catch (SQLException e) { throw new SynapseException("Processing Statement failed : " + stmt.getRawStatement() + " against DataSource : " + jdbcConfiguration.getDSName(), e); } finally { close(con, ps, rs); } return resultMsg; }
From source file:org.wso2.carbon.device.mgt.core.archival.dao.impl.ArchivalDAOImpl.java
@Override public void moveProfileOperations() throws ArchivalDAOException { Statement stmt = null;//ww w . j av a 2 s . c om PreparedStatement stmt2 = null; Statement stmt3 = null; ResultSet rs = null; try { Connection conn = ArchivalSourceDAOFactory.getConnection(); String sql = "SELECT * FROM DM_PROFILE_OPERATION WHERE OPERATION_ID IN " + "(SELECT ID FROM DM_ARCHIVED_OPERATIONS)"; stmt = this.createMemoryEfficientStatement(conn); rs = stmt.executeQuery(sql); Connection conn2 = ArchivalDestinationDAOFactory.getConnection(); sql = "INSERT INTO DM_PROFILE_OPERATION_ARCH VALUES(?, ?, ?, ?)"; stmt2 = conn2.prepareStatement(sql); int count = 0; while (rs.next()) { stmt2.setInt(1, rs.getInt("OPERATION_ID")); stmt2.setInt(2, rs.getInt("ENABLED")); stmt2.setBytes(3, rs.getBytes("OPERATION_DETAILS")); stmt2.setTimestamp(4, this.currentTimestamp); stmt2.addBatch(); if (++count % batchSize == 0) { stmt2.executeBatch(); } } stmt2.executeBatch(); if (log.isDebugEnabled()) { log.debug(count + " [PROFILE_OPERATION] Records copied to the archival table. Starting deletion"); } sql = "DELETE FROM DM_PROFILE_OPERATION" + " WHERE OPERATION_ID IN (SELECT ID FROM DM_ARCHIVED_OPERATIONS)"; stmt3 = conn.createStatement(); int affected = stmt3.executeUpdate(sql); if (log.isDebugEnabled()) { log.debug(affected + " Rows deleted"); } } catch (SQLException e) { throw new ArchivalDAOException("Error occurred while moving profile operations", e); } finally { ArchivalDAOUtil.cleanupResources(stmt, rs); ArchivalDAOUtil.cleanupResources(stmt2); ArchivalDAOUtil.cleanupResources(stmt3); } }
From source file:org.sakaiproject.util.BaseDbBinarySingleStorage.java
protected List loadResources(String sql, Object[] fields) { List all = m_sql.dbRead(sql, fields, new SqlReader() { public Object readSqlResultRecord(ResultSet result) { try { // create the Resource from the db xml return readResource(result.getBytes(1)); } catch (SQLException ignore) { return null; }/* ww w .j a v a 2 s. c o m*/ } }); return all; }
From source file:org.apache.nifi.processors.standard.TestPutSQL.java
@Test public void testBinaryColumnTypes() throws InitializationException, ProcessException, SQLException, IOException, ParseException { final TestRunner runner = TestRunners.newTestRunner(PutSQL.class); try (final Connection conn = service.getConnection()) { try (final Statement stmt = conn.createStatement()) { stmt.executeUpdate(/*from w ww . ja v a 2s .co m*/ "CREATE TABLE BINARYTESTS (id integer primary key, bn1 CHAR(8) FOR BIT DATA, bn2 VARCHAR(100) FOR BIT DATA, " + "bn3 LONG VARCHAR FOR BIT DATA)"); } } runner.addControllerService("dbcp", service); runner.enableControllerService(service); runner.setProperty(PutSQL.CONNECTION_POOL, "dbcp"); final byte[] insertStatement = "INSERT INTO BINARYTESTS (ID, bn1, bn2, bn3) VALUES (?, ?, ?, ?)".getBytes(); final String arg2BIN = fixedSizeByteArrayAsASCIIString(8); final String art3VARBIN = fixedSizeByteArrayAsASCIIString(50); final String art4LongBin = fixedSizeByteArrayAsASCIIString(32700); //max size supported by Derby //ASCII (default) binary formatn Map<String, String> attributes = new HashMap<>(); attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER)); attributes.put("sql.args.1.value", "1"); attributes.put("sql.args.2.type", String.valueOf(Types.BINARY)); attributes.put("sql.args.2.value", arg2BIN); attributes.put("sql.args.3.type", String.valueOf(Types.VARBINARY)); attributes.put("sql.args.3.value", art3VARBIN); attributes.put("sql.args.4.type", String.valueOf(Types.LONGVARBINARY)); attributes.put("sql.args.4.value", art4LongBin); runner.enqueue(insertStatement, attributes); //ASCII with specified format attributes = new HashMap<>(); attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER)); attributes.put("sql.args.1.value", "2"); attributes.put("sql.args.2.type", String.valueOf(Types.BINARY)); attributes.put("sql.args.2.value", arg2BIN); attributes.put("sql.args.2.format", "ascii"); attributes.put("sql.args.3.type", String.valueOf(Types.VARBINARY)); attributes.put("sql.args.3.value", art3VARBIN); attributes.put("sql.args.3.format", "ascii"); attributes.put("sql.args.4.type", String.valueOf(Types.LONGVARBINARY)); attributes.put("sql.args.4.value", art4LongBin); attributes.put("sql.args.4.format", "ascii"); runner.enqueue(insertStatement, attributes); //Hex final String arg2HexBIN = fixedSizeByteArrayAsHexString(8); final String art3HexVARBIN = fixedSizeByteArrayAsHexString(50); final String art4HexLongBin = fixedSizeByteArrayAsHexString(32700); attributes = new HashMap<>(); attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER)); attributes.put("sql.args.1.value", "3"); attributes.put("sql.args.2.type", String.valueOf(Types.BINARY)); attributes.put("sql.args.2.value", arg2HexBIN); attributes.put("sql.args.2.format", "hex"); attributes.put("sql.args.3.type", String.valueOf(Types.VARBINARY)); attributes.put("sql.args.3.value", art3HexVARBIN); attributes.put("sql.args.3.format", "hex"); attributes.put("sql.args.4.type", String.valueOf(Types.LONGVARBINARY)); attributes.put("sql.args.4.value", art4HexLongBin); attributes.put("sql.args.4.format", "hex"); runner.enqueue(insertStatement, attributes); //Base64 final String arg2Base64BIN = fixedSizeByteArrayAsBase64String(8); final String art3Base64VARBIN = fixedSizeByteArrayAsBase64String(50); final String art4Base64LongBin = fixedSizeByteArrayAsBase64String(32700); attributes = new HashMap<>(); attributes.put("sql.args.1.type", String.valueOf(Types.INTEGER)); attributes.put("sql.args.1.value", "4"); attributes.put("sql.args.2.type", String.valueOf(Types.BINARY)); attributes.put("sql.args.2.value", arg2Base64BIN); attributes.put("sql.args.2.format", "base64"); attributes.put("sql.args.3.type", String.valueOf(Types.VARBINARY)); attributes.put("sql.args.3.value", art3Base64VARBIN); attributes.put("sql.args.3.format", "base64"); attributes.put("sql.args.4.type", String.valueOf(Types.LONGVARBINARY)); attributes.put("sql.args.4.value", art4Base64LongBin); attributes.put("sql.args.4.format", "base64"); runner.enqueue(insertStatement, attributes); runner.run(); runner.assertAllFlowFilesTransferred(PutSQL.REL_SUCCESS, 4); try (final Connection conn = service.getConnection()) { try (final Statement stmt = conn.createStatement()) { final ResultSet rs = stmt.executeQuery("SELECT * FROM BINARYTESTS"); //First Batch assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); assertTrue(Arrays.equals(arg2BIN.getBytes("ASCII"), rs.getBytes(2))); assertTrue(Arrays.equals(art3VARBIN.getBytes("ASCII"), rs.getBytes(3))); assertTrue(Arrays.equals(art4LongBin.getBytes("ASCII"), rs.getBytes(4))); //Second batch assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); assertTrue(Arrays.equals(arg2BIN.getBytes("ASCII"), rs.getBytes(2))); assertTrue(Arrays.equals(art3VARBIN.getBytes("ASCII"), rs.getBytes(3))); assertTrue(Arrays.equals(art4LongBin.getBytes("ASCII"), rs.getBytes(4))); //Third Batch (Hex) assertTrue(rs.next()); assertEquals(3, rs.getInt(1)); assertTrue(Arrays.equals(DatatypeConverter.parseHexBinary(arg2HexBIN), rs.getBytes(2))); assertTrue(Arrays.equals(DatatypeConverter.parseHexBinary(art3HexVARBIN), rs.getBytes(3))); assertTrue(Arrays.equals(DatatypeConverter.parseHexBinary(art4HexLongBin), rs.getBytes(4))); //Fourth Batch (Base64) assertTrue(rs.next()); assertEquals(4, rs.getInt(1)); assertTrue(Arrays.equals(DatatypeConverter.parseBase64Binary(arg2Base64BIN), rs.getBytes(2))); assertTrue(Arrays.equals(DatatypeConverter.parseBase64Binary(art3Base64VARBIN), rs.getBytes(3))); assertTrue(Arrays.equals(DatatypeConverter.parseBase64Binary(art4Base64LongBin), rs.getBytes(4))); assertFalse(rs.next()); } } }
From source file:org.sakaiproject.util.BaseDbDualSingleStorage.java
protected List loadResources(String sql, Object[] fields) { List all = m_sql.dbRead(sql, fields, new SqlReader() { public Object readSqlResultRecord(ResultSet result) { try { // create the Resource from the db xml return readResource(result.getString(1), result.getBytes(2)); } catch (SQLException ignore) { return null; }//from www . ja va 2 s . c o m } }); return all; }
From source file:com.cnd.greencube.server.dao.jdbc.JdbcDAO.java
@SuppressWarnings("rawtypes") private Object getColumnValue(ResultSet rs, ResultSetMetaData meta, int index, Class clazz) throws Exception { Object value = null;/*from ww w . j av a 2s . c o m*/ int type = meta.getColumnType(index); if (clazz == String.class) { value = rs.getString(index); } else if (clazz == Integer.class) { value = rs.getInt(index); } else if (clazz == Boolean.class) { value = rs.getBoolean(index); } else if (clazz == byte[].class) { if (type == Types.BLOB) value = rs.getBlob(index); else value = rs.getBytes(index); } else if (clazz == Long.class) { value = rs.getLong(index); } else if (clazz == BigInteger.class) { value = rs.getBigDecimal(index); } else if (clazz == Float.class) { value = rs.getFloat(index); } else if (clazz == Double.class) { value = rs.getDouble(index); } else if (clazz == java.util.Date.class) { Timestamp time = rs.getTimestamp(index); if (time == null) value = null; else { value = new java.util.Date(time.getTime()); } } else if (clazz == java.sql.Date.class) { value = rs.getDate(index); } else if (clazz == java.sql.Time.class) { value = rs.getTime(index); } else if (clazz == java.sql.Timestamp.class) { value = rs.getTimestamp(index); } else { throw new Exception("Cannote determin this column type:" + meta.getColumnName(index)); } return value; }
From source file:org.sakaiproject.util.BaseDbBinarySingleStorage.java
/** * Get selected Resources, filtered by a test on the id field * /*from w w w. ja v a 2 s. c om*/ * @param filter * A filter to select what gets returned. * @return The list of selected Resources. */ public List getSelectedResources(final Filter filter) { List all = new Vector(); // read all users from the db String sql = singleStorageSql.getXmlAndFieldSql(m_resourceTableIdField, m_resourceTableName); // %%% + "order by " + m_resourceTableOrderField + " asc"; List xml = m_sql.dbRead(sql, null, new SqlReader() { public Object readSqlResultRecord(ResultSet result) { try { // read the id m_resourceTableIdField String id = result.getString(1); byte[] blob = result.getBytes(2); if (!filter.accept(caseId(id))) return null; return readResource(blob); } catch (SQLException ignore) { return null; } } }); // process all result xml into user objects if (!xml.isEmpty()) { for (int i = 0; i < xml.size(); i++) { Entity entry = (Entity) xml.get(i); if (entry != null) all.add(entry); } } return all; }
From source file:org.sakaiproject.util.BaseDbBinarySingleStorage.java
/** * Get a lock on the Resource with this id, or null if a lock cannot be * gotten./*from www . ja v a 2 s . com*/ * * @param id * The user id. * @return The locked Resource with this id, or null if this records cannot * be locked. */ public Edit editResource(String id) { Edit edit = null; if (m_locksAreInDb) { if ("oracle".equals(m_sql.getVendor())) { final List<Entity> l = new ArrayList<Entity>(); Connection lock = null; if (m_user instanceof EntityReaderHandler) { // read the record and get a lock on it (non blocking) String statement = "select XML from " + m_resourceTableName + " where ( " + m_resourceTableIdField + " = '" + StorageUtils.escapeSql(caseId(id)) + "' )" + " for update nowait"; lock = m_sql.dbReadLock(statement, new SqlReader() { public Object readSqlResultRecord(ResultSet result) { try { l.add(readResource(result.getBytes(1))); } catch (SQLException e) { M_log.warn("Failed to retrieve record ", e); } return null; } }); } else { // read the record and get a lock on it (non blocking) String statement = "select BENTRY, XML from " + m_resourceTableName + " where ( " + m_resourceTableIdField + " = '" + StorageUtils.escapeSql(caseId(id)) + "' )" + " for update nowait"; lock = m_sql.dbReadLock(statement, new SqlReader() { public Object readSqlResultRecord(ResultSet result) { try { l.add(readResource(result.getBytes(1))); } catch (SQLException e) { M_log.warn("Failed to retrieve record ", e); } return null; } }); } // for missing or already locked... if ((lock == null) || (l.size() == 0)) return null; // make first a Resource, then an Edit Entity entry = l.get(0); edit = m_user.newResourceEdit(null, entry); // store the lock for this object m_locks.put(entry.getReference(), lock); } else { throw new UnsupportedOperationException( "Record locking only available when configured with Oracle database"); } } // if the locks are in a separate table in the db else if (m_locksAreInTable) { // read the record - fail if not there Entity entry = getResource(id); if (entry == null) return null; // write a lock to the lock table - if we can do it, we get the lock String statement = singleStorageSql.getInsertLocks(); // we need session id and user id String sessionId = UsageSessionService.getSessionId(); if (sessionId == null) { sessionId = ""; } // collect the fields Object fields[] = new Object[4]; fields[0] = m_resourceTableName; fields[1] = internalRecordId(caseId(id)); fields[2] = TimeService.newTime(); fields[3] = sessionId; // add the lock - if fails, someone else has the lock boolean ok = m_sql.dbWriteFailQuiet(null, statement, fields); if (!ok) { return null; } // we got the lock! - make the edit from the Resource edit = m_user.newResourceEdit(null, entry); } // otherwise, get the lock locally else { // get the entry, and check for existence Entity entry = getResource(id); if (entry == null) return null; // we only sync this getting - someone may release a lock out of // sync synchronized (m_locks) { // if already locked if (m_locks.containsKey(entry.getReference())) return null; // make the edit from the Resource edit = m_user.newResourceEdit(null, entry); // store the edit in the locks by reference m_locks.put(entry.getReference(), edit); } } return edit; }