Example usage for java.sql ResultSet getBlob

List of usage examples for java.sql ResultSet getBlob

Introduction

In this page you can find the example usage for java.sql ResultSet getBlob.

Prototype

Blob getBlob(String columnLabel) throws SQLException;

Source Link

Document

Retrieves the value of the designated column in the current row of this ResultSet object as a Blob object in the Java programming language.

Usage

From source file:org.springframework.jdbc.support.lob.DefaultLobHandler.java

@Override
@Nullable/*from   www . j a  va2  s .  co m*/
public byte[] getBlobAsBytes(ResultSet rs, int columnIndex) throws SQLException {
    logger.debug("Returning BLOB as bytes");
    if (this.wrapAsLob) {
        Blob blob = rs.getBlob(columnIndex);
        return blob.getBytes(1, (int) blob.length());
    } else {
        return rs.getBytes(columnIndex);
    }
}

From source file:nl.nn.adapterframework.jdbc.dbms.GenericDbmsSupport.java

public Object getBlobUpdateHandle(ResultSet rs, int column) throws SQLException, JdbcException {
    Blob blob = rs.getBlob(column);
    if (blob == null) {
        throw new JdbcException("no blob found in column [" + column + "]");
    }/*w  w w . j a v a  2 s .c o m*/
    return blob;
}

From source file:org.springframework.jdbc.support.lob.DefaultLobHandler.java

@Override
@Nullable//from  w w w  . j  ava  2  s.  c  o  m
public InputStream getBlobAsBinaryStream(ResultSet rs, int columnIndex) throws SQLException {
    logger.debug("Returning BLOB as binary stream");
    if (this.wrapAsLob) {
        Blob blob = rs.getBlob(columnIndex);
        return blob.getBinaryStream();
    } else {
        return rs.getBinaryStream(columnIndex);
    }
}

From source file:nl.nn.adapterframework.jdbc.dbms.GenericDbmsSupport.java

public Object getBlobUpdateHandle(ResultSet rs, String column) throws SQLException, JdbcException {
    Blob blob = rs.getBlob(column);
    if (blob == null) {
        throw new JdbcException("no blob found in column [" + column + "]");
    }/*from  w ww .  ja  v a  2  s .c  om*/
    return blob;
}

From source file:org.apache.sqoop.lib.LargeObjectLoader.java

/**
 * Actually read a BlobRef instance from the ResultSet and materialize
 * the data either inline or to a file./*from  ww w.ja va 2s . c  o  m*/
 *
 * @param colNum the column of the ResultSet's current row to read.
 * @param r the ResultSet to read from.
 * @return a BlobRef encapsulating the data in this field.
 * @throws IOException if an error occurs writing to the FileSystem.
 * @throws SQLException if an error occurs reading from the database.
 */
public com.cloudera.sqoop.lib.BlobRef readBlobRef(int colNum, ResultSet r)
        throws IOException, InterruptedException, SQLException {

    long maxInlineLobLen = conf.getLong(MAX_INLINE_LOB_LEN_KEY, DEFAULT_MAX_LOB_LENGTH);

    Blob b = r.getBlob(colNum);
    if (null == b) {
        return null;
    } else if (b.length() > maxInlineLobLen) {
        // Deserialize very large BLOBs into separate files.
        long len = b.length();
        LobFile.Writer lobWriter = getBlobWriter();

        long recordOffset = lobWriter.tell();
        InputStream is = null;
        OutputStream os = lobWriter.writeBlobRecord(len);
        try {
            is = b.getBinaryStream();
            copyAll(is, os);
        } finally {
            if (null != os) {
                os.close();
            }

            if (null != is) {
                is.close();
            }

            // Mark the record as finished.
            lobWriter.finishRecord();
        }

        return new com.cloudera.sqoop.lib.BlobRef(getRelativePath(curBlobWriter), recordOffset, len);
    } else {
        // This is a 1-based array.
        return new com.cloudera.sqoop.lib.BlobRef(b.getBytes(1, (int) b.length()));
    }
}

From source file:com.orangeandbronze.jblubble.jdbc.PgJdbcBlobstoreService.java

@Override
public BlobKey createBlob(BlobstoreWriteCallback callback, String name, String contentType)
        throws IOException, BlobstoreException {
    boolean resetCommitMode = false;
    try (Connection connection = dataSource.getConnection()) {
        if (connection.getAutoCommit()) {
            connection.setAutoCommit(false);
            resetCommitMode = true;/*from ww  w .  j  av a 2  s.c o  m*/
        }
        try {
            int rowCount;
            try (PreparedStatement ps = connection.prepareStatement(getInsertSql(),
                    Statement.RETURN_GENERATED_KEYS)) {
                ps.setString(1, name);
                ps.setString(2, contentType);
                ps.setTimestamp(3, new java.sql.Timestamp(new java.util.Date().getTime()));
                rowCount = ps.executeUpdate();
                if (rowCount == 0) {
                    throw new BlobstoreException("Creating blob failed, no rows created.");
                }
                final long generatedId = getGeneratedKey(ps);
                long size;
                String md5Hash = null;
                try (PreparedStatement ps2 = connection.prepareStatement(getSelectContentByIdSql())) {
                    ps2.setLong(1, generatedId);
                    ResultSet rs = ps2.executeQuery();
                    if (!rs.next()) {
                        throw new BlobstoreException("Creating blob failed, no rows created.");
                    }
                    Blob contentBlob = rs.getBlob(1);
                    try {
                        OutputStream out = new BufferedOutputStream(contentBlob.setBinaryStream(1L),
                                getBufferSize());
                        try {
                            CountingOutputStream countingOutputStream = new CountingOutputStream(out);
                            try {
                                MessageDigest md5;
                                try {
                                    md5 = MessageDigest.getInstance(MD5_ALGORITHM_NAME);
                                    try (DigestOutputStream digestOutputStream = new DigestOutputStream(
                                            countingOutputStream, md5)) {
                                        size = callback.writeToOutputStream(digestOutputStream);
                                        if (size == -1L) {
                                            size = countingOutputStream.getByteCount();
                                        }
                                        md5Hash = new String(encodeHex(md5.digest()));
                                    }
                                } catch (NoSuchAlgorithmException e) {
                                    throw new BlobstoreException(e);
                                }
                            } finally {
                                try {
                                    countingOutputStream.close();
                                } catch (IOException e) {
                                    // Since digestOutputStream gets closed,
                                    // the wrapped countingOutputStream does
                                    // not really need to get closed again.
                                }
                            }
                        } finally {
                            try {
                                out.close();
                            } catch (IOException e) {
                                // Since digestOutputStream gets closed,
                                // the wrapped buffered OutputStream does
                                // not really need to get closed again.
                            }
                        }
                    } finally {
                        contentBlob.free();
                    }
                }
                try (PreparedStatement ps3 = connection.prepareStatement(getUpdateSizeSql())) {
                    ps3.setLong(1, size);
                    ps3.setString(2, md5Hash);
                    ps3.setLong(3, generatedId);
                    rowCount = ps3.executeUpdate();
                    if (rowCount == 0) {
                        throw new BlobstoreException("Creating blob failed, no rows created.");
                    }
                }
                if (resetCommitMode) {
                    connection.commit();
                }
                return new BlobKey(String.valueOf(generatedId));
            }
        } catch (Exception e) {
            connection.rollback();
            throw e;
        } finally {
            if (resetCommitMode) {
                connection.setAutoCommit(true);
            }
        }
    } catch (SQLException e) {
        throw new BlobstoreException("Error when creating blob", e);
    }
}

From source file:org.georepublic.db.utils.ResultSetConverter.java

public static JSONArray convertGeoJson(ResultSet rs) throws SQLException, JSONException {

    JSONArray json = new JSONArray();
    ResultSetMetaData rsmd = rs.getMetaData();

    while (rs.next()) {
        int numColumns = rsmd.getColumnCount();
        JSONObject obj = new JSONObject();
        JSONObject feat = new JSONObject();

        feat.put("type", "Feature");

        for (int i = 1; i < numColumns + 1; i++) {
            String column_name = rsmd.getColumnName(i);

            if (StringUtils.equals(column_name, "the_geom")) {
                continue;
            }//from  w  ww  .j ava2s .com
            if (StringUtils.equals(column_name, "geojson")) {
                continue;
            }
            if (rsmd.getColumnType(i) == java.sql.Types.ARRAY) {
                obj.put(column_name, rs.getArray(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.BIGINT) {
                obj.put(column_name, rs.getInt(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.BOOLEAN) {
                obj.put(column_name, rs.getBoolean(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.BLOB) {
                obj.put(column_name, rs.getBlob(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.DOUBLE) {
                obj.put(column_name, rs.getDouble(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.FLOAT) {
                obj.put(column_name, rs.getFloat(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.INTEGER) {
                obj.put(column_name, rs.getInt(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.NVARCHAR) {
                obj.put(column_name, rs.getNString(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.VARCHAR) {
                obj.put(column_name, rs.getString(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.TINYINT) {
                obj.put(column_name, rs.getInt(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.SMALLINT) {
                obj.put(column_name, rs.getInt(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.DATE) {
                obj.put(column_name, rs.getDate(column_name));
            } else if (rsmd.getColumnType(i) == java.sql.Types.TIMESTAMP) {
                obj.put(column_name, rs.getTimestamp(column_name));
            } else {
                obj.put(column_name, rs.getObject(column_name));
            }
        }

        feat.put("properties", obj);

        try {
            rs.findColumn("lon");
            rs.findColumn("lat");

            JSONObject geo = new JSONObject();
            JSONArray coord = new JSONArray();

            coord.put(rs.getDouble("lon"));
            coord.put(rs.getDouble("lat"));

            geo.put("type", "point");
            geo.put("coordinates", coord);

            feat.put("geometry", geo);
        } catch (Exception ex1) {
            ;
        }

        json.put(feat);
    }

    return json;

}

From source file:biz.taoconsulting.dominodav.resource.DAVResourceJDBC.java

/**
 * /**//  ww w.j a va2s .co  m
 * 
 * @see biz.taoconsulting.dominodav.resource.DAVAbstractResource#getStream()
 */
public InputStream getStream() {

    Connection conn = null;
    Statement stmt = null;

    InputStream blobStream = null;

    try {
        Context initCtx = new InitialContext();
        Context envCtx = (Context) initCtx.lookup("java:comp/env");
        DataSource ds = (DataSource)

        // THe internal address of a JDBC source is the data source
        envCtx.lookup(this.repositoryMeta.getInternalAddress());

        conn = ds.getConnection();
        stmt = conn.createStatement();
        // XXX: THat is plain wrong -- need to rework the JDBC data source
        // query
        ResultSet rs = stmt.executeQuery(
                "select f.fil_blocksize,f.fil_contents_blob from ibkuis_pp_files f where  f.fil_id="
                        + this.getDBFileID());
        if (rs.next()) {
            Blob blob = rs.getBlob(2);
            blobStream = blob.getBinaryStream();
        }
    } catch (NamingException e) {
        e.printStackTrace();
    } catch (SQLException e) {
        e.printStackTrace();
    } finally {
        try {
            if (stmt != null)
                stmt.close();

            if (conn != null)
                conn.close();
        } catch (SQLException e) {
            /** Exception handling **/
        }
    }
    return blobStream;
}

From source file:de.unidue.inf.is.ezdl.dlservices.repository.store.repositories.DBRepository.java

@Override
public StoredDocument getDocument(String oid) {
    Connection con = null;//from w w  w .  java2  s . co m
    PreparedStatement st = null;
    ResultSet res = null;
    StoredDocument out = null;
    final String databaseIdForOid = databaseIdForOid(oid);
    try {
        con = provider.connection();
        st = con.prepareStatement(GET);
        st.setString(1, databaseIdForOid);
        res = st.executeQuery();
        if (res.next()) {
            Blob clob = res.getBlob(1);
            out = decode(clob.getBinaryStream());
        }
        con.commit();
    } catch (SQLException e) {
        rollback(con);
        getLogger().error("Error selecting " + databaseIdForOid, e);
    } finally {
        ClosingUtils.close(res);
        ClosingUtils.close(st);
        ClosingUtils.close(con);
    }
    return out;
}

From source file:com.splicemachine.derby.impl.sql.execute.operations.InsertOperationIT.java

@Test
public void testInsertBlob() throws Exception {
    InputStream fin = new FileInputStream(getResourceDirectory() + "order_line_500K.csv");
    PreparedStatement ps = methodWatcher.prepareStatement("insert into FILES (name, doc) values (?,?)");
    ps.setString(1, "csv_file");
    ps.setBinaryStream(2, fin);//w w  w.java 2s .  c o  m
    ps.execute();
    ResultSet rs = methodWatcher.executeQuery("SELECT doc FROM FILES WHERE name = 'csv_file'");
    byte buff[] = new byte[1024];
    while (rs.next()) {
        Blob ablob = rs.getBlob(1);
        File newFile = new File(getBaseDirectory() + "/target/order_line_500K.csv");
        if (newFile.exists()) {
            newFile.delete();
        }
        newFile.createNewFile();
        InputStream is = ablob.getBinaryStream();
        FileOutputStream fos = new FileOutputStream(newFile);
        for (int b = is.read(buff); b != -1; b = is.read(buff)) {
            fos.write(buff, 0, b);
        }
        is.close();
        fos.close();
    }
    File file1 = new File(getResourceDirectory() + "order_line_500K.csv");
    File file2 = new File(getBaseDirectory() + "/target/order_line_500K.csv");
    Assert.assertTrue("The files contents are not equivalent", FileUtils.contentEquals(file1, file2));
}