Example usage for java.sql Blob getBinaryStream

List of usage examples for java.sql Blob getBinaryStream

Introduction

In this page you can find the example usage for java.sql Blob getBinaryStream.

Prototype

java.io.InputStream getBinaryStream() throws SQLException;

Source Link

Document

Retrieves the BLOB value designated by this Blob instance as a stream.

Usage

From source file:org.apache.jackrabbit.core.persistence.bundle.BundleDbPersistenceManager.java

/**
 * Loads a bundle from the underlying system and optionally performs
 * a check on the bundle first./*  w ww  .  j a  va 2 s .c  o  m*/
 *
 * @param id the node id of the bundle
 * @param checkBeforeLoading check the bundle before loading it and log
 *                           detailed informations about it (slower)
 * @return the loaded bundle or <code>null</code> if the bundle does not
 *         exist.
 * @throws ItemStateException if an error while loading occurs.
 */
protected synchronized NodePropBundle loadBundle(NodeId id, boolean checkBeforeLoading)
        throws ItemStateException {
    ResultSet rs = null;
    InputStream in = null;
    byte[] bytes = null;
    try {
        Statement stmt = connectionManager.executeStmt(bundleSelectSQL, getKey(id.getUUID()));
        rs = stmt.getResultSet();
        if (!rs.next()) {
            return null;
        }
        Blob b = rs.getBlob(1);
        // JCR-1039: pre-fetch/buffer blob data
        long length = b.length();
        bytes = new byte[(int) length];
        in = b.getBinaryStream();
        int read, pos = 0;
        while ((read = in.read(bytes, pos, bytes.length - pos)) > 0) {
            pos += read;
        }
        DataInputStream din = new DataInputStream(new ByteArrayInputStream(bytes));

        if (checkBeforeLoading) {
            if (binding.checkBundle(din)) {
                // reset stream for readBundle()
                din = new DataInputStream(new ByteArrayInputStream(bytes));
            } else {
                // gets wrapped as proper ItemStateException below
                throw new Exception("invalid bundle, see previous BundleBinding error log entry");
            }
        }

        NodePropBundle bundle = binding.readBundle(din, id);
        bundle.setSize(length);
        return bundle;
    } catch (Exception e) {
        String msg = "failed to read bundle: " + id + ": " + e;
        log.error(msg);
        throw new ItemStateException(msg, e);
    } finally {
        IOUtils.closeQuietly(in);
        closeResultSet(rs);
    }
}

From source file:org.apache.jackrabbit.core.persistence.bundle.BundleDbPersistenceManager.java

/**
 * Performs a consistency check.//from ww w  . ja  v  a 2 s .c  o  m
 */
private void checkConsistency() {
    int count = 0;
    int total = 0;
    log.info("{}: checking workspace consistency...", name);

    Collection modifications = new ArrayList();
    ResultSet rs = null;
    DataInputStream din = null;
    try {
        String sql;
        if (getStorageModel() == SM_BINARY_KEYS) {
            sql = "select NODE_ID, BUNDLE_DATA from " + schemaObjectPrefix + "BUNDLE";
        } else {
            sql = "select NODE_ID_HI, NODE_ID_LO, BUNDLE_DATA from " + schemaObjectPrefix + "BUNDLE";
        }
        Statement stmt = connectionManager.executeStmt(sql, new Object[0]);
        rs = stmt.getResultSet();
        while (rs.next()) {
            NodeId id;
            Blob blob;
            if (getStorageModel() == SM_BINARY_KEYS) {
                id = new NodeId(new UUID(rs.getBytes(1)));
                blob = rs.getBlob(2);
            } else {
                id = new NodeId(new UUID(rs.getLong(1), rs.getLong(2)));
                blob = rs.getBlob(3);
            }
            din = new DataInputStream(blob.getBinaryStream());
            try {
                NodePropBundle bundle = binding.readBundle(din, id);
                Collection missingChildren = new ArrayList();
                Iterator iter = bundle.getChildNodeEntries().iterator();
                while (iter.hasNext()) {
                    NodePropBundle.ChildNodeEntry entry = (NodePropBundle.ChildNodeEntry) iter.next();
                    if (entry.getId().toString().endsWith("babecafebabe")) {
                        continue;
                    }
                    if (id.toString().endsWith("babecafebabe")) {
                        continue;
                    }
                    try {
                        NodePropBundle child = loadBundle(entry.getId());
                        if (child == null) {
                            log.error("NodeState " + id.getUUID() + " references inexistent child "
                                    + entry.getName() + " with id " + entry.getId().getUUID());
                            missingChildren.add(entry);
                        } else {
                            NodeId cp = child.getParentId();
                            if (cp == null) {
                                log.error("ChildNode has invalid parent uuid: null");
                            } else if (!cp.equals(id)) {
                                log.error("ChildNode has invalid parent uuid: " + cp + " (instead of "
                                        + id.getUUID() + ")");
                            }
                        }
                    } catch (ItemStateException e) {
                        log.error("Error while loading child node: " + e);
                    }
                }
                if (consistencyFix && !missingChildren.isEmpty()) {
                    Iterator iterator = missingChildren.iterator();
                    while (iterator.hasNext()) {
                        bundle.getChildNodeEntries().remove(iterator.next());
                    }
                    modifications.add(bundle);
                }

                NodeId parentId = bundle.getParentId();
                if (parentId != null) {
                    if (!exists(parentId)) {
                        log.error("NodeState " + id + " references inexistent parent id " + parentId);
                    }
                }
            } catch (IOException e) {
                log.error("Error in bundle " + id + ": " + e);
                din = new DataInputStream(blob.getBinaryStream());
                binding.checkBundle(din);
            }
            count++;
            if (count % 1000 == 0) {
                log.info(name + ": checked " + count + "/" + total + " bundles...");
            }
        }
    } catch (Exception e) {
        log.error("Error in bundle", e);
    } finally {
        IOUtils.closeQuietly(din);
        closeResultSet(rs);
    }

    if (consistencyFix && !modifications.isEmpty()) {
        log.info(name + ": Fixing " + modifications.size() + " inconsistent bundle(s)...");
        Iterator iterator = modifications.iterator();
        while (iterator.hasNext()) {
            NodePropBundle bundle = (NodePropBundle) iterator.next();
            try {
                log.info(name + ": Fixing bundle " + bundle.getId());
                bundle.markOld(); // use UPDATE instead of INSERT
                storeBundle(bundle);
            } catch (ItemStateException e) {
                log.error(name + ": Error storing fixed bundle: " + e);
            }
        }
    }

    log.info(name + ": checked " + count + "/" + total + " bundles.");
}

From source file:org.apache.jackrabbit.core.persistence.bundle.BundleDbPersistenceManager.java

/**
 * {@inheritDoc}/* ww  w  . jav  a 2s .c  o m*/
 */
protected synchronized NodePropBundle loadBundle(NodeId id) throws ItemStateException {
    ResultSet rs = null;
    InputStream in = null;
    try {
        Statement stmt = connectionManager.executeStmt(bundleSelectSQL, getKey(id.getUUID()));
        rs = stmt.getResultSet();
        if (!rs.next()) {
            return null;
        }
        Blob b = rs.getBlob(1);
        // JCR-1039: pre-fetch/buffer blob data
        long length = b.length();
        byte[] bytes = new byte[(int) length];
        in = b.getBinaryStream();
        int read, pos = 0;
        while ((read = in.read(bytes, pos, bytes.length - pos)) > 0) {
            pos += read;
        }
        DataInputStream din = new DataInputStream(new ByteArrayInputStream(bytes));
        NodePropBundle bundle = binding.readBundle(din, id);
        bundle.setSize(length);
        return bundle;
    } catch (Exception e) {
        String msg = "failed to read bundle: " + id + ": " + e;
        log.error(msg);
        throw new ItemStateException(msg, e);
    } finally {
        IOUtils.closeQuietly(in);
        closeResultSet(rs);
    }
}

From source file:org.apache.jackrabbit.core.persistence.bundle.BundleDbPersistenceManager.java

/**
 * Reads the blob's bytes and returns it. this is a helper method to
 * circumvent issue JCR-1039 and JCR-1474
 * @param blob blob to read/*from   w w  w  . j  a  v a2s.c  om*/
 * @return bytes of the blob
 * @throws SQLException if an SQL error occurs
 * @throws IOException if an I/O error occurs
 */
private byte[] getBytes(Blob blob) throws SQLException, IOException {
    InputStream in = null;
    try {
        long length = blob.length();
        byte[] bytes = new byte[(int) length];
        in = blob.getBinaryStream();
        int read, pos = 0;
        while ((read = in.read(bytes, pos, bytes.length - pos)) > 0) {
            pos += read;
        }
        return bytes;
    } finally {
        IOUtils.closeQuietly(in);
    }
}

From source file:org.apache.nifi.processors.standard.util.JdbcCommon.java

public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName,
        ResultSetRowCallback callback, final int maxRows, boolean convertNames)
        throws SQLException, IOException {
    final Schema schema = createSchema(rs, recordName, convertNames);
    final GenericRecord rec = new GenericData.Record(schema);

    final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) {
        dataFileWriter.create(schema, outStream);

        final ResultSetMetaData meta = rs.getMetaData();
        final int nrOfColumns = meta.getColumnCount();
        long nrOfRows = 0;
        while (rs.next()) {
            if (callback != null) {
                callback.processRow(rs);
            }/*ww w. ja  v  a 2 s . c  o  m*/
            for (int i = 1; i <= nrOfColumns; i++) {
                final int javaSqlType = meta.getColumnType(i);

                // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement
                if (javaSqlType == CLOB) {
                    Clob clob = rs.getClob(i);
                    if (clob != null) {
                        long numChars = clob.length();
                        char[] buffer = new char[(int) numChars];
                        InputStream is = clob.getAsciiStream();
                        int index = 0;
                        int c = is.read();
                        while (c > 0) {
                            buffer[index++] = (char) c;
                            c = is.read();
                        }
                        rec.put(i - 1, new String(buffer));
                        clob.free();
                    } else {
                        rec.put(i - 1, null);
                    }
                    continue;
                }

                if (javaSqlType == BLOB) {
                    Blob blob = rs.getBlob(i);
                    if (blob != null) {
                        long numChars = blob.length();
                        byte[] buffer = new byte[(int) numChars];
                        InputStream is = blob.getBinaryStream();
                        int index = 0;
                        int c = is.read();
                        while (c > 0) {
                            buffer[index++] = (byte) c;
                            c = is.read();
                        }
                        ByteBuffer bb = ByteBuffer.wrap(buffer);
                        rec.put(i - 1, bb);
                        blob.free();
                    } else {
                        rec.put(i - 1, null);
                    }
                    continue;
                }

                final Object value = rs.getObject(i);

                if (value == null) {
                    rec.put(i - 1, null);

                } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY
                        || javaSqlType == ARRAY) {
                    // bytes requires little bit different handling
                    byte[] bytes = rs.getBytes(i);
                    ByteBuffer bb = ByteBuffer.wrap(bytes);
                    rec.put(i - 1, bb);

                } else if (value instanceof Byte) {
                    // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT
                    // But value is returned by JDBC as java.lang.Byte
                    // (at least H2 JDBC works this way)
                    // direct put to avro record results:
                    // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte
                    rec.put(i - 1, ((Byte) value).intValue());
                } else if (value instanceof Short) {
                    //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand.
                    rec.put(i - 1, ((Short) value).intValue());
                } else if (value instanceof BigDecimal) {
                    // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38"
                    rec.put(i - 1, value.toString());

                } else if (value instanceof BigInteger) {
                    // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that.
                    // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a
                    // long (and the schema says it will be), so try to get its value as a long.
                    // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException
                    // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string.
                    if (javaSqlType == BIGINT) {
                        int precision = meta.getPrecision(i);
                        if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) {
                            rec.put(i - 1, value.toString());
                        } else {
                            try {
                                rec.put(i - 1, ((BigInteger) value).longValueExact());
                            } catch (ArithmeticException ae) {
                                // Since the value won't fit in a long, convert it to a string
                                rec.put(i - 1, value.toString());
                            }
                        }
                    } else {
                        rec.put(i - 1, value.toString());
                    }

                } else if (value instanceof Number || value instanceof Boolean) {
                    if (javaSqlType == BIGINT) {
                        int precision = meta.getPrecision(i);
                        if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) {
                            rec.put(i - 1, value.toString());
                        } else {
                            rec.put(i - 1, value);
                        }
                    } else {
                        rec.put(i - 1, value);
                    }

                } else {
                    // The different types that we support are numbers (int, long, double, float),
                    // as well as boolean values and Strings. Since Avro doesn't provide
                    // timestamp types, we want to convert those to Strings. So we will cast anything other
                    // than numbers or booleans to strings by using the toString() method.
                    rec.put(i - 1, value.toString());
                }
            }
            dataFileWriter.append(rec);
            nrOfRows += 1;

            if (maxRows > 0 && nrOfRows == maxRows)
                break;
        }

        return nrOfRows;
    }
}

From source file:org.apache.openjpa.jdbc.sql.DBDictionary.java

/**
 * Convert the specified column of the SQL ResultSet to the proper
 * java type./* ww  w .  j a  v  a2  s. com*/
 */
public Object getBlobObject(ResultSet rs, int column, JDBCStore store) throws SQLException {
    InputStream in = null;
    if (useGetBytesForBlobs || useGetObjectForBlobs) {
        byte[] bytes = getBytes(rs, column);
        if (bytes != null && bytes.length > 0)
            in = new ByteArrayInputStream(bytes);
    } else {
        Blob blob = getBlob(rs, column);
        if (blob != null && blob.length() > 0)
            in = blob.getBinaryStream();
    }
    if (in == null)
        return null;

    try {
        if (store == null)
            return Serialization.deserialize(in, null);
        return Serialization.deserialize(in, store.getContext());
    } finally {
        try {
            in.close();
        } catch (IOException ioe) {
        }
    }
}

From source file:org.apache.sqoop.lib.LargeObjectLoader.java

/**
 * Actually read a BlobRef instance from the ResultSet and materialize
 * the data either inline or to a file.//from  www . java2  s  .  c o  m
 *
 * @param colNum the column of the ResultSet's current row to read.
 * @param r the ResultSet to read from.
 * @return a BlobRef encapsulating the data in this field.
 * @throws IOException if an error occurs writing to the FileSystem.
 * @throws SQLException if an error occurs reading from the database.
 */
public com.cloudera.sqoop.lib.BlobRef readBlobRef(int colNum, ResultSet r)
        throws IOException, InterruptedException, SQLException {

    long maxInlineLobLen = conf.getLong(MAX_INLINE_LOB_LEN_KEY, DEFAULT_MAX_LOB_LENGTH);

    Blob b = r.getBlob(colNum);
    if (null == b) {
        return null;
    } else if (b.length() > maxInlineLobLen) {
        // Deserialize very large BLOBs into separate files.
        long len = b.length();
        LobFile.Writer lobWriter = getBlobWriter();

        long recordOffset = lobWriter.tell();
        InputStream is = null;
        OutputStream os = lobWriter.writeBlobRecord(len);
        try {
            is = b.getBinaryStream();
            copyAll(is, os);
        } finally {
            if (null != os) {
                os.close();
            }

            if (null != is) {
                is.close();
            }

            // Mark the record as finished.
            lobWriter.finishRecord();
        }

        return new com.cloudera.sqoop.lib.BlobRef(getRelativePath(curBlobWriter), recordOffset, len);
    } else {
        // This is a 1-based array.
        return new com.cloudera.sqoop.lib.BlobRef(b.getBytes(1, (int) b.length()));
    }
}

From source file:org.apache.syncope.core.persistence.jpa.content.XMLContentExporter.java

private String getValues(final ResultSet rs, final String columnName, final Integer columnType)
        throws SQLException {

    String res = null;//w ww.  j  a  v  a2 s .  c  om

    try {
        switch (columnType) {
        case Types.BINARY:
        case Types.VARBINARY:
        case Types.LONGVARBINARY:
            final InputStream is = rs.getBinaryStream(columnName);
            if (is != null) {
                res = DatatypeConverter.printHexBinary(IOUtils.toByteArray(is));
            }
            break;

        case Types.BLOB:
            final Blob blob = rs.getBlob(columnName);
            if (blob != null) {
                res = DatatypeConverter.printHexBinary(IOUtils.toByteArray(blob.getBinaryStream()));
            }
            break;

        case Types.BIT:
        case Types.BOOLEAN:
            if (rs.getBoolean(columnName)) {
                res = "1";
            } else {
                res = "0";
            }
            break;

        case Types.DATE:
        case Types.TIME:
        case Types.TIMESTAMP:
            final Timestamp timestamp = rs.getTimestamp(columnName);
            if (timestamp != null) {
                res = FormatUtils.format(new Date(timestamp.getTime()));
            }
            break;

        default:
            res = rs.getString(columnName);
        }
    } catch (IOException e) {
        LOG.error("Error retrieving hexadecimal string", e);
    }

    return res;
}

From source file:org.apache.syncope.core.util.ContentExporter.java

private String getValues(final ResultSet rs, final String columnName, final Integer columnType)
        throws SQLException {

    String res = null;//www.ja va  2  s .  c  o  m

    try {
        switch (columnType) {
        case Types.BINARY:
        case Types.VARBINARY:
        case Types.LONGVARBINARY:
            final InputStream is = rs.getBinaryStream(columnName);
            if (is != null) {
                res = new String(Hex.encode(IOUtils.toByteArray(is)));
            }
            break;

        case Types.BLOB:
            final Blob blob = rs.getBlob(columnName);
            if (blob != null) {
                res = new String(Hex.encode(IOUtils.toByteArray(blob.getBinaryStream())));
            }
            break;

        case Types.BIT:
        case Types.BOOLEAN:
            if (rs.getBoolean(columnName)) {
                res = "1";
            } else {
                res = "0";
            }
            break;

        case Types.DATE:
        case Types.TIME:
        case Types.TIMESTAMP:
            final Timestamp timestamp = rs.getTimestamp(columnName);
            if (timestamp != null) {
                res = DataFormat.format(new Date(timestamp.getTime()));
            }
            break;

        default:
            res = rs.getString(columnName);
        }
    } catch (IOException e) {
        LOG.error("Error retrieving hexadecimal string", e);
    }

    return res;
}

From source file:org.batoo.jpa.jdbc.AbstractColumn.java

private Object readLob(Object value) {
    try {/*from  ww w .  java  2s.  c o m*/
        if (value instanceof Clob) {
            final Clob clob = (Clob) value;

            if (this.javaType == String.class) {
                final StringWriter w = new StringWriter();
                IOUtils.copy(clob.getAsciiStream(), w);
                value = w.toString();
            } else {
                final CharArrayWriter w = new CharArrayWriter((int) clob.length());
                IOUtils.copy(clob.getCharacterStream(), w);
                value = w.toCharArray();
            }
        } else if (value instanceof byte[]) {
            if (this.javaType == String.class) {
                final StringWriter w = new StringWriter();
                IOUtils.copy(new ByteArrayInputStream((byte[]) value), w);
                value = w.toString();
            } else if (this.javaType == char[].class) {
                final byte[] byteArray = (byte[]) value;

                final char[] charArray = new char[byteArray.length];

                for (int i = 0; i < charArray.length; i++) {
                    charArray[i] = (char) byteArray[i];
                }

                value = charArray;
            } else if (this.javaType != byte[].class) {
                final ObjectInputStream is = new ObjectInputStream(new ByteArrayInputStream((byte[]) value));
                try {
                    return is.readObject();
                } finally {
                    is.close();
                }
            }
        } else if (value instanceof String) {
            return value;
        } else {
            final Blob blob = (Blob) value;

            if (this.javaType == byte[].class) {
                final ByteArrayOutputStream os = new ByteArrayOutputStream();

                IOUtils.copy(blob.getBinaryStream(), os);

                value = os.toByteArray();
            } else {

                final ObjectInputStream is = new ObjectInputStream(blob.getBinaryStream());
                try {
                    value = is.readObject();
                } finally {
                    is.close();
                }
            }
        }
        return value;
    } catch (final Exception e) {
        throw new PersistenceException("Cannot read sql data", e);
    }
}