Example usage for java.sql Blob free

List of usage examples for java.sql Blob free

Introduction

In this page you can find the example usage for java.sql Blob free.

Prototype

void free() throws SQLException;

Source Link

Document

This method frees the Blob object and releases the resources that it holds.

Usage

From source file:org.apache.nifi.processors.standard.util.JdbcCommon.java

public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName,
        ResultSetRowCallback callback, final int maxRows, boolean convertNames)
        throws SQLException, IOException {
    final Schema schema = createSchema(rs, recordName, convertNames);
    final GenericRecord rec = new GenericData.Record(schema);

    final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
    try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) {
        dataFileWriter.create(schema, outStream);

        final ResultSetMetaData meta = rs.getMetaData();
        final int nrOfColumns = meta.getColumnCount();
        long nrOfRows = 0;
        while (rs.next()) {
            if (callback != null) {
                callback.processRow(rs);
            }/*from w ww  . j  ava  2 s .  com*/
            for (int i = 1; i <= nrOfColumns; i++) {
                final int javaSqlType = meta.getColumnType(i);

                // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement
                if (javaSqlType == CLOB) {
                    Clob clob = rs.getClob(i);
                    if (clob != null) {
                        long numChars = clob.length();
                        char[] buffer = new char[(int) numChars];
                        InputStream is = clob.getAsciiStream();
                        int index = 0;
                        int c = is.read();
                        while (c > 0) {
                            buffer[index++] = (char) c;
                            c = is.read();
                        }
                        rec.put(i - 1, new String(buffer));
                        clob.free();
                    } else {
                        rec.put(i - 1, null);
                    }
                    continue;
                }

                if (javaSqlType == BLOB) {
                    Blob blob = rs.getBlob(i);
                    if (blob != null) {
                        long numChars = blob.length();
                        byte[] buffer = new byte[(int) numChars];
                        InputStream is = blob.getBinaryStream();
                        int index = 0;
                        int c = is.read();
                        while (c > 0) {
                            buffer[index++] = (byte) c;
                            c = is.read();
                        }
                        ByteBuffer bb = ByteBuffer.wrap(buffer);
                        rec.put(i - 1, bb);
                        blob.free();
                    } else {
                        rec.put(i - 1, null);
                    }
                    continue;
                }

                final Object value = rs.getObject(i);

                if (value == null) {
                    rec.put(i - 1, null);

                } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY
                        || javaSqlType == ARRAY) {
                    // bytes requires little bit different handling
                    byte[] bytes = rs.getBytes(i);
                    ByteBuffer bb = ByteBuffer.wrap(bytes);
                    rec.put(i - 1, bb);

                } else if (value instanceof Byte) {
                    // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT
                    // But value is returned by JDBC as java.lang.Byte
                    // (at least H2 JDBC works this way)
                    // direct put to avro record results:
                    // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte
                    rec.put(i - 1, ((Byte) value).intValue());
                } else if (value instanceof Short) {
                    //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand.
                    rec.put(i - 1, ((Short) value).intValue());
                } else if (value instanceof BigDecimal) {
                    // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38"
                    rec.put(i - 1, value.toString());

                } else if (value instanceof BigInteger) {
                    // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that.
                    // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a
                    // long (and the schema says it will be), so try to get its value as a long.
                    // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException
                    // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string.
                    if (javaSqlType == BIGINT) {
                        int precision = meta.getPrecision(i);
                        if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) {
                            rec.put(i - 1, value.toString());
                        } else {
                            try {
                                rec.put(i - 1, ((BigInteger) value).longValueExact());
                            } catch (ArithmeticException ae) {
                                // Since the value won't fit in a long, convert it to a string
                                rec.put(i - 1, value.toString());
                            }
                        }
                    } else {
                        rec.put(i - 1, value.toString());
                    }

                } else if (value instanceof Number || value instanceof Boolean) {
                    if (javaSqlType == BIGINT) {
                        int precision = meta.getPrecision(i);
                        if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) {
                            rec.put(i - 1, value.toString());
                        } else {
                            rec.put(i - 1, value);
                        }
                    } else {
                        rec.put(i - 1, value);
                    }

                } else {
                    // The different types that we support are numbers (int, long, double, float),
                    // as well as boolean values and Strings. Since Avro doesn't provide
                    // timestamp types, we want to convert those to Strings. So we will cast anything other
                    // than numbers or booleans to strings by using the toString() method.
                    rec.put(i - 1, value.toString());
                }
            }
            dataFileWriter.append(rec);
            nrOfRows += 1;

            if (maxRows > 0 && nrOfRows == maxRows)
                break;
        }

        return nrOfRows;
    }
}

From source file:org.apache.tika.parser.jdbc.JDBCTableReader.java

protected void handleBlob(String tableName, String columnName, int rowNum, ResultSet resultSet, int columnIndex,
        ContentHandler handler, ParseContext context) throws SQLException, IOException, SAXException {
    Metadata m = new Metadata();
    m.set(Database.TABLE_NAME, tableName);
    m.set(Database.COLUMN_NAME, columnName);
    m.set(Database.PREFIX + "ROW_NUM", Integer.toString(rowNum));
    m.set(Database.PREFIX + "IS_BLOB", "true");
    Blob blob = null;
    TikaInputStream is = null;/*from   ww w.  j  a  v  a  2 s . c o m*/
    try {
        blob = getBlob(resultSet, columnIndex, m);
        if (blob == null) {
            return;
        }
        is = TikaInputStream.get(blob, m);
        Attributes attrs = new AttributesImpl();
        ((AttributesImpl) attrs).addAttribute("", "type", "type", "CDATA", "blob");
        ((AttributesImpl) attrs).addAttribute("", "column_name", "column_name", "CDATA", columnName);
        ((AttributesImpl) attrs).addAttribute("", "row_number", "row_number", "CDATA",
                Integer.toString(rowNum));
        handler.startElement("", "span", "span", attrs);
        String extension = embeddedDocumentUtil.getExtension(is, m);

        m.set(TikaMetadataKeys.RESOURCE_NAME_KEY,
                //just in case something screwy is going on with the column name
                FilenameUtils.normalize(FilenameUtils.getName(columnName + "_" + rowNum + extension)));
        if (embeddedDocumentUtil.shouldParseEmbedded(m)) {
            embeddedDocumentUtil.parseEmbedded(is, handler, m, true);
        }

    } finally {
        if (blob != null) {
            try {
                blob.free();
            } catch (SQLException | UnsupportedOperationException e) {
                //swallow
            }
        }
        IOUtils.closeQuietly(is);
    }
    handler.endElement("", "span", "span");
}

From source file:org.deegree.tools.rendering.dem.builder.DEMDatasetGenerator.java

private static void init(CommandLine line) throws ParseException, IOException, SQLException {

    String t = line.getOptionValue(OPT_OUTPUT_LEVELS, "-1");
    int levels = Integer.parseInt(t);
    t = line.getOptionValue(OPT_OUTPUT_ROWS, "128");
    int rows = Integer.parseInt(t);
    t = line.getOptionValue(OPT_MAX_HEIGHT);
    float maxZ = Float.NaN;
    if (t != null) {
        maxZ = Float.parseFloat(t);
    }/*from  w  w w  .j  a va2 s . com*/

    RasterIOOptions rasterIOOptions = RasterOptionsParser.parseRasterIOOptions(line);
    AbstractCoverage raster = RasterOptionsParser.loadCoverage(line, rasterIOOptions);
    if (!(raster instanceof AbstractRaster)) {
        throw new IllegalArgumentException(
                "Given raster location is a multiresolution raster, this is not supported.");
    }

    DEMDatasetGenerator builder = new DEMDatasetGenerator((AbstractRaster) raster, rasterIOOptions, levels,
            rows, maxZ);

    t = line.getOptionValue(OPT_OUTPUT_DIR);
    File outputDir = new File(t);
    if (outputDir.getFreeSpace() < builder.fileSize) {
        System.err.println("Not enough space (" + outputDir.getFreeSpace() + " bytes ca: "
                + (Math.round((outputDir.getFreeSpace() / (1024 * 1024d)) * 100d) / 100d)
                + " Mb.) free in the directory: " + outputDir + " please specify a location where at least: "
                + builder.fileSize + " bytes (ca. "
                + (Math.round((builder.fileSize / (1024 * 1024d)) * 100d) / 100d) + " Mb) are available.");
        System.exit(2);
    }
    Blob patchesBlob = new FileBlob(new File(outputDir, MultiresolutionMesh.FRAGMENTS_FILE_NAME));

    PatchManager triangleManager = new PatchManager(builder.getLevels(), patchesBlob);
    System.out.println(triangleManager);

    // generate macro triangle blob
    double sampleSizeX = Math.abs(builder.sampleSizeX);
    double sampleSizeY = Math.abs(builder.sampleSizeY);
    float outputExtentX = (float) (builder.outputX * sampleSizeX);
    float outputExtentY = (float) (builder.outputY * sampleSizeY);
    PatchManager manager = builder.generateMacroTriangles(triangleManager, 0, 0, outputExtentX, outputExtentY);

    // write mrindex blob
    Blob mrIndexBlob = new FileBlob(new File(outputDir, MultiresolutionMesh.INDEX_FILE_NAME));
    DAGBuilder dagBuilder = new DAGBuilder(manager.getLevels(), manager);
    dagBuilder.writeBlob(mrIndexBlob, (short) 0, (short) rows);
    dagBuilder.printStats();
    mrIndexBlob.free();
}

From source file:org.kawanfw.test.api.client.InsertAndUpdateBlobTest.java

/**
 * Test that the blob was were correctly inserted
 * //from   ww  w  .j a v a 2 s  .  co m
 * @param connection
 */
public void selectBlobTestAlternateSyntax(Connection connection, String originalFileName, String shaHexa)
        throws Exception {
    int customer_id;
    int item_id;
    String description;
    BigDecimal cost_price;
    Date date_placed;
    Timestamp date_shipped;
    Blob blob;
    boolean is_delivered;
    int quantity;

    String sql = "select * from orderlog where  customer_id >= ? and item_id >= ? ";

    PreparedStatement prepStatement = connection.prepareStatement(sql);

    int i = 1;
    prepStatement.setInt(i++, 1);
    prepStatement.setInt(i++, 1);

    ResultSet rs = prepStatement.executeQuery();

    MessageDisplayer.display("");

    InputStream in = null;
    OutputStream out = null;

    SqlUtil sqlUtil = new SqlUtil(connection);

    while (rs.next()) {

        customer_id = rs.getInt("customer_id");
        item_id = rs.getInt("item_id");
        description = rs.getString("description");
        cost_price = rs.getBigDecimal("cost_price");
        date_placed = rs.getDate("date_placed");
        date_shipped = rs.getTimestamp("date_shipped");
        blob = rs.getBlob("jpeg_image");

        if (sqlUtil.isIngres()) {
            is_delivered = (rs.getInt("is_delivered") == 1) ? true : false;
        } else {
            is_delivered = rs.getBoolean("is_delivered");
        }

        quantity = rs.getInt("quantity");

        i = 1;
        customer_id = rs.getInt(i++);
        item_id = rs.getInt(i++);
        description = rs.getString(i++);
        cost_price = rs.getBigDecimal(i++);
        date_placed = rs.getDate(i++);
        date_shipped = rs.getTimestamp(i++);

        File originalBlobFile = SqlTestParms.getFileFromUserHome(originalFileName);
        // String extension = "."
        // + StringUtils.substringAfterLast(
        // originalBlobFile.toString(), ".");

        File file = createTempFile(originalBlobFile.toString());

        try {
            in = blob.getBinaryStream();

            if (in != null) {
                out = new BufferedOutputStream(new FileOutputStream(file));
                IOUtils.copy(in, out);
            } else {
                MessageDisplayer.display("jpeg_image column is null!");
            }

        } finally {
            IOUtils.closeQuietly(in);
            IOUtils.closeQuietly(out);
            try {
                blob.free();
            } catch (Throwable e) {
                MessageDisplayer.display("blob.free() not done: " + e.toString());
            }
        }

        i++;
        if (sqlUtil.isIngres()) {
            is_delivered = (rs.getInt(i++) == 1) ? true : false;
        } else {
            is_delivered = rs.getBoolean(i++);
        }

        quantity = rs.getInt(i++);

        MessageDisplayer.display("");
        MessageDisplayer.display("customer_id : " + customer_id);
        MessageDisplayer.display("item_id     : " + item_id);
        MessageDisplayer.display("description : " + description);
        MessageDisplayer.display("cost_price  : " + cost_price);
        MessageDisplayer.display("date_placed : " + date_placed);
        MessageDisplayer.display("date_shipped: " + date_shipped);
        MessageDisplayer.display("jpeg_image  : " + "content stored in file: " + file);
        MessageDisplayer.display("is_delivered: " + is_delivered);
        MessageDisplayer.display("quantity    : " + quantity);

        // Compute the hash of the file
        Sha1Util sha1 = new Sha1Util();
        String shaHexaNew = sha1.getHexFileHash(file);

        Assert.assertEquals(shaHexa, shaHexaNew);

        file.delete();

        MessageDisplayer.display("");
        MessageDisplayer.display("Ok, SHA-1 value of read file " + file + " is same as inserted file "
                + SqlTestParms.getFileFromUserHome(originalFileName));

    }

    prepStatement.close();
    rs.close();

    MessageDisplayer.display("Select done!");

}

From source file:org.kawanfw.test.api.client.InsertAndUpdateBlobTestNew.java

public void updateBlob(Connection connection, File blobFile) throws Exception {
    PreparedStatement prepStatement = null;

    String sql = "update orderlog set " + "   jpeg_image  = ? "
            + "     where  customer_id >= ? and item_id >= ?";

    prepStatement = connection.prepareStatement(sql);

    Blob blob = connection.createBlob();

    InputStream in = null;//  w w w  . ja va2  s  .c  om
    OutputStream out = null;

    try {
        in = new BufferedInputStream(new FileInputStream(blobFile));
        out = blob.setBinaryStream(1);
        IOUtils.copy(in, out);

        int i = 1;
        prepStatement.setBlob(i++, blob);
        prepStatement.setInt(i++, 1);
        prepStatement.setInt(i++, 1);

        prepStatement.executeUpdate();

        // Close and free are important to delete temp files
        prepStatement.close();
        blob.free();
    } finally {
        IOUtils.closeQuietly(in);
        IOUtils.closeQuietly(out);
    }

}

From source file:org.kawanfw.test.api.client.InsertAndUpdateBlobTestNew.java

/**
 * Test that the blob was were correctly inserted
 * //from w  w  w.j a v a 2  s  . co m
 * @param connection
 */
public void selectGetBlobSyntax(Connection connection, String originalFileName, String shaHexa)
        throws Exception {
    int customer_id;
    int item_id;
    String description;
    BigDecimal cost_price;
    Date date_placed;
    Timestamp date_shipped;
    Blob blob;
    boolean is_delivered;
    int quantity;

    String sql = "select * from orderlog where  customer_id >= ? and item_id >= ? ";

    PreparedStatement prepStatement = connection.prepareStatement(sql);

    int i = 1;
    prepStatement.setInt(i++, 1);
    prepStatement.setInt(i++, 1);

    ResultSet rs = prepStatement.executeQuery();

    MessageDisplayer.display("");

    InputStream in = null;
    OutputStream out = null;

    SqlUtil sqlUtil = new SqlUtil(connection);

    while (rs.next()) {

        customer_id = rs.getInt("customer_id");
        item_id = rs.getInt("item_id");
        description = rs.getString("description");
        cost_price = rs.getBigDecimal("cost_price");
        date_placed = rs.getDate("date_placed");
        date_shipped = rs.getTimestamp("date_shipped");

        blob = rs.getBlob("jpeg_image");

        if (sqlUtil.isIngres()) {
            is_delivered = (rs.getInt("is_delivered") == 1) ? true : false;
        } else {
            is_delivered = rs.getBoolean("is_delivered");
        }

        quantity = rs.getInt("quantity");

        i = 1;
        customer_id = rs.getInt(i++);
        item_id = rs.getInt(i++);
        description = rs.getString(i++);
        cost_price = rs.getBigDecimal(i++);
        date_placed = rs.getDate(i++);
        date_shipped = rs.getTimestamp(i++);

        File originalBlobFile = SqlTestParms.getFileFromUserHome(originalFileName);

        File file = createTempFile(originalBlobFile.toString());

        try {
            in = blob.getBinaryStream();

            if (in != null) {
                out = new BufferedOutputStream(new FileOutputStream(file));
                IOUtils.copy(in, out);
            } else {
                MessageDisplayer.display("jpeg_image column is null!");
            }

        } finally {
            IOUtils.closeQuietly(in);
            IOUtils.closeQuietly(out);
            try {
                blob.free();
            } catch (Throwable e) {
                MessageDisplayer.display("blob.free() not done: " + e.toString());
            }
        }

        i++;
        if (sqlUtil.isIngres()) {
            is_delivered = (rs.getInt(i++) == 1) ? true : false;
        } else {
            is_delivered = rs.getBoolean(i++);
        }

        quantity = rs.getInt(i++);

        MessageDisplayer.display("");
        MessageDisplayer.display("customer_id : " + customer_id);
        MessageDisplayer.display("item_id     : " + item_id);
        MessageDisplayer.display("description : " + description);
        MessageDisplayer.display("cost_price  : " + cost_price);
        MessageDisplayer.display("date_placed : " + date_placed);
        MessageDisplayer.display("date_shipped: " + date_shipped);
        MessageDisplayer.display("jpeg_image  : " + "content stored in file: " + file);
        MessageDisplayer.display("is_delivered: " + is_delivered);
        MessageDisplayer.display("quantity    : " + quantity);

        // Compute the hash of the file
        Sha1Util sha1 = new Sha1Util();
        String shaHexaNew = sha1.getHexFileHash(file);

        Assert.assertEquals(shaHexa, shaHexaNew);

        file.delete();

        MessageDisplayer.display("");
        MessageDisplayer.display("Ok, SHA-1 value of read file " + file + " is same as inserted file "
                + SqlTestParms.getFileFromUserHome(originalFileName));

    }

    prepStatement.close();
    rs.close();

    MessageDisplayer.display("Select done!");

}

From source file:org.kawanfw.test.api.client.InsertAndUpdateBlobTestNew.java

/**
 * Insert a blob// w w  w. jav a 2s.c  o m
 * 
 * @throws Exception
 *             it any Exception occurs
 */
public void insertLoopPrepStatement(Connection connection, int numberToInsert, File blobFile) throws Exception {

    // We can now use our Remote JDBC Connection as a regular Connection!
    connection.setAutoCommit(false);

    // We will do all our remote insert in a SQL Transaction
    try {

        String sql = "insert into orderlog values ( ?, ?, ?, ?, ?, ?, ?, ?, ? )";

        // Create a new Prepared Statement
        PreparedStatement prepStatement = null;

        MessageDisplayer.display("");
        MessageDisplayer.display("Inserting " + numberToInsert + " orderlog...");

        for (int customerId = 1; customerId < numberToInsert + 1; customerId++) {
            int i = 1;
            long theTime = new java.util.Date().getTime();

            // We will insert a Blob (the image of the product).
            // The transfer will be done in streaming both on the client
            // and on the Servlet Server: we can upload/download very big
            // files.

            InputStream in = null;
            OutputStream out = null;

            try {
                in = new FileInputStream(blobFile);
                Blob blob = connection.createBlob();
                out = blob.setBinaryStream(1);
                IOUtils.copy(in, out);

                prepStatement = connection.prepareStatement(sql);

                prepStatement.setInt(i++, customerId);
                prepStatement.setInt(i++, customerId);
                prepStatement.setString(i++, "Item Description No " + customerId);
                prepStatement.setBigDecimal(i++, new BigDecimal(customerId));
                prepStatement.setDate(i++, new java.sql.Date(theTime));
                prepStatement.setTimestamp(i++, new Timestamp(theTime));
                prepStatement.setBlob(i++, blob);

                SqlUtil sqlUtil = new SqlUtil(connection);
                if (sqlUtil.isIngres()) {
                    prepStatement.setInt(i++, 0);
                } else {
                    prepStatement.setBoolean(i++, false);
                }

                prepStatement.setInt(i++, customerId);

                // SystemOutHandle.display("Before executeUpdate...");
                prepStatement.executeUpdate();

                // Close and free are important to delete temp files
                prepStatement.close();
                blob.free();
            } finally {
                IOUtils.closeQuietly(in);
                IOUtils.closeQuietly(out);
            }

        }

        // We do either everything in a single transaction or nothing
        connection.commit(); // Commit is propagated on Server
        MessageDisplayer.display("Remote Commit Done on AceQL Server!");
    } catch (Exception e) {
        connection.rollback();
        throw e;
    } finally {
        connection.setAutoCommit(true);
    }

}

From source file:org.kawanfw.test.api.client.InsertAndUpdateBlobTestPsqlOID.java

/**
 * Test that the blob was were correctly inserted
 * /*  w w  w  .  j a va 2s. c om*/
 * @param connection
 */
public void selectBlobTestAlternateSyntax(Connection connection, String originalFileName, String shaHexa)
        throws Exception {
    int customer_id;
    int item_id;
    String description;
    BigDecimal cost_price;
    Date date_placed;
    Timestamp date_shipped;
    Blob blob;
    boolean is_delivered;
    int quantity;

    String sql = "select * from orderlog_2 where  customer_id >= ? and item_id >= ? ";

    PreparedStatement prepStatement = connection.prepareStatement(sql);

    int i = 1;
    prepStatement.setInt(i++, 1);
    prepStatement.setInt(i++, 1);

    ResultSet rs = prepStatement.executeQuery();

    MessageDisplayer.display("");

    InputStream in = null;
    OutputStream out = null;

    SqlUtil sqlUtil = new SqlUtil(connection);

    while (rs.next()) {

        customer_id = rs.getInt("customer_id");
        item_id = rs.getInt("item_id");
        description = rs.getString("description");
        cost_price = rs.getBigDecimal("cost_price");
        date_placed = rs.getDate("date_placed");
        date_shipped = rs.getTimestamp("date_shipped");
        blob = rs.getBlob("jpeg_image");

        if (sqlUtil.isIngres()) {
            is_delivered = (rs.getInt("is_delivered") == 1) ? true : false;
        } else {
            is_delivered = rs.getBoolean("is_delivered");
        }

        quantity = rs.getInt("quantity");

        i = 1;
        customer_id = rs.getInt(i++);
        item_id = rs.getInt(i++);
        description = rs.getString(i++);
        cost_price = rs.getBigDecimal(i++);
        date_placed = rs.getDate(i++);
        date_shipped = rs.getTimestamp(i++);

        File originalBlobFile = SqlTestParms.getFileFromUserHome(originalFileName);
        // String extension = "."
        // + StringUtils.substringAfterLast(
        // originalBlobFile.toString(), ".");

        File file = createTempFile(originalBlobFile.toString());

        try {
            in = blob.getBinaryStream();

            if (in != null) {
                out = new BufferedOutputStream(new FileOutputStream(file));
                IOUtils.copy(in, out);
            } else {
                MessageDisplayer.display("jpeg_image column is null!");
            }

        } finally {
            IOUtils.closeQuietly(in);
            IOUtils.closeQuietly(out);
            try {
                blob.free();
            } catch (Throwable e) {
                MessageDisplayer.display("blob.free() not done: " + e.toString());
            }
        }

        i++;
        if (sqlUtil.isIngres()) {
            is_delivered = (rs.getInt(i++) == 1) ? true : false;
        } else {
            is_delivered = rs.getBoolean(i++);
        }

        quantity = rs.getInt(i++);

        MessageDisplayer.display("");
        MessageDisplayer.display("customer_id : " + customer_id);
        MessageDisplayer.display("item_id     : " + item_id);
        MessageDisplayer.display("description : " + description);
        MessageDisplayer.display("cost_price  : " + cost_price);
        MessageDisplayer.display("date_placed : " + date_placed);
        MessageDisplayer.display("date_shipped: " + date_shipped);
        MessageDisplayer.display("jpeg_image  : " + "content stored in file: " + file);
        MessageDisplayer.display("is_delivered: " + is_delivered);
        MessageDisplayer.display("quantity    : " + quantity);

        // Compute the hash of the file
        Sha1Util sha1 = new Sha1Util();
        String shaHexaNew = sha1.getHexFileHash(file);

        Assert.assertEquals(shaHexa, shaHexaNew);

        file.delete();

        MessageDisplayer.display("");
        MessageDisplayer.display("Ok, SHA-1 value of read file " + file + " is same as inserted file "
                + SqlTestParms.getFileFromUserHome(originalFileName));

    }

    prepStatement.close();
    rs.close();

    MessageDisplayer.display("Select done!");

}

From source file:org.springframework.jdbc.support.lob.TemporaryLobCreator.java

@Override
public void close() {
    try {/*from   www  .  jav a  2s.c  o m*/
        for (Blob blob : this.temporaryBlobs) {
            blob.free();
        }
        for (Clob clob : this.temporaryClobs) {
            clob.free();
        }
    } catch (SQLException ex) {
        logger.error("Could not free LOB", ex);
    }
}

From source file:org.xenei.bloomgraph.bloom.sql.DBIO.java

/**
 * free the blob ignoring any errors./*from w  w  w.ja  va  2 s.co  m*/
 * 
 * @param blob
 *            The blob to free.
 */
public static void freeQuietly(final Blob blob) {
    if (blob != null) {
        try {
            blob.free();
        } catch (final SQLException e) {
            LOG.warn("Ignoring error freeing blob: " + e.getMessage(), e);
        }
    }
}