List of usage examples for java.sql Blob free
void free() throws SQLException;
From source file:EmployeeInit.java
public static void main(String[] args) throws Exception { Connection con;//from ww w .ja va2 s . c o m con = DriverManager.getConnection("jdbc:derby://localhost:1527/" + "c:\\db\\employee"); PreparedStatement ps; ps = con.prepareStatement("insert into employee(name,photo) " + "values(?,?)"); ps.setString(1, "Duke"); Blob blob = con.createBlob(); ImageIcon ii = new ImageIcon("duke.png"); ObjectOutputStream oos; oos = new ObjectOutputStream(blob.setBinaryStream(1)); oos.writeObject(ii); oos.close(); ps.setBlob(2, blob); ps.execute(); blob.free(); ps.close(); }
From source file:be.dataminded.nifi.plugins.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }/* w w w. j a v a 2 s. c om*/ for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" try { int scale = meta.getScale(i); BigDecimal bigDecimal = ((BigDecimal) value); if (scale == 0) { if (meta.getPrecision(i) < 10) { rec.put(i - 1, bigDecimal.intValue()); } else { rec.put(i - 1, bigDecimal.longValue()); } } else { rec.put(i - 1, bigDecimal.doubleValue()); } } catch (Exception e) { rec.put(i - 1, value.toString()); } } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }
From source file:com.orangeandbronze.jblubble.jdbc.PgJdbcBlobstoreService.java
@Override protected void readBlobInternal(BlobKey blobKey, BlobHandler blobHandler) throws IOException, BlobstoreException { try {// www . j a v a 2s . c o m boolean resetCommitMode = false; try (Connection connection = dataSource.getConnection()) { if (connection.getAutoCommit()) { connection.setAutoCommit(false); resetCommitMode = true; } try (PreparedStatement ps = connection.prepareStatement(getSelectContentByIdSql())) { ps.setLong(1, Long.valueOf(blobKey.stringValue())); try (ResultSet rs = ps.executeQuery()) { if (!rs.next()) { throw new BlobstoreException("Blob not found: " + blobKey); } Blob blob = rs.getBlob(1); try { blobHandler.handleBlob(blob); } finally { blob.free(); } } if (resetCommitMode) { connection.commit(); } } } } catch (SQLException e) { throw new BlobstoreException("Error when retrieving blob", e); } }
From source file:com.orangeandbronze.jblubble.jdbc.JdbcBlobstoreService.java
protected void readBlobInternal(BlobKey blobKey, BlobHandler blobHandler) throws IOException, BlobstoreException { try {/*from www .j ava 2 s .co m*/ try (Connection connection = dataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(getSelectContentByIdSql());) { ps.setLong(1, Long.valueOf(blobKey.stringValue())); try (ResultSet rs = ps.executeQuery()) { if (!rs.next()) { throw new BlobstoreException("Blob not found: " + blobKey); } Blob blob = rs.getBlob(1); try { blobHandler.handleBlob(blob); } finally { blob.free(); } } } } catch (SQLException e) { throw new BlobstoreException("Error when retrieving blob", e); } }
From source file:com.orangeandbronze.jblubble.jdbc.springframework.SpringJdbcBlobstoreService.java
@Override public void readBlob(BlobKey blobKey, BlobstoreReadCallback callback) throws IOException, BlobstoreException { try {/*w w w .j ava 2s . c o m*/ jdbcTemplate.query(getSelectContentByIdSql(), (rs) -> { if (!rs.next()) { throw new BlobstoreException("Blob not found: " + blobKey); } Blob blob = rs.getBlob("content"); try { try (InputStream in = blob.getBinaryStream()) { callback.readInputStream(in); return true; } catch (IOException ioe) { throw new BlobstoreException("Error while reading blob", ioe); } } finally { blob.free(); } }, Long.valueOf(blobKey.stringValue())); } catch (DataAccessException e) { throw new BlobstoreException(e); } }
From source file:com.algomedica.service.LicenseManagerServiceImpl.java
@Override public Map<String, Object> getLicenseDetail(String id) { Map<String, Object> map; if (StringUtils.isEmpty(id)) { throw new ApplicationException(ErrorCodesConstants.INVALID_LICENSE_ID_CODE, ErrorCodesConstants.INVALID_LICENSE_ID); }/*from ww w. ja v a 2s . c o m*/ LicenseDetail licenseDetail = licenseDao.getLicenseById(Long.parseLong(id)); if (licenseDetail == null || licenseDetail.getBlobkey() == null) { throw new ApplicationException(ErrorCodesConstants.INVALID_LICENSE_ID_CODE, ErrorCodesConstants.INVALID_LICENSE_ID); } // release the blob and free up memory. (since JDBC 4.0) try { java.sql.Blob blob = licenseDetail.getBlobkey(); int blobLength = (int) blob.length(); byte[] blobAsBytes = blob.getBytes(1, blobLength); blobAsBytes = zipBytes(licenseDetail.getLsMacAddress() + ".key", blobAsBytes); map = new HashMap<>(); map.put("data", blobAsBytes); map.put("filename", licenseDetail.getLsMacAddress()); blob.free(); } catch (SQLException | IOException e) { LOGGER.error("During getting License detail exception occured "); throw new ApplicationException(ErrorCodesConstants.INTERNAL_SYSTEM_ERROR_CODE, ErrorCodesConstants.INTERNAL_SYSTEM_ERROR, e); } return map; }
From source file:com.orangeandbronze.jblubble.jdbc.springframework.SpringJdbcBlobstoreService.java
protected void serveBlobInternal(BlobKey blobKey, OutputStream out, long start, long end, boolean useEnd) { try {//from www . j a v a 2s . co m jdbcTemplate.query(getSelectContentByIdSql(), (rs) -> { if (!rs.next()) { throw new BlobstoreException("Blob not found: " + blobKey); } Blob blob = rs.getBlob("content"); try { long pos = start + 1; long length = useEnd ? (end - start + 1) : blob.length(); try (InputStream in = new BufferedInputStream(blob.getBinaryStream(pos, length), getBufferSize())) { copy(in, out); } catch (IOException ioe) { throw new BlobstoreException("Error while reading blob", ioe); } return blob.length(); } finally { blob.free(); } }, Long.valueOf(blobKey.stringValue())); } catch (DataAccessException e) { throw new BlobstoreException(e); } }
From source file:com.orangeandbronze.jblubble.jdbc.JdbcBlobstoreService.java
@Override public BlobKey createBlob(BlobstoreWriteCallback callback, String name, String contentType) throws IOException, BlobstoreException { try {//w w w . j a v a 2s. co m try (Connection connection = dataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(getInsertSql(), Statement.RETURN_GENERATED_KEYS);) { ps.setString(1, name); ps.setString(2, contentType); Blob content = connection.createBlob(); try { long size; String md5Hash = null; OutputStream out = new BufferedOutputStream(content.setBinaryStream(1L), getBufferSize()); try { CountingOutputStream countingOutputStream = new CountingOutputStream(out); try { MessageDigest md5; try { md5 = MessageDigest.getInstance(MD5_ALGORITHM_NAME); try (DigestOutputStream digestOutputStream = new DigestOutputStream( countingOutputStream, md5)) { size = callback.writeToOutputStream(digestOutputStream); if (size == -1L) { size = countingOutputStream.getByteCount(); } md5Hash = new String(encodeHex(md5.digest())); } } catch (NoSuchAlgorithmException e) { throw new BlobstoreException(e); } } finally { countingOutputStream.close(); } } finally { out.close(); } ps.setBlob(3, content); ps.setLong(4, size); ps.setTimestamp(5, new java.sql.Timestamp(new java.util.Date().getTime())); ps.setString(6, md5Hash); int rowCount = ps.executeUpdate(); if (rowCount == 0) { throw new BlobstoreException("Creating blob failed, no rows created."); } long generatedId = getGeneratedKey(ps); return new BlobKey(String.valueOf(generatedId)); } finally { content.free(); } } } catch (SQLException e) { throw new BlobstoreException("Error when creating blob", e); } }
From source file:com.orangeandbronze.jblubble.jdbc.springframework.SpringJdbcBlobstoreService.java
@Override public BlobKey createBlob(BlobstoreWriteCallback callback, String name, String contentType) throws IOException, BlobstoreException { try {/*from w w w . j av a 2 s. co m*/ return jdbcTemplate.execute(new ConnectionCallback<BlobKey>() { @Override public BlobKey doInConnection(Connection connection) throws SQLException, DataAccessException { try (PreparedStatement ps = connection.prepareStatement(getInsertSql(), Statement.RETURN_GENERATED_KEYS)) { ps.setString(1, name); ps.setString(2, contentType); Blob content = connection.createBlob(); try { long size; String md5Hash = null; OutputStream out = new BufferedOutputStream(content.setBinaryStream(1L), getBufferSize()); try { CountingOutputStream countingOutputStream = new CountingOutputStream(out); try { MessageDigest md5; try { md5 = MessageDigest.getInstance(MD5_ALGORITHM_NAME); try (DigestOutputStream digestOutputStream = new DigestOutputStream( countingOutputStream, md5)) { size = callback.writeToOutputStream(digestOutputStream); if (size == -1L) { size = countingOutputStream.getByteCount(); } md5Hash = new String(encodeHex(md5.digest())); } } catch (NoSuchAlgorithmException e) { throw new BlobstoreException(e); } } finally { countingOutputStream.close(); } } finally { out.close(); } ps.setBlob(3, content); ps.setLong(4, size); ps.setTimestamp(5, new java.sql.Timestamp(new java.util.Date().getTime())); ps.setString(6, md5Hash); int rowCount = ps.executeUpdate(); if (rowCount == 0) { throw new BlobstoreException("Creating blob failed, no rows created."); } long generatedId = getGeneratedKey(ps); return new BlobKey(String.valueOf(generatedId)); } finally { content.free(); } } catch (IOException e) { throw new BlobstoreException("Error when creating blob", e); } } }); } catch (DataAccessException e) { throw new BlobstoreException(e); } }
From source file:com.orangeandbronze.jblubble.jdbc.PgJdbcBlobstoreService.java
@Override public BlobKey createBlob(BlobstoreWriteCallback callback, String name, String contentType) throws IOException, BlobstoreException { boolean resetCommitMode = false; try (Connection connection = dataSource.getConnection()) { if (connection.getAutoCommit()) { connection.setAutoCommit(false); resetCommitMode = true;//ww w . j a va 2 s . c o m } try { int rowCount; try (PreparedStatement ps = connection.prepareStatement(getInsertSql(), Statement.RETURN_GENERATED_KEYS)) { ps.setString(1, name); ps.setString(2, contentType); ps.setTimestamp(3, new java.sql.Timestamp(new java.util.Date().getTime())); rowCount = ps.executeUpdate(); if (rowCount == 0) { throw new BlobstoreException("Creating blob failed, no rows created."); } final long generatedId = getGeneratedKey(ps); long size; String md5Hash = null; try (PreparedStatement ps2 = connection.prepareStatement(getSelectContentByIdSql())) { ps2.setLong(1, generatedId); ResultSet rs = ps2.executeQuery(); if (!rs.next()) { throw new BlobstoreException("Creating blob failed, no rows created."); } Blob contentBlob = rs.getBlob(1); try { OutputStream out = new BufferedOutputStream(contentBlob.setBinaryStream(1L), getBufferSize()); try { CountingOutputStream countingOutputStream = new CountingOutputStream(out); try { MessageDigest md5; try { md5 = MessageDigest.getInstance(MD5_ALGORITHM_NAME); try (DigestOutputStream digestOutputStream = new DigestOutputStream( countingOutputStream, md5)) { size = callback.writeToOutputStream(digestOutputStream); if (size == -1L) { size = countingOutputStream.getByteCount(); } md5Hash = new String(encodeHex(md5.digest())); } } catch (NoSuchAlgorithmException e) { throw new BlobstoreException(e); } } finally { try { countingOutputStream.close(); } catch (IOException e) { // Since digestOutputStream gets closed, // the wrapped countingOutputStream does // not really need to get closed again. } } } finally { try { out.close(); } catch (IOException e) { // Since digestOutputStream gets closed, // the wrapped buffered OutputStream does // not really need to get closed again. } } } finally { contentBlob.free(); } } try (PreparedStatement ps3 = connection.prepareStatement(getUpdateSizeSql())) { ps3.setLong(1, size); ps3.setString(2, md5Hash); ps3.setLong(3, generatedId); rowCount = ps3.executeUpdate(); if (rowCount == 0) { throw new BlobstoreException("Creating blob failed, no rows created."); } } if (resetCommitMode) { connection.commit(); } return new BlobKey(String.valueOf(generatedId)); } } catch (Exception e) { connection.rollback(); throw e; } finally { if (resetCommitMode) { connection.setAutoCommit(true); } } } catch (SQLException e) { throw new BlobstoreException("Error when creating blob", e); } }