List of usage examples for java.sql Blob length
long length() throws SQLException;
From source file:au.com.ish.derbydump.derbydump.metadata.Column.java
/** * this is a tricky one. according to/*from w w w . j ava 2 s . c o m*/ <ul> <li>http://db.apache.org/derby/docs/10.2/ref/rrefjdbc96386.html</li> <li>http://stackoverflow.com/questions/7510112/how-to-make-java-ignore-escape-sequences-in-a-string</li> <li>http://dba.stackexchange.com/questions/10642/mysql-mysqldump-uses-n-instead-of-null</li> <li>http://stackoverflow.com/questions/12038814/import-hex-binary-data-into-mysql</li> <li>http://stackoverflow.com/questions/3126210/insert-hex-values-into-mysql</li> <li>http://www.xaprb.com/blog/2009/02/12/5-ways-to-make-hexadecimal-identifiers-perform-better-on-mysql/</li> </ul> and many others, there is no safer way of exporting blobs than separate data files or hex format.<br/> tested, mysql detects and imports hex encoded fields automatically. * @param blob Blob which we will convert to hex encoded string * @return String representation of binary data */ public static String processBinaryData(Blob blob) throws SQLException { if (blob == null) { return "NULL"; } int blobLength = (int) blob.length(); if (blobLength == 0) { return "NULL"; } byte[] bytes = blob.getBytes(1L, blobLength); return "0x" + new String(Hex.encodeHex(bytes)).toUpperCase(); }
From source file:io.confluent.connect.jdbc.EmbeddedDerby.java
private static String formatLiteral(Object value) throws SQLException { if (value == null) { return "NULL"; } else if (value instanceof CharSequence) { return "'" + value + "'"; } else if (value instanceof Blob) { Blob blob = ((Blob) value); byte[] blobData = blob.getBytes(1, (int) blob.length()); return "CAST(X'" + DatatypeConverter.printHexBinary(blobData) + "' AS BLOB)"; } else if (value instanceof byte[]) { return "X'" + DatatypeConverter.printHexBinary((byte[]) value) + "'"; } else {/*w w w . j av a 2 s .com*/ return value.toString(); } }
From source file:com.xqdev.sql.MLSQL.java
private static void addResultSet(Element root, ResultSet rs) throws SQLException { Namespace sql = root.getNamespace(); ResultSetMetaData rsmd = rs.getMetaData(); int columnCount = rsmd.getColumnCount(); while (rs.next()) { Element tuple = new Element("tuple", sql); for (int i = 1; i <= columnCount; i++) { String colName = rsmd.getColumnName(i); // names aren't guaranteed OK in xml String colTypeName = rsmd.getColumnTypeName(i); // Decode a BLOB if one is found and place it into the result as a encoded Base 64 string String colValue = ""; if ("BLOB".equalsIgnoreCase(colTypeName)) { Blob b = rs.getBlob(i); if (b != null && b.length() > 0) { Base64 b64 = new Base64(); String b64Blob = b64.encodeBase64String(b.getBytes(1, (int) b.length())); colValue = b64Blob; } else colValue = ""; } else { colValue = rs.getString(i); }/* w w w .j a v a 2 s . com*/ boolean wasNull = rs.wasNull(); Element elt = new Element(colName); if (wasNull) { elt.setAttribute("null", "true"); } if ("UNKNOWN".equalsIgnoreCase(colTypeName)) { tuple.addContent(elt.setText("UNKNOWN TYPE")); // XXX ugly } else { tuple.addContent(elt.setText(colValue)); } } root.addContent(tuple); } }
From source file:br.com.itfox.utils.Utils.java
public static String blobToString(Blob blob) { try {/*from w w w .j a v a 2s .c o m*/ if (blob != null) { byte[] bdata = blob.getBytes(1, (int) blob.length()); String text = new String(bdata); text = text.replaceAll("\r\n", "<br>"); return text; } } catch (SQLException ex) { Logger.getLogger(Utils.class.getName()).log(Level.SEVERE, null, ex); } return ""; }
From source file:org.springframework.jdbc.support.JdbcUtils.java
/** * Retrieve a JDBC column value from a ResultSet, using the most appropriate * value type. The returned value should be a detached value object, not having * any ties to the active ResultSet: in particular, it should not be a Blob or * Clob object but rather a byte array or String representation, respectively. * <p>Uses the {@code getObject(index)} method, but includes additional "hacks" * to get around Oracle 10g returning a non-standard object for its TIMESTAMP * datatype and a {@code java.sql.Date} for DATE columns leaving out the * time portion: These columns will explicitly be extracted as standard * {@code java.sql.Timestamp} object.// w w w.java2 s .c om * @param rs is the ResultSet holding the data * @param index is the column index * @return the value object * @throws SQLException if thrown by the JDBC API * @see java.sql.Blob * @see java.sql.Clob * @see java.sql.Timestamp */ @Nullable public static Object getResultSetValue(ResultSet rs, int index) throws SQLException { Object obj = rs.getObject(index); String className = null; if (obj != null) { className = obj.getClass().getName(); } if (obj instanceof Blob) { Blob blob = (Blob) obj; obj = blob.getBytes(1, (int) blob.length()); } else if (obj instanceof Clob) { Clob clob = (Clob) obj; obj = clob.getSubString(1, (int) clob.length()); } else if ("oracle.sql.TIMESTAMP".equals(className) || "oracle.sql.TIMESTAMPTZ".equals(className)) { obj = rs.getTimestamp(index); } else if (className != null && className.startsWith("oracle.sql.DATE")) { String metaDataClassName = rs.getMetaData().getColumnClassName(index); if ("java.sql.Timestamp".equals(metaDataClassName) || "oracle.sql.TIMESTAMP".equals(metaDataClassName)) { obj = rs.getTimestamp(index); } else { obj = rs.getDate(index); } } else if (obj instanceof java.sql.Date) { if ("java.sql.Timestamp".equals(rs.getMetaData().getColumnClassName(index))) { obj = rs.getTimestamp(index); } } return obj; }
From source file:org.agnitas.backend.StringOps.java
/** * Converts a DB blob into a string/* w ww .j av a2s . co m*/ * * @param blob * the source * @param encoding * the encoding of the blob * @return the extracted string */ public static String blob2string(Blob blob, String encoding) throws SQLException { String rc; try { rc = blob == null ? "" : new String(blob.getBytes(1, (int) blob.length()), encoding); } catch (UnsupportedEncodingException e) { rc = null; } return rc; }
From source file:com.trackplus.ddl.DataReader.java
private static int getBlobTableData(BufferedWriter writer, Connection connection) throws DDLException { try {// w w w . j av a 2 s . c o m Statement st = connection.createStatement(); ResultSet rs = st.executeQuery("SELECT * FROM TBLOB"); int idx = 0; while (rs.next()) { StringBuilder line = new StringBuilder(); //OBJECTID String value = rs.getString("OBJECTID"); line.append(value).append(","); //BLOBVALUE Blob blobValue = rs.getBlob("BLOBVALUE"); if (blobValue != null) { String str = new String(Base64.encodeBase64(blobValue.getBytes(1l, (int) blobValue.length()))); if (str.length() == 0) { str = " "; } line.append(str); } else { line.append("null"); } line.append(","); //TPUUID value = rs.getString("TPUUID"); line.append(value); writer.write(line.toString()); writer.newLine(); idx++; } rs.close(); return idx; } catch (SQLException ex) { throw new DDLException(ex.getMessage(), ex); } catch (IOException ex) { throw new DDLException(ex.getMessage(), ex); } }
From source file:org.apache.nifi.processors.standard.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }/*from w ww . jav a 2s . c o m*/ for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" rec.put(i - 1, value.toString()); } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }
From source file:be.dataminded.nifi.plugins.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }//w w w . j a v a 2 s .c o m for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" try { int scale = meta.getScale(i); BigDecimal bigDecimal = ((BigDecimal) value); if (scale == 0) { if (meta.getPrecision(i) < 10) { rec.put(i - 1, bigDecimal.intValue()); } else { rec.put(i - 1, bigDecimal.longValue()); } } else { rec.put(i - 1, bigDecimal.doubleValue()); } } catch (Exception e) { rec.put(i - 1, value.toString()); } } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }
From source file:it.greenvulcano.gvesb.utils.ResultSetUtils.java
/** * Returns all values from the ResultSet as an XML. * For instance, if the ResultSet has 3 values, the returned XML will have following fields: * <RowSet> * <data> * <row> * <col>value1</col> * <col>value2</col> * <col>value3</col> * </row> * <row> * <col>value4</col> * <col>value5</col> * <col>value6</col> * </row> * ../*www. jav a 2 s .c o m*/ * <row> * <col>valuex</col> * <col>valuey</col> * <col>valuez</col> * </row> * </data> * </RowSet> * @param rs * @return * @throws Exception */ public static Document getResultSetAsDOM(ResultSet rs) throws Exception { XMLUtils xml = XMLUtils.getParserInstance(); try { Document doc = xml.newDocument("RowSet"); Element docRoot = doc.getDocumentElement(); if (rs != null) { try { ResultSetMetaData metadata = rs.getMetaData(); Element data = null; Element row = null; Element col = null; Text text = null; String textVal = null; while (rs.next()) { boolean restartResultset = false; for (int j = 1; j <= metadata.getColumnCount() && !restartResultset; j++) { col = xml.createElement(doc, "col"); restartResultset = false; switch (metadata.getColumnType(j)) { case Types.CLOB: { Clob clob = rs.getClob(j); if (clob != null) { Reader is = clob.getCharacterStream(); StringWriter strW = new StringWriter(); IOUtils.copy(is, strW); is.close(); textVal = strW.toString(); } else { textVal = ""; } } break; case Types.BLOB: { Blob blob = rs.getBlob(j); if (blob != null) { InputStream is = blob.getBinaryStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); IOUtils.copy(is, baos); is.close(); try { byte[] buffer = Arrays.copyOf(baos.toByteArray(), (int) blob.length()); textVal = new String(Base64.getEncoder().encode(buffer)); } catch (SQLFeatureNotSupportedException exc) { textVal = new String(Base64.getEncoder().encode(baos.toByteArray())); } } else { textVal = ""; } } break; case -10: { // OracleTypes.CURSOR Object obj = rs.getObject(j); if (obj instanceof ResultSet) { rs = (ResultSet) obj; metadata = rs.getMetaData(); } restartResultset = true; } break; default: { textVal = rs.getString(j); if (textVal == null) { textVal = ""; } } } if (restartResultset) { continue; } if (row == null || j == 1) { row = xml.createElement(doc, "row"); } if (textVal != null) { text = doc.createTextNode(textVal); col.appendChild(text); } row.appendChild(col); } if (row != null) { if (data == null) { data = xml.createElement(doc, "data"); } data.appendChild(row); } } if (data != null) { docRoot.appendChild(data); } } finally { if (rs != null) { try { rs.close(); } catch (Exception exc) { // do nothing } rs = null; } } } return doc; } finally { XMLUtils.releaseParserInstance(xml); } }