List of usage examples for java.sql Clob length
long length() throws SQLException;
From source file:org.apache.ddlutils.platform.PlatformImplBase.java
/** * This is the core method to retrieve a value for a column from a result set. Its primary * purpose is to call the appropriate method on the result set, and to provide an extension * point where database-specific implementations can change this behavior. * /* w ww . jav a 2 s . c o m*/ * @param resultSet The result set to extract the value from * @param columnName The name of the column; can be <code>null</code> in which case the * <code>columnIdx</code> will be used instead * @param columnIdx The index of the column's value in the result set; is only used if * <code>columnName</code> is <code>null</code> * @param jdbcType The jdbc type to extract * @return The value * @throws SQLException If an error occurred while accessing the result set */ protected Object extractColumnValue(ResultSet resultSet, String columnName, int columnIdx, int jdbcType) throws SQLException { boolean useIdx = (columnName == null); Object value; switch (jdbcType) { case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: value = useIdx ? resultSet.getString(columnIdx) : resultSet.getString(columnName); break; case Types.NUMERIC: case Types.DECIMAL: value = useIdx ? resultSet.getBigDecimal(columnIdx) : resultSet.getBigDecimal(columnName); break; case Types.BIT: case Types.BOOLEAN: value = new Boolean(useIdx ? resultSet.getBoolean(columnIdx) : resultSet.getBoolean(columnName)); break; case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: value = new Integer(useIdx ? resultSet.getInt(columnIdx) : resultSet.getInt(columnName)); break; case Types.BIGINT: value = new Long(useIdx ? resultSet.getLong(columnIdx) : resultSet.getLong(columnName)); break; case Types.REAL: value = new Float(useIdx ? resultSet.getFloat(columnIdx) : resultSet.getFloat(columnName)); break; case Types.FLOAT: case Types.DOUBLE: value = new Double(useIdx ? resultSet.getDouble(columnIdx) : resultSet.getDouble(columnName)); break; case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: value = useIdx ? resultSet.getBytes(columnIdx) : resultSet.getBytes(columnName); break; case Types.DATE: value = useIdx ? resultSet.getDate(columnIdx) : resultSet.getDate(columnName); break; case Types.TIME: value = useIdx ? resultSet.getTime(columnIdx) : resultSet.getTime(columnName); break; case Types.TIMESTAMP: value = useIdx ? resultSet.getTimestamp(columnIdx) : resultSet.getTimestamp(columnName); break; case Types.CLOB: Clob clob = useIdx ? resultSet.getClob(columnIdx) : resultSet.getClob(columnName); if (clob == null) { value = null; } else { long length = clob.length(); if (length > Integer.MAX_VALUE) { value = clob; } else if (length == 0) { // the javadoc is not clear about whether Clob.getSubString // can be used with a substring length of 0 // thus we do the safe thing and handle it ourselves value = ""; } else { value = clob.getSubString(1l, (int) length); } } break; case Types.BLOB: Blob blob = useIdx ? resultSet.getBlob(columnIdx) : resultSet.getBlob(columnName); if (blob == null) { value = null; } else { long length = blob.length(); if (length > Integer.MAX_VALUE) { value = blob; } else if (length == 0) { // the javadoc is not clear about whether Blob.getBytes // can be used with for 0 bytes to be copied // thus we do the safe thing and handle it ourselves value = new byte[0]; } else { value = blob.getBytes(1l, (int) length); } } break; case Types.ARRAY: value = useIdx ? resultSet.getArray(columnIdx) : resultSet.getArray(columnName); break; case Types.REF: value = useIdx ? resultSet.getRef(columnIdx) : resultSet.getRef(columnName); break; default: value = useIdx ? resultSet.getObject(columnIdx) : resultSet.getObject(columnName); break; } return resultSet.wasNull() ? null : value; }
From source file:org.apache.gobblin.source.jdbc.JdbcExtractor.java
private String readClobAsString(Clob logClob) throws SQLException { if (logClob == null) { return StringUtils.EMPTY; }/*from w w w . jav a 2s. com*/ long length = logClob.length(); return logClob.getSubString(1, (int) length); }
From source file:org.apache.nifi.processors.standard.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }//from w w w . j a v a2 s . c o m for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" rec.put(i - 1, value.toString()); } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }
From source file:org.apache.oozie.tools.OozieDBCLI.java
private void replaceForDerby(String oldStr, String newStr) throws Exception { Connection connRead = createConnection(); try {/*from www. j a v a 2 s . c om*/ connRead.setAutoCommit(false); Statement st = connRead.createStatement(); // set fetch size to limit number of rows into memory for large table st.setFetchSize(100); ResultSet rs = st.executeQuery(COORD_ACTION_ID_DEPS); while (rs.next()) { String id = rs.getString(1); Clob clob = rs.getClob(2); String clobStr = clob.getSubString(1, (int) clob.length()); clob.setString(1, clobStr.replace(oldStr, newStr)); PreparedStatement prepStmt = connRead .prepareStatement("UPDATE COORD_ACTIONS SET MISSING_DEPENDENCIES=? WHERE ID=?"); prepStmt.setString(1, clob.getSubString(1, (int) clob.length())); prepStmt.setString(2, id); prepStmt.execute(); prepStmt.close(); } } finally { connRead.commit(); connRead.close(); } }
From source file:org.apache.oozie.tools.OozieDBCLI.java
private void convertClobToBlobInOracle(Connection conn) throws Exception { if (conn == null) { return;/*from w ww . j a va2s . com*/ } System.out.println("Converting clob columns to blob for all tables"); Statement statement = conn.createStatement(); CallableStatement tempBlobCall = conn.prepareCall("{call dbms_lob.CREATETEMPORARY(?, TRUE)}"); tempBlobCall.registerOutParameter(1, java.sql.Types.BLOB); CallableStatement dbmsLobCallStmt = conn .prepareCall("{call dbms_lob.CONVERTTOBLOB(?, ?, ?, ?, ?, 0, ?, ?)}"); dbmsLobCallStmt.registerOutParameter(1, java.sql.Types.BLOB); // Lob max size dbmsLobCallStmt.setInt(3, Integer.MAX_VALUE); dbmsLobCallStmt.registerOutParameter(4, java.sql.Types.INTEGER); // dest_offset dbmsLobCallStmt.setInt(4, 1); // src_offset dbmsLobCallStmt.registerOutParameter(5, java.sql.Types.INTEGER); dbmsLobCallStmt.setInt(5, 1); // blob_csid dbmsLobCallStmt.registerOutParameter(6, java.sql.Types.INTEGER); // lang_context dbmsLobCallStmt.setInt(6, 0); // warning dbmsLobCallStmt.registerOutParameter(7, java.sql.Types.INTEGER); dbmsLobCallStmt.setInt(7, 1); for (Map.Entry<String, List<String>> tableClobColumnMap : getTableClobColumnMap().entrySet()) { String tableName = tableClobColumnMap.getKey(); List<String> columnNames = tableClobColumnMap.getValue(); for (String column : columnNames) { statement.executeUpdate(getAddColumnQuery(tableName, TEMP_COLUMN_PREFIX + column, "blob")); } ResultSet rs = statement.executeQuery(getSelectQuery(tableName, columnNames)); while (rs.next()) { for (int i = 0; i < columnNames.size(); i++) { Clob srcClob = rs.getClob(columnNames.get(i)); if (srcClob == null || srcClob.length() < 1) { continue; } tempBlobCall.execute(); Blob destLob = tempBlobCall.getBlob(1); dbmsLobCallStmt.setBlob(1, destLob); dbmsLobCallStmt.setClob(2, srcClob); dbmsLobCallStmt.execute(); Blob blob = dbmsLobCallStmt.getBlob(1); PreparedStatement ps = conn.prepareStatement("update " + tableName + " set " + TEMP_COLUMN_PREFIX + columnNames.get(i) + "=? where id = ?"); ps.setBlob(1, blob); ps.setString(2, rs.getString(1)); ps.executeUpdate(); ps.close(); } } rs.close(); for (String column : columnNames) { statement.executeUpdate(getDropColumnQuery(tableName, column)); statement.executeUpdate(getRenameColumnQuery(tableName, TEMP_COLUMN_PREFIX + column, column)); } } dbmsLobCallStmt.close(); tempBlobCall.close(); System.out.println("Done"); }
From source file:org.apache.openjpa.jdbc.sql.DBDictionary.java
/** * Convert the specified column of the SQL ResultSet to the proper * java type./*from w w w . j ava2 s. c o m*/ */ public String getClobString(ResultSet rs, int column) throws SQLException { if (useGetStringForClobs) return rs.getString(column); Clob clob = getClob(rs, column); if (clob == null) return null; if (clob.length() == 0) return ""; // unlikely that we'll have strings over Integer.MAX_VALUE chars return clob.getSubString(1, (int) clob.length()); }
From source file:org.apache.sqoop.lib.LargeObjectLoader.java
/** * Actually read a ClobRef instance from the ResultSet and materialize * the data either inline or to a file.//from w w w. j ava 2s . co m * * @param colNum the column of the ResultSet's current row to read. * @param r the ResultSet to read from. * @return a ClobRef encapsulating the data in this field. * @throws IOException if an error occurs writing to the FileSystem. * @throws SQLException if an error occurs reading from the database. */ public com.cloudera.sqoop.lib.ClobRef readClobRef(int colNum, ResultSet r) throws IOException, InterruptedException, SQLException { long maxInlineLobLen = conf.getLong(MAX_INLINE_LOB_LEN_KEY, DEFAULT_MAX_LOB_LENGTH); Clob c = r.getClob(colNum); if (null == c) { return null; } else if (c.length() > maxInlineLobLen) { // Deserialize large CLOB into separate file. long len = c.length(); LobFile.Writer lobWriter = getClobWriter(); long recordOffset = lobWriter.tell(); Reader reader = null; Writer w = lobWriter.writeClobRecord(len); try { reader = c.getCharacterStream(); copyAll(reader, w); } finally { if (null != w) { w.close(); } if (null != reader) { reader.close(); } // Mark the record as finished. lobWriter.finishRecord(); } return new com.cloudera.sqoop.lib.ClobRef(getRelativePath(lobWriter), recordOffset, len); } else { // This is a 1-based array. return new com.cloudera.sqoop.lib.ClobRef(c.getSubString(1, (int) c.length())); } }
From source file:org.apache.tika.parser.jdbc.JDBCTableReader.java
protected void handleClob(String tableName, String columnName, int rowNum, ResultSet resultSet, int columnIndex, ContentHandler handler, ParseContext context) throws SQLException, IOException, SAXException { Clob clob = resultSet.getClob(columnIndex); if (resultSet.wasNull()) { return;//from w w w . j a v a 2 s . c om } boolean truncated = clob.length() > Integer.MAX_VALUE || clob.length() > maxClobLength; int readSize = (clob.length() < maxClobLength ? (int) clob.length() : maxClobLength); Metadata m = new Metadata(); m.set(Database.TABLE_NAME, tableName); m.set(Database.COLUMN_NAME, columnName); m.set(Database.PREFIX + "ROW_NUM", Integer.toString(rowNum)); m.set(Database.PREFIX + "IS_CLOB", "true"); m.set(Database.PREFIX + "CLOB_LENGTH", Long.toString(clob.length())); m.set(Database.PREFIX + "IS_CLOB_TRUNCATED", Boolean.toString(truncated)); m.set(Metadata.CONTENT_TYPE, "text/plain; charset=UTF-8"); m.set(Metadata.CONTENT_LENGTH, Integer.toString(readSize)); m.set(TikaMetadataKeys.RESOURCE_NAME_KEY, //just in case something screwy is going on with the column name FilenameUtils.normalize(FilenameUtils.getName(columnName + "_" + rowNum + ".txt"))); //is there a more efficient way to go from a Reader to an InputStream? String s = clob.getSubString(0, readSize); if (embeddedDocumentUtil.shouldParseEmbedded(m)) { embeddedDocumentUtil.parseEmbedded(new ByteArrayInputStream(s.getBytes(UTF_8)), handler, m, true); } }
From source file:org.batoo.jpa.jdbc.AbstractColumn.java
private Object readLob(Object value) { try {/*ww w.j av a 2s .c o m*/ if (value instanceof Clob) { final Clob clob = (Clob) value; if (this.javaType == String.class) { final StringWriter w = new StringWriter(); IOUtils.copy(clob.getAsciiStream(), w); value = w.toString(); } else { final CharArrayWriter w = new CharArrayWriter((int) clob.length()); IOUtils.copy(clob.getCharacterStream(), w); value = w.toCharArray(); } } else if (value instanceof byte[]) { if (this.javaType == String.class) { final StringWriter w = new StringWriter(); IOUtils.copy(new ByteArrayInputStream((byte[]) value), w); value = w.toString(); } else if (this.javaType == char[].class) { final byte[] byteArray = (byte[]) value; final char[] charArray = new char[byteArray.length]; for (int i = 0; i < charArray.length; i++) { charArray[i] = (char) byteArray[i]; } value = charArray; } else if (this.javaType != byte[].class) { final ObjectInputStream is = new ObjectInputStream(new ByteArrayInputStream((byte[]) value)); try { return is.readObject(); } finally { is.close(); } } } else if (value instanceof String) { return value; } else { final Blob blob = (Blob) value; if (this.javaType == byte[].class) { final ByteArrayOutputStream os = new ByteArrayOutputStream(); IOUtils.copy(blob.getBinaryStream(), os); value = os.toByteArray(); } else { final ObjectInputStream is = new ObjectInputStream(blob.getBinaryStream()); try { value = is.readObject(); } finally { is.close(); } } } return value; } catch (final Exception e) { throw new PersistenceException("Cannot read sql data", e); } }
From source file:org.batoo.jpa.jdbc.ValueConverter.java
private static Object readLob(Object value, Class<?> javaType) { try {/*w w w . j a v a 2 s. c o m*/ if (value instanceof Clob) { final Clob clob = (Clob) value; if (javaType == String.class) { final StringWriter w = new StringWriter(); IOUtils.copy(clob.getAsciiStream(), w); value = w.toString(); } else { final CharArrayWriter w = new CharArrayWriter((int) clob.length()); IOUtils.copy(clob.getCharacterStream(), w); value = w.toCharArray(); } } else if (value instanceof byte[]) { if (javaType == String.class) { final StringWriter w = new StringWriter(); IOUtils.copy(new ByteArrayInputStream((byte[]) value), w); value = w.toString(); } else if (javaType == char[].class) { final byte[] byteArray = (byte[]) value; final char[] charArray = new char[byteArray.length]; for (int i = 0; i < charArray.length; i++) { charArray[i] = (char) byteArray[i]; } value = charArray; } else if (javaType != byte[].class) { final ObjectInputStream is = new ObjectInputStream(new ByteArrayInputStream((byte[]) value)); try { return is.readObject(); } finally { is.close(); } } } else if (value instanceof String) { return value; } else { final Blob blob = (Blob) value; if (javaType == byte[].class) { final ByteArrayOutputStream os = new ByteArrayOutputStream(); IOUtils.copy(blob.getBinaryStream(), os); value = os.toByteArray(); } else { final ObjectInputStream is = new ObjectInputStream(blob.getBinaryStream()); try { value = is.readObject(); } finally { is.close(); } } } return value; } catch (final Exception e) { throw new PersistenceException("Cannot read sql data", e); } }