List of usage examples for java.sql Clob free
void free() throws SQLException;
From source file:org.sonar.db.AbstractDbTester.java
private static void doClobFree(Clob clob) throws SQLException { try {/*from w ww .j ava 2s.com*/ clob.free(); } catch (AbstractMethodError e) { // JTS driver do not implement free() as it's using JDBC 3.0 } }
From source file:org.b3log.latke.repository.jdbc.util.JdbcUtil.java
/** * resultSetToJsonObject.// w w w.j av a2s . c o m * * @param resultSet resultSet * @param ifOnlyOne ifOnlyOne * @param tableName tableName * * @return JSONObject * @throws SQLException SQLException * @throws JSONException JSONException * @throws RepositoryException RepositoryException */ private static JSONObject resultSetToJsonObject(final ResultSet resultSet, final boolean ifOnlyOne, final String tableName) throws SQLException, JSONException, RepositoryException { final ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); final List<FieldDefinition> definitioList = JdbcRepositories.getRepositoriesMap().get(tableName); if (definitioList == null) { LOGGER.log(Level.SEVERE, "resultSetToJsonObject: null definitioList finded for table {0}", tableName); throw new RepositoryException( "resultSetToJsonObject: null definitioList finded for table " + tableName); } final Map<String, FieldDefinition> dMap = new HashMap<String, FieldDefinition>(); for (FieldDefinition fieldDefinition : definitioList) { if (RuntimeDatabase.H2 == Latkes.getRuntimeDatabase()) { dMap.put(fieldDefinition.getName().toUpperCase(), fieldDefinition); } else { dMap.put(fieldDefinition.getName(), fieldDefinition); } } final int numColumns = resultSetMetaData.getColumnCount(); final JSONArray jsonArray = new JSONArray(); JSONObject jsonObject; String columnName; while (resultSet.next()) { jsonObject = new JSONObject(); for (int i = 1; i < numColumns + 1; i++) { columnName = resultSetMetaData.getColumnName(i); final FieldDefinition definition = dMap.get(columnName); if (definition == null) { // COUNT(OID) jsonObject.put(columnName, resultSet.getObject(columnName)); } else { if ("boolean".equals(definition.getType())) { jsonObject.put(definition.getName(), resultSet.getBoolean(columnName)); } else { final Object v = resultSet.getObject(columnName); while (true) { if (RuntimeDatabase.H2 != Latkes.getRuntimeDatabase()) { jsonObject.put(definition.getName(), v); break; } // H2 if ("String".equals(definition.getType()) && v instanceof Clob) { // H2 CLOB final Clob clob = (Clob) v; String str = null; try { str = IOUtils.toString(clob.getCharacterStream()); } catch (final IOException e) { LOGGER.log(Level.SEVERE, "Cant not read column[name=" + columnName + "] in table[name=" + tableName + "] on H2", e); } finally { clob.free(); } jsonObject.put(definition.getName(), str); break; } // H2 other types jsonObject.put(definition.getName(), v); break; } } } } jsonArray.put(jsonObject); } if (ifOnlyOne) { if (jsonArray.length() > 0) { jsonObject = jsonArray.getJSONObject(0); return jsonObject; } return null; } jsonObject = new JSONObject(); jsonObject.put(Keys.RESULTS, jsonArray); return jsonObject; }
From source file:org.apache.nifi.processors.standard.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }/*from ww w . ja va 2 s .co m*/ for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" rec.put(i - 1, value.toString()); } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }
From source file:be.dataminded.nifi.plugins.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }//from ww w .j a v a2 s . c o m for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" try { int scale = meta.getScale(i); BigDecimal bigDecimal = ((BigDecimal) value); if (scale == 0) { if (meta.getPrecision(i) < 10) { rec.put(i - 1, bigDecimal.intValue()); } else { rec.put(i - 1, bigDecimal.longValue()); } } else { rec.put(i - 1, bigDecimal.doubleValue()); } } catch (Exception e) { rec.put(i - 1, value.toString()); } } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }
From source file:org.springframework.jdbc.support.lob.TemporaryLobCreator.java
@Override public void close() { try {/* www.j ava 2 s. c om*/ for (Blob blob : this.temporaryBlobs) { blob.free(); } for (Clob clob : this.temporaryClobs) { clob.free(); } } catch (SQLException ex) { logger.error("Could not free LOB", ex); } }
From source file:com.textocat.textokit.commons.cpe.JdbcCollectionReader.java
private DbTuple toTuple(ResultSet rs) throws SQLException, IOException { String url = rs.getString(documentUrlColumn); String text = null;//w w w . ja v a2 s . c o m Clob textClob = rs.getClob(textColumn); if (textClob != null) { Reader textReader = null; try { textReader = textClob.getCharacterStream(); text = IOUtils.toString(textReader); } finally { IOUtils.closeQuietly(textReader); textClob.free(); } } return new DbTuple(url, text); }
From source file:org.kawanfw.test.api.client.InsertAndUpdateClobTestNew.java
public void updateclob(Connection connection, File file) throws Exception { PreparedStatement prepStatement = null; String sql = "update documentation set " + " item_doc = ? " + " where item_id >= ?"; prepStatement = connection.prepareStatement(sql); Clob clob = connection.createClob(); Writer writer = null;/*from w w w . j a v a2s .c o m*/ Reader reader = null; try { reader = new BufferedReader(new FileReader(file)); writer = clob.setCharacterStream(1); IOUtils.copy(reader, writer); int i = 1; prepStatement.setClob(i++, clob); prepStatement.setInt(i++, 1); prepStatement.executeUpdate(); // Close and free are important to delete temp files prepStatement.close(); clob.free(); } finally { IOUtils.closeQuietly(reader); IOUtils.closeQuietly(writer); } }
From source file:org.zaproxy.zap.extension.sse.db.TableEventStream.java
/** * @param rs//w ww . j a v a 2s .c o m * @param interpretLiteralBytes * @param previewLength * @return * @throws DatabaseException */ private List<ServerSentEvent> buildEvents(ResultSet rs, boolean interpretLiteralBytes, int previewLength) throws SQLException { List<ServerSentEvent> events = new ArrayList<>(); try { while (rs.next()) { ServerSentEvent event; int streamId = rs.getInt("stream_id"); // ServerSentEventStream stream = getStream(streamId); event = new ServerSentEvent(); // TODO should I set stream? event.setId(rs.getInt("event_id")); event.setEventType(rs.getString("event_type")); event.setLastEventId(rs.getString("last_event_id")); event.setStreamId(streamId); event.setTime(rs.getTimestamp("timestamp")); if (previewLength == -1) { // load all characters event.setData(rs.getString("data")); } else { Clob clob = rs.getClob("data"); int length = Math.min(previewLength, (int) clob.length()); event.setData(clob.getSubString(1, length)); clob.free(); } Clob clob = rs.getClob("raw_event"); event.setRawEventLength(clob.length()); if (previewLength == -1) { // load all characters event.setRawEvent(rs.getString("raw_event")); } else { int length = Math.min(previewLength, (int) clob.length()); event.setRawEvent(clob.getSubString(1, length)); } clob.free(); events.add(event); } } finally { rs.close(); } return events; }
From source file:org.zaproxy.zap.extension.websocket.db.TableWebSocket.java
/** * Filter out and count messages according to payloadFilter * * @param criteria/*w w w .j a v a2 s .com*/ * @param opcodes Null when all opcodes should be retrieved. * @param inScopeChannelIds * @param payloadFilter Null when all payloads should be retrieved. * @param payloadLength * @return number of message that fulfill given template * @throws DatabaseException */ private int countMessageWithPayloadFilter(WebSocketMessageDTO criteria, List<Integer> opcodes, List<Integer> inScopeChannelIds, WebSocketMessagesPayloadFilter payloadFilter, int payloadLength) throws DatabaseException { String query = "SELECT m.opcode, m.payload_utf8 FROM websocket_message AS m " + "LEFT OUTER JOIN websocket_message_fuzz f " + "ON m.message_id = f.message_id AND m.channel_id = f.channel_id " + "<where> "; int count = 0; try { PreparedStatement stmt = buildMessageCriteriaStatement(query, criteria, opcodes, inScopeChannelIds); stmt.execute(); ResultSet resultSet = stmt.getResultSet(); try { while (resultSet.next()) { String payload; // read payload if (resultSet.getInt("opcode") != WebSocketMessage.OPCODE_BINARY) { if (payloadLength == -1) { // load all characters payload = resultSet.getString("payload_utf8"); } else { Clob clob = resultSet.getClob("payload_utf8"); int length = Math.min(payloadLength, (int) clob.length()); payload = clob.getSubString(1, length); clob.free(); } if (payloadFilter.isStringValidWithPattern(payload)) { count++; } } } } finally { resultSet.close(); stmt.close(); } } catch (SQLException e) { throw new DatabaseException(e); } return count; }
From source file:org.zaproxy.zap.extension.websocket.db.TableWebSocket.java
/** * @param rs//from www . j av a 2 s . c o m * @param interpretLiteralBytes * @param payloadLength * @return * @throws HttpMalformedHeaderException * @throws SQLException * @throws DatabaseException */ private List<WebSocketMessageDTO> buildMessageDTOs(ResultSet rs, boolean interpretLiteralBytes, int payloadLength) throws SQLException, DatabaseException { ArrayList<WebSocketMessageDTO> messages = new ArrayList<>(); try { while (rs.next()) { WebSocketMessageDTO message; int channelId = rs.getInt("channel_id"); WebSocketChannelDTO channel = getChannel(channelId); if (rs.getInt("fuzz_id") != 0) { WebSocketFuzzMessageDTO fuzzMessage = new WebSocketFuzzMessageDTO(channel); fuzzMessage.fuzzId = rs.getInt("fuzz_id"); fuzzMessage.state = WebSocketFuzzMessageDTO.State.valueOf(rs.getString("state")); fuzzMessage.fuzz = rs.getString("fuzz"); message = fuzzMessage; } else { message = new WebSocketMessageDTO(channel); } message.id = rs.getInt("message_id"); message.setTime(rs.getTimestamp("timestamp")); message.opcode = rs.getInt("opcode"); message.readableOpcode = WebSocketMessage.opcode2string(message.opcode); // read payload if (message.opcode == WebSocketMessage.OPCODE_BINARY) { if (payloadLength == -1) { // load all bytes message.payload = rs.getBytes("payload_bytes"); } else { Blob blob = rs.getBlob("payload_bytes"); int length = Math.min(payloadLength, (int) blob.length()); message.payload = blob.getBytes(1, length); blob.free(); } if (message.payload == null) { message.payload = new byte[0]; } } else { if (payloadLength == -1) { // load all characters message.payload = rs.getString("payload_utf8"); } else { Clob clob = rs.getClob("payload_utf8"); int length = Math.min(payloadLength, (int) clob.length()); message.payload = clob.getSubString(1, length); clob.free(); } if (message.payload == null) { message.payload = ""; } } message.isOutgoing = rs.getBoolean("is_outgoing"); message.payloadLength = rs.getInt("payload_length"); messages.add(message); } } finally { rs.close(); } messages.trimToSize(); return messages; }