List of usage examples for java.sql ResultSet getBlob
Blob getBlob(String columnLabel) throws SQLException;
ResultSet
object as a Blob
object in the Java programming language. From source file:org.sakaiproject.webservices.SakaiReport.java
protected String toJsonString(ResultSet rs) throws SQLException, JSONException { ResultSetMetaData rsmd = rs.getMetaData(); JSONArray array = new JSONArray(); int numColumns = rsmd.getColumnCount(); while (rs.next()) { JSONObject obj = new JSONObject(); for (int i = 1; i < numColumns + 1; i++) { String column_label = rsmd.getColumnLabel(i); LOG.debug("Column Name=" + column_label + ",type=" + rsmd.getColumnType(i)); switch (rsmd.getColumnType(i)) { case Types.ARRAY: obj.put(column_label, rs.getArray(i)); break; case Types.BIGINT: obj.put(column_label, rs.getInt(i)); break; case Types.BOOLEAN: obj.put(column_label, rs.getBoolean(i)); break; case Types.BLOB: obj.put(column_label, rs.getBlob(i)); break; case Types.DOUBLE: obj.put(column_label, rs.getDouble(i)); break; case Types.FLOAT: obj.put(column_label, rs.getFloat(i)); break; case Types.INTEGER: obj.put(column_label, rs.getInt(i)); break; case Types.NVARCHAR: obj.put(column_label, rs.getNString(i)); break; case Types.VARCHAR: obj.put(column_label, rs.getString(i)); break; case Types.TINYINT: obj.put(column_label, rs.getInt(i)); break; case Types.SMALLINT: obj.put(column_label, rs.getInt(i)); break; case Types.DATE: obj.put(column_label, rs.getDate(i)); break; case Types.TIMESTAMP: obj.put(column_label, rs.getTimestamp(i)); break; default: obj.put(column_label, rs.getObject(i)); break; }/*from w ww . j a v a 2 s . c o m*/ } array.put(obj); } return array.toString(); }
From source file:be.dataminded.nifi.plugins.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }/*from w ww .j a v a 2 s .co m*/ for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" try { int scale = meta.getScale(i); BigDecimal bigDecimal = ((BigDecimal) value); if (scale == 0) { if (meta.getPrecision(i) < 10) { rec.put(i - 1, bigDecimal.intValue()); } else { rec.put(i - 1, bigDecimal.longValue()); } } else { rec.put(i - 1, bigDecimal.doubleValue()); } } catch (Exception e) { rec.put(i - 1, value.toString()); } } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }
From source file:ubic.gemma.externalDb.GoldenPathSequenceAnalysis.java
/** * Uses a query that can retrieve BlatResults from GoldenPath. The query must have the appropriate form. * /* w w w . j av a2 s .co m*/ * @param query * @param params * @return */ private Collection<BlatResult> findLocationsByQuery(final String query, final Object[] params) { return this.getJdbcTemplate().query(query, params, new ResultSetExtractor<Collection<BlatResult>>() { @Override public Collection<BlatResult> extractData(ResultSet rs) throws SQLException, DataAccessException { Collection<BlatResult> r = new HashSet<BlatResult>(); while (rs.next()) { BlatResult blatResult = BlatResult.Factory.newInstance(); Chromosome c = Chromosome.Factory.newInstance(); c.setName(SequenceManipulation.deBlatFormatChromosomeName(rs.getString(1))); Taxon taxon = getTaxon(); assert taxon != null; c.setTaxon(taxon); blatResult.setTargetChromosome(c); Blob blockSizes = rs.getBlob(2); Blob targetStarts = rs.getBlob(3); Blob queryStarts = rs.getBlob(4); blatResult.setBlockSizes(SQLUtils.blobToString(blockSizes)); blatResult.setTargetStarts(SQLUtils.blobToString(targetStarts)); blatResult.setQueryStarts(SQLUtils.blobToString(queryStarts)); blatResult.setStrand(rs.getString(5)); // need the query size to compute scores. blatResult.setQuerySequence(BioSequence.Factory.newInstance()); blatResult.getQuerySequence().setLength(rs.getLong(6)); blatResult.getQuerySequence().setName((String) params[0]); blatResult.setMatches(rs.getInt(7)); blatResult.setMismatches(rs.getInt(8)); blatResult.setQueryGapCount(rs.getInt(9)); blatResult.setTargetGapCount(rs.getInt(10)); blatResult.setQueryStart(rs.getInt(11)); blatResult.setQueryEnd(rs.getInt(12)); blatResult.setTargetStart(rs.getLong(13)); blatResult.setTargetEnd(rs.getLong(14)); blatResult.setRepMatches(rs.getInt(15)); r.add(blatResult); } return r; } }); }
From source file:org.wso2.carbon.cluster.coordinator.rdbms.RDBMSCommunicationBusContextImpl.java
@Override public List<NodeDetail> getAllNodeData(String groupId) throws ClusterCoordinationException { Connection connection = null; PreparedStatement preparedStatement = null; ResultSet resultSet = null; String coordinatorNodeId = getCoordinatorNodeId(groupId); ArrayList<NodeDetail> nodeDataList = new ArrayList<NodeDetail>(); try {/*ww w . j a v a 2 s . c o m*/ connection = getConnection(); preparedStatement = connection.prepareStatement(RDBMSConstants.PS_GET_ALL_NODE_HEARTBEAT); preparedStatement.setString(1, groupId); resultSet = preparedStatement.executeQuery(); while (resultSet.next()) { String nodeId = resultSet.getString(2); boolean isCoordinatorNode = false; if (coordinatorNodeId != null) { isCoordinatorNode = coordinatorNodeId.equals(nodeId); } Map<String, Object> propertiesMap = null; if (resultSet.getBlob(3) != null) { int blobLength = (int) resultSet.getBlob(3).length(); byte[] bytes = resultSet.getBlob(3).getBytes(0L, blobLength); ByteArrayInputStream bis = new ByteArrayInputStream(bytes); ObjectInputStream ois = new ObjectInputStream(bis); Object blobObject = ois.readObject(); if (blobObject instanceof Map) { propertiesMap = (Map) blobObject; } } long lastHeartbeat = resultSet.getLong(4); boolean isNewNode = convertIntToBoolean(resultSet.getInt(5)); NodeDetail heartBeatData = new NodeDetail(nodeId, groupId, isCoordinatorNode, lastHeartbeat, isNewNode, propertiesMap); nodeDataList.add(heartBeatData); } } catch (SQLException e) { String errMsg = RDBMSConstants.TASK_GET_ALL_QUEUES; throw new ClusterCoordinationException("Error occurred while " + errMsg, e); } catch (IOException e) { throw new ClusterCoordinationException("Error retrieving the property map. ", e); } catch (ClassNotFoundException e) { throw new ClusterCoordinationException("Error retrieving the property map. ", e); } finally { close(resultSet, RDBMSConstants.TASK_GET_ALL_QUEUES); close(preparedStatement, RDBMSConstants.TASK_GET_ALL_QUEUES); close(connection, RDBMSConstants.TASK_GET_ALL_QUEUES); } return nodeDataList; }
From source file:cn.clickvalue.cv2.model.rowmapper.BeanPropertyRowMapper.java
/** * Retrieve a JDBC column value from a ResultSet, using the specified value type. * <p>Uses the specifically typed ResultSet accessor methods, falling back to * {@link #getResultSetValue(java.sql.ResultSet, int)} for unknown types. * <p>Note that the returned value may not be assignable to the specified * required type, in case of an unknown type. Calling code needs to deal * with this case appropriately, e.g. throwing a corresponding exception. * @param rs is the ResultSet holding the data * @param index is the column index/*from ww w.j a v a 2s . co m*/ * @param requiredType the required value type (may be <code>null</code>) * @return the value object * @throws SQLException if thrown by the JDBC API */ public static Object getResultSetValue(ResultSet rs, int index, Class requiredType) throws SQLException { if (requiredType == null) { return getResultSetValue(rs, index); } Object value = null; boolean wasNullCheck = false; // Explicitly extract typed value, as far as possible. if (String.class.equals(requiredType)) { value = rs.getString(index); } else if (boolean.class.equals(requiredType) || Boolean.class.equals(requiredType)) { value = Boolean.valueOf(rs.getBoolean(index)); wasNullCheck = true; } else if (byte.class.equals(requiredType) || Byte.class.equals(requiredType)) { value = Byte.valueOf(rs.getByte(index)); wasNullCheck = true; } else if (short.class.equals(requiredType) || Short.class.equals(requiredType)) { value = Short.valueOf(rs.getShort(index)); wasNullCheck = true; } else if (int.class.equals(requiredType) || Integer.class.equals(requiredType)) { value = Integer.valueOf(rs.getInt(index)); wasNullCheck = true; } else if (long.class.equals(requiredType) || Long.class.equals(requiredType)) { value = Long.valueOf(rs.getLong(index)); wasNullCheck = true; } else if (float.class.equals(requiredType) || Float.class.equals(requiredType)) { value = Float.valueOf(rs.getFloat(index)); wasNullCheck = true; } else if (double.class.equals(requiredType) || Double.class.equals(requiredType) || Number.class.equals(requiredType)) { value = Double.valueOf(rs.getDouble(index)); wasNullCheck = true; } else if (byte[].class.equals(requiredType)) { value = rs.getBytes(index); } else if (java.sql.Date.class.equals(requiredType)) { value = rs.getDate(index); } else if (java.sql.Time.class.equals(requiredType)) { value = rs.getTime(index); } else if (java.sql.Timestamp.class.equals(requiredType) || java.util.Date.class.equals(requiredType)) { value = rs.getTimestamp(index); } else if (BigDecimal.class.equals(requiredType)) { value = rs.getBigDecimal(index); } else if (Blob.class.equals(requiredType)) { value = rs.getBlob(index); } else if (Clob.class.equals(requiredType)) { value = rs.getClob(index); } else { // Some unknown type desired -> rely on getObject. value = getResultSetValue(rs, index); } // Perform was-null check if demanded (for results that the // JDBC driver returns as primitives). if (wasNullCheck && value != null && rs.wasNull()) { value = null; } return value; }
From source file:com.trackplus.ddl.GenericStringValueConverter.java
protected String extractColumnValue(ResultSet resultSet, int columnIdx, int jdbcType) throws SQLException, DDLException { String value = resultSet.getString(columnIdx); if (value != null) { switch (jdbcType) { case Types.NUMERIC: case Types.DECIMAL: break; case Types.BIT: case Types.BOOLEAN: case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: case Types.BIGINT: case Types.REAL: case Types.FLOAT: case Types.DOUBLE: { break; }/*from w w w . j a v a 2 s. co m*/ case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: case Types.BINARY: case Types.VARBINARY: case Types.TIME: case Types.CLOB: case Types.ARRAY: case Types.REF: { value = "'" + value.replaceAll("'", "''") + "'"; break; } case Types.DATE: case Types.TIMESTAMP: { Date d = resultSet.getDate(columnIdx); Calendar cal = Calendar.getInstance(); cal.setTime(d); int year = cal.get(Calendar.YEAR); if (year < 1900) { throw new DDLException("Invalid date:" + d); } else { value = "'" + value + "'"; } break; } case Types.BLOB: case Types.LONGVARBINARY: { Blob blobValue = resultSet.getBlob(columnIdx); String str = new String(Base64.encodeBase64(blobValue.getBytes(1l, (int) blobValue.length()))); value = "'" + str + "'"; break; } default: break; } } return value; }
From source file:org.apache.jackrabbit.core.persistence.bundle.BundleDbPersistenceManager.java
/** * {@inheritDoc}/*from w ww . j a v a2 s .co m*/ */ protected synchronized NodePropBundle loadBundle(NodeId id) throws ItemStateException { ResultSet rs = null; InputStream in = null; try { Statement stmt = connectionManager.executeStmt(bundleSelectSQL, getKey(id.getUUID())); rs = stmt.getResultSet(); if (!rs.next()) { return null; } Blob b = rs.getBlob(1); // JCR-1039: pre-fetch/buffer blob data long length = b.length(); byte[] bytes = new byte[(int) length]; in = b.getBinaryStream(); int read, pos = 0; while ((read = in.read(bytes, pos, bytes.length - pos)) > 0) { pos += read; } DataInputStream din = new DataInputStream(new ByteArrayInputStream(bytes)); NodePropBundle bundle = binding.readBundle(din, id); bundle.setSize(length); return bundle; } catch (Exception e) { String msg = "failed to read bundle: " + id + ": " + e; log.error(msg); throw new ItemStateException(msg, e); } finally { IOUtils.closeQuietly(in); closeResultSet(rs); } }
From source file:org.apache.james.mailrepository.jdbc.JDBCMailRepository.java
/** * @see org.apache.james.mailrepository.api.MailRepository#retrieve(String) */// w ww .j a v a 2s.co m @SuppressWarnings("unchecked") public Mail retrieve(String key) throws MessagingException { if (DEEP_DEBUG) { System.err.println("retrieving " + key); } Connection conn = null; PreparedStatement retrieveMessage = null; ResultSet rsMessage = null; try { conn = datasource.getConnection(); if (DEEP_DEBUG) { System.err.println("got a conn " + key); } retrieveMessage = conn.prepareStatement(sqlQueries.getSqlString("retrieveMessageSQL", true)); retrieveMessage.setString(1, key); retrieveMessage.setString(2, repositoryName); rsMessage = retrieveMessage.executeQuery(); if (DEEP_DEBUG) { System.err.println("ran the query " + key); } if (!rsMessage.next()) { if (getLogger().isDebugEnabled()) { String debugBuffer = "Did not find a record " + key + " in " + repositoryName; getLogger().debug(debugBuffer); } return null; } // Determine whether attributes are used and retrieve them PreparedStatement retrieveMessageAttr = null; HashMap<String, Object> attributes = null; if (jdbcMailAttributesReady) { String retrieveMessageAttrSql = sqlQueries.getSqlString("retrieveMessageAttributesSQL", false); ResultSet rsMessageAttr = null; try { retrieveMessageAttr = conn.prepareStatement(retrieveMessageAttrSql); retrieveMessageAttr.setString(1, key); retrieveMessageAttr.setString(2, repositoryName); rsMessageAttr = retrieveMessageAttr.executeQuery(); if (rsMessageAttr.next()) { try { byte[] serialized_attr; String getAttributesOption = sqlQueries.getDbOption("getAttributes"); if (getAttributesOption != null && (getAttributesOption.equalsIgnoreCase("useBlob") || getAttributesOption.equalsIgnoreCase("useBinaryStream"))) { Blob b = rsMessageAttr.getBlob(1); serialized_attr = b.getBytes(1, (int) b.length()); } else { serialized_attr = rsMessageAttr.getBytes(1); } // this check is for better backwards compatibility if (serialized_attr != null) { ByteArrayInputStream bais = new ByteArrayInputStream(serialized_attr); ObjectInputStream ois = new ObjectInputStream(bais); attributes = (HashMap<String, Object>) ois.readObject(); ois.close(); } } catch (IOException ioe) { if (getLogger().isDebugEnabled()) { String debugBuffer = "Exception reading attributes " + key + " in " + repositoryName; getLogger().debug(debugBuffer, ioe); } } } else { if (getLogger().isDebugEnabled()) { String debugBuffer = "Did not find a record (attributes) " + key + " in " + repositoryName; getLogger().debug(debugBuffer); } } } catch (SQLException sqle) { String errorBuffer = "Error retrieving message" + sqle.getMessage() + sqle.getErrorCode() + sqle.getSQLState() + sqle.getNextException(); getLogger().error(errorBuffer); } finally { theJDBCUtil.closeJDBCResultSet(rsMessageAttr); theJDBCUtil.closeJDBCStatement(retrieveMessageAttr); } } MailImpl mc = new MailImpl(); mc.setAttributesRaw(attributes); mc.setName(key); mc.setState(rsMessage.getString(1)); mc.setErrorMessage(rsMessage.getString(2)); String sender = rsMessage.getString(3); if (sender == null) { mc.setSender(null); } else { mc.setSender(new MailAddress(sender)); } StringTokenizer st = new StringTokenizer(rsMessage.getString(4), "\r\n", false); Set<MailAddress> recipients = new HashSet<MailAddress>(); while (st.hasMoreTokens()) { recipients.add(new MailAddress(st.nextToken())); } mc.setRecipients(recipients); mc.setRemoteHost(rsMessage.getString(5)); mc.setRemoteAddr(rsMessage.getString(6)); mc.setLastUpdated(rsMessage.getTimestamp(7)); MimeMessageJDBCSource source = new MimeMessageJDBCSource(this, key, sr); MimeMessageCopyOnWriteProxy message = new MimeMessageCopyOnWriteProxy(source); mc.setMessage(message); return mc; } catch (SQLException sqle) { String errorBuffer = "Error retrieving message" + sqle.getMessage() + sqle.getErrorCode() + sqle.getSQLState() + sqle.getNextException(); getLogger().error(errorBuffer); getLogger().debug("Failed to retrieve mail", sqle); throw new MessagingException("Exception while retrieving mail: " + sqle.getMessage(), sqle); } catch (Exception me) { throw new MessagingException("Exception while retrieving mail: " + me.getMessage(), me); } finally { theJDBCUtil.closeJDBCResultSet(rsMessage); theJDBCUtil.closeJDBCStatement(retrieveMessage); theJDBCUtil.closeJDBCConnection(conn); } }
From source file:com.erbjuder.logger.server.rest.util.ResultSetConverter.java
public List<String> toStringList(ResultSet rs) throws Exception { List<String> list = new ArrayList<String>(); try {/* w w w .j a v a 2 s . c o m*/ // we will need the column names, this will save the table meta-data like column nmae. java.sql.ResultSetMetaData rsmd = rs.getMetaData(); //loop through the ResultSet while (rs.next()) { //figure out how many columns there are int numColumns = rsmd.getColumnCount(); //each row in the ResultSet will be converted to a JSON Object StringBuilder builder = new StringBuilder(); // loop through all the columns and place them into the JSON Object for (int i = 1; i < numColumns + 1; i++) { String column_name = rsmd.getColumnName(i); if (rsmd.getColumnType(i) == java.sql.Types.ARRAY) { builder.append(rs.getArray(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.BIGINT) { builder.append(rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.BOOLEAN) { builder.append(rs.getBoolean(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.BLOB) { builder.append(rs.getBlob(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.DOUBLE) { builder.append(rs.getDouble(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.FLOAT) { builder.append(rs.getFloat(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.INTEGER) { builder.append(rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.NVARCHAR) { builder.append(rs.getNString(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.VARCHAR) { // temp = rs.getString(column_name); //saving column data to temp variable // temp = ESAPI.encoder().canonicalize(temp); //decoding data to base state // temp = ESAPI.encoder().encodeForHTML(temp); //encoding to be browser safe // obj.put(column_name, temp); //putting data into JSON object // builder.append(rs.getNString(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.TINYINT) { builder.append(rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.SMALLINT) { builder.append(rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.DATE) { builder.append(rs.getDate(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.TIME) { builder.append(rs.getTime(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.TIMESTAMP) { builder.append(rs.getTimestamp(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.NUMERIC) { builder.append(rs.getBigDecimal(column_name)); } else { builder.append(rs.getObject(column_name)); } } //end foreach list.add(builder.toString()); } //end while } catch (Exception e) { e.printStackTrace(); } return list; //return String list }
From source file:org.apache.jackrabbit.core.persistence.bundle.BundleDbPersistenceManager.java
/** * Loads a bundle from the underlying system and optionally performs * a check on the bundle first.//from w w w . j av a2s. c om * * @param id the node id of the bundle * @param checkBeforeLoading check the bundle before loading it and log * detailed informations about it (slower) * @return the loaded bundle or <code>null</code> if the bundle does not * exist. * @throws ItemStateException if an error while loading occurs. */ protected synchronized NodePropBundle loadBundle(NodeId id, boolean checkBeforeLoading) throws ItemStateException { ResultSet rs = null; InputStream in = null; byte[] bytes = null; try { Statement stmt = connectionManager.executeStmt(bundleSelectSQL, getKey(id.getUUID())); rs = stmt.getResultSet(); if (!rs.next()) { return null; } Blob b = rs.getBlob(1); // JCR-1039: pre-fetch/buffer blob data long length = b.length(); bytes = new byte[(int) length]; in = b.getBinaryStream(); int read, pos = 0; while ((read = in.read(bytes, pos, bytes.length - pos)) > 0) { pos += read; } DataInputStream din = new DataInputStream(new ByteArrayInputStream(bytes)); if (checkBeforeLoading) { if (binding.checkBundle(din)) { // reset stream for readBundle() din = new DataInputStream(new ByteArrayInputStream(bytes)); } else { // gets wrapped as proper ItemStateException below throw new Exception("invalid bundle, see previous BundleBinding error log entry"); } } NodePropBundle bundle = binding.readBundle(din, id); bundle.setSize(length); return bundle; } catch (Exception e) { String msg = "failed to read bundle: " + id + ": " + e; log.error(msg); throw new ItemStateException(msg, e); } finally { IOUtils.closeQuietly(in); closeResultSet(rs); } }