List of usage examples for java.sql Blob getBinaryStream
java.io.InputStream getBinaryStream() throws SQLException;
From source file:com.cosmosource.common.action.CALicenseAction.java
public String getImg() throws Exception { String caid = getParameter("caid"); TAcCaapply entitys = camgrManager.findById(Long.parseLong(caid)); Blob blob = entitys.getHandlerstamp(); if (blob != null) { getResponse().setContentType("image/jpg"); OutputStream outs = getResponse().getOutputStream(); InputStream pi = blob.getBinaryStream(); int blobsize = (int) blob.length(); byte[] blobbytes = new byte[blobsize]; int bytesRead = 0; while ((bytesRead = pi.read(blobbytes)) != -1) { outs.write(blobbytes, 0, bytesRead); }// www . j ava 2 s . co m pi.close(); outs.flush(); } return null; }
From source file:com.taobao.tdhs.jdbc.TDHSPreparedStatement.java
public void setBlob(int parameterIndex, Blob x) throws SQLException { setBlob(parameterIndex, x.getBinaryStream()); }
From source file:joachimeichborn.geotag.io.database.DerbyDatabase.java
@Override public BufferedImage getPreviewAnySize(final String aFile) { try {/*ww w.j a v a 2s .com*/ synchronized (readConnection) { try (final PreparedStatement statement = readConnection .prepareStatement(GET_PREVIEW_ANY_SIZE_QUERY)) { statement.setString(1, aFile); try (final ResultSet result = statement.executeQuery()) { if (!result.next()) { return null; } else { final Blob blob = result.getBlob(Preview.IMAGE_COLUMN); return ImageIO.read(blob.getBinaryStream()); } } } } } catch (final SQLException | IOException aEx) { logger.log(Level.SEVERE, "Could not get preview for " + aFile, aEx); return null; } }
From source file:joachimeichborn.geotag.io.database.DerbyDatabase.java
@Override public BufferedImage getPreview(final PreviewKey aKey) { try {/*from w w w. j av a2s .com*/ synchronized (readConnection) { try (final PreparedStatement statement = readConnection.prepareStatement(GET_PREVIEW_QUERY)) { statement.setString(1, aKey.getFile()); statement.setInt(2, aKey.getWidth()); statement.setInt(3, aKey.getHeight()); try (final ResultSet result = statement.executeQuery()) { if (!result.next()) { return null; } else { final Blob blob = result.getBlob(Preview.IMAGE_COLUMN); return ImageIO.read(blob.getBinaryStream()); } } } } } catch (final SQLException | IOException aEx) { logger.log(Level.SEVERE, "Could not get preview for " + aKey, aEx); return null; } }
From source file:com.p5solutions.core.jpa.orm.oracle.ConversionUtilityImpl.java
/** * If blob./*from w w w .ja v a2 s . com*/ * * @param pb * the pb * @param value * the value * @return the object */ protected Object toSqlBlob(ParameterBinder pb, Object value) { if (value == null) { return null; } int type = getSqlType(pb); // if the sql target type is blob if (Types.BLOB == type) { Class<?> clazz = value.getClass(); boolean isByteArray = ReflectionUtility.isByteArray(clazz); boolean isBlob = isByteArray ? false : ReflectionUtility.isBlob(clazz); boolean isString = isByteArray | isBlob ? false : ReflectionUtility.isBlob(clazz); boolean isInputStream = isByteArray | isBlob | isString ? false : ReflectionUtility.isStringClass(clazz); // if the datasource is not set, then throw an error if (dataSource == null) { logger.error("Required datasource has not been set for " // + getClass() + ", when dealing with Lob values, datasource " // + "is required for creation of lob space in DB."); return null; } // scope variables BLOB blob = null; OutputStream os = null; // get a database connection Connection conn = DataSourceUtils.getConnection(dataSource); try { // activate the connection and create an empty blob pointer blob = BLOB.createTemporary(conn, false, BLOB.DURATION_SESSION); blob.open(BLOB.MODE_READWRITE); os = blob.setBinaryStream(0); } catch (Exception e) { logger.error("Unable to create temporary blob when accessing entity " + pb.getEntityClass() + " on paramater " + pb.getBindingPath() + " and column " + pb.getColumnName()); blob = null; os = null; return null; } InputStream is = null; // if the source is of type byte[] if (isByteArray) { blob.setBytes((byte[]) value); } else if (isBlob) { Blob sourceBlob = (Blob) value; try { is = sourceBlob.getBinaryStream(); } catch (Exception e) { logger.error( "Unable to copy input stream to output when accessing entity " + pb.getEntityClass() + " on paramater " + pb.getBindingPath() + " and column " + pb.getColumnName()); is = null; } } else if (isString) { String v = (String) value; blob.setBytes(v.getBytes()); } else if (isInputStream) { is = (InputStream) value; } // if the input stream is set if (is != null) { try { IOUtils.copy(is, os); } catch (Exception e) { logger.error( "Unable to copy input stream to output when accessing entity " + pb.getEntityClass() + " on paramater " + pb.getBindingPath() + " and column " + pb.getColumnName()); } } if (os != null) { try { os.close(); blob.close(); } catch (Exception e) { logger.error("Unable to close stream properly when accessing entity " + pb.getEntityClass() + " on paramater " + pb.getBindingPath() + " and column " + pb.getColumnName()); } } return blob; } return null; }
From source file:com.jaspersoft.jasperserver.api.metadata.data.snapshot.hibernate.HibernateDataSnapshotContentsService.java
@Transactional(propagation = Propagation.MANDATORY, readOnly = true) public DataSnapshot loadDataSnapshot(ExecutionContext context, final long id) { return getHibernateTemplate().execute(new HibernateCallback<DataSnapshot>() { public DataSnapshot doInHibernate(Session session) throws HibernateException, SQLException { if (log.isDebugEnabled()) { log.debug("loading data snapshot " + id); }//from w w w. j a v a 2 s. c o m Blob dataBlob = loadSnapshotDataBlob(id, session); if (dataBlob == null) { return null; } DataSnapshot snapshot; InputStream dataStream = dataBlob.getBinaryStream(); try { snapshot = getSnapshotSerializer().readSnapshot(dataStream); } catch (IOException e) { throw new JSExceptionWrapper("Failed to read data snapshot", e); } finally { try { dataStream.close(); } catch (IOException e) { log.warn("Failed to close blob stream for data snapshot " + id, e); } } return snapshot; } }); }
From source file:com.jaspersoft.jasperserver.api.metadata.data.snapshot.hibernate.HibernateDataSnapshotContentsService.java
@Transactional(propagation = Propagation.MANDATORY, readOnly = true) public DataContainer loadDataSnapshotData(ExecutionContext context, final long id) { return getHibernateTemplate().execute(new HibernateCallback<DataContainer>() { public DataContainer doInHibernate(Session session) throws HibernateException, SQLException { if (log.isDebugEnabled()) { log.debug("loading snapshot data " + id); }/*from w ww.j ava 2s . c o m*/ Blob dataBlob = loadSnapshotDataBlob(id, session); if (dataBlob == null) { return null; } FileBufferedDataContainer dataContainer; InputStream dataStream = dataBlob.getBinaryStream(); try { dataContainer = new FileBufferedDataContainer(); OutputStream dataOut = dataContainer.getOutputStream(); try { DataContainerStreamUtil.pipeData(dataStream, dataOut); } finally { dataOut.close();// fail on close exception } } catch (IOException e) { throw new JSExceptionWrapper("Failed to read data snapshot", e); } finally { try { dataStream.close(); } catch (IOException e) { log.warn("Failed to close blob stream for data snapshot " + id, e); } } return dataContainer; } }); }
From source file:cz.zcu.kiv.eegdatabase.logic.controller.experiment.ExperimentMultiController.java
public ModelAndView detail(HttpServletRequest request, HttpServletResponse response) { ModelAndView mav = new ModelAndView("experiments/detail"); VhdrReader vhdr = new VhdrReader(); List<ChannelInfo> channels = null; setPermissionsToView(mav);//from www . j a va 2 s . co m int id = 0; try { id = Integer.parseInt(request.getParameter("experimentId")); } catch (Exception e) { } Experiment m = experimentDao.getExperimentForDetail(id); mav.addObject("userIsOwnerOrCoexperimenter", (auth.userIsOwnerOrCoexperimenter(id)) || (auth.isAdmin())); int subjectPersonId = m.getPersonBySubjectPersonId().getPersonId(); Boolean filesIn = new Boolean(false); ArrayList<double[]> signalData = new ArrayList<double[]>(); for (DataFile file : m.getDataFiles()) { if (file.getFilename().endsWith(".vhdr")) { Blob b = file.getFileContent(); int index = file.getFilename().lastIndexOf("."); String fileName = file.getFilename().substring(0, index); try { vhdr.readVhdr(IOUtils.toByteArray(b.getBinaryStream())); } catch (IOException ex) { Logger.getLogger(ExperimentMultiController.class.getName()).log(Level.SEVERE, null, ex); throw new RuntimeException(ex); } catch (SQLException ex) { Logger.getLogger(ExperimentMultiController.class.getName()).log(Level.SEVERE, null, ex); throw new RuntimeException(ex); } channels = vhdr.getChannels(); mav.addObject("channels", channels); for (DataFile file2 : m.getDataFiles()) { if ((file2.getFilename().endsWith(".eeg")) || (file2.getFilename().endsWith(".avg"))) { filesIn = true; Blob b2 = file.getFileContent(); EegReader eeg = new EegReader(vhdr); for (ChannelInfo ch : channels) { try { signalData.add( eeg.readFile(IOUtils.toByteArray(b2.getBinaryStream()), ch.getNumber())); } catch (IOException ex) { Logger.getLogger(ExperimentMultiController.class.getName()).log(Level.SEVERE, null, ex); throw new RuntimeException(ex); } catch (SQLException ex) { Logger.getLogger(ExperimentMultiController.class.getName()).log(Level.SEVERE, null, ex); throw new RuntimeException(ex); } } mav.addObject("signalData", signalData); } } } } mav.addObject("filesAvailable", filesIn); mav.addObject("userCanViewPersonDetails", auth.userCanViewPersonDetails(subjectPersonId)); mav.addObject("experimentDetail", m); return mav; }
From source file:gov.nih.nci.ncicb.tcga.dcc.dam.bean.QuartzQueueJobDetails.java
/** * Extract estimatedUncompressedSize and jobWSSubmissionDate from the given <code>Blob</code> * (which is expected to be a serialized <code>FilePackagerBean</code>) and populates this bean's properties * * @param jobData the serialized <code>FilePackagerBean</code> *///from w w w .j a v a2s. co m private void extractData(final Blob jobData) { // Default values that will be used in case an exception is raised Long estimatedUncompressedSize = null; Date jobWSSubmissionDate = null; if (jobData != null) { ObjectInputStream objectInputStream = null; try { //noinspection IOResourceOpenedButNotSafelyClosed objectInputStream = new ObjectInputStream(jobData.getBinaryStream()); final JobDataMap jobDataMap = (JobDataMap) objectInputStream.readObject(); final FilePackagerBean filePackagerBean = (FilePackagerBean) jobDataMap.get(JobDelegate.DATA_BEAN); ; estimatedUncompressedSize = filePackagerBean.getEstimatedUncompressedSize(); jobWSSubmissionDate = filePackagerBean.getJobWSSubmissionDate(); } catch (final IOException e) { } catch (final SQLException e) { } catch (final ClassNotFoundException e) { } finally { IOUtils.closeQuietly(objectInputStream); } } // Set this bean's fields setEstimatedUncompressedSize(estimatedUncompressedSize); setJobWSSubmissionDate(jobWSSubmissionDate); }
From source file:gobblin.metastore.MysqlStateStore.java
protected List<T> getAll(String storeName, String tableName, boolean useLike) throws IOException { List<T> states = Lists.newArrayList(); try (Connection connection = dataSource.getConnection(); PreparedStatement queryStatement = connection .prepareStatement(useLike ? SELECT_JOB_STATE_WITH_LIKE_SQL : SELECT_JOB_STATE_SQL)) { queryStatement.setString(1, storeName); queryStatement.setString(2, tableName); try (ResultSet rs = queryStatement.executeQuery()) { while (rs.next()) { Blob blob = rs.getBlob(1); Text key = new Text(); try (InputStream is = StreamUtils.isCompressed(blob.getBytes(1, 2)) ? new GZIPInputStream(blob.getBinaryStream()) : blob.getBinaryStream(); DataInputStream dis = new DataInputStream(is)) { // keep deserializing while we have data while (dis.available() > 0) { T state = this.stateClass.newInstance(); key.readString(dis); state.readFields(dis); states.add(state); }//from w w w .jav a 2s .c o m } catch (EOFException e) { // no more data. GZIPInputStream.available() doesn't return 0 until after EOF. } } } } catch (RuntimeException re) { throw re; } catch (Exception e) { throw new IOException( "failure retrieving state from storeName " + storeName + " tableName " + tableName, e); } return states; }