List of usage examples for java.security DigestOutputStream DigestOutputStream
public DigestOutputStream(OutputStream stream, MessageDigest digest)
From source file:alluxio.underfs.oss.OSSOutputStream.java
/** * Creates a name instance of {@link OSSOutputStream}. * * @param bucketName the name of the bucket * @param key the key of the file//from w w w. ja v a 2 s . co m * @param client the client for OSS * @throws IOException if an I/O error occurs */ public OSSOutputStream(String bucketName, String key, OSSClient client) throws IOException { Preconditions.checkArgument(bucketName != null && !bucketName.isEmpty(), "Bucket name must not be null or empty."); Preconditions.checkArgument(key != null && !key.isEmpty(), "OSS path must not be null or empty."); Preconditions.checkArgument(client != null, "OSSClient must not be null."); mBucketName = bucketName; mKey = key; mOssClient = client; mFile = new File(PathUtils.concatPath("/tmp", UUID.randomUUID())); try { mHash = MessageDigest.getInstance("MD5"); mLocalOutputStream = new BufferedOutputStream( new DigestOutputStream(new FileOutputStream(mFile), mHash)); } catch (NoSuchAlgorithmException e) { LOG.warn("Algorithm not available for MD5 hash.", e); mHash = null; mLocalOutputStream = new BufferedOutputStream(new FileOutputStream(mFile)); } }
From source file:alluxio.underfs.cos.COSOutputStream.java
/** * Creates a name instance of {@link COSOutputStream}. * * @param bucketName the name of the bucket * @param key the key of the file//from ww w.j a v a 2 s .com * @param client the client for COS */ public COSOutputStream(String bucketName, String key, COSClient client) throws IOException { Preconditions.checkArgument(bucketName != null && !bucketName.isEmpty(), "Bucket name must not be null or empty."); Preconditions.checkArgument(key != null && !key.isEmpty(), "COS path must not be null or empty."); Preconditions.checkArgument(client != null, "COSClient must not be null."); mBucketName = bucketName; mKey = key; mCosClient = client; mFile = new File(PathUtils.concatPath(CommonUtils.getTmpDir(), UUID.randomUUID())); try { mHash = MessageDigest.getInstance("MD5"); mLocalOutputStream = new BufferedOutputStream( new DigestOutputStream(new FileOutputStream(mFile), mHash)); } catch (NoSuchAlgorithmException e) { LOG.warn("Algorithm not available for MD5 hash.", e); mHash = null; mLocalOutputStream = new BufferedOutputStream(new FileOutputStream(mFile)); } }
From source file:com.github.neio.filesystem.paths.FilePath.java
@Override public BigInteger sha1Hash() throws NeIOException, FilesystemException { try {/*from w w w . j a v a2 s . c o m*/ MessageDigest hash = MessageDigest.getInstance("SHA1"); DigestOutputStream digestOutputStream = new DigestOutputStream(new NullOutputStream(), hash); IOUtils.copy(new AutoCloseInputStream(new FileInputStream(new java.io.File(super.path))), digestOutputStream); return new BigInteger(hash.digest()); } catch (NoSuchAlgorithmException e) { throw new NeIOException("Unable calculate hash due to SHA1 Algorithm not being found", e); } catch (FileNotFoundException e) { throw new FilesystemException("File pointed at by this path [" + super.path + "] does not exist", e); } catch (IOException e) { throw new NeIOException(e); } }
From source file:com.orangeandbronze.jblubble.jdbc.JdbcBlobstoreService.java
@Override public BlobKey createBlob(BlobstoreWriteCallback callback, String name, String contentType) throws IOException, BlobstoreException { try {//from w w w. j a v a 2s . com try (Connection connection = dataSource.getConnection(); PreparedStatement ps = connection.prepareStatement(getInsertSql(), Statement.RETURN_GENERATED_KEYS);) { ps.setString(1, name); ps.setString(2, contentType); Blob content = connection.createBlob(); try { long size; String md5Hash = null; OutputStream out = new BufferedOutputStream(content.setBinaryStream(1L), getBufferSize()); try { CountingOutputStream countingOutputStream = new CountingOutputStream(out); try { MessageDigest md5; try { md5 = MessageDigest.getInstance(MD5_ALGORITHM_NAME); try (DigestOutputStream digestOutputStream = new DigestOutputStream( countingOutputStream, md5)) { size = callback.writeToOutputStream(digestOutputStream); if (size == -1L) { size = countingOutputStream.getByteCount(); } md5Hash = new String(encodeHex(md5.digest())); } } catch (NoSuchAlgorithmException e) { throw new BlobstoreException(e); } } finally { countingOutputStream.close(); } } finally { out.close(); } ps.setBlob(3, content); ps.setLong(4, size); ps.setTimestamp(5, new java.sql.Timestamp(new java.util.Date().getTime())); ps.setString(6, md5Hash); int rowCount = ps.executeUpdate(); if (rowCount == 0) { throw new BlobstoreException("Creating blob failed, no rows created."); } long generatedId = getGeneratedKey(ps); return new BlobKey(String.valueOf(generatedId)); } finally { content.free(); } } } catch (SQLException e) { throw new BlobstoreException("Error when creating blob", e); } }
From source file:com.thoughtworks.go.plugin.infra.commons.PluginsZip.java
public void create() { checkFilesAccessibility(bundledPlugins, externalPlugins); reset();// w ww. ja va 2 s . c om MessageDigest md5Digest = DigestUtils.getMd5Digest(); try (ZipOutputStream zos = new ZipOutputStream( new DigestOutputStream(new BufferedOutputStream(new FileOutputStream(destZipFile)), md5Digest))) { for (GoPluginDescriptor agentPlugins : agentPlugins()) { String zipEntryPrefix = "external/"; if (agentPlugins.isBundledPlugin()) { zipEntryPrefix = "bundled/"; } zos.putNextEntry( new ZipEntry(zipEntryPrefix + new File(agentPlugins.pluginFileLocation()).getName())); Files.copy(new File(agentPlugins.pluginFileLocation()).toPath(), zos); zos.closeEntry(); } } catch (Exception e) { LOG.error("Could not create zip of plugins for agent to download.", e); } md5DigestOfPlugins = Hex.encodeHexString(md5Digest.digest()); }
From source file:eu.delving.metadata.Hasher.java
public DigestOutputStream createDigestOutputStream(OutputStream outputStream) { messageDigest.reset();/*from w w w .j av a2 s. c o m*/ return new DigestOutputStream(outputStream, messageDigest); }
From source file:org.apache.hadoop.hdfs.qjournal.server.UploadImageServlet.java
private static synchronized SessionDescriptor startImageUpload(UploadImageParam params, ServletContext context) throws IOException { // get and validate storage Journal journal = getStorage(context, params); JNStorage storage = journal.getImageStorage(); // get tmp image file File outputFile = storage.getCheckpointImageFile(params.txId); // starting a new upload long sessionId = sessionIds.incrementAndGet(); MessageDigest digester = MD5Hash.getDigester(); // open the stream that will be used throughout the upload FileOutputStream fos = new FileOutputStream(outputFile); OutputStream os = new BufferedOutputStream(new DigestOutputStream(fos, digester)); SessionDescriptor sd = new SessionDescriptor(journal, params.journalId, sessionId, os, params.txId, digester);//from w ww.j a v a 2 s .com sessions.put(sessionId, sd); InjectionHandler.processEventIO(InjectionEvent.UPLOADIMAGESERVLET_START, context); return sd; }
From source file:info.magnolia.module.files.MD5CheckingFileExtractorOperation.java
@Override protected OutputStream openOutput(File outFile) throws IOException { final OutputStream outputStream = super.openOutput(outFile); final MessageDigest md5 = getMessageDigest(); return new DigestOutputStream(outputStream, md5); }
From source file:com.orangeandbronze.jblubble.jdbc.springframework.SpringJdbcBlobstoreService.java
@Override public BlobKey createBlob(BlobstoreWriteCallback callback, String name, String contentType) throws IOException, BlobstoreException { try {//from w w w. jav a 2 s . c om return jdbcTemplate.execute(new ConnectionCallback<BlobKey>() { @Override public BlobKey doInConnection(Connection connection) throws SQLException, DataAccessException { try (PreparedStatement ps = connection.prepareStatement(getInsertSql(), Statement.RETURN_GENERATED_KEYS)) { ps.setString(1, name); ps.setString(2, contentType); Blob content = connection.createBlob(); try { long size; String md5Hash = null; OutputStream out = new BufferedOutputStream(content.setBinaryStream(1L), getBufferSize()); try { CountingOutputStream countingOutputStream = new CountingOutputStream(out); try { MessageDigest md5; try { md5 = MessageDigest.getInstance(MD5_ALGORITHM_NAME); try (DigestOutputStream digestOutputStream = new DigestOutputStream( countingOutputStream, md5)) { size = callback.writeToOutputStream(digestOutputStream); if (size == -1L) { size = countingOutputStream.getByteCount(); } md5Hash = new String(encodeHex(md5.digest())); } } catch (NoSuchAlgorithmException e) { throw new BlobstoreException(e); } } finally { countingOutputStream.close(); } } finally { out.close(); } ps.setBlob(3, content); ps.setLong(4, size); ps.setTimestamp(5, new java.sql.Timestamp(new java.util.Date().getTime())); ps.setString(6, md5Hash); int rowCount = ps.executeUpdate(); if (rowCount == 0) { throw new BlobstoreException("Creating blob failed, no rows created."); } long generatedId = getGeneratedKey(ps); return new BlobKey(String.valueOf(generatedId)); } finally { content.free(); } } catch (IOException e) { throw new BlobstoreException("Error when creating blob", e); } } }); } catch (DataAccessException e) { throw new BlobstoreException(e); } }
From source file:com.orangeandbronze.jblubble.jdbc.PgJdbcBlobstoreService.java
@Override public BlobKey createBlob(BlobstoreWriteCallback callback, String name, String contentType) throws IOException, BlobstoreException { boolean resetCommitMode = false; try (Connection connection = dataSource.getConnection()) { if (connection.getAutoCommit()) { connection.setAutoCommit(false); resetCommitMode = true;//from w w w .j a v a 2 s .co m } try { int rowCount; try (PreparedStatement ps = connection.prepareStatement(getInsertSql(), Statement.RETURN_GENERATED_KEYS)) { ps.setString(1, name); ps.setString(2, contentType); ps.setTimestamp(3, new java.sql.Timestamp(new java.util.Date().getTime())); rowCount = ps.executeUpdate(); if (rowCount == 0) { throw new BlobstoreException("Creating blob failed, no rows created."); } final long generatedId = getGeneratedKey(ps); long size; String md5Hash = null; try (PreparedStatement ps2 = connection.prepareStatement(getSelectContentByIdSql())) { ps2.setLong(1, generatedId); ResultSet rs = ps2.executeQuery(); if (!rs.next()) { throw new BlobstoreException("Creating blob failed, no rows created."); } Blob contentBlob = rs.getBlob(1); try { OutputStream out = new BufferedOutputStream(contentBlob.setBinaryStream(1L), getBufferSize()); try { CountingOutputStream countingOutputStream = new CountingOutputStream(out); try { MessageDigest md5; try { md5 = MessageDigest.getInstance(MD5_ALGORITHM_NAME); try (DigestOutputStream digestOutputStream = new DigestOutputStream( countingOutputStream, md5)) { size = callback.writeToOutputStream(digestOutputStream); if (size == -1L) { size = countingOutputStream.getByteCount(); } md5Hash = new String(encodeHex(md5.digest())); } } catch (NoSuchAlgorithmException e) { throw new BlobstoreException(e); } } finally { try { countingOutputStream.close(); } catch (IOException e) { // Since digestOutputStream gets closed, // the wrapped countingOutputStream does // not really need to get closed again. } } } finally { try { out.close(); } catch (IOException e) { // Since digestOutputStream gets closed, // the wrapped buffered OutputStream does // not really need to get closed again. } } } finally { contentBlob.free(); } } try (PreparedStatement ps3 = connection.prepareStatement(getUpdateSizeSql())) { ps3.setLong(1, size); ps3.setString(2, md5Hash); ps3.setLong(3, generatedId); rowCount = ps3.executeUpdate(); if (rowCount == 0) { throw new BlobstoreException("Creating blob failed, no rows created."); } } if (resetCommitMode) { connection.commit(); } return new BlobKey(String.valueOf(generatedId)); } } catch (Exception e) { connection.rollback(); throw e; } finally { if (resetCommitMode) { connection.setAutoCommit(true); } } } catch (SQLException e) { throw new BlobstoreException("Error when creating blob", e); } }