List of usage examples for java.security DigestInputStream DigestInputStream
public DigestInputStream(InputStream stream, MessageDigest digest)
From source file:org.dspace.services.impl.storage.DSpaceStorageService.java
@Override public Bitstream store(InputStream input) throws StorageException { this.init();/* w w w .j a v a2s .com*/ // Create internal ID String id = Utils.generateKey(); Bitstream bitstream = new Bitstream(); bitstream.setDeleted(true); bitstream.setInternalId(id); bitstream.setStoreNumber(incoming); bitstreamDao.save(bitstream); try { GeneralFile file = this.getFile(bitstream); if (file != null && file.getParentFile() != null) file.getParentFile().mkdirs(); file.createNewFile(); GeneralFileOutputStream fos = FileFactory.newFileOutputStream(file); // Read through a digest input stream that will work out the MD5 DigestInputStream dis = null; try { dis = new DigestInputStream(input, MessageDigest.getInstance("MD5")); } catch (NoSuchAlgorithmException nsae) // Should never happen { log.warn("Caught NoSuchAlgorithmException", nsae); } IOUtils.copy(dis, fos); fos.close(); input.close(); bitstream.setSize(file.length()); if (dis != null) { bitstream.setChecksum(Utils.toHex(dis.getMessageDigest().digest())); bitstream.setChecksumAlgorithm("MD5"); } bitstream.setDeleted(false); bitstreamDao.save(bitstream); if (log.isDebugEnabled()) { log.debug("Stored bitstream " + bitstream.getID() + " in file " + file.getAbsolutePath()); } return bitstream; } catch (IOException e) { throw new StorageException(e); } }
From source file:io.barracks.ota.client.PackageDownloadService.java
/** * This method checks the package's <code>file</code> integrity.<br> * It uses the md5 provided in the <code>details</code> parameter. * * @param details The {@link UpdateDetails} retrieved from the Barracks platform. * @param file The file which was downloaded. * @throws IOException If an exception is raised while accessing the file. * @throws GeneralSecurityException If the hash verification fails. *//*from w w w .j a va2 s . com*/ protected void checkPackageIntegrity(UpdateDetails details, File file) throws IOException, GeneralSecurityException { InputStream is = null; MessageDigest md = null; try { md = MessageDigest.getInstance("MD5"); is = new FileInputStream(file); is = new DigestInputStream(is, md); byte[] buffer = new byte[8192]; while (is.read(buffer) != -1) { } } catch (NoSuchAlgorithmException | IOException e) { throw e; } finally { if (is != null) { try { is.close(); } catch (IOException e) { e.printStackTrace(); } } } if (md != null) { byte[] digest = md.digest(); StringBuilder sb = new StringBuilder(); for (byte b : digest) { sb.append(String.format("%02x", b)); } if (!sb.toString().equals(details.getPackageInfo().getMd5())) { throw new DigestException( "Wrong file signature " + sb.toString() + " - " + details.getPackageInfo().getMd5()); } } }
From source file:org.opencastproject.workingfilerepository.impl.WorkingFileRepositoryImpl.java
/** * {@inheritDoc}//from w w w. j a v a 2 s . c o m * * @see org.opencastproject.workingfilerepository.api.WorkingFileRepository#put(java.lang.String, java.lang.String, * java.lang.String, java.io.InputStream) */ public URI put(String mediaPackageID, String mediaPackageElementID, String filename, InputStream in) throws IOException { checkPathSafe(mediaPackageID); checkPathSafe(mediaPackageElementID); File f = null; File dir = getElementDirectory(mediaPackageID, mediaPackageElementID); if (dir.exists()) { // clear the directory File[] filesToDelete = dir.listFiles(); if (filesToDelete != null && filesToDelete.length > 0) { for (File fileToDelete : filesToDelete) { if (!fileToDelete.delete()) { throw new IllegalStateException("Unable to delete file: " + fileToDelete.getAbsolutePath()); } } } } else { logger.debug("Attempting to create a new directory at {}", dir.getAbsolutePath()); FileUtils.forceMkdir(dir); } f = new File(dir, PathSupport.toSafeName(filename)); logger.debug("Attempting to write a file to {}", f.getAbsolutePath()); FileOutputStream out = null; try { if (!f.exists()) { f.createNewFile(); } else { logger.debug("Attempting to overwrite the file at {}", f.getAbsolutePath()); } out = new FileOutputStream(f); // Wrap the input stream and copy the input stream to the file MessageDigest messageDigest = null; DigestInputStream dis = null; try { messageDigest = MessageDigest.getInstance("MD5"); dis = new DigestInputStream(in, messageDigest); IOUtils.copy(dis, out); } catch (NoSuchAlgorithmException e1) { logger.error("Unable to create md5 message digest"); } // Store the hash String md5 = Checksum.convertToHex(dis.getMessageDigest().digest()); File md5File = null; try { md5File = getMd5File(f); FileUtils.writeStringToFile(md5File, md5); } catch (IOException e) { FileUtils.deleteQuietly(md5File); throw e; } finally { IOUtils.closeQuietly(dis); } } catch (IOException e) { FileUtils.deleteDirectory(dir); throw e; } finally { IOUtils.closeQuietly(out); IOUtils.closeQuietly(in); } return getURI(mediaPackageID, mediaPackageElementID, filename); }
From source file:dk.netarkivet.common.distribute.FTPRemoteFile.java
/** * An implementation of the getInputStream operation that works with FTP. Notice that most of the special work * (logging out and checking MD5) happens in the close() method of the returned InputStream, since that is the only * place where we can know we're done./*from w w w .j a va 2 s . c om*/ * * @return An InputStream that will deliver the data transferred by FTP. Holding on to this for long periods without * reading any data might cause a timeout. */ @Override public InputStream getInputStream() { if (filesize == 0) { return new ByteArrayInputStream(new byte[] {}); } try { cm.logOn(); InputStream in = cm.getFTPClient().retrieveFileStream(ftpFileName); if (in == null) { throw new IOFailure("Unable to retrieve input stream:" + cm.getFtpErrorMessage()); } if (useChecksums) { in = new DigestInputStream(in, ChecksumCalculator.getMessageDigest(ChecksumCalculator.MD5)); } return new FilterInputStream(in) { public void close() throws IOException { try { super.close(); if (useChecksums) { String newChecksum = ChecksumCalculator .toHex(((DigestInputStream) in).getMessageDigest().digest()); if (!newChecksum.equals(checksum)) { final String msg = "Checksums of '" + ftpFileName + "' do not match! " + "Should be " + checksum + " but was " + newChecksum; log.warn(msg); throw new IOFailure(msg); } } } finally { cm.logOut(); if (!multipleDownloads) { cleanup(); } } } }; } catch (IOException e) { String msg = "Creating inputstream from '" + ftpFileName + "' failed "; if (e instanceof CopyStreamException) { CopyStreamException realException = (CopyStreamException) e; msg += "(real cause = " + realException.getIOException() + ")"; } log.warn(msg, e); throw new IOFailure(msg, e); } }
From source file:org.dcm4chex.archive.hsm.TarRetrieverService.java
private void extractTar(File tarFile, File cacheDir) throws IOException, VerifyTarException { int count = 0; long totalSize = 0; long free = FileSystemUtils.freeSpace(journal.getDataRootDir().getPath()); long fsize = tarFile.length(); long toDelete = fsize + minFreeDiskSpace - free; if (toDelete > 0) free += free(toDelete);/*from w w w . ja va2 s . c o m*/ byte[] buf = new byte[bufferSize]; TarInputStream tar = new TarInputStream(new FileInputStream(tarFile)); InputStream in = tar; try { TarEntry entry = skipDirectoryEntries(tar); if (entry == null) throw new IOException("No entries in " + tarFile); String entryName = entry.getName(); Map<String, byte[]> md5sums = null; MessageDigest digest = null; if ("MD5SUM".equals(entryName)) { if (checkMD5) { try { digest = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } md5sums = new HashMap<String, byte[]>(); BufferedReader lineReader = new BufferedReader(new InputStreamReader(tar)); String line; while ((line = lineReader.readLine()) != null) { md5sums.put(line.substring(34), MD5Utils.toBytes(line.substring(0, 32))); } } entry = skipDirectoryEntries(tar); } else if (checkMD5) { getLog().warn("Missing MD5SUM entry in " + tarFile); } for (; entry != null; entry = skipDirectoryEntries(tar)) { entryName = entry.getName(); // Retrieve saved MD5 checksum byte[] md5sum = null; if (md5sums != null && digest != null) { md5sum = md5sums.remove(entryName); if (md5sum == null) throw new VerifyTarException("Unexpected TAR entry: " + entryName + " in " + tarFile); digest.reset(); in = new DigestInputStream(tar, digest); } File fOri = new File(cacheDir, entryName.replace('/', File.separatorChar)); File f = new File(fOri.getAbsolutePath() + ".tmp"); File dir = f.getParentFile(); if (dir.mkdirs()) { log.info("M-WRITE " + dir); } log.info("M-WRITE " + f); // Write the stream to file FileOutputStream out = new FileOutputStream(f); boolean cleanup = true; try { int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } cleanup = false; } finally { try { out.close(); } catch (Exception ignore) { } if (cleanup) { log.info("M-DELETE " + f); f.delete(); } } // Verify MD5 if (md5sums != null && digest != null) { if (!Arrays.equals(digest.digest(), md5sum)) { log.info("M-DELETE " + f); f.delete(); throw new VerifyTarException( "Failed MD5 check of TAR entry: " + entryName + " in " + tarFile); } else log.info("MD5 check is successful for " + entryName + " in " + tarFile); } free -= f.length(); count++; totalSize += f.length(); if (f.exists()) f.renameTo(fOri); } } finally { tar.close(); } toDelete = prefFreeDiskSpace - free; if (toDelete > 0) { freeNonBlocking(toDelete); } }
From source file:com.hortonworks.streamline.streams.service.UDFCatalogResource.java
private void processUdf(InputStream inputStream, UDF udf, boolean checkDuplicate, boolean builtin) throws Exception { if (builtin) { udf.setDigest("builtin"); udf.setJarStoragePath("builtin"); checkDuplicate(udf);/* w ww .ja va 2 s .c om*/ } else { MessageDigest md = MessageDigest.getInstance("SHA-256"); File tmpFile; try (DigestInputStream dis = new DigestInputStream(inputStream, md)) { tmpFile = FileUtil.writeInputStreamToTempFile(dis, ".jar"); } Map<String, Class<?>> udfs = catalogService.loadUdfsFromJar(tmpFile); validateUDF(new HashSet<>(ProxyUtil.canonicalNames(udfs.values())), udf, checkDuplicate); updateTypeInfo(udf, udfs.get(udf.getClassName())); String digest = Hex.encodeHexString(md.digest()); LOG.debug("Digest: {}", digest); udf.setDigest(digest); String jarPath = getExistingJarPath(digest).orElseGet(() -> uploadJar(tmpFile, udf.getName())); if (!fileStorage.exists(jarPath)) { String msg = String.format("The jar path '%s' does not exist. " + "You may have to reset the db and run bootstrap again.", jarPath); LOG.error(msg); throw new RuntimeException(msg); } udf.setJarStoragePath(jarPath); } }
From source file:de.tudarmstadt.ukp.dkpro.core.api.datasets.DatasetFactory.java
private String getDigest(Path aFile, String aDigest) throws IOException { MessageDigest digest;/*from w ww . j a va2 s . c o m*/ try { digest = MessageDigest.getInstance(aDigest); } catch (NoSuchAlgorithmException e) { throw new IOException(e); } try (InputStream is = Files.newInputStream(aFile)) { DigestInputStream digestFilter = new DigestInputStream(is, digest); IOUtils.copy(digestFilter, new NullOutputStream()); return new String(Hex.encodeHex(digestFilter.getMessageDigest().digest())); } }
From source file:org.apache.jackrabbit.core.data.db.DbDataStore.java
public DataRecord addRecord(InputStream stream) throws DataStoreException { InputStream fileInput = null; String tempId = null;//from www .j a v a 2s . co m ResultSet rs = null; try { long tempModified; while (true) { try { tempModified = System.currentTimeMillis(); String id = UUID.randomUUID().toString(); tempId = TEMP_PREFIX + id; temporaryInUse.add(tempId); // SELECT LENGTH, LAST_MODIFIED FROM DATASTORE WHERE ID=? rs = conHelper.query(selectMetaSQL, tempId); boolean hasNext = rs.next(); DbUtility.close(rs); rs = null; if (hasNext) { // re-try in the very, very unlikely event that the row already exists continue; } // INSERT INTO DATASTORE VALUES(?, 0, ?, NULL) conHelper.exec(insertTempSQL, tempId, tempModified); break; } catch (Exception e) { throw convert("Can not insert new record", e); } finally { DbUtility.close(rs); // prevent that rs.close() is called again rs = null; } } MessageDigest digest = getDigest(); DigestInputStream dIn = new DigestInputStream(stream, digest); CountingInputStream in = new CountingInputStream(dIn); StreamWrapper wrapper; if (STORE_SIZE_MINUS_ONE.equals(storeStream)) { wrapper = new StreamWrapper(in, -1); } else if (STORE_SIZE_MAX.equals(storeStream)) { wrapper = new StreamWrapper(in, Integer.MAX_VALUE); } else if (STORE_TEMP_FILE.equals(storeStream)) { File temp = moveToTempFile(in); long length = temp.length(); wrapper = new StreamWrapper(new ResettableTempFileInputStream(temp), length); } else { throw new DataStoreException("Unsupported stream store algorithm: " + storeStream); } // UPDATE DATASTORE SET DATA=? WHERE ID=? conHelper.exec(updateDataSQL, wrapper, tempId); long length = in.getByteCount(); DataIdentifier identifier = new DataIdentifier(encodeHexString(digest.digest())); usesIdentifier(identifier); String id = identifier.toString(); long newModified; while (true) { newModified = System.currentTimeMillis(); if (checkExisting(tempId, length, identifier)) { touch(identifier, newModified); conHelper.exec(deleteSQL, tempId); break; } try { // UPDATE DATASTORE SET ID=?, LENGTH=?, LAST_MODIFIED=? // WHERE ID=? AND LAST_MODIFIED=? int count = conHelper.update(updateSQL, id, length, newModified, tempId, tempModified); // If update count is 0, the last modified time of the // temporary row was changed - which means we need to // re-try using a new last modified date (a later one) // because we need to ensure the new last modified date // is _newer_ than the old (otherwise the garbage // collection could delete rows) if (count != 0) { // update was successful break; } } catch (SQLException e) { // duplicate key (the row already exists) - repeat // we use exception handling for flow control here, which is bad, // but the alternative is to use UPDATE ... WHERE ... (SELECT ...) // which could cause a deadlock in some databases - also, // duplicate key will only occur if somebody else concurrently // added the same record (which is very unlikely) } // SELECT LENGTH, LAST_MODIFIED FROM DATASTORE WHERE ID=? rs = conHelper.query(selectMetaSQL, tempId); if (!rs.next()) { // the row was deleted, which is unexpected / not allowed String msg = DIGEST + " temporary entry deleted: " + " id=" + tempId + " length=" + length; log.error(msg); throw new DataStoreException(msg); } tempModified = rs.getLong(2); DbUtility.close(rs); rs = null; } usesIdentifier(identifier); DbDataRecord record = new DbDataRecord(this, identifier, length, newModified); return record; } catch (Exception e) { throw convert("Can not insert new record", e); } finally { if (tempId != null) { temporaryInUse.remove(tempId); } DbUtility.close(rs); if (fileInput != null) { try { fileInput.close(); } catch (IOException e) { throw convert("Can not close temporary file", e); } } } }
From source file:edu.mit.lib.bagit.Loader.java
private String checksum(File file, String csAlg) throws IOException { byte[] buf = new byte[2048]; int num = 0;/*from ww w . j a v a2 s .co m*/ // wrap stream in digest stream try (FileInputStream is = new FileInputStream(file); DigestInputStream dis = new DigestInputStream(is, MessageDigest.getInstance(csAlg))) { while (num != -1) { num = dis.read(buf); } return toHex(dis.getMessageDigest().digest()); } catch (NoSuchAlgorithmException nsaE) { throw new IOException("no algorithm: " + csAlg); } }