List of usage examples for java.security DigestInputStream DigestInputStream
public DigestInputStream(InputStream stream, MessageDigest digest)
From source file:org.apache.nifi.registry.service.extension.StandardExtensionService.java
@Override public ExtensionBundleVersion createExtensionBundleVersion(final String bucketIdentifier, final ExtensionBundleType bundleType, final InputStream inputStream, final String clientSha256) throws IOException { if (StringUtils.isBlank(bucketIdentifier)) { throw new IllegalArgumentException("Bucket identifier cannot be null or blank"); }/*w ww . jav a 2 s . c om*/ if (bundleType == null) { throw new IllegalArgumentException("Bundle type cannot be null"); } if (inputStream == null) { throw new IllegalArgumentException("Extension bundle input stream cannot be null"); } if (!extractors.containsKey(bundleType)) { throw new IllegalArgumentException( "No metadata extractor is registered for bundle-type: " + bundleType); } // ensure the bucket exists final BucketEntity existingBucket = metadataService.getBucketById(bucketIdentifier); if (existingBucket == null) { LOGGER.warn("The specified bucket id [{}] does not exist.", bucketIdentifier); throw new ResourceNotFoundException("The specified bucket ID does not exist in this registry."); } // ensure the extensions directory exists and we can read and write to it FileUtils.ensureDirectoryExistAndCanReadAndWrite(extensionsWorkingDir); final String extensionWorkingFilename = UUID.randomUUID().toString(); final File extensionWorkingFile = new File(extensionsWorkingDir, extensionWorkingFilename); LOGGER.debug("Writing bundle contents to working directory at {}", new Object[] { extensionWorkingFile.getAbsolutePath() }); try { // write the contents of the input stream to a temporary file in the extensions working directory final MessageDigest sha256Digest = DigestUtils.getSha256Digest(); try (final DigestInputStream digestInputStream = new DigestInputStream(inputStream, sha256Digest); final OutputStream out = new FileOutputStream(extensionWorkingFile)) { IOUtils.copy(digestInputStream, out); } // get the hex of the SHA-256 computed by the server and compare to the client provided SHA-256, if one was provided final String sha256Hex = Hex.encodeHexString(sha256Digest.digest()); final boolean sha256Supplied = !StringUtils.isBlank(clientSha256); if (sha256Supplied && !sha256Hex.equalsIgnoreCase(clientSha256)) { LOGGER.error("Client provided SHA-256 of '{}', but server calculated '{}'", new Object[] { clientSha256, sha256Hex }); throw new IllegalStateException( "The SHA-256 of the received extension bundle does not match the SHA-256 provided by the client"); } // extract the details of the bundle from the temp file in the working directory final BundleDetails bundleDetails; try (final InputStream in = new FileInputStream(extensionWorkingFile)) { final BundleExtractor extractor = extractors.get(bundleType); bundleDetails = extractor.extract(in); } final BundleCoordinate bundleCoordinate = bundleDetails.getBundleCoordinate(); final Set<BundleCoordinate> dependencyCoordinates = bundleDetails.getDependencyBundleCoordinates(); final String groupId = bundleCoordinate.getGroupId(); final String artifactId = bundleCoordinate.getArtifactId(); final String version = bundleCoordinate.getVersion(); final boolean isSnapshotVersion = version.endsWith(SNAPSHOT_VERSION_SUFFIX); final boolean overwriteBundleVersion = isSnapshotVersion || existingBucket.isAllowExtensionBundleRedeploy(); LOGGER.debug("Extracted bundle details - '{}' - '{}' - '{}'", new Object[] { groupId, artifactId, version }); // a bundle with the same group, artifact, and version can exist in multiple buckets, but only if it contains the same binary content, or if its a snapshot version // we can determine that by comparing the SHA-256 digest of the incoming bundle against existing bundles with the same group, artifact, version final List<ExtensionBundleVersionEntity> allExistingVersions = metadataService .getExtensionBundleVersionsGlobal(groupId, artifactId, version); for (final ExtensionBundleVersionEntity existingVersionEntity : allExistingVersions) { if (!existingVersionEntity.getSha256Hex().equals(sha256Hex) && !isSnapshotVersion) { throw new IllegalStateException( "Found existing extension bundle with same group, artifact, and version, but different SHA-256 checksums"); } } // get the existing extension bundle entity, or create a new one if one does not exist in the bucket with the group + artifact final long currentTime = System.currentTimeMillis(); final ExtensionBundleEntity extensionBundle = getOrCreateExtensionBundle(bucketIdentifier, groupId, artifactId, bundleType, currentTime); // check if the version of incoming bundle already exists in the bucket // if it exists and it is a snapshot version or the bucket allows redeploying, then first delete the row in the extension_bundle_version table so we can create a new one // otherwise we throw an exception because we don't allow the same version in the same bucket final ExtensionBundleVersionEntity existingVersion = metadataService .getExtensionBundleVersion(bucketIdentifier, groupId, artifactId, version); if (existingVersion != null) { if (overwriteBundleVersion) { metadataService.deleteExtensionBundleVersion(existingVersion); } else { LOGGER.warn("The specified version [{}] already exists for extension bundle [{}].", new Object[] { version, extensionBundle.getId() }); throw new IllegalStateException( "The specified version already exists for the given extension bundle"); } } // create the version metadata instance and validate it has all the required fields final String userIdentity = NiFiUserUtils.getNiFiUserIdentity(); final ExtensionBundleVersionMetadata versionMetadata = new ExtensionBundleVersionMetadata(); versionMetadata.setId(UUID.randomUUID().toString()); versionMetadata.setExtensionBundleId(extensionBundle.getId()); versionMetadata.setBucketId(bucketIdentifier); versionMetadata.setVersion(version); versionMetadata.setTimestamp(currentTime); versionMetadata.setAuthor(userIdentity); versionMetadata.setSha256(sha256Hex); versionMetadata.setSha256Supplied(sha256Supplied); versionMetadata.setContentSize(extensionWorkingFile.length()); validate(versionMetadata, "Cannot create extension bundle version"); // create the version dependency instances and validate they have the required fields final Set<ExtensionBundleVersionDependency> versionDependencies = new HashSet<>(); for (final BundleCoordinate dependencyCoordinate : dependencyCoordinates) { final ExtensionBundleVersionDependency versionDependency = new ExtensionBundleVersionDependency(); versionDependency.setGroupId(dependencyCoordinate.getGroupId()); versionDependency.setArtifactId(dependencyCoordinate.getArtifactId()); versionDependency.setVersion(dependencyCoordinate.getVersion()); validate(versionDependency, "Cannot create extension bundle version dependency"); versionDependencies.add(versionDependency); } // create the bundle version in the metadata db final ExtensionBundleVersionEntity versionEntity = DataModelMapper.map(versionMetadata); metadataService.createExtensionBundleVersion(versionEntity); // create the bundle version dependencies in the metadata db for (final ExtensionBundleVersionDependency versionDependency : versionDependencies) { final ExtensionBundleVersionDependencyEntity versionDependencyEntity = DataModelMapper .map(versionDependency); versionDependencyEntity.setId(UUID.randomUUID().toString()); versionDependencyEntity.setExtensionBundleVersionId(versionEntity.getId()); metadataService.createDependency(versionDependencyEntity); } // persist the content of the bundle to the persistence provider final ExtensionBundleContext context = new StandardExtensionBundleContext.Builder() .bundleType(getProviderBundleType(bundleType)).bucketId(existingBucket.getId()) .bucketName(existingBucket.getName()).bundleId(extensionBundle.getId()) .bundleGroupId(extensionBundle.getGroupId()).bundleArtifactId(extensionBundle.getArtifactId()) .bundleVersion(versionMetadata.getVersion()).author(versionMetadata.getAuthor()) .timestamp(versionMetadata.getTimestamp()).build(); try (final InputStream in = new FileInputStream(extensionWorkingFile); final InputStream bufIn = new BufferedInputStream(in)) { bundlePersistenceProvider.saveBundleVersion(context, bufIn, overwriteBundleVersion); LOGGER.debug("Bundle saved to persistence provider - '{}' - '{}' - '{}'", new Object[] { groupId, artifactId, version }); } // get the updated extension bundle so it contains the correct version count final ExtensionBundleEntity updatedBundle = metadataService.getExtensionBundle(bucketIdentifier, groupId, artifactId); // create the full ExtensionBundleVersion instance to return final ExtensionBundleVersion extensionBundleVersion = new ExtensionBundleVersion(); extensionBundleVersion.setVersionMetadata(versionMetadata); extensionBundleVersion.setExtensionBundle(DataModelMapper.map(existingBucket, updatedBundle)); extensionBundleVersion.setBucket(DataModelMapper.map(existingBucket)); extensionBundleVersion.setDependencies(versionDependencies); return extensionBundleVersion; } finally { if (extensionWorkingFile.exists()) { try { extensionWorkingFile.delete(); } catch (Exception e) { LOGGER.warn("Error removing temporary extension bundle file at {}", new Object[] { extensionWorkingFile.getAbsolutePath() }); } } } }
From source file:com.microsoftopentechnologies.windowsazurestorage.WAStorageClient.java
/** * * @param listener//w ww . j a va 2 s.com * @param blob * @param src * @throws StorageException * @throws IOException * @throws InterruptedException * @returns Md5 hash of the uploaded file in hexadecimal encoding */ protected static String upload(TaskListener listener, CloudBlockBlob blob, FilePath src) throws StorageException, IOException, InterruptedException { MessageDigest md = DigestUtils.getMd5Digest(); long startTime = System.currentTimeMillis(); try (InputStream inputStream = src.read(); DigestInputStream digestInputStream = new DigestInputStream(inputStream, md)) { blob.upload(digestInputStream, src.length(), null, getBlobRequestOptions(), Utils.updateUserAgent()); } long endTime = System.currentTimeMillis(); listener.getLogger() .println("Uploaded blob with uri " + blob.getUri() + " in " + getTime(endTime - startTime)); return DatatypeConverter.printHexBinary(md.digest()); }
From source file:org.dataconservancy.dcs.ingest.client.impl.DualManagerDeposit.java
void uploadFile(DcsFile file, String path) { try {/* w ww . ja v a 2s .com*/ File physicalFile = new File(path); DigestInputStream digestStream = new DigestInputStream(new FileInputStream(path), MessageDigest.getInstance(checksumAlgorithm)); String mime = MimeUtil.getMostSpecificMimeType(MimeUtil.getMimeTypes(new File(path))).toString(); /* * proceed to the end of the inputStream if we havent got there * apready. We need to visit every byte in order to have calculated * the digest. */ if (digestStream.read() != -1) { byte[] buf = new byte[1024]; while (digestStream.read(buf) != -1) ; } byte[] digest = digestStream.getMessageDigest().digest(); /* Set the file name */ file.setName(physicalFile.getName()); /* Set the calculated fixity */ DcsFixity fixity = new DcsFixity(); fixity.setAlgorithm(checksumAlgorithm); fixity.setValue(new String(Hex.encodeHex(digest))); file.addFixity(fixity); /* Set the format */ DcsFormat format = new DcsFormat(); format.setSchemeUri("http://www.iana.org/assignments/media-types/"); format.setFormat(mime); file.addFormat(format); long length = physicalFile.length(); file.setSource(doUpload(path, mime, digest, length)); file.setSizeBytes(length); file.setExtant(true); } catch (Exception e) { throw new RuntimeException(e); } }
From source file:org.eclipse.smarthome.core.thing.binding.firmware.Firmware.java
/** * Returns the binary content of the firmware using the firmwares input stream. If the firmware provides a MD5 hash * value then this operation will also validate the MD5 checksum of the firmware. * * @return the binary content of the firmware (can be null) * * @throws IllegalStateException if the MD5 hash value of the firmware is invalid *///from www. j a v a 2 s . c o m public synchronized byte[] getBytes() { if (inputStream == null) { return null; } if (bytes == null) { try { MessageDigest md = MessageDigest.getInstance("MD5"); try (DigestInputStream dis = new DigestInputStream(inputStream, md)) { bytes = IOUtils.toByteArray(dis); } catch (IOException ioEx) { logger.error(String.format("Cannot read firmware with UID %s.", uid), ioEx); return null; } byte[] digest = md.digest(); if (md5Hash != null && digest != null) { StringBuilder digestString = new StringBuilder(); for (byte b : digest) { digestString.append(String.format("%02x", b)); } if (!md5Hash.equals(digestString.toString())) { bytes = null; throw new IllegalStateException(String .format("Invalid MD5 checksum. Expected %s, but was %s.", md5Hash, digestString)); } } } catch (NoSuchAlgorithmException e) { logger.error("Cannot calculate MD5 checksum.", e); bytes = null; return null; } } return bytes; }
From source file:de.escidoc.bwelabs.deposit.DepositServiceSpec.java
private String calculateCheckSum(Configuration conf, InputStream is) throws NoSuchAlgorithmException, IOException { MessageDigest instance = MessageDigest .getInstance(conf.getProperty(Configuration.PROPERTY_CHECKSUM_ALGORITHM)); DigestInputStream digest = new DigestInputStream(is, instance); int read = digest.read(); while (read != -1) { read = digest.read();/*from w w w .jav a 2s. c o m*/ } return Utility.byteArraytoHexString(instance.digest()); }
From source file:dk.netarkivet.common.distribute.HTTPRemoteFile.java
/** Get an input stream representing the remote file. * If the file resides on the current machine, the input stream is to the * local file. Otherwise, the remote file is transferred over http. * The close method of the input stream will cleanup this handle, and if * checksums are requested, will check the checksums on close. * If the file is not set to be able to be transferred multiple times, it is * cleaned up after the transfer.//from ww w .ja v a 2s .co m * @return An input stream for the remote file. * @throws IOFailure on I/O trouble generating inputstream for remote file. * Also, the returned remote file will throw IOFailure on close, if * checksums are requested, but do not match. */ public InputStream getInputStream() { if (filesize == 0) { return new ByteArrayInputStream(new byte[] {}); } try { InputStream is = null; if (isLocal()) { is = new FileInputStream(file); } else { URLConnection urlConnection = getRegistry().openConnection(url); //ensure not getting some cached version urlConnection.setUseCaches(false); is = urlConnection.getInputStream(); } if (useChecksums) { is = new DigestInputStream(is, ChecksumCalculator.getMessageDigest(ChecksumCalculator.MD5)); } return new FilterInputStream(is) { public void close() { if (useChecksums) { String newChecksum = ChecksumCalculator .toHex(((DigestInputStream) in).getMessageDigest().digest()); if (!newChecksum.equals(checksum)) { throw new IOFailure( "Checksum mismatch! Expected '" + checksum + "' but was '" + newChecksum + "'"); } } if (!multipleDownloads) { cleanup(); } } }; } catch (IOException e) { throw new IOFailure("Unable to get inputstream for '" + file + "' from '" + url + "'", e); } }
From source file:edu.stanford.muse.datacache.BlobStore.java
/** * add a new piece of data with content copied from is. * if owner is not set on the data, we will set it to this data store's owner. * computes content hash and sets it./* ww w . j a v a2 s . co m*/ * will close the stream when done. * Performance critical! * returns # of bytes in inputstream */ public long add(Blob blob, InputStream is) throws IOException { long nBytes = -1; /* we no longer assume that if 2 blobs have the same name and file size, their contents must be the same! synchronized (this) { if (this.contains(blob)) { log.info("Item is already present: " + blob); return nBytes; } add(blob); Util.ASSERT(this.contains(blob)); } */ // release the lock here because we dont want a long op like reading the object's stream // and storing the file to be serialized across threads. // copy the stream, if it throws IOException, then we cancel the item try { // this code is slightly tricky because blob equality (and therefore its membership in this blob store) depends on the hash , which we don't have yet. // so we first read the stream into a temp file. As we read the temp file, we get its checksum. We use the checksum to initialize the blob object completely // and check if it already exists in the blob store. if not, we assign it in the id_map etc. and then rename the temp file to the proper location in the blob store. DigestInputStream din = new DigestInputStream(is, MessageDigest.getInstance("SHA-1")); log.info(" adding file to blob store = " + blob.filename); Path tmpPath = Files.createTempFile(new File(System.getProperty("java.io.tmpdir")).toPath(), "epadd.", ".temp"); File tmpFile = tmpPath.toFile(); nBytes = Util.copy_stream_to_file(din, tmpFile.getAbsolutePath()); // now set the blob size and digest blob.size = nBytes; // overwrite this -- earlier we had the part size stored in blob size // be careful -- add the blob only after its size and SHA-1 has been updated byte byteArray[] = din.getMessageDigest().digest(); blob.setContentHash(byteArray); // ok, now blob is finally set up and can be compared // if the blob is already present, fetching the bytes was a waste, but there would no other way to know its checksum. // we'll delete the file that is in the tmp dir if (uniqueBlobs.contains(blob)) { tmpFile.delete(); return nBytes; } // blob doesn't already exist, add it, and move it from the temp dir to its actual place in the blobstore add(blob); // move it from the temp file to the blobs dir. don't do this before add(blob), because full_filename will not be set up correctly until the blob object can be lookedup String destination = dir + File.separatorChar + full_filename(blob); Files.move(tmpPath, new File(destination).toPath()); } catch (IOException ioe) { // we couldn't copy the stream to the data store, so undo everything Util.print_exception("IO Error copying blob to blobstore", ioe, log); remove(blob); Util.ASSERT(!this.contains(blob)); throw ioe; } catch (NoSuchAlgorithmException nsae) { // we couldn't copy the stream to the data store, so undo everything remove(blob); Util.ASSERT(!this.contains(blob)); throw new RuntimeException(nsae); } Util.ASSERT(this.contains(blob)); // packing needs to be done more efficiently (batch mode or incremental) if ((uniqueBlobs.size() % 100) == 0) pack(); return nBytes; }
From source file:org.fcrepo.apix.integration.StreamingIT.java
/** * Verify the binary can be retrieved through the API-X proxy. The request should be intercepted and proxied * by API-X./*from w w w .j ava 2s . c om*/ * * @throws Exception if unexpected things go wrong */ @Test public void testRetrieveLargeBinaryFromApix() throws Exception { // Record 'true' if the intercepting route is triggered final AtomicBoolean intercepted = new AtomicBoolean(false); ctx.getRouteDefinition(INTERCEPT_ROUTE_ID).adviceWith((ModelCamelContext) ctx, new AdviceWithRouteBuilder() { @Override public void configure() throws Exception { weaveAddFirst().process((ex) -> intercepted.set(true)); } }); final long expectedSize = (2 * 1024 * 1024) + 1; final long actualSize; final String actualDigest; final URI proxiedResource = proxied(binaryResource); try (FcrepoResponse r = KarafIT.attempt(30, () -> client.get(proxiedResource).perform()); DigestInputStream body = new DigestInputStream(r.getBody(), sha1)) { actualSize = drain(body); actualDigest = asHex(body.getMessageDigest().digest()); } // The request _was_ proxied by API-X assertTrue(String.format("Expected the retrieval of %s to be proxied by API-X, route id %s", proxiedResource, INTERCEPT_ROUTE_ID), intercepted.get()); // And resource can be retrieved intact assertEquals(expectedSize, actualSize); assertEquals(binaryResourceSha, actualDigest); }
From source file:org.digitalcampus.oppia.service.DownloadService.java
private void downloadFile(String fileUrl, String filename, String fileDigest) { File downloadedFile = null;/* w ww .j a va2 s. c o m*/ try { URL url = new URL(fileUrl); //If no filename was passed, we set the filename based on the URL if (filename == null) { filename = url.getPath().substring(url.getPath().lastIndexOf("/") + 1); } downloadedFile = new File(Storage.getMediaPath(this), filename); OkHttpClient client = HTTPClientUtils.getClient(this); Request request = new Request.Builder().url(fileUrl).build(); Response response = client.newCall(request).execute(); long fileLength = response.body().contentLength(); long availableStorage = Storage.getAvailableStorageSize(this); if (fileLength >= availableStorage) { sendBroadcast(fileUrl, ACTION_FAILED, this.getString(R.string.error_insufficient_storage_available)); removeDownloading(fileUrl); return; } FileOutputStream f = new FileOutputStream(downloadedFile); InputStream in = response.body().byteStream(); MessageDigest mDigest = MessageDigest.getInstance("MD5"); in = new DigestInputStream(in, mDigest); byte[] buffer = new byte[8192]; int len1; long total = 0; int previousProgress = 0, progress = 0; while ((len1 = in.read(buffer)) > 0) { //If received a cancel action while downloading, stop it if (isCancelled(fileUrl)) { Log.d(TAG, "Media " + filename + " cancelled while downloading. Deleting temp file..."); f.close(); in.close(); deleteFile(downloadedFile); removeCancelled(fileUrl); removeDownloading(fileUrl); return; } total += len1; progress = (int) ((total * 100) / fileLength); if ((progress > 0) && (progress > previousProgress)) { sendBroadcast(fileUrl, ACTION_DOWNLOAD, "" + progress); previousProgress = progress; } f.write(buffer, 0, len1); } f.close(); in.close(); if (fileDigest != null) { // check the file digest matches, otherwise delete the file // (it's either been a corrupted download or it's the wrong file) byte[] digest = mDigest.digest(); String resultMD5 = ""; for (byte aDigest : digest) { resultMD5 += Integer.toString((aDigest & 0xff) + 0x100, 16).substring(1); } if (!resultMD5.contains(fileDigest)) { this.deleteFile(downloadedFile); sendBroadcast(fileUrl, ACTION_FAILED, this.getString(R.string.error_media_download)); removeDownloading(fileUrl); return; } } } catch (MalformedURLException e) { logAndNotifyError(fileUrl, e); return; } catch (IOException e) { this.deleteFile(downloadedFile); logAndNotifyError(fileUrl, e); return; } catch (NoSuchAlgorithmException e) { Mint.logException(e); logAndNotifyError(fileUrl, e); return; } Log.d(TAG, fileUrl + " succesfully downloaded"); removeDownloading(fileUrl); sendBroadcast(fileUrl, ACTION_COMPLETE, null); }
From source file:com.smartmarmot.dbforbix.config.Config.java
/** * calculates hash for config file/*from w ww . ja v a 2s . c om*/ * @throws NullPointerException - if hash is null */ private void calculateFileConfigHash() throws NullPointerException { MessageDigest md = null; byte[] b = new byte[2048]; try { md = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { LOG.error("Wrong algorithm provided while getting instance of MessageDigest: " + e.getMessage()); } /** * try with resources. Autoclosing after exitting try block */ try (InputStream is = Files.newInputStream(Paths.get(getConfigFile())); DigestInputStream dis = new DigestInputStream(is, md)) { while (dis.read(b) >= 0) ; } catch (IOException e) { LOG.error("Something has happenned reading file: " + e.getLocalizedMessage()); } try { setFileConfigHash((new HexBinaryAdapter()).marshal(md.digest())); } catch (Exception e) { LOG.error("Something has happenned converting md5 sum to string: " + e.getLocalizedMessage()); } if (null == getFileConfigHash()) throw new NullPointerException("Hash for config file is null!"); }