List of usage examples for java.security MessageDigest getAlgorithm
public final String getAlgorithm()
From source file:com.alfaariss.oa.util.saml2.crypto.SAML2CryptoUtils.java
/** * Returns the digest method algorith of the supplied message digest. * @param messageDigest The message digest. * /* w w w .j av a 2 s . co m*/ * @return SecurityConfiguration for generating signatures * @throws OAException if algorithm is not supported */ public static String getXMLDigestMethodURI(MessageDigest messageDigest) throws OAException { String sDigestAlgorithm = messageDigest.getAlgorithm(); sDigestAlgorithm = sDigestAlgorithm.replace("-", ""); if (sDigestAlgorithm.equalsIgnoreCase("SHA1")) return SignatureConstants.ALGO_ID_DIGEST_SHA1; else if (sDigestAlgorithm.equalsIgnoreCase("SHA256")) return EncryptionConstants.ALGO_ID_DIGEST_SHA256; else if (sDigestAlgorithm.equalsIgnoreCase("SHA384")) return SignatureConstants.ALGO_ID_DIGEST_SHA384; else if (sDigestAlgorithm.equalsIgnoreCase("SHA512")) return EncryptionConstants.ALGO_ID_DIGEST_SHA512; else if (sDigestAlgorithm.equalsIgnoreCase("MD5")) return SignatureConstants.ALGO_ID_DIGEST_NOT_RECOMMENDED_MD5; else { //DD Only a limited number of digest algorithms are supported in OA SAML2 _logger.error("Unsupported message digest algorithm: " + sDigestAlgorithm); throw new OAException(SystemErrors.ERROR_INTERNAL); } }
From source file:de.ks.flatadocdb.defaults.DefaultIdGenerator.java
public String getSha1Hash(Path repository, Path targetPath) {//for debugging and readability the string is used instead of the byte array. Also avoids possible programming errors using the byte array in equals/hashcode. String relative = getRelativePath(repository, targetPath); MessageDigest digest = sha1.get(); byte[] checksum = digest.digest(relative.getBytes(Charsets.UTF_16)); String hexString = Hex.encodeHexString(checksum); log.trace("Generated {} \"{}\" for {}", digest.getAlgorithm(), hexString, relative); return hexString; }
From source file:eu.peppol.document.PayloadDigestCalculator.java
public static MessageDigestResult calcDigest(String algorithm, StandardBusinessDocumentHeader sbdh, InputStream inputStream) { MessageDigest messageDigest; try {// w ww. j a v a 2 s . c o m messageDigest = MessageDigest.getInstance(algorithm); } catch (NoSuchAlgorithmException e) { throw new IllegalStateException( "Unknown digest algorithm " + OxalisConstant.DEFAULT_DIGEST_ALGORITHM + " " + e.getMessage(), e); } InputStream inputStreamToCalculateDigestFrom = null; ManifestItem manifestItem = SbdhFastParser.searchForAsicManifestItem(sbdh); if (manifestItem != null) { // creates an FilterInputStream, which will extract the ASiC in binary format. inputStreamToCalculateDigestFrom = new Base64InputStream(new AsicFilterInputStream(inputStream)); } else inputStreamToCalculateDigestFrom = inputStream; DigestInputStream digestInputStream = new DigestInputStream( new BufferedInputStream(inputStreamToCalculateDigestFrom), messageDigest); try { IOUtils.copy(digestInputStream, new NullOutputStream()); } catch (IOException e) { throw new IllegalStateException("Unable to calculate digest for payload"); } return new MessageDigestResult(messageDigest.digest(), messageDigest.getAlgorithm()); }
From source file:fr.pilato.elasticsearch.crawler.fs.tika.TikaDocParser.java
public static void generate(FsSettings fsSettings, InputStream inputStream, String filename, Doc doc, MessageDigest messageDigest, long filesize) throws IOException { logger.trace("Generating document [{}]", filename); // Extracting content with Tika // See #38: https://github.com/dadoonet/fscrawler/issues/38 int indexedChars = 100000; if (fsSettings.getFs().getIndexedChars() != null) { if (fsSettings.getFs().getIndexedChars().percentage()) { indexedChars = (int) Math.round(filesize * fsSettings.getFs().getIndexedChars().asDouble()); logger.trace("using percentage [{}] to define indexed chars: [{}]", fsSettings.getFs().getIndexedChars(), indexedChars); } else {//from www . jav a 2 s . co m indexedChars = (int) fsSettings.getFs().getIndexedChars().value(); logger.trace("indexed chars [{}]", indexedChars == -1 ? "has been disabled. All text will be extracted" : indexedChars); } } Metadata metadata = new Metadata(); String parsedContent = null; if (messageDigest != null) { logger.trace("Generating hash with [{}]", messageDigest.getAlgorithm()); inputStream = new DigestInputStream(inputStream, messageDigest); } ByteArrayOutputStream bos = null; if (fsSettings.getFs().isStoreSource()) { logger.debug("Using a TeeInputStream as we need to store the source"); bos = new ByteArrayOutputStream(); inputStream = new TeeInputStream(inputStream, bos); } try { // Set the maximum length of strings returned by the parseToString method, -1 sets no limit logger.trace("Beginning Tika extraction"); parsedContent = tika().parseToString(inputStream, metadata, indexedChars); logger.trace("End of Tika extraction"); } catch (Throwable e) { logger.debug("Failed to extract [" + indexedChars + "] characters of text for [" + filename + "]", e); } // Adding what we found to the document we want to index // File doc.getFile().setContentType(metadata.get(Metadata.CONTENT_TYPE)); doc.getFile().setExtension(FilenameUtils.getExtension(filename)); // We only add `indexed_chars` if we have other value than default or -1 if (fsSettings.getFs().getIndexedChars() != null && fsSettings.getFs().getIndexedChars().value() != -1) { doc.getFile().setIndexedChars(indexedChars); } if (fsSettings.getFs().isAddFilesize()) { if (metadata.get(Metadata.CONTENT_LENGTH) != null) { // We try to get CONTENT_LENGTH from Tika first doc.getFile().setFilesize(Long.parseLong(metadata.get(Metadata.CONTENT_LENGTH))); } } if (messageDigest != null) { byte[] digest = messageDigest.digest(); String result = ""; // Convert to Hexa for (int i = 0; i < digest.length; i++) { result += Integer.toString((digest[i] & 0xff) + 0x100, 16).substring(1); } doc.getFile().setChecksum(result); } // File // Meta doc.getMeta().setAuthor(metadata.get(TikaCoreProperties.CREATOR)); doc.getMeta().setTitle(metadata.get(TikaCoreProperties.TITLE)); String sDate = metadata.get(TikaCoreProperties.MODIFIED); if (sDate != null) { try { LocalDateTime date = LocalDateTime.parse(sDate, DateTimeFormatter.ISO_DATE_TIME); doc.getMeta().setDate(date); } catch (DateTimeParseException e) { logger.warn("Can not parse date [{}] for [{}]. Skipping date field...", sDate, filename); } } doc.getMeta().setKeywords(commaDelimitedListToStringArray(metadata.get(TikaCoreProperties.KEYWORDS))); if (fsSettings.getFs().isRawMetadata()) { logger.trace("Listing all available metadata:"); for (String metadataName : metadata.names()) { String value = metadata.get(metadataName); // This is a logger trick which helps to generate our unit tests // You need to change test/resources/log4j2.xml fr.pilato.elasticsearch.crawler.fs.tika level to trace logger.trace(" assertThat(raw, hasEntry(\"{}\", \"{}\"));", metadataName, value); doc.getMeta().addRaw(metadataName, value); } } // Meta // Doc content doc.setContent(parsedContent); // Doc as binary attachment if (fsSettings.getFs().isStoreSource()) { doc.setAttachment(Base64.getEncoder().encodeToString(bos.toByteArray())); } logger.trace("End document generation"); // End of our document }
From source file:com.alfaariss.oa.profile.aselect.processor.handler.AbstractAPIHandler.java
/** * Resolves the value for the uid parameter of the A-Select protocol. * /*w ww . ja v a 2 s . com*/ * @param oUser The User object * @param oASRequestorPool Requestor Pool object or <code>null</code> * @param oRequestorPool OA Requestorpool * @param oRequestor OA Requestor * @return the resolved uid value * @throws ASelectException if no uid can be resolved * @throws OAException if conversion to hexadecimal fails */ protected String getUid(IUser oUser, ASelectRequestorPool oASRequestorPool, RequestorPool oRequestorPool, IRequestor oRequestor) throws ASelectException, OAException { String sUid = oUser.getID(); String sUidAttribute = (String) oRequestor.getProperty(_sProfileID + PROPERTY_UID_ATTRIBUTE); if (sUidAttribute == null) { if (oASRequestorPool != null) sUidAttribute = oASRequestorPool.getUidAttribute(); if (sUidAttribute == null) sUidAttribute = (String) oRequestorPool.getProperty(_sProfileID + PROPERTY_UID_ATTRIBUTE); } if (sUidAttribute != null) { IAttributes oAttributes = oUser.getAttributes(); sUid = (String) oAttributes.get(sUidAttribute); if (sUid == null) { StringBuffer sbError = new StringBuffer("Missing required attribute ("); sbError.append(sUidAttribute); sbError.append(") to resolve uid for user with ID: "); sbError.append(oUser.getID()); _logger.warn(sbError.toString()); throw new ASelectException(ASelectErrors.ERROR_MISSING_REQUIRED_ATTRIBUTE); } //DD Remove the used attribute from the user attributes, so it will not be released to the application oAttributes.remove(sUidAttribute); } boolean bOpaqueUID = false; String sUIDOpaque = (String) oRequestor.getProperty(_sProfileID + PROPERTY_UID_OPAQUE_ENABLED); if (sUIDOpaque != null) { if ("TRUE".equalsIgnoreCase(sUIDOpaque)) bOpaqueUID = true; else if (!"FALSE".equalsIgnoreCase(sUIDOpaque)) { StringBuffer sbError = new StringBuffer("Invalid value for requestor property '"); sbError.append(_sProfileID); sbError.append(PROPERTY_UID_OPAQUE_ENABLED); sbError.append("': "); sbError.append(sUIDOpaque); _logger.error(sbError.toString()); throw new OAException(SystemErrors.ERROR_INTERNAL); } } else { if (oASRequestorPool != null) bOpaqueUID = oASRequestorPool.isUidOpaque(); if (!bOpaqueUID) { sUIDOpaque = (String) oRequestorPool.getProperty(_sProfileID + PROPERTY_UID_OPAQUE_ENABLED); if (sUIDOpaque != null) { if ("TRUE".equalsIgnoreCase(sUIDOpaque)) bOpaqueUID = true; else if (!"FALSE".equalsIgnoreCase(sUIDOpaque)) { StringBuffer sbError = new StringBuffer("Invalid value for requestorpool property '"); sbError.append(_sProfileID); sbError.append(PROPERTY_UID_OPAQUE_ENABLED); sbError.append("': "); sbError.append(sUIDOpaque); _logger.error(sbError.toString()); throw new OAException(SystemErrors.ERROR_INTERNAL); } } } } if (bOpaqueUID) { String sSalt = (String) oRequestor.getProperty(_sProfileID + PROPERTY_UID_OPAQUE_SALT); if (sSalt == null) { if (oASRequestorPool != null) sSalt = oASRequestorPool.getUidOpaqueSalt(); if (sSalt == null) sSalt = (String) oRequestorPool.getProperty(_sProfileID + PROPERTY_UID_OPAQUE_SALT); } if (sSalt != null) sUid = sUid + sSalt; // the returned user ID must contain an opaque value MessageDigest oMessageDigest = _cryptoManager.getMessageDigest(); try { oMessageDigest.update(sUid.getBytes(ASelectProcessor.CHARSET)); sUid = toHexString(oMessageDigest.digest()); } catch (Exception e) { _logger.warn( "Unable to generate '" + oMessageDigest.getAlgorithm() + "' hash from user ID: " + sUid, e); throw new OAException(SystemErrors.ERROR_INTERNAL); } } return sUid; }
From source file:org.abstracthorizon.proximity.maven.MavenProximityLogic.java
/** * Store digest.//from ww w. ja v a 2 s . c o m * * @param req the req * @param dig the dig * * @throws IOException Signals that an I/O exception has occurred. */ protected void storeDigest(ProximityRequest req, MessageDigest dig) throws IOException { File tmpFile = new File(System.getProperty("java.io.tmpdir"), req.getPath().replace(ItemProperties.PATH_SEPARATOR.charAt(0), '_') + "." + dig.getAlgorithm().toLowerCase()); tmpFile.deleteOnExit(); FileWriter fw = new FileWriter(tmpFile); try { fw.write(new String(Hex.encodeHex(dig.digest())) + "\n"); fw.flush(); } finally { fw.close(); } }
From source file:org.apache.cloudstack.utils.security.DigestHelper.java
public static ChecksumValue digest(String algorithm, InputStream is) throws NoSuchAlgorithmException, IOException { MessageDigest digest = MessageDigest.getInstance(algorithm); ChecksumValue checksum = null;// www .j av a2 s . c o m byte[] buffer = new byte[8192]; int read = 0; while ((read = is.read(buffer)) > 0) { digest.update(buffer, 0, read); } byte[] md5sum = digest.digest(); // TODO make sure this is valid for all types of checksums !?! BigInteger bigInt = new BigInteger(1, md5sum); checksum = new ChecksumValue(digest.getAlgorithm(), getPaddedDigestString(digest, bigInt)); return checksum; }
From source file:org.cdmckay.coffeep.Program.java
private static ClassFile getClassFile(JavaFileObject fileObject, CoffeepSystemInfo systemInfo) throws IOException, ConstantPoolException { if (fileObject == null) { throw new IllegalArgumentException("fileObject cannot be null"); }/*from w w w. j a v a 2 s . com*/ if (systemInfo == null) { throw new IllegalArgumentException("systemInfo cannot be null"); } InputStream inputStream = fileObject.openInputStream(); try { MessageDigest messageDigest = null; try { messageDigest = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { logger.warn("Exception while getting MD5 MessageDigest", e); } DigestInputStream digestInputStream = new DigestInputStream(inputStream, messageDigest); CountingInputStream countingInputStream = new CountingInputStream(digestInputStream); Attribute.Factory attributeFactory = new Attribute.Factory(); ClassFile classFile = ClassFile.read(countingInputStream, attributeFactory); systemInfo.classFileUri = fileObject.toUri(); systemInfo.classFileSize = countingInputStream.getSize(); systemInfo.lastModifiedTimestamp = fileObject.getLastModified(); if (messageDigest != null) { systemInfo.digestAlgorithm = messageDigest.getAlgorithm(); systemInfo.digest = new BigInteger(1, messageDigest.digest()).toString(16); } return classFile; } finally { inputStream.close(); } }
From source file:org.dataconservancy.ui.dcpmap.DataSetMapper.java
/** * Calculates fixity over an InputStream according to the supplied algorithm. * * @param in the InputStream/* ww w . j av a 2s . c om*/ * @param digestAlgo the algorithm used to calculate fixity * @return a DcsFixity object encapsulating the calculated fixity and algorithm * @throws IOException if there is an error calculating the fixity */ private DcsFixity calculateFixity(InputStream in, MessageDigest digestAlgo) throws IOException { final HexEncodingDigestListener digestListener = new HexEncodingDigestListener(); final NullOutputStream devNull = new NullOutputStream(); final DigestNotificationStream digestIn = new DigestNotificationStream(in, digestAlgo, digestListener); IOUtils.copy(digestIn, devNull); final String digest = digestListener.getDigest(); if (digest == null || digest.isEmpty()) { throw new IOException("Error calculating fixity for stream: the digest was empty or null."); } DcsFixity fixity = new DcsFixity(); fixity.setAlgorithm(digestAlgo.getAlgorithm()); fixity.setValue(digest); return fixity; }