Example usage for java.security DigestInputStream DigestInputStream

List of usage examples for java.security DigestInputStream DigestInputStream

Introduction

In this page you can find the example usage for java.security DigestInputStream DigestInputStream.

Prototype

public DigestInputStream(InputStream stream, MessageDigest digest) 

Source Link

Document

Creates a digest input stream, using the specified input stream and message digest.

Usage

From source file:cz.muni.fi.xklinec.zipstream.Utils.java

/**
 * Computes SHA256 hash of a given file.
 * @param b/*from w  ww.  ja  va  2  s  .  c  om*/
 * @return 
 */
public static String sha256(byte[] b) {
    try {
        MessageDigest sha = MessageDigest.getInstance("SHA-256");
        InputStream fis = new ByteArrayInputStream(b);
        DigestInputStream dis = new DigestInputStream(fis, sha);

        byte[] buffer = new byte[65536]; // 64kB buffer
        while (dis.read(buffer) != -1) {
        }

        byte[] hash = sha.digest();
        return new String(Base64.encode(hash));

    } catch (Exception e) {
        throw new IllegalArgumentException("Cannot compute SHA256 digest of the file", e);
    }
}

From source file:fr.pilato.elasticsearch.crawler.fs.tika.TikaDocParser.java

public static void generate(FsSettings fsSettings, InputStream inputStream, String filename, Doc doc,
        MessageDigest messageDigest, long filesize) throws IOException {
    logger.trace("Generating document [{}]", filename);
    // Extracting content with Tika
    // See #38: https://github.com/dadoonet/fscrawler/issues/38
    int indexedChars = 100000;
    if (fsSettings.getFs().getIndexedChars() != null) {
        if (fsSettings.getFs().getIndexedChars().percentage()) {
            indexedChars = (int) Math.round(filesize * fsSettings.getFs().getIndexedChars().asDouble());
            logger.trace("using percentage [{}] to define indexed chars: [{}]",
                    fsSettings.getFs().getIndexedChars(), indexedChars);
        } else {/*from  ww  w.ja v a  2s.c o  m*/
            indexedChars = (int) fsSettings.getFs().getIndexedChars().value();
            logger.trace("indexed chars [{}]",
                    indexedChars == -1 ? "has been disabled. All text will be extracted" : indexedChars);
        }
    }
    Metadata metadata = new Metadata();

    String parsedContent = null;

    if (messageDigest != null) {
        logger.trace("Generating hash with [{}]", messageDigest.getAlgorithm());
        inputStream = new DigestInputStream(inputStream, messageDigest);
    }

    ByteArrayOutputStream bos = null;
    if (fsSettings.getFs().isStoreSource()) {
        logger.debug("Using a TeeInputStream as we need to store the source");
        bos = new ByteArrayOutputStream();
        inputStream = new TeeInputStream(inputStream, bos);
    }

    try {
        // Set the maximum length of strings returned by the parseToString method, -1 sets no limit
        logger.trace("Beginning Tika extraction");
        parsedContent = tika().parseToString(inputStream, metadata, indexedChars);
        logger.trace("End of Tika extraction");
    } catch (Throwable e) {
        logger.debug("Failed to extract [" + indexedChars + "] characters of text for [" + filename + "]", e);
    }

    // Adding what we found to the document we want to index

    // File
    doc.getFile().setContentType(metadata.get(Metadata.CONTENT_TYPE));
    doc.getFile().setExtension(FilenameUtils.getExtension(filename));

    // We only add `indexed_chars` if we have other value than default or -1
    if (fsSettings.getFs().getIndexedChars() != null && fsSettings.getFs().getIndexedChars().value() != -1) {
        doc.getFile().setIndexedChars(indexedChars);
    }

    if (fsSettings.getFs().isAddFilesize()) {
        if (metadata.get(Metadata.CONTENT_LENGTH) != null) {
            // We try to get CONTENT_LENGTH from Tika first
            doc.getFile().setFilesize(Long.parseLong(metadata.get(Metadata.CONTENT_LENGTH)));
        }
    }
    if (messageDigest != null) {
        byte[] digest = messageDigest.digest();
        String result = "";
        // Convert to Hexa
        for (int i = 0; i < digest.length; i++) {
            result += Integer.toString((digest[i] & 0xff) + 0x100, 16).substring(1);
        }
        doc.getFile().setChecksum(result);
    }
    // File

    // Meta
    doc.getMeta().setAuthor(metadata.get(TikaCoreProperties.CREATOR));
    doc.getMeta().setTitle(metadata.get(TikaCoreProperties.TITLE));
    String sDate = metadata.get(TikaCoreProperties.MODIFIED);
    if (sDate != null) {
        try {
            LocalDateTime date = LocalDateTime.parse(sDate, DateTimeFormatter.ISO_DATE_TIME);
            doc.getMeta().setDate(date);
        } catch (DateTimeParseException e) {
            logger.warn("Can not parse date [{}] for [{}]. Skipping date field...", sDate, filename);
        }
    }
    doc.getMeta().setKeywords(commaDelimitedListToStringArray(metadata.get(TikaCoreProperties.KEYWORDS)));

    if (fsSettings.getFs().isRawMetadata()) {
        logger.trace("Listing all available metadata:");
        for (String metadataName : metadata.names()) {
            String value = metadata.get(metadataName);
            // This is a logger trick which helps to generate our unit tests
            // You need to change test/resources/log4j2.xml fr.pilato.elasticsearch.crawler.fs.tika level to trace
            logger.trace("  assertThat(raw, hasEntry(\"{}\", \"{}\"));", metadataName, value);
            doc.getMeta().addRaw(metadataName, value);
        }
    }
    // Meta

    // Doc content
    doc.setContent(parsedContent);

    // Doc as binary attachment
    if (fsSettings.getFs().isStoreSource()) {
        doc.setAttachment(Base64.getEncoder().encodeToString(bos.toByteArray()));
    }
    logger.trace("End document generation");
    // End of our document
}

From source file:uk.co.saiman.webmodule.commonjs.repository.TarGzInputStream.java

private TarGzInputStream(InputStream inputStream, String expectedSha1, MessageDigest digest)
        throws IOException {
    this(expectedSha1, digest, digest != null ? new DigestInputStream(inputStream, digest) : inputStream);
}

From source file:cascading.tap.hadoop.FSDigestInputStream.java

/**
 * Constructor FSDigestInputStream creates a new FSDigestInputStream instance.
 *
 * @param inputStream   of type InputStream
 * @param messageDigest of type MessageDigest
 * @param digestHex     of type String//  w w w. j a v  a  2s. c om
 */
public FSDigestInputStream(InputStream inputStream, MessageDigest messageDigest, String digestHex) {
    this.inputStream = digestHex == null ? inputStream : new DigestInputStream(inputStream, messageDigest);
    this.digestHex = digestHex;
}

From source file:com.zimbra.cs.store.http.HttpStoreManager.java

@Override
public String writeStreamToStore(InputStream in, long actualSize, Mailbox mbox)
        throws IOException, ServiceException {
    MessageDigest digest;/*from ww  w . j  av a  2 s  .c  om*/
    try {
        digest = MessageDigest.getInstance("SHA-256");
    } catch (NoSuchAlgorithmException e) {
        throw ServiceException.FAILURE("SHA-256 digest not found", e);
    }
    ByteUtil.PositionInputStream pin = new ByteUtil.PositionInputStream(new DigestInputStream(in, digest));

    HttpClient client = ZimbraHttpConnectionManager.getInternalHttpConnMgr().newHttpClient();
    PostMethod post = new PostMethod(getPostUrl(mbox));
    try {
        HttpClientUtil.addInputStreamToHttpMethod(post, pin, actualSize, "application/octet-stream");
        int statusCode = HttpClientUtil.executeMethod(client, post);
        if (statusCode == HttpStatus.SC_OK || statusCode == HttpStatus.SC_CREATED
                || statusCode == HttpStatus.SC_NO_CONTENT) {
            return getLocator(post, ByteUtil.encodeFSSafeBase64(digest.digest()), pin.getPosition(), mbox);
        } else {
            throw ServiceException.FAILURE("error POSTing blob: " + post.getStatusText(), null);
        }
    } finally {
        post.releaseConnection();
    }
}

From source file:com.galenframework.storage.repository.LocalFileStorage.java

@Override
public FileInfo saveImageToStorage(InputStream inputStream) {
    try {/*from w  ww  . ja v a2s.  c o  m*/
        MessageDigest md = MessageDigest.getInstance("MD5");

        String dirsPath = generateImageDirsPath();

        File dirs = new File(root.getPath() + File.separator + dirsPath);
        dirs.mkdirs();

        String fileName = UUID.randomUUID().toString();
        File file = new File(dirs.getPath() + File.separator + fileName);
        file.createNewFile();
        FileOutputStream fos = new FileOutputStream(file);

        DigestInputStream dis = new DigestInputStream(inputStream, md);

        IOUtils.copy(dis, fos);
        fos.flush();
        fos.close();

        byte[] digest = md.digest();
        String hash = Base64.encodeBase64String(digest);
        return new FileInfo(hash, dirsPath + File.separator + fileName);
    } catch (Exception ex) {
        throw new RuntimeException("Cannot save image to storage", ex);
    }
}

From source file:fr.gouv.culture.vitam.digest.DigestCompute.java

/**
 * /*from   ww w .  j  a va2s. c om*/
 * @param fis
 * @param algorithm
 * @return the Hashcode according to the algorithm
 * @throws Exception
 */
public final static String getHashCode(FileInputStream fis, String algorithm) throws Exception {
    MessageDigest md = MessageDigest.getInstance(algorithm);
    DigestInputStream dis = null;
    try {
        dis = new DigestInputStream(fis, md);
        byte[] buffer = new byte[8192];
        while (dis.read(buffer) != -1)
            ;
    } finally {
        if (dis != null) {
            dis.close();
        }
    }
    byte[] bDigest = md.digest();
    return Base64.encode(bDigest, false);
}

From source file:org.apache.archiva.checksum.Checksum.java

public Checksum update(InputStream stream) throws IOException {
    try (DigestInputStream dig = new DigestInputStream(stream, md)) {
        IOUtils.copy(dig, new NullOutputStream());
    }//from   w ww  . ja  va2s .  c o m
    return this;
}

From source file:org.apache.jackrabbit.oak.spi.blob.FileBlobStore.java

@Override
public String writeBlob(String tempFilePath) throws IOException {
    File file = new File(tempFilePath);
    InputStream in = new FileInputStream(file);
    MessageDigest messageDigest;/*from w ww.ja  va  2  s. co m*/
    try {
        messageDigest = MessageDigest.getInstance(HASH_ALGORITHM);
    } catch (NoSuchAlgorithmException e) {
        throw new IOException(e);
    }
    DigestInputStream din = new DigestInputStream(in, messageDigest);
    long length = file.length();
    try {
        while (true) {
            int len = din.read(buffer, 0, buffer.length);
            if (len < 0) {
                break;
            }
        }
    } finally {
        din.close();
    }
    ByteArrayOutputStream idStream = new ByteArrayOutputStream();
    idStream.write(TYPE_HASH);
    IOUtils.writeVarInt(idStream, 0);
    IOUtils.writeVarLong(idStream, length);
    byte[] digest = messageDigest.digest();
    File f = getFile(digest, false);
    if (f.exists()) {
        file.delete();
    } else {
        File parent = f.getParentFile();
        if (!parent.exists()) {
            parent.mkdirs();
        }
        file.renameTo(f);
    }
    IOUtils.writeVarInt(idStream, digest.length);
    idStream.write(digest);
    byte[] id = idStream.toByteArray();
    String blobId = StringUtils.convertBytesToHex(id);
    usesBlobId(blobId);
    return blobId;
}

From source file:org.seadva.dataone.DataOneUtil.java

@GET
@Path("addObject")
@Produces(MediaType.APPLICATION_JSON)/*from   ww  w  .java  2  s .c o  m*/
public String addObject(@QueryParam("filePath") String filePath, @QueryParam("id") String identifier,
        @QueryParam("schema") String metaFormat) throws IndexServiceException {

    ResearchObject researchObject = new ResearchObject();

    String filename = ((String) filePath).split("/")[((String) filePath).split("/").length - 1].replace(".xml",
            "");
    if (metaFormat == null)
        metaFormat = "http://www.fgdc.gov/schemas/metadata/fgdc-std-001-1998.xsd";
    if (identifier == null)
        identifier = filename;

    SeadFile metadataFile = new SeadFile();
    metadataFile.setId(filename);
    metadataFile.setName(filename);
    metadataFile.setSource("file://" + filePath);

    try {
        DigestInputStream digestStream = new DigestInputStream(new FileInputStream(filePath),
                MessageDigest.getInstance("SHA-1"));
        if (digestStream.read() != -1) {
            byte[] buf = new byte[1024];
            while (digestStream.read(buf) != -1)
                ;
        }
        byte[] digest = digestStream.getMessageDigest().digest();
        DcsFixity fixity = new DcsFixity();
        fixity.setAlgorithm("SHA-1");
        fixity.setValue(new String(Hex.encodeHex(digest)));
        metadataFile.addFixity(fixity);
    } catch (IOException e) {
        e.printStackTrace();
    } catch (NoSuchAlgorithmException e) {
        e.printStackTrace();
    }

    DcsFormat metadataFormat = new DcsFormat();
    metadataFormat.setFormat(metaFormat);
    metadataFile.addFormat(metadataFormat);

    DcsResourceIdentifier dcsResourceIdentifier = new DcsResourceIdentifier();
    dcsResourceIdentifier.setIdValue(identifier);
    dcsResourceIdentifier.setTypeId("dataone");
    metadataFile.addAlternateId(dcsResourceIdentifier);

    File metaFile = new File(filePath);
    metadataFile.setSizeBytes(metaFile.length());

    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
    Date now = new Date();
    String strDate = sdfDate.format(now);
    metadataFile.setMetadataUpdateDate(strDate);

    researchObject.addFile(metadataFile);

    BatchIndexer<ResearchObject> indexer = new ROBatchIndexer(SeadQueryService.solr, null);
    indexer.add(researchObject);
    indexer.close();

    return "{\n" + "  \"response\" : \"Successfully added object - " + identifier + "\"" + "}";
}