List of usage examples for java.security DigestOutputStream DigestOutputStream
public DigestOutputStream(OutputStream stream, MessageDigest digest)
From source file:no.trank.openpipe.wikipedia.download.HttpDownloader.java
protected void writeFile(InputStream in, long size) throws IOException { OutputStream out;// w w w . j ava2s .c o m try { out = new DigestOutputStream(new FileOutputStream(targetFile), MessageDigest.getInstance("MD5")); } catch (NoSuchAlgorithmException e) { log.error("Could not make md5. MD5 not supported", e); out = new FileOutputStream(targetFile); } reportSize(size); try { long totalReadBytes = 0; final byte[] buf = new byte[1024 * 32]; int numBytes = in.read(buf); while (numBytes >= 0) { totalReadBytes += numBytes; reportProgress(totalReadBytes); out.write(buf, 0, numBytes); numBytes = in.read(buf); } } finally { try { out.close(); } catch (Exception e) { // Do nothing } reportDone(); if (out instanceof DigestOutputStream) { writeMd5File(((DigestOutputStream) out).getMessageDigest()); } } }
From source file:no.trank.openpipe.wikipedia.producer.HttpDownloader.java
protected void writeFile(InputStream in, long size) throws IOException { OutputStream out;/*from w w w .j av a 2s . com*/ try { out = new DigestOutputStream(new FileOutputStream(targetFile), MessageDigest.getInstance("MD5")); } catch (NoSuchAlgorithmException e) { log.error("Could not make md5. MD5 not supported", e); out = new FileOutputStream(targetFile); } try { long totalReadBytes = 0; byte[] buf = new byte[16384]; int numBytes = in.read(buf); while (numBytes >= 0) { totalReadBytes += numBytes; progress(size, totalReadBytes); out.write(buf, 0, numBytes); numBytes = in.read(buf); } } finally { try { out.close(); } catch (Exception e) { // Do nothing } progress(size, size); if (out instanceof DigestOutputStream) { writeMd5File(((DigestOutputStream) out).getMessageDigest()); } } }
From source file:nu.kelvin.jfileshare.ajax.FileReceiverServlet.java
@Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { HttpSession session = req.getSession(); UserItem currentUser = (UserItem) session.getAttribute("user"); if (currentUser != null && ServletFileUpload.isMultipartContent(req)) { Conf conf = (Conf) getServletContext().getAttribute("conf"); // keep files of up to 10 MiB in memory 10485760 FileItemFactory factory = new DiskFileItemFactory(10485760, new File(conf.getPathTemp())); ServletFileUpload upload = new ServletFileUpload(factory); upload.setSizeMax(conf.getFileSizeMax()); // set file upload progress listener FileUploadListener listener = new FileUploadListener(); session.setAttribute("uploadListener", listener); upload.setProgressListener(listener); File tempFile = File.createTempFile(String.format("%05d-", currentUser.getUid()), null, new File(conf.getPathTemp())); tempFile.deleteOnExit();/*from ww w. j a va2s. c o m*/ try { FileItem file = new FileItem(); /* iterate over all uploaded items */ FileItemIterator it = upload.getItemIterator(req); FileOutputStream filestream = null; while (it.hasNext()) { FileItemStream item = it.next(); String name = item.getFieldName(); InputStream instream = item.openStream(); DigestOutputStream outstream = null; if (item.isFormField()) { String value = Streams.asString(instream); // logger.info(name + " : " + value); /* not the file upload. Maybe the password field? */ if (name.equals("password") && !value.equals("")) { logger.info("Uploaded file has password set"); file.setPwPlainText(value); } instream.close(); } else { // This is the file you're looking for file.setName(item.getName()); file.setType( item.getContentType() == null ? "application/octet-stream" : item.getContentType()); file.setUid(currentUser.getUid()); try { filestream = new FileOutputStream(tempFile); MessageDigest md = MessageDigest.getInstance("MD5"); outstream = new DigestOutputStream(filestream, md); long filesize = IOUtils.copyLarge(instream, outstream); if (filesize == 0) { throw new Exception("File is empty."); } md = outstream.getMessageDigest(); file.setMd5sum(toHex(md.digest())); file.setSize(filesize); } finally { if (outstream != null) { try { outstream.close(); } catch (IOException ignored) { } } if (filestream != null) { try { filestream.close(); } catch (IOException ignored) { } } if (instream != null) { try { instream.close(); } catch (IOException ignored) { } } } } } /* All done. Save the new file */ if (conf.getDaysFileExpiration() != 0) { file.setDaysToKeep(conf.getDaysFileExpiration()); } if (file.create(ds, req.getRemoteAddr())) { File finalFile = new File(conf.getPathStore(), Integer.toString(file.getFid())); tempFile.renameTo(finalFile); logger.log(Level.INFO, "User {0} storing file \"{1}\" in the filestore", new Object[] { currentUser.getUid(), file.getName() }); req.setAttribute("msg", "File <strong>\"" + Helpers.htmlSafe(file.getName()) + "\"</strong> uploaded successfully. <a href='" + req.getContextPath() + "/file/edit/" + file.getFid() + "'>Click here to edit file</a>"); req.setAttribute("javascript", "parent.uploadComplete('info');"); } else { req.setAttribute("msg", "Unable to contact the database"); req.setAttribute("javascript", "parent.uploadComplete('critical');"); } } catch (SizeLimitExceededException e) { tempFile.delete(); req.setAttribute("msg", "File is too large. The maximum size of file uploads is " + FileItem.humanReadable(conf.getFileSizeMax())); req.setAttribute("javascript", "parent.uploadComplete('warning');"); } catch (FileUploadException e) { tempFile.delete(); req.setAttribute("msg", "Unable to upload file"); req.setAttribute("javascript", "parent.uploadComplete('warning');"); } catch (Exception e) { tempFile.delete(); req.setAttribute("msg", "Unable to upload file. ".concat(e.getMessage() == null ? "" : e.getMessage())); req.setAttribute("javascript", "parent.uploadComplete('warning');"); } finally { session.setAttribute("uploadListener", null); } ServletContext app = getServletContext(); RequestDispatcher disp = app.getRequestDispatcher("/templates/AjaxDummy.jsp"); disp.forward(req, resp); } }
From source file:org.abstracthorizon.proximity.maven.MavenProximityLogic.java
/** * This postprocessing simply merges the fetched list of metadatas. * /*from w w w. j av a 2s .c om*/ * @param request the request * @param groupRequest the group request * @param listOfProxiedItems the list of proxied items * * @return the merged metadata. * * @throws IOException Signals that an I/O exception has occurred. */ public Item postprocessItemList(ProximityRequest request, ProximityRequest groupRequest, List listOfProxiedItems) throws IOException { if (listOfProxiedItems.size() == 0) { throw new IllegalArgumentException("The listOfProxiedItems list cannot be 0 length!"); } Item item = (Item) listOfProxiedItems.get(0); ItemProperties itemProps = item.getProperties(); if (listOfProxiedItems.size() > 1) { if (MavenArtifactRecognizer.isChecksum(request.getPath())) { File tmpFile = new File(System.getProperty("java.io.tmpdir"), request.getPath().replace(ItemProperties.PATH_SEPARATOR.charAt(0), '_')); if (tmpFile.exists()) { logger.info("Item for path " + request.getPath() + " SPOOFED with merged metadata checksum."); item.setStream(new DeleteOnCloseFileInputStream(tmpFile)); itemProps.setSize(tmpFile.length()); } else { logger.debug("Item for path " + request.getPath() + " SPOOFED with first got from repo group."); } } else { logger.debug("Item for path " + request.getPath() + " found in total of " + listOfProxiedItems.size() + " repositories, will merge them."); MetadataXpp3Reader metadataReader = new MetadataXpp3Reader(); MetadataXpp3Writer metadataWriter = new MetadataXpp3Writer(); InputStreamReader isr; Metadata mergedMetadata = null; for (int i = 0; i < listOfProxiedItems.size(); i++) { Item currentItem = (Item) listOfProxiedItems.get(i); try { isr = new InputStreamReader(currentItem.getStream()); Metadata imd = metadataReader.read(isr); if (mergedMetadata == null) { mergedMetadata = imd; } else { mergedMetadata.merge(imd); } isr.close(); } catch (XmlPullParserException ex) { logger.warn("Could not merge M2 metadata: " + currentItem.getProperties().getDirectoryPath() + " from repository " + currentItem.getProperties().getRepositoryId(), ex); } catch (IOException ex) { logger.warn("Got IOException during merge of M2 metadata: " + currentItem.getProperties().getDirectoryPath() + " from repository " + currentItem.getProperties().getRepositoryId(), ex); } } try { // we know that maven-metadata.xml is relatively small // (few // KB) MessageDigest md5alg = MessageDigest.getInstance("md5"); MessageDigest sha1alg = MessageDigest.getInstance("sha1"); ByteArrayOutputStream bos = new ByteArrayOutputStream(); DigestOutputStream md5os = new DigestOutputStream(bos, md5alg); DigestOutputStream sha1os = new DigestOutputStream(md5os, sha1alg); OutputStreamWriter osw = new OutputStreamWriter(sha1os); metadataWriter.write(osw, mergedMetadata); osw.flush(); osw.close(); storeDigest(request, md5alg); storeDigest(request, sha1alg); ByteArrayInputStream is = new ByteArrayInputStream(bos.toByteArray()); item.setStream(is); itemProps.setSize(bos.size()); } catch (NoSuchAlgorithmException ex) { throw new IllegalArgumentException("No MD5 or SHA1 algorithm?"); } } } return item; }
From source file:org.apache.hadoop.hdfs.qjournal.client.TestImageUploadStream.java
private MD5Hash writeDataAndAssertContents(TestImageUploadStreamInjectionHandler h, int iteration) throws IOException { // check write digest MessageDigest digester = MD5Hash.getDigester(); // create stream HttpImageUploadStream ius = new HttpImageUploadStream(httpAddrs, JID, FAKE_NSINFO, startTxId + iteration, 1, bufferSize, maxNumChunks);/*from www . j a va 2s. c o m*/ DigestOutputStream ds = new DigestOutputStream(ius, digester); DataOutputStream dos = new DataOutputStream(ds); // write actual data byte[] written = writeData(dos, 10240); // flush dos.flush(); // get written hash MD5Hash hash = new MD5Hash(digester.digest()); // close the stream dos.close(); assertContents(cluster, written, startTxId + iteration, hash, h); // roll image qjm.saveDigestAndRenameCheckpointImage(startTxId + iteration, hash); // final assert of the contents // get contents using input stream obtained from qjm InputStream is = qjm.getImageInputStream(startTxId + iteration).getInputStream(); byte[] contents = new byte[written.length]; is.read(contents); assertTrue(Arrays.equals(written, contents)); return hash; }
From source file:org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageReconstructor.java
/** * Run the OfflineImageReconstructor./*w w w . j ava 2 s . c o m*/ * * @param inputPath The input path to use. * @param outputPath The output path to use. * * @throws Exception On error. */ public static void run(String inputPath, String outputPath) throws Exception { MessageDigest digester = MD5Hash.getDigester(); FileOutputStream fout = null; File foutHash = new File(outputPath + ".md5"); Files.deleteIfExists(foutHash.toPath()); // delete any .md5 file that exists CountingOutputStream out = null; FileInputStream fis = null; InputStreamReader reader = null; try { Files.deleteIfExists(Paths.get(outputPath)); fout = new FileOutputStream(outputPath); fis = new FileInputStream(inputPath); reader = new InputStreamReader(fis, Charset.forName("UTF-8")); out = new CountingOutputStream(new DigestOutputStream(new BufferedOutputStream(fout), digester)); OfflineImageReconstructor oir = new OfflineImageReconstructor(out, reader); oir.processXml(); } finally { IOUtils.cleanup(LOG, reader, fis, out, fout); } // Write the md5 file MD5FileUtils.saveMD5File(new File(outputPath), new MD5Hash(digester.digest())); }
From source file:org.apache.jackrabbit.core.data.FileDataStore.java
/** * Creates a new data record./*from ww w . jav a 2 s . co m*/ * The stream is first consumed and the contents are saved in a temporary file * and the SHA-1 message digest of the stream is calculated. If a * record with the same SHA-1 digest (and length) is found then it is * returned. Otherwise the temporary file is moved in place to become * the new data record that gets returned. * * @param input binary stream * @return data record that contains the given stream * @throws DataStoreException if the record could not be created */ public DataRecord addRecord(InputStream input) throws DataStoreException { File temporary = null; try { temporary = newTemporaryFile(); DataIdentifier tempId = new DataIdentifier(temporary.getName()); usesIdentifier(tempId); // Copy the stream to the temporary file and calculate the // stream length and the message digest of the stream long length = 0; MessageDigest digest = MessageDigest.getInstance(DIGEST); OutputStream output = new DigestOutputStream(new FileOutputStream(temporary), digest); try { length = IOUtils.copyLarge(input, output); } finally { output.close(); } DataIdentifier identifier = new DataIdentifier(digest.digest()); File file; synchronized (this) { // Check if the same record already exists, or // move the temporary file in place if needed usesIdentifier(identifier); file = getFile(identifier); File parent = file.getParentFile(); if (!parent.isDirectory()) { parent.mkdirs(); } if (!file.exists()) { temporary.renameTo(file); if (!file.exists()) { throw new IOException("Can not rename " + temporary.getAbsolutePath() + " to " + file.getAbsolutePath() + " (media read only?)"); } } else { long now = System.currentTimeMillis(); if (file.lastModified() < now) { file.setLastModified(now); } } // Sanity checks on the record file. These should never fail, // but better safe than sorry... if (!file.isFile()) { throw new IOException("Not a file: " + file); } if (file.length() != length) { throw new IOException(DIGEST + " collision: " + file); } } // this will also make sure that // tempId is not garbage collected until here inUse.remove(tempId); return new FileDataRecord(identifier, file); } catch (NoSuchAlgorithmException e) { throw new DataStoreException(DIGEST + " not available", e); } catch (IOException e) { throw new DataStoreException("Could not add record", e); } finally { if (temporary != null) { temporary.delete(); } } }
From source file:org.apache.jackrabbit.core.data.CachingDataStore.java
/** * Creates a new data record in {@link Backend}. The stream is first * consumed and the contents are saved in a temporary file and the SHA-1 * message digest of the stream is calculated. If a record with the same * SHA-1 digest (and length) is found then it is returned. Otherwise new * record is created in {@link Backend} and the temporary file is moved in * place to {@link LocalCache}./*from w ww .j a v a 2s.c o m*/ * * @param input * binary stream * @return {@link CachingDataRecord} * @throws DataStoreException * if the record could not be created. */ @Override public DataRecord addRecord(InputStream input) throws DataStoreException { File temporary = null; long startTime = System.currentTimeMillis(); long length = 0; try { temporary = newTemporaryFile(); DataIdentifier tempId = new DataIdentifier(temporary.getName()); usesIdentifier(tempId); // Copy the stream to the temporary file and calculate the // stream length and the message digest of the stream MessageDigest digest = MessageDigest.getInstance(DIGEST); OutputStream output = new DigestOutputStream(new FileOutputStream(temporary), digest); try { length = IOUtils.copyLarge(input, output); } finally { output.close(); } long currTime = System.currentTimeMillis(); DataIdentifier identifier = new DataIdentifier(encodeHexString(digest.digest())); LOG.debug("SHA1 of [{}], length =[{}] took [{}]ms ", new Object[] { identifier, length, (currTime - startTime) }); String fileName = getFileName(identifier); AsyncUploadCacheResult result = null; synchronized (this) { usesIdentifier(identifier); // check if async upload is already in progress if (!asyncWriteCache.hasEntry(fileName, true)) { result = cache.store(fileName, temporary, true); } } LOG.debug("storing [{}] in localCache took [{}] ms", identifier, (System.currentTimeMillis() - currTime)); if (result != null) { if (result.canAsyncUpload()) { backend.writeAsync(identifier, result.getFile(), this); } else { backend.write(identifier, result.getFile()); } } // this will also make sure that // tempId is not garbage collected until here inUse.remove(tempId); LOG.debug("addRecord [{}] of length [{}] took [{}]ms.", new Object[] { identifier, length, (System.currentTimeMillis() - startTime) }); return new CachingDataRecord(this, identifier); } catch (NoSuchAlgorithmException e) { throw new DataStoreException(DIGEST + " not available", e); } catch (IOException e) { throw new DataStoreException("Could not add record", e); } finally { if (temporary != null) { // try to delete - but it's not a big deal if we can't temporary.delete(); } } }
From source file:org.apache.jackrabbit.core.data.FileDataStore.java
/** * Creates a new data record./*from w w w .j av a 2 s . com*/ * The stream is first consumed and the contents are saved in a temporary file * and the SHA-1 message digest of the stream is calculated. If a * record with the same SHA-1 digest (and length) is found then it is * returned. Otherwise the temporary file is moved in place to become * the new data record that gets returned. * * @param input binary stream * @return data record that contains the given stream * @throws DataStoreException if the record could not be created */ public DataRecord addRecord(InputStream input) throws DataStoreException { File temporary = null; try { temporary = newTemporaryFile(); DataIdentifier tempId = new DataIdentifier(temporary.getName()); usesIdentifier(tempId); // Copy the stream to the temporary file and calculate the // stream length and the message digest of the stream long length = 0; MessageDigest digest = MessageDigest.getInstance(DIGEST); OutputStream output = new DigestOutputStream(new FileOutputStream(temporary), digest); try { length = IOUtils.copyLarge(input, output); } finally { output.close(); } DataIdentifier identifier = new DataIdentifier(encodeHexString(digest.digest())); File file; synchronized (this) { // Check if the same record already exists, or // move the temporary file in place if needed usesIdentifier(identifier); file = getFile(identifier); if (!file.exists()) { File parent = file.getParentFile(); parent.mkdirs(); if (temporary.renameTo(file)) { // no longer need to delete the temporary file temporary = null; } else { throw new IOException("Can not rename " + temporary.getAbsolutePath() + " to " + file.getAbsolutePath() + " (media read only?)"); } } else { long now = System.currentTimeMillis(); if (getLastModified(file) < now + ACCESS_TIME_RESOLUTION) { setLastModified(file, now + ACCESS_TIME_RESOLUTION); } } if (file.length() != length) { // Sanity checks on the record file. These should never fail, // but better safe than sorry... if (!file.isFile()) { throw new IOException("Not a file: " + file); } throw new IOException(DIGEST + " collision: " + file); } } // this will also make sure that // tempId is not garbage collected until here inUse.remove(tempId); return new FileDataRecord(this, identifier, file); } catch (NoSuchAlgorithmException e) { throw new DataStoreException(DIGEST + " not available", e); } catch (IOException e) { throw new DataStoreException("Could not add record", e); } finally { if (temporary != null) { temporary.delete(); } } }
From source file:org.apache.sling.distribution.util.impl.DigestUtils.java
public static DigestOutputStream openDigestOutputStream(OutputStream mainOutputStream, String digestAlgorithm) { return new DigestOutputStream(mainOutputStream, getDigest(digestAlgorithm)); }