List of usage examples for java.util.zip CRC32 getValue
@Override public long getValue()
From source file:org.apache.hadoop.raid.TestRaidNode.java
private void validateFile(FileSystem fileSys, Path name1, Path name2, long crc) throws IOException { FileStatus stat1 = fileSys.getFileStatus(name1); FileStatus stat2 = fileSys.getFileStatus(name2); assertTrue(" Length of file " + name1 + " is " + stat1.getLen() + " is different from length of file " + name1 + " " + stat2.getLen(), stat1.getLen() == stat2.getLen()); CRC32 newcrc = new CRC32(); FSDataInputStream stm = fileSys.open(name2); final byte[] b = new byte[4192]; int num = 0;/* w w w. j a v a2 s . co m*/ while (num >= 0) { num = stm.read(b); if (num < 0) { break; } newcrc.update(b, 0, num); } stm.close(); if (newcrc.getValue() != crc) { fail("CRC mismatch of files " + name1 + " with file " + name2); } }
From source file:org.apache.hadoop.raid.TestBlockCopier.java
private long[] createRandomFileDispersed(Path file, int numBlocks, DatanodeDescriptor primaryNode, DatanodeDescriptor altNode) throws IOException, InterruptedException { BlockPlacementPolicyFakeData bp = BlockPlacementPolicyFakeData.lastInstance; DatanodeDescriptor tmp = bp.overridingDatanode; final int repl = 1; long[] crcs = new long[numBlocks]; CRC32 crc = new CRC32(); Random rand = new Random(); FSDataOutputStream stm = fileSys.create(file, true, fileSys.getConf().getInt("io.file.buffer.size", 4096), (short) repl, BLOCK_SIZE); // Create the first block on the alt node bp.overridingDatanode = altNode;/*from w ww. j ava 2 s.c o m*/ // fill random data into file final byte[] b = new byte[(int) BLOCK_SIZE]; LOG.info("Writing first block (alt. host)"); rand.nextBytes(b); stm.write(b); crc.update(b); crcs[0] = crc.getValue(); stm.flush(); Thread.sleep(1000); // What a hack. Le sigh. // Now we want to write on the altNode bp.overridingDatanode = primaryNode; // Write the rest of the blocks on primaryNode for (int i = 1; i < numBlocks; i++) { LOG.info("Writing block number " + i + " (primary host)"); rand.nextBytes(b); stm.write(b); crc.reset(); crc.update(b); crcs[i] = crc.getValue(); } stm.close(); Thread.sleep(1000); // Reset this guy bp.overridingDatanode = tmp; return crcs; }
From source file:com.zimbra.cs.zimlet.ZimletUtil.java
private static long computeCRC32(File file) throws IOException { byte buf[] = new byte[32 * 1024]; CRC32 crc = new CRC32(); crc.reset();/*w w w .ja v a2 s .c om*/ FileInputStream fis = null; try { fis = new FileInputStream(file); int bytesRead; while ((bytesRead = fis.read(buf)) != -1) { crc.update(buf, 0, bytesRead); } return crc.getValue(); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { } } } }
From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java
/** * Tries to read an existing index from the given tar file. The index is * returned if it is found and looks valid (correct checksum, passes * sanity checks).//from w ww . java 2s . c o m * * @param file tar file * @param name name of the tar file, for logging purposes * @return tar index, or {@code null} if not found or not valid * @throws IOException if the tar file could not be read */ private static ByteBuffer loadAndValidateIndex(RandomAccessFile file, String name) throws IOException { long length = file.length(); if (length % BLOCK_SIZE != 0 || length < 6 * BLOCK_SIZE || length > Integer.MAX_VALUE) { log.warn("Unexpected size {} of tar file {}", length, name); return null; // unexpected file size } // read the index metadata just before the two final zero blocks ByteBuffer meta = ByteBuffer.allocate(16); file.seek(length - 2 * BLOCK_SIZE - 16); file.readFully(meta.array()); int crc32 = meta.getInt(); int count = meta.getInt(); int bytes = meta.getInt(); int magic = meta.getInt(); if (magic != INDEX_MAGIC) { return null; // magic byte mismatch } if (count < 1 || bytes < count * 24 + 16 || bytes % BLOCK_SIZE != 0) { log.warn("Invalid index metadata in tar file {}", name); return null; // impossible entry and/or byte counts } // this involves seeking backwards in the file, which might not // perform well, but that's OK since we only do this once per file ByteBuffer index = ByteBuffer.allocate(count * 24); file.seek(length - 2 * BLOCK_SIZE - 16 - count * 24); file.readFully(index.array()); index.mark(); CRC32 checksum = new CRC32(); long limit = length - 2 * BLOCK_SIZE - bytes - BLOCK_SIZE; long lastmsb = Long.MIN_VALUE; long lastlsb = Long.MIN_VALUE; byte[] entry = new byte[24]; for (int i = 0; i < count; i++) { index.get(entry); checksum.update(entry); ByteBuffer buffer = ByteBuffer.wrap(entry); long msb = buffer.getLong(); long lsb = buffer.getLong(); int offset = buffer.getInt(); int size = buffer.getInt(); if (lastmsb > msb || (lastmsb == msb && lastlsb > lsb)) { log.warn("Incorrect index ordering in tar file {}", name); return null; } else if (lastmsb == msb && lastlsb == lsb && i > 0) { log.warn("Duplicate index entry in tar file {}", name); return null; } else if (offset < 0 || offset % BLOCK_SIZE != 0) { log.warn("Invalid index entry offset in tar file {}", name); return null; } else if (size < 1 || offset + size > limit) { log.warn("Invalid index entry size in tar file {}", name); return null; } lastmsb = msb; lastlsb = lsb; } if (crc32 != (int) checksum.getValue()) { log.warn("Invalid index checksum in tar file {}", name); return null; // checksum mismatch } index.reset(); return index; }
From source file:de.mpg.escidoc.services.dataacquisition.DataHandlerBean.java
/** * fetch data from a given url.// w w w .j a va 2 s . c o m * * @param url * @return byte[] * @throws SourceNotAvailableException * @throws RuntimeException * @throws AccessException */ public byte[] fetchMetadatafromURL(URL url) throws SourceNotAvailableException, RuntimeException, AccessException { byte[] input = null; URLConnection conn = null; Date retryAfter = null; ByteArrayOutputStream baos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(baos); try { conn = ProxyHelper.openConnection(url); HttpURLConnection httpConn = (HttpURLConnection) conn; int responseCode = httpConn.getResponseCode(); switch (responseCode) { case 503: String retryAfterHeader = conn.getHeaderField("Retry-After"); if (retryAfterHeader != null) { SimpleDateFormat dateFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z"); retryAfter = dateFormat.parse(retryAfterHeader); this.logger.debug("Source responded with 503, retry after " + retryAfter + "."); throw new SourceNotAvailableException(retryAfter); } break; case 302: String alternativeLocation = conn.getHeaderField("Location"); return fetchMetadatafromURL(new URL(alternativeLocation)); case 200: this.logger.info("Source responded with 200."); // Fetch file GetMethod method = new GetMethod(url.toString()); HttpClient client = new HttpClient(); ProxyHelper.executeMethod(client, method); input = method.getResponseBody(); httpConn.disconnect(); // Create zip file with fetched file ZipEntry ze = new ZipEntry("unapi"); ze.setSize(input.length); ze.setTime(this.currentDate()); CRC32 crc321 = new CRC32(); crc321.update(input); ze.setCrc(crc321.getValue()); zos.putNextEntry(ze); zos.write(input); zos.flush(); zos.closeEntry(); zos.close(); this.setContentType("application/zip"); this.setFileEnding(".zip"); break; case 403: throw new AccessException("Access to url " + url + " is restricted."); default: throw new RuntimeException("An error occurred during importing from external system: " + responseCode + ": " + httpConn.getResponseMessage() + "."); } } catch (AccessException e) { this.logger.error("Access denied.", e); throw new AccessException(url.toString()); } catch (Exception e) { throw new RuntimeException(e); } return baos.toByteArray(); }
From source file:com.jivesoftware.os.amza.service.storage.WALStorage.java
private long[] loadEndOfMergeMarker(long deltaWALId, byte[] row) { long[] marker = UIO.bytesLongs(row); if (marker[EOM_VERSION_INDEX] != 1) { return null; }// www.j a va2 s.c o m CRC32 crC32 = new CRC32(); byte[] hintsAsBytes = UIO.longsBytes(marker); crC32.update(hintsAsBytes, 16, hintsAsBytes.length - 16); // 16 skips the version and checksum if (marker[EOM_CHECKSUM_INDEX] != crC32.getValue()) { return null; } if (deltaWALId > -1 && marker[EOM_DELTA_WAL_ID_INDEX] >= deltaWALId) { return null; } return marker; }
From source file:de.mpg.escidoc.services.dataacquisition.DataHandlerBean.java
/** * Operation for fetching data of type FILE. * /* w ww . ja v a 2 s . c o m*/ * @param importSource * @param identifier * @param listOfFormats * @return byte[] of the fetched file, zip file if more than one record was * fetched * @throws RuntimeException * @throws SourceNotAvailableException */ private byte[] fetchData(String identifier, Format[] formats) throws SourceNotAvailableException, RuntimeException, FormatNotAvailableException { byte[] in = null; FullTextVO fulltext = new FullTextVO(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(baos); try { // Call fetch file for every given format for (int i = 0; i < formats.length; i++) { Format format = formats[i]; fulltext = this.util.getFtObjectToFetch(this.currentSource, format.getName(), format.getType(), format.getEncoding()); // Replace regex with identifier String decoded = java.net.URLDecoder.decode(fulltext.getFtUrl().toString(), this.currentSource.getEncoding()); fulltext.setFtUrl(new URL(decoded)); fulltext.setFtUrl( new URL(fulltext.getFtUrl().toString().replaceAll(this.regex, identifier.trim()))); this.logger.debug("Fetch file from URL: " + fulltext.getFtUrl()); // escidoc file if (this.currentSource.getHarvestProtocol().equalsIgnoreCase("ejb")) { in = this.fetchEjbFile(fulltext, identifier); } // other file else { in = this.fetchFile(fulltext); } this.setFileProperties(fulltext); // If only one file => return it in fetched format if (formats.length == 1) { return in; } // If more than one file => add it to zip else { // If cone service is not available (we do not get a // fileEnding) we have // to make sure that the zip entries differ in name. String fileName = identifier; if (this.getFileEnding().equals("")) { fileName = fileName + "_" + i; } ZipEntry ze = new ZipEntry(fileName + this.getFileEnding()); ze.setSize(in.length); ze.setTime(this.currentDate()); CRC32 crc321 = new CRC32(); crc321.update(in); ze.setCrc(crc321.getValue()); zos.putNextEntry(ze); zos.write(in); zos.flush(); zos.closeEntry(); } } this.setContentType("application/zip"); this.setFileEnding(".zip"); zos.close(); } catch (SourceNotAvailableException e) { this.logger.error("Import Source " + this.currentSource + " not available.", e); throw new SourceNotAvailableException(e); } catch (FormatNotAvailableException e) { throw new FormatNotAvailableException(e.getMessage()); } catch (Exception e) { throw new RuntimeException(e); } return baos.toByteArray(); }
From source file:org.kuali.kfs.module.ar.document.service.impl.DunningLetterServiceImpl.java
/** * This method generates the actual pdf files to print. * * @param mapping//from ww w.j a v a 2s . c o m * @param form * @param list * @return */ @Override public boolean createZipOfPDFs(byte[] report, ByteArrayOutputStream baos) throws IOException { ZipOutputStream zos = new ZipOutputStream(baos); int bytesRead; byte[] buffer = new byte[1024]; CRC32 crc = new CRC32(); if (ObjectUtils.isNotNull(report)) { BufferedInputStream bis = new BufferedInputStream(new ByteArrayInputStream(report)); crc.reset(); while ((bytesRead = bis.read(buffer)) != -1) { crc.update(buffer, 0, bytesRead); } bis.close(); // Reset to beginning of input stream bis = new BufferedInputStream(new ByteArrayInputStream(report)); ZipEntry entry = new ZipEntry("DunningLetters&Invoices-" + getDateTimeService().toDateStringForFilename(getDateTimeService().getCurrentDate()) + ".pdf"); entry.setMethod(ZipEntry.STORED); entry.setCompressedSize(report.length); entry.setSize(report.length); entry.setCrc(crc.getValue()); zos.putNextEntry(entry); while ((bytesRead = bis.read(buffer)) != -1) { zos.write(buffer, 0, bytesRead); } bis.close(); } zos.close(); return true; }
From source file:org.apache.jackrabbit.oak.segment.file.TarReader.java
/** * Tries to read an existing index from the given tar file. The index is * returned if it is found and looks valid (correct checksum, passes * sanity checks).// www.j av a2 s . c o m * * @param file tar file * @param name name of the tar file, for logging purposes * @return tar index, or {@code null} if not found or not valid * @throws IOException if the tar file could not be read */ private static ByteBuffer loadAndValidateIndex(RandomAccessFile file, String name) throws IOException { long length = file.length(); if (length % BLOCK_SIZE != 0 || length < 6 * BLOCK_SIZE || length > Integer.MAX_VALUE) { log.warn("Unexpected size {} of tar file {}", length, name); return null; // unexpected file size } // read the index metadata just before the two final zero blocks ByteBuffer meta = ByteBuffer.allocate(16); file.seek(length - 2 * BLOCK_SIZE - 16); file.readFully(meta.array()); int crc32 = meta.getInt(); int count = meta.getInt(); int bytes = meta.getInt(); int magic = meta.getInt(); if (magic != INDEX_MAGIC) { return null; // magic byte mismatch } if (count < 1 || bytes < count * TarEntry.SIZE + 16 || bytes % BLOCK_SIZE != 0) { log.warn("Invalid index metadata in tar file {}", name); return null; // impossible entry and/or byte counts } // this involves seeking backwards in the file, which might not // perform well, but that's OK since we only do this once per file ByteBuffer index = ByteBuffer.allocate(count * TarEntry.SIZE); file.seek(length - 2 * BLOCK_SIZE - 16 - count * TarEntry.SIZE); file.readFully(index.array()); index.mark(); CRC32 checksum = new CRC32(); long limit = length - 2 * BLOCK_SIZE - bytes - BLOCK_SIZE; long lastmsb = Long.MIN_VALUE; long lastlsb = Long.MIN_VALUE; byte[] entry = new byte[TarEntry.SIZE]; for (int i = 0; i < count; i++) { index.get(entry); checksum.update(entry); ByteBuffer buffer = wrap(entry); long msb = buffer.getLong(); long lsb = buffer.getLong(); int offset = buffer.getInt(); int size = buffer.getInt(); if (lastmsb > msb || (lastmsb == msb && lastlsb > lsb)) { log.warn("Incorrect index ordering in tar file {}", name); return null; } else if (lastmsb == msb && lastlsb == lsb && i > 0) { log.warn("Duplicate index entry in tar file {}", name); return null; } else if (offset < 0 || offset % BLOCK_SIZE != 0) { log.warn("Invalid index entry offset in tar file {}", name); return null; } else if (size < 1 || offset + size > limit) { log.warn("Invalid index entry size in tar file {}", name); return null; } lastmsb = msb; lastlsb = lsb; } if (crc32 != (int) checksum.getValue()) { log.warn("Invalid index checksum in tar file {}", name); return null; // checksum mismatch } index.reset(); return index; }
From source file:org.smartfrog.services.www.bulkio.client.SunJavaBulkIOClient.java
@Override public long doUpload(String method, long ioSize) throws IOException, InterruptedException { validateURL();// w ww . j a va2 s. com CRC32 checksum = new CRC32(); URL targetUrl = getUrl(); getLog().info("Uploading " + ioSize + " bytes to " + targetUrl); HttpURLConnection connection = openConnection(); connection.setRequestMethod(method); connection.setRequestProperty(HttpHeaders.CONTENT_LENGTH, Long.toString(ioSize)); connection.setDoOutput(true); maybeSetChunking(connection); connection.connect(); OutputStream stream = connection.getOutputStream(); long bytes = 0; try { for (bytes = 0; bytes < ioSize; bytes++) { int octet = (AbstractBulkioServlet.getByteFromCounter(bytes)); stream.write(octet); checksum.update(octet); if (interrupted) { throw new InterruptedException( "Interrupted after sending " + bytes + " bytes" + " to " + targetUrl); } } } finally { closeQuietly(stream); } getLog().info("Upload complete, checking results"); checkStatusCode(targetUrl, connection, HttpURLConnection.HTTP_OK); long expectedChecksum = checksum.getValue(); getLog().info("Uploaded " + bytes + " bytes to " + targetUrl + " checksum=" + expectedChecksum); if (bytes != ioSize) { throw new IOException( "Wrong content length uploaded to " + targetUrl + " : put " + ioSize + " but got " + bytes); } if (parseResults) { InputStream inStream = null; Properties props = new Properties(); try { inStream = connection.getInputStream(); props.load(inStream); } finally { closeQuietly(inStream); } long actualChecksum = getLongPropValue(props, IoAttributes.CHECKSUM); if (actualChecksum != expectedChecksum) { throw new IOException("Expected the checksum from upload of " + ioSize + " bytes " + " to " + targetUrl + "to be " + expectedChecksum + " but got " + actualChecksum + "\n Properties: " + props.toString()); } } return ioSize; }