List of usage examples for java.util.zip CRC32 CRC32
public CRC32()
From source file:org.anarres.lzo.LzopInputStream.java
/** * Read and verify an lzo header, setting relevant block checksum options * and ignoring most everything else.//from w w w . j a v a 2s .c o m */ protected int readHeader() throws IOException { byte[] buf = new byte[9]; readBytes(buf, 0, 9); if (!Arrays.equals(buf, LzopConstants.LZOP_MAGIC)) throw new IOException("Invalid LZO header"); Arrays.fill(buf, (byte) 0); Adler32 adler = new Adler32(); CRC32 crc32 = new CRC32(); int hitem = readHeaderItem(buf, 2, adler, crc32); // lzop version if (hitem > LzopConstants.LZOP_VERSION) { LOG.debug("Compressed with later version of lzop: " + Integer.toHexString(hitem) + " (expected 0x" + Integer.toHexString(LzopConstants.LZOP_VERSION) + ")"); } hitem = readHeaderItem(buf, 2, adler, crc32); // lzo library version if (hitem > LzoVersion.LZO_LIBRARY_VERSION) { throw new IOException("Compressed with incompatible lzo version: 0x" + Integer.toHexString(hitem) + " (expected 0x" + Integer.toHexString(LzoVersion.LZO_LIBRARY_VERSION) + ")"); } hitem = readHeaderItem(buf, 2, adler, crc32); // lzop extract version if (hitem > LzopConstants.LZOP_VERSION) { throw new IOException("Compressed with incompatible lzop version: 0x" + Integer.toHexString(hitem) + " (expected 0x" + Integer.toHexString(LzopConstants.LZOP_VERSION) + ")"); } hitem = readHeaderItem(buf, 1, adler, crc32); // method switch (hitem) { case LzopConstants.M_LZO1X_1: case LzopConstants.M_LZO1X_1_15: case LzopConstants.M_LZO1X_999: break; default: throw new IOException("Invalid strategy " + Integer.toHexString(hitem)); } readHeaderItem(buf, 1, adler, crc32); // ignore level // flags int flags = readHeaderItem(buf, 4, adler, crc32); boolean useCRC32 = (flags & LzopConstants.F_H_CRC32) != 0; boolean extraField = (flags & LzopConstants.F_H_EXTRA_FIELD) != 0; if ((flags & LzopConstants.F_MULTIPART) != 0) throw new IOException("Multipart lzop not supported"); if ((flags & LzopConstants.F_H_FILTER) != 0) throw new IOException("lzop filter not supported"); if ((flags & LzopConstants.F_RESERVED) != 0) throw new IOException("Unknown flags in header"); // known !F_H_FILTER, so no optional block readHeaderItem(buf, 4, adler, crc32); // ignore mode readHeaderItem(buf, 4, adler, crc32); // ignore mtime readHeaderItem(buf, 4, adler, crc32); // ignore gmtdiff hitem = readHeaderItem(buf, 1, adler, crc32); // fn len if (hitem > 0) { byte[] tmp = (hitem > buf.length) ? new byte[hitem] : buf; readHeaderItem(tmp, hitem, adler, crc32); // skip filename } int checksum = (int) (useCRC32 ? crc32.getValue() : adler.getValue()); hitem = readHeaderItem(buf, 4, adler, crc32); // read checksum if (hitem != checksum) { throw new IOException("Invalid header checksum: " + Long.toHexString(checksum) + " (expected 0x" + Integer.toHexString(hitem) + ")"); } if (extraField) { // lzop 1.08 ultimately ignores this LOG.debug("Extra header field not processed"); adler.reset(); crc32.reset(); hitem = readHeaderItem(buf, 4, adler, crc32); readHeaderItem(new byte[hitem], hitem, adler, crc32); checksum = (int) (useCRC32 ? crc32.getValue() : adler.getValue()); if (checksum != readHeaderItem(buf, 4, adler, crc32)) { throw new IOException("Invalid checksum for extra header field"); } } return flags; }
From source file:org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.java
private void verifyDir(DistributedFileSystem dfs, Path dir) throws IOException { FileStatus[] fileArr = dfs.listStatus(dir); TreeMap<Path, Boolean> fileMap = new TreeMap<Path, Boolean>(); for (FileStatus file : fileArr) { fileMap.put(file.getPath(), Boolean.valueOf(file.isDir())); }/* w w w. j ava 2 s . c o m*/ for (Iterator<Path> it = fileMap.keySet().iterator(); it.hasNext();) { Path path = it.next(); boolean isDir = fileMap.get(path); String pathName = path.toUri().getPath(); overallChecksum.update(pathName.getBytes()); if (isDir) { verifyDir(dfs, path); } else { // this is not a directory. Checksum the file data. CRC32 fileCRC = new CRC32(); FSInputStream in = dfs.dfs.open(pathName); byte[] buf = new byte[4096]; int nRead = 0; while ((nRead = in.read(buf, 0, buf.length)) > 0) { fileCRC.update(buf, 0, nRead); } verifyChecksum(pathName, fileCRC.getValue()); } } }
From source file:me.j360.dubbo.modules.util.text.HashUtil.java
/** * crc32int, ?.//from w w w.j a va2 s .c o m * * Guavacrc32, longJDK */ public static int crc32AsInt(@NotNull byte[] input) { CRC32 crc32 = new CRC32(); crc32.update(input); // CRC32 ? 32bit intCheckSum??long?? return (int) crc32.getValue(); }
From source file:JarUtil.java
/** * Adds the given file to the specified JAR file. * /*from w w w .ja va 2s . c o m*/ * @param file * the file that should be added * @param jarFile * The JAR to which the file should be added * @param parentDir * the parent directory of the file, this is used to calculate * the path witin the JAR file. When null is given, the file will * be added into the root of the JAR. * @param compress * True when the jar file should be compressed * @throws FileNotFoundException * when the jarFile does not exist * @throws IOException * when a file could not be written or the jar-file could not * read. */ public static void addToJar(File file, File jarFile, File parentDir, boolean compress) throws FileNotFoundException, IOException { File tmpJarFile = File.createTempFile("tmp", ".jar", jarFile.getParentFile()); JarOutputStream out = new JarOutputStream(new FileOutputStream(tmpJarFile)); if (compress) { out.setLevel(ZipOutputStream.DEFLATED); } else { out.setLevel(ZipOutputStream.STORED); } // copy contents of old jar to new jar: JarFile inputFile = new JarFile(jarFile); JarInputStream in = new JarInputStream(new FileInputStream(jarFile)); CRC32 crc = new CRC32(); byte[] buffer = new byte[512 * 1024]; JarEntry entry = (JarEntry) in.getNextEntry(); while (entry != null) { InputStream entryIn = inputFile.getInputStream(entry); add(entry, entryIn, out, crc, buffer); entryIn.close(); entry = (JarEntry) in.getNextEntry(); } in.close(); inputFile.close(); int sourceDirLength; if (parentDir == null) { sourceDirLength = file.getAbsolutePath().lastIndexOf(File.separatorChar) + 1; } else { sourceDirLength = file.getAbsolutePath().lastIndexOf(File.separatorChar) + 1 - parentDir.getAbsolutePath().length(); } addFile(file, out, crc, sourceDirLength, buffer); out.close(); // remove old jar file and rename temp file to old one: if (jarFile.delete()) { if (!tmpJarFile.renameTo(jarFile)) { throw new IOException( "Unable to rename temporary JAR file to [" + jarFile.getAbsolutePath() + "]."); } } else { throw new IOException("Unable to delete old JAR file [" + jarFile.getAbsolutePath() + "]."); } }
From source file:org.mariotaku.twidere.util.TwitterContentUtils.java
public static String getOfficialKeyName(final Context context, final String consumerKey, final String consumerSecret) { if (context == null || consumerKey == null || consumerSecret == null) return null; final String[] keySecrets = context.getResources() .getStringArray(R.array.values_official_consumer_secret_crc32); final String[] keyNames = context.getResources().getStringArray(R.array.names_official_consumer_secret); final CRC32 crc32 = new CRC32(); final byte[] consumerSecretBytes = consumerSecret.getBytes(Charset.forName("UTF-8")); crc32.update(consumerSecretBytes, 0, consumerSecretBytes.length); final long value = crc32.getValue(); crc32.reset();//from w ww . j a v a2s. c om for (int i = 0, j = keySecrets.length; i < j; i++) { if (Long.parseLong(keySecrets[i], 16) == value) return keyNames[i]; } return null; }
From source file:com.asakusafw.runtime.util.cache.HadoopFileCacheRepository.java
private long computeChecksum(FileSystem fs, Path file) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format("Computing checksum: {0}", //$NON-NLS-1$ file));/*from www .j ava2 s .c o m*/ } Checksum checksum = new CRC32(); byte[] buf = byteBuffers.get(); try (FSDataInputStream input = fs.open(file)) { while (true) { int read = input.read(buf); if (read < 0) { break; } checksum.update(buf, 0, read); } } return checksum.getValue(); }
From source file:org.kepler.objectmanager.cache.DataCacheManager.java
/** * Return a temporary local LSID based on a string. * @param magicstring/* w ww . j av a2s. c o m*/ * * @throws Exception */ private KeplerLSID getDataLSID(String magicstring) throws Exception { CRC32 c = new CRC32(); c.update(magicstring.getBytes()); String hexValue = Long.toHexString(c.getValue()); KeplerLSID lsid = new KeplerLSID("localdata", hexValue, 0L, 0L); return lsid; }
From source file:com.dcits.govsbu.southernbase.baseproject2.modules.utils.Digests.java
/** * crc32.//from w w w.j a v a 2s. co m */ public static int crc32(byte[] input) { CRC32 crc32 = new CRC32(); crc32.update(input); return (int) crc32.getValue(); }
From source file:org.digidoc4j.impl.bdoc.asic.AsicContainerCreator.java
private ZipEntry getAsicMimeTypeZipEntry(byte[] mimeTypeBytes) { ZipEntry entryMimetype = new ZipEntry(ZIP_ENTRY_MIMETYPE); entryMimetype.setMethod(ZipEntry.STORED); entryMimetype.setSize(mimeTypeBytes.length); entryMimetype.setCompressedSize(mimeTypeBytes.length); CRC32 crc = new CRC32(); crc.update(mimeTypeBytes);/*from ww w . j a v a2 s . c om*/ entryMimetype.setCrc(crc.getValue()); return entryMimetype; }
From source file:edu.hm.cs.fs.scriptinat0r7.model.ScriptDocument.java
/** * Computes the hash value of a script document. * @return the 32 bit hash value as a long. *//* w w w. j av a 2 s. co m*/ public long computeHashvalue() { final CRC32 crc = new CRC32(); crc.update(getFile()); return crc.getValue(); }