Example usage for java.util.zip CRC32 update

List of usage examples for java.util.zip CRC32 update

Introduction

In this page you can find the example usage for java.util.zip CRC32 update.

Prototype

@Override
public void update(byte[] b, int off, int len) 

Source Link

Document

Updates the CRC-32 checksum with the specified array of bytes.

Usage

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

public static boolean validateFile(FileSystem fileSys, Path name, long length, long crc) throws IOException {

    long numRead = 0;
    CRC32 newcrc = new CRC32();
    FSDataInputStream stm = fileSys.open(name);
    final byte[] b = new byte[4192];
    int num = 0;/*from w w w.  j  a  va  2s . c  o m*/
    while (num >= 0) {
        num = stm.read(b);
        if (num < 0) {
            break;
        }
        numRead += num;
        newcrc.update(b, 0, num);
    }
    stm.close();

    if (numRead != length) {
        LOG.info("Number of bytes read " + numRead + " does not match file size " + length);
        return false;
    }

    LOG.info(" Newcrc " + newcrc.getValue() + " old crc " + crc);
    if (newcrc.getValue() != crc) {
        LOG.info("CRC mismatch of file " + name + ": " + newcrc.getValue() + " vs. " + crc);
        return false;
    }
    return true;
}

From source file:org.anarres.lzo.LzopInputStream.java

/**
 * Read bytes, update checksums, return first four bytes as an int, first
 * byte read in the MSB.//from   www. j a v a  2  s.c om
 */
private int readHeaderItem(byte[] buf, int len, Adler32 adler, CRC32 crc32) throws IOException {
    int ret = readInt(buf, len);
    adler.update(buf, 0, len);
    crc32.update(buf, 0, len);
    Arrays.fill(buf, (byte) 0);
    return ret;
}

From source file:nl.nn.adapterframework.compression.ZipWriter.java

public void writeEntryWithCompletedHeader(String filename, Object contents, boolean close, String charset)
        throws CompressionException, IOException {
    if (StringUtils.isEmpty(filename)) {
        throw new CompressionException("filename cannot be empty");
    }//from w  w  w  .  ja v a 2s . co m

    byte[] contentBytes = null;
    BufferedInputStream bis = null;
    long size = 0;
    if (contents != null) {
        if (contents instanceof byte[]) {
            contentBytes = (byte[]) contents;
        } else if (contents instanceof InputStream) {
            contentBytes = Misc.streamToBytes((InputStream) contents);
        } else {
            contentBytes = contents.toString().getBytes(charset);
        }
        bis = new BufferedInputStream(new ByteArrayInputStream(contentBytes));
        size = bis.available();
    } else {
        log.warn("contents of zip entry [" + filename + "] is null");
    }

    int bytesRead;
    byte[] buffer = new byte[1024];
    CRC32 crc = new CRC32();
    crc.reset();
    if (bis != null) {
        while ((bytesRead = bis.read(buffer)) != -1) {
            crc.update(buffer, 0, bytesRead);
        }
        bis.close();
    }
    if (contents != null) {
        bis = new BufferedInputStream(new ByteArrayInputStream(contentBytes));
    }
    ZipEntry entry = new ZipEntry(filename);
    entry.setMethod(ZipEntry.STORED);
    entry.setCompressedSize(size);
    entry.setSize(size);
    entry.setCrc(crc.getValue());
    getZipoutput().putNextEntry(entry);
    if (bis != null) {
        while ((bytesRead = bis.read(buffer)) != -1) {
            getZipoutput().write(buffer, 0, bytesRead);
        }
        bis.close();
    }
    getZipoutput().closeEntry();
}

From source file:io.fabric8.maven.generator.springboot.SpringBootGenerator.java

private ZipEntry createZipEntry(File file, String fullPath) throws IOException {
    ZipEntry entry = new ZipEntry(fullPath);

    byte[] buffer = new byte[8192];
    int bytesRead = -1;
    try (InputStream is = new FileInputStream(file)) {
        CRC32 crc = new CRC32();
        int size = 0;
        while ((bytesRead = is.read(buffer)) != -1) {
            crc.update(buffer, 0, bytesRead);
            size += bytesRead;//from  w  w  w. ja va2 s  .co  m
        }
        entry.setSize(size);
        entry.setCompressedSize(size);
        entry.setCrc(crc.getValue());
        entry.setMethod(ZipEntry.STORED);
        return entry;
    }
}

From source file:org.sleuthkit.autopsy.experimental.configuration.SharedConfiguration.java

/**
 * Calculate the CRC of a file to use to determine if it has changed.
 *
 * @param filePath File to get the CRC for
 *
 * @return String containing the CRC//from   w w  w  . j a v  a 2 s .c o  m
 *
 * @throws SharedConfigurationException
 */
private static String calculateCRC(String filePath) throws SharedConfigurationException {
    File file = new File(filePath);
    try {
        FileInputStream fileStream = new FileInputStream(file);
        CRC32 crc = new CRC32();
        byte[] buffer = new byte[65536];
        int bytesRead = fileStream.read(buffer);
        while (-1 != bytesRead) {
            crc.update(buffer, 0, bytesRead);
            bytesRead = fileStream.read(buffer);
        }
        return String.valueOf(crc.getValue());
    } catch (IOException ex) {
        throw new SharedConfigurationException(
                String.format("Failed to calculate CRC for %s", file.getAbsolutePath()), ex);
    }
}

From source file:org.apache.hadoop.dfs.TestDFSUpgradeFromImage.java

private void verifyDir(DFSClient client, String dir) throws IOException {

    DFSFileInfo[] fileArr = client.listPaths(dir);
    TreeMap<String, Boolean> fileMap = new TreeMap<String, Boolean>();

    for (DFSFileInfo file : fileArr) {
        String path = file.getPath().toString();
        fileMap.put(path, Boolean.valueOf(file.isDir()));
    }//from  w  ww . j  a v a  2  s  . com

    for (Iterator<String> it = fileMap.keySet().iterator(); it.hasNext();) {
        String path = it.next();
        boolean isDir = fileMap.get(path);

        overallChecksum.update(path.getBytes());

        if (isDir) {
            verifyDir(client, path);
        } else {
            // this is not a directory. Checksum the file data.
            CRC32 fileCRC = new CRC32();
            FSInputStream in = client.open(path);
            byte[] buf = new byte[4096];
            int nRead = 0;
            while ((nRead = in.read(buf, 0, buf.length)) > 0) {
                fileCRC.update(buf, 0, nRead);
            }

            verifyChecksum(path, fileCRC.getValue());
        }
    }
}

From source file:org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.java

private void verifyDir(DistributedFileSystem dfs, Path dir) throws IOException {

    FileStatus[] fileArr = dfs.listStatus(dir);
    TreeMap<Path, Boolean> fileMap = new TreeMap<Path, Boolean>();

    for (FileStatus file : fileArr) {
        fileMap.put(file.getPath(), Boolean.valueOf(file.isDir()));
    }//w  w  w .ja  v  a2s  . c o  m

    for (Iterator<Path> it = fileMap.keySet().iterator(); it.hasNext();) {
        Path path = it.next();
        boolean isDir = fileMap.get(path);

        String pathName = path.toUri().getPath();
        overallChecksum.update(pathName.getBytes());

        if (isDir) {
            verifyDir(dfs, path);
        } else {
            // this is not a directory. Checksum the file data.
            CRC32 fileCRC = new CRC32();
            FSInputStream in = dfs.dfs.open(pathName);
            byte[] buf = new byte[4096];
            int nRead = 0;
            while ((nRead = in.read(buf, 0, buf.length)) > 0) {
                fileCRC.update(buf, 0, nRead);
            }

            verifyChecksum(pathName, fileCRC.getValue());
        }
    }
}

From source file:org.apache.hadoop.raid.TestRaidDfs.java

private void validateFile(FileSystem fileSys, Path name1, Path name2, long crc) throws IOException {

    FileStatus stat1 = fileSys.getFileStatus(name1);
    FileStatus stat2 = fileSys.getFileStatus(name2);
    assertTrue(" Length of file " + name1 + " is " + stat1.getLen() + " is different from length of file "
            + name1 + " " + stat2.getLen(), stat1.getLen() == stat2.getLen());

    CRC32 newcrc = new CRC32();
    FSDataInputStream stm = fileSys.open(name2);
    final byte[] b = new byte[4192];
    int num = 0;/*from  w w w  .j  a v a 2 s.  co m*/
    while (num >= 0) {
        num = stm.read(b);
        if (num < 0) {
            break;
        }
        newcrc.update(b, 0, num);
    }
    stm.close();
    LOG.info(" Newcrc " + newcrc.getValue() + " old crc " + crc);
    if (newcrc.getValue() != crc) {
        fail("CRC mismatch of files " + name1 + " with file " + name2);
    }
}

From source file:org.apache.hadoop.raid.SmokeTestThread.java

@Override
public Boolean call() throws Exception {
    Path testPath = null;// w ww  .  j a  v a 2 s  .  c o m
    try {
        fileSys = FileSystem.get(distRaidNode.getConf());
        // Create a small file with 3 blocks
        String testFile = testFileBase + rand.nextLong();
        testPath = new Path(testFile);
        if (fileSys.exists(testPath)) {
            fileSys.delete(testPath, true);
        }
        long blockSize = BLOCK_SIZE;
        FSDataOutputStream stm = fileSys.create(testPath, true,
                fileSys.getConf().getInt("io.file.buffer.size", 4096), (short) 3, blockSize);
        // Write 3 blocks.
        byte[] b = new byte[(int) blockSize];
        for (int i = 0; i < NUM_SOURCE_BLOCKS; i++) {
            rand.nextBytes(b);
            stm.write(b);
            checksum.update(b);
        }
        stm.close();
        LOG.info(
                "[SMOKETEST] Created a test file: " + testFile + " with CRC32 checksum " + checksum.getValue());
        PolicyInfo info = new PolicyInfo(testFile, distRaidNode.getConf());
        info.setCodecId(TEST_CODEC);
        info.setSrcPath(testFileDirectory);
        info.setShouldRaid(true);
        info.setProperty("modTimePeriod", "0");
        info.setProperty("targetReplication", "1");
        info.setProperty("metaReplication", "1");
        FileStatus stat = fileSys.getFileStatus(testPath);
        ArrayList<FileStatus> fstats = new ArrayList<FileStatus>();
        fstats.add(stat);
        // Raid it using rs
        DistRaid dr = DistRaidNode.raidFiles(distRaidNode.getConf(), distRaidNode.jobMonitor, fstats, info);
        LOG.info("[SMOKETEST] RS Raid test file: " + testFile);
        if (dr == null) {
            throw new IOException("Failed to sart a raiding job");
        }
        long startTime = System.currentTimeMillis();
        while (!dr.checkComplete() && System.currentTimeMillis() - startTime < timeOut) {
            Thread.sleep(SLEEP_TIME);
        }
        if (!dr.checkComplete()) {
            throw new IOException("Failed to finish the raiding job in " + (timeOut / 1000) + " seconds");
        }
        if (!dr.successful()) {
            throw new IOException("Failed to raid the file " + testFile);
        }
        LOG.info("[SMOKETEST] Finish raiding test file: " + testFile);
        // Verify parity file exists
        Codec codec = Codec.getCodec(TEST_CODEC);
        Path parityPath = new Path(codec.getParityPrefix(), RaidNode.makeRelative(testPath));
        FileStatus parityStat = fileSys.getFileStatus(parityPath);
        long numParityBlocks = RaidNode.numBlocks(parityStat);
        long expectedNumParityBlocks = RaidNode.numStripes(NUM_SOURCE_BLOCKS, codec.stripeLength)
                * codec.parityLength;
        if (numParityBlocks != expectedNumParityBlocks
                || parityStat.getLen() != expectedNumParityBlocks * BLOCK_SIZE) {
            throw new IOException("[SMOKETEST] Parity file " + parityPath + " has " + numParityBlocks
                    + " blocks and " + parityStat.getLen() + " bytes, but we expect " + expectedNumParityBlocks
                    + " blocks and " + (expectedNumParityBlocks * BLOCK_SIZE) + " bytes");
        }
        LOG.info("[SMOKETEST] Verification of parity file " + parityPath + " succeeded");
        LocatedBlock[] blocks = new LocatedBlock[1];
        LocatedBlocks lbs = ((DistributedFileSystem) fileSys).getLocatedBlocks(testPath, 0, Integer.MAX_VALUE);
        // Corrupt the first block
        blocks[0] = lbs.get(0);
        ((DistributedFileSystem) fileSys).getClient().reportBadBlocks(blocks);
        LOG.info("[SMOKETEST] Finish corrupting the first block " + lbs.get(0).getBlock());
        // submit a job to "fix" it
        Set<String> jobFiles = new HashSet<String>();
        jobFiles.add(testFile);
        Job job = DistBlockIntegrityMonitor.startOneJob(
                (DistBlockIntegrityMonitor.Worker) distRaidNode.blockIntegrityMonitor.getCorruptionMonitor(),
                Priority.HIGH, jobFiles, System.currentTimeMillis(), new AtomicLong(0),
                new AtomicLong(System.currentTimeMillis()), Integer.MAX_VALUE);
        startTime = System.currentTimeMillis();
        while (!job.isComplete() && System.currentTimeMillis() - startTime < timeOut) {
            Thread.sleep(SLEEP_TIME);
        }
        if (!job.isComplete()) {
            throw new IOException("Failed to finish the blockfixing job in " + (timeOut / 1000) + " seconds");
        }
        if (!job.isSuccessful()) {
            throw new IOException("Failed to fix the file " + testFile);
        }
        LOG.info("[SMOKETEST] Finish blockfixing test file: " + testFile);
        // wait for block is reported
        startTime = System.currentTimeMillis();
        while (((DistributedFileSystem) fileSys).getLocatedBlocks(testPath, 0, Integer.MAX_VALUE).get(0)
                .isCorrupt() && System.currentTimeMillis() - startTime < timeOut) {
            Thread.sleep(SLEEP_TIME);
        }
        CRC32 newChk = new CRC32();
        FSDataInputStream readStm = fileSys.open(testPath);
        int num = 0;
        while (num >= 0) {
            num = readStm.read(b);
            if (num < 0) {
                break;
            }
            newChk.update(b, 0, num);
        }
        stm.close();
        if (newChk.getValue() != checksum.getValue()) {
            throw new IOException(
                    "Fixed file's checksum " + newChk.getValue() + " != original one " + checksum.getValue());
        }
        LOG.info("[SMOKETEST] Verification of fixed test file: " + testFile);
        return true;
    } catch (IOException ex) {
        LOG.error("Get IOException in SmokeTestThread", ex);
        ioe = ex;
        return false;
    } catch (Throwable ex) {
        LOG.error("Get Error in SmokeTestThread", ex);
        ioe = new IOException(ex);
        return false;
    } finally {
        try {
            if (fileSys != null) {
                fileSys.delete(testPath, true);
            }
        } catch (IOException ioe) {
            LOG.error("Get error during deletion", ioe);
        }
    }
}

From source file:com.jkoolcloud.tnt4j.streams.configure.state.AbstractFileStreamStateHandler.java

/**
 * Save current file access state. Actually takes the current streamed file line, and calculates CRC of that line.
 *
 * @param line/* w  w w.j  a  v a  2s  .c o m*/
 *            line currently streamed
 * @param streamName
 *            stream name
 */
public void saveState(AbstractFileLineStream.Line line, String streamName) {
    AbstractFileLineStream.Line procLine = prevLine;
    prevLine = line;
    if (procLine == null) {
        return;
    }

    String lineStr = procLine.getText();
    int lineNr = procLine.getLineNumber();

    try {
        fileAccessState.currentLineNumber = lineNr;
        fileAccessState.lastReadTime = System.currentTimeMillis();

        CRC32 crc = new CRC32();
        final byte[] bytes4Line = lineStr.getBytes(Utils.UTF8);
        crc.update(bytes4Line, 0, bytes4Line.length);
        fileAccessState.currentLineCrc = crc.getValue();
    } catch (IOException exc) {
        logger().log(OpLevel.ERROR, StreamsResources.getString(StreamsResources.RESOURCE_BUNDLE_NAME,
                "FileStreamStateHandler.file.error"), exc);
    }
}