Example usage for java.util.zip CRC32 update

List of usage examples for java.util.zip CRC32 update

Introduction

In this page you can find the example usage for java.util.zip CRC32 update.

Prototype

@Override
public void update(ByteBuffer buffer) 

Source Link

Document

Updates the CRC-32 checksum with the bytes from the specified buffer.

Usage

From source file:com.taobao.android.builder.tools.zip.ZipUtils.java

public static void rezip(File output, File srcDir, Map<String, ZipEntry> zipEntryMethodMap) throws Exception {
    if (output.isDirectory()) {
        throw new IOException("This is a directory!");
    }//  w w  w .ja  v  a2s .c  o  m
    if (!output.getParentFile().exists()) {
        output.getParentFile().mkdirs();
    }

    if (!output.exists()) {
        output.createNewFile();
    }
    List fileList = getSubFiles(srcDir);
    ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(output));
    ZipEntry ze = null;
    byte[] buf = new byte[1024];
    int readLen = 0;
    for (int i = 0; i < fileList.size(); i++) {
        File f = (File) fileList.get(i);
        ze = new ZipEntry(getAbsFileName(srcDir.getPath(), f));
        ze.setSize(f.length());
        ze.setTime(f.lastModified());
        if (zipEntryMethodMap != null) {
            ZipEntry originEntry = zipEntryMethodMap.get(ze.getName());
            if (originEntry != null) {
                if (originEntry.getMethod() == STORED) {
                    ze.setCompressedSize(f.length());
                    InputStream in = new BufferedInputStream(new FileInputStream(f));
                    try {
                        CRC32 crc = new CRC32();
                        int c;
                        while ((c = in.read()) != -1) {
                            crc.update(c);
                        }
                        ze.setCrc(crc.getValue());
                    } finally {
                        in.close();
                    }
                }
                ze.setMethod(originEntry.getMethod());
            }
        }
        zos.putNextEntry(ze);
        InputStream is = new BufferedInputStream(new FileInputStream(f));
        while ((readLen = is.read(buf, 0, 1024)) != -1) {
            zos.write(buf, 0, readLen);
        }
        is.close();
    }
    zos.close();
}

From source file:org.apache.hadoop.raid.TestStatisticsCollector.java

private static long createFile(FileSystem fileSys, Path name, int repl, int numBlocks, long blocksize)
        throws IOException {
    CRC32 crc = new CRC32();
    int bufSize = fileSys.getConf().getInt("io.file.buffer.size", 4096);
    FSDataOutputStream stm = fileSys.create(name, true, bufSize, (short) repl, blocksize);
    // fill random data into file
    byte[] b = new byte[(int) blocksize];
    for (int i = 0; i < numBlocks; i++) {
        rand.nextBytes(b);/*from w  ww . j  a  v a  2 s.  c o m*/
        stm.write(b);
        crc.update(b);
    }
    stm.close();
    return crc.getValue();
}

From source file:io.blobkeeper.file.util.FileUtils.java

public static long getCrc(@NotNull File file) {
    CRC32 crc = new CRC32();

    while (true) {
        ByteBuffer buffer = ByteBuffer.allocate(CHUNK_SIZE);
        while (buffer.hasRemaining()) {
            int bytes = 0;
            try {
                bytes = file.getFileChannel().read(buffer);
            } catch (IOException e) {
                log.error("Can't read blob file " + file, e);
                throw new IllegalArgumentException(e);
            }/*from   ww  w . ja  v  a2 s.c o  m*/
            if (bytes < 0) {
                break;
            }
        }
        buffer.flip();
        if (buffer.remaining() == 0) {
            break;
        } else {
            crc.update(buffer.array());
        }
    }

    return crc.getValue();
}

From source file:org.apache.hadoop.raid.TestBlockCopier.java

static void validateFileCopy(FileSystem fs, Path path, long size, long[] blockCrcs, boolean twiceThrough)
        throws IOException {

    final int timesThrough = (twiceThrough ? 2 : 1);
    final int numBlocks = (int) Math.ceil((double) size / BLOCK_SIZE);

    // Check all the blocks timesThrough times
    FSDataInputStream in = fs.open(path);
    CRC32 crc = new CRC32();

    for (int i = 0; i < timesThrough; i++) {
        for (int b = 0; b < numBlocks; b++) {
            int chunkSize = (int) Math.min(BLOCK_SIZE, (size - (b * BLOCK_SIZE)));
            byte[] buf = new byte[chunkSize];

            in.read(buf);//from   w w w. ja v  a  2  s.com
            crc.reset();
            crc.update(buf);

            assertEquals(("Block crc " + b + " did not match on iteration " + i), blockCrcs[b], crc.getValue());
        }
        assert in.getPos() == size : "Did not read to end of file";
        if (i < (timesThrough - 1)) {
            in.seekToNewSource(0);
        }
    }
}

From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java

/**
 * Scans through the tar file, looking for all segment entries.
 *
 * @throws IOException if the tar file could not be read
 *//*from  www  .j  av a  2s .  c  o m*/
private static void recoverEntries(File file, RandomAccessFile access, LinkedHashMap<UUID, byte[]> entries)
        throws IOException {
    byte[] header = new byte[BLOCK_SIZE];
    while (access.getFilePointer() + BLOCK_SIZE <= access.length()) {
        // read the tar header block
        access.readFully(header);

        // compute the header checksum
        int sum = 0;
        for (int i = 0; i < BLOCK_SIZE; i++) {
            sum += header[i] & 0xff;
        }

        // identify possible zero block
        if (sum == 0 && access.getFilePointer() + 2 * BLOCK_SIZE == access.length()) {
            return; // found the zero blocks at the end of the file
        }

        // replace the actual stored checksum with spaces for comparison
        for (int i = 148; i < 148 + 8; i++) {
            sum -= header[i] & 0xff;
            sum += ' ';
        }

        byte[] checkbytes = String.format("%06o\0 ", sum).getBytes(UTF_8);
        for (int i = 0; i < checkbytes.length; i++) {
            if (checkbytes[i] != header[148 + i]) {
                log.warn("Invalid entry checksum at offset {} in tar file {}, skipping...",
                        access.getFilePointer() - BLOCK_SIZE, file);
            }
        }

        // The header checksum passes, so read the entry name and size
        ByteBuffer buffer = ByteBuffer.wrap(header);
        String name = readString(buffer, 100);
        buffer.position(124);
        int size = readNumber(buffer, 12);
        if (access.getFilePointer() + size > access.length()) {
            // checksum was correct, so the size field should be accurate
            log.warn("Partial entry {} in tar file {}, ignoring...", name, file);
            return;
        }

        Matcher matcher = NAME_PATTERN.matcher(name);
        if (matcher.matches()) {
            UUID id = UUID.fromString(matcher.group(1));

            String checksum = matcher.group(3);
            if (checksum != null || !entries.containsKey(id)) {
                byte[] data = new byte[size];
                access.readFully(data);

                // skip possible padding to stay at block boundaries
                long position = access.getFilePointer();
                long remainder = position % BLOCK_SIZE;
                if (remainder != 0) {
                    access.seek(position + (BLOCK_SIZE - remainder));
                }

                if (checksum != null) {
                    CRC32 crc = new CRC32();
                    crc.update(data);
                    if (crc.getValue() != Long.parseLong(checksum, 16)) {
                        log.warn("Checksum mismatch in entry {} of tar file {}, skipping...", name, file);
                        continue;
                    }
                }

                entries.put(id, data);
            }
        } else if (!name.equals(file.getName() + ".idx")) {
            log.warn("Unexpected entry {} in tar file {}, skipping...", name, file);
            long position = access.getFilePointer() + size;
            long remainder = position % BLOCK_SIZE;
            if (remainder != 0) {
                position += BLOCK_SIZE - remainder;
            }
            access.seek(position);
        }
    }
}

From source file:org.apache.jackrabbit.oak.segment.file.TarReader.java

/**
 * Scans through the tar file, looking for all segment entries.
 *
 * @throws IOException if the tar file could not be read
 *//*from  www .  j  ava 2 s  .c  om*/
private static void recoverEntries(File file, RandomAccessFile access, LinkedHashMap<UUID, byte[]> entries)
        throws IOException {
    byte[] header = new byte[BLOCK_SIZE];
    while (access.getFilePointer() + BLOCK_SIZE <= access.length()) {
        // read the tar header block
        access.readFully(header);

        // compute the header checksum
        int sum = 0;
        for (int i = 0; i < BLOCK_SIZE; i++) {
            sum += header[i] & 0xff;
        }

        // identify possible zero block
        if (sum == 0 && access.getFilePointer() + 2 * BLOCK_SIZE == access.length()) {
            return; // found the zero blocks at the end of the file
        }

        // replace the actual stored checksum with spaces for comparison
        for (int i = 148; i < 148 + 8; i++) {
            sum -= header[i] & 0xff;
            sum += ' ';
        }

        byte[] checkbytes = String.format("%06o\0 ", sum).getBytes(UTF_8);
        for (int i = 0; i < checkbytes.length; i++) {
            if (checkbytes[i] != header[148 + i]) {
                log.warn("Invalid entry checksum at offset {} in tar file {}, skipping...",
                        access.getFilePointer() - BLOCK_SIZE, file);
            }
        }

        // The header checksum passes, so read the entry name and size
        ByteBuffer buffer = wrap(header);
        String name = readString(buffer, 100);
        buffer.position(124);
        int size = readNumber(buffer, 12);
        if (access.getFilePointer() + size > access.length()) {
            // checksum was correct, so the size field should be accurate
            log.warn("Partial entry {} in tar file {}, ignoring...", name, file);
            return;
        }

        Matcher matcher = NAME_PATTERN.matcher(name);
        if (matcher.matches()) {
            UUID id = UUID.fromString(matcher.group(1));

            String checksum = matcher.group(3);
            if (checksum != null || !entries.containsKey(id)) {
                byte[] data = new byte[size];
                access.readFully(data);

                // skip possible padding to stay at block boundaries
                long position = access.getFilePointer();
                long remainder = position % BLOCK_SIZE;
                if (remainder != 0) {
                    access.seek(position + (BLOCK_SIZE - remainder));
                }

                if (checksum != null) {
                    CRC32 crc = new CRC32();
                    crc.update(data);
                    if (crc.getValue() != Long.parseLong(checksum, 16)) {
                        log.warn("Checksum mismatch in entry {} of tar file {}, skipping...", name, file);
                        continue;
                    }
                }

                entries.put(id, data);
            }
        } else if (!name.equals(file.getName() + ".idx")) {
            log.warn("Unexpected entry {} in tar file {}, skipping...", name, file);
            long position = access.getFilePointer() + size;
            long remainder = position % BLOCK_SIZE;
            if (remainder != 0) {
                position += BLOCK_SIZE - remainder;
            }
            access.seek(position);
        }
    }
}

From source file:com.espertech.esper.core.context.mgr.ContextControllerHashedGetterCRC32Serialized.java

public Object get(EventBean eventBean) throws PropertyAccessException {
    EventBean[] events = new EventBean[] { eventBean };

    Object[] parameters = new Object[evaluators.length];
    for (int i = 0; i < serializers.length; i++) {
        parameters[i] = evaluators[i].evaluate(events, true, null);
    }//from ww  w.j a v  a2 s . c  om

    byte[] bytes;
    try {
        bytes = SerializerFactory.serialize(serializers, parameters);
    } catch (IOException e) {
        log.error("Exception serializing parameters for computing consistent hash for statement '"
                + statementName + "': " + e.getMessage(), e);
        bytes = new byte[0];
    }

    CRC32 crc = new CRC32();
    crc.update(bytes);
    long value = crc.getValue() % granularity;

    int result = (int) value;
    if (result >= 0) {
        return result;
    }
    return -result;
}

From source file:com.googlecode.flyway.core.migration.sql.SqlMigration.java

/**
 * Calculates the checksum of this sql script.
 *
 * @return The crc-32 checksum of the script.
 *///from  w  w  w  .  ja  v  a2  s.  co  m
private int calculateChecksum(String sql) {
    final CRC32 crc32 = new CRC32();
    crc32.update(sql.getBytes());
    return (int) crc32.getValue();
}

From source file:eionet.webq.service.RemoteFileServiceImpl.java

/**
 * Calculates crc32 checksum./*from  w ww . j  a v  a  2s.c  o m*/
 *
 * @param bytes bytes to calculate checksum.
 * @return checksum
 * @see java.util.zip.CRC32
 */
private long crc32Checksum(byte[] bytes) {
    CRC32 crc32 = new CRC32();
    crc32.update(bytes);
    return crc32.getValue();
}

From source file:com.eventattend.portal.bl.FaceBookBL.java

public static String generateEmailHash(String email) {
    email = email.trim().toLowerCase();//from  w  ww  .jav  a 2  s . co  m
    CRC32 crc = new CRC32();
    crc.update(email.getBytes());
    String md5 = MD5(email);
    return crc.getValue() + "_" + md5;
}