Example usage for java.util.zip CRC32 getValue

List of usage examples for java.util.zip CRC32 getValue

Introduction

In this page you can find the example usage for java.util.zip CRC32 getValue.

Prototype

@Override
public long getValue() 

Source Link

Document

Returns CRC-32 value.

Usage

From source file:com.sastix.cms.server.services.content.impl.HashedDirectoryServiceImpl.java

/**
 * Returns the crc32 hash for the input String.
 *
 * @param text a String with the text/* www.j a va  2s .  c o  m*/
 * @return a BigInteger with the hash
 */
@Override
public String hashText(final String text) {
    final CRC32 crc32 = new CRC32();
    crc32.reset();
    crc32.update(text.getBytes());
    return Long.toHexString(crc32.getValue());
}

From source file:de.catma.ui.repository.wizard.FileTypePanel.java

private boolean loadSourceDocumentAndContent(SourceDocumentResult sdr) {
    try {/*from   www .  j a va 2 s. c  o m*/
        SourceDocumentHandler sourceDocumentHandler = new SourceDocumentHandler();
        SourceDocument sourceDocument = sourceDocumentHandler.loadSourceDocument(sdr.getSourceDocumentID(),
                sdr.getSourceDocumentInfo());
        sdr.setSourceDocument(sourceDocument);

        TechInfoSet techInfoSet = sdr.getSourceDocumentInfo().getTechInfoSet();
        String documentId = sdr.getSourceDocumentID();

        ProtocolHandler protocolHandler = getProtocolHandlerForUri(techInfoSet.getURI(), documentId,
                techInfoSet.getMimeType());

        byte[] currentByteContent = protocolHandler.getByteContent();

        sourceDocument.getSourceContentHandler().load(new ByteArrayInputStream(currentByteContent));

        FileOSType fileOSType = FileOSType.getFileOSType(sourceDocument.getContent());

        sdr.getSourceDocumentInfo().getTechInfoSet().setFileOSType(fileOSType);
        CRC32 checksum = new CRC32();
        checksum.update(currentByteContent);
        sdr.getSourceDocumentInfo().getTechInfoSet().setChecksum(checksum.getValue());
        return true;
    } catch (Exception e) {
        TechInfoSet techInfoSet = sdr.getSourceDocumentInfo().getTechInfoSet();
        Notification.show(
                "Information",
                "Sorry, CATMA wasn't able to process the file as "
                        + techInfoSet.getFileType()
                        + (techInfoSet.getFileType().isCharsetSupported()
                                ? " with " + ((techInfoSet.getCharset() == null) ? "unknown charset"
                                        : " charset " + techInfoSet.getCharset())
                                : "")
                        + "\n\nThe original error message is: " + e.getLocalizedMessage(),
                Notification.Type.WARNING_MESSAGE);
        return false;
    }
}

From source file:org.apache.hadoop.hdfs.TestRaidDfs.java

public static long createTestFilePartialLastBlock(FileSystem fileSys, Path name, int repl, int numBlocks,
        long blocksize) throws IOException {
    CRC32 crc = new CRC32();
    Random rand = new Random();
    FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
            (short) repl, blocksize);
    // Write whole blocks.
    byte[] b = new byte[(int) blocksize];
    for (int i = 1; i < numBlocks; i++) {
        rand.nextBytes(b);//from ww w. j  a v  a  2s  . c  o m
        stm.write(b);
        crc.update(b);
    }
    // Write partial block.
    b = new byte[(int) blocksize / 2 - 1];
    rand.nextBytes(b);
    stm.write(b);
    crc.update(b);

    stm.close();
    return crc.getValue();
}

From source file:ch.cyberduck.core.io.CRC32ChecksumCompute.java

@Override
public Checksum compute(final InputStream in, final TransferStatus status) throws ChecksumException {
    final CRC32 crc32 = new CRC32();
    try {//from   www . ja  v  a2s . co  m
        byte[] buffer = new byte[16384];
        int bytesRead;
        while ((bytesRead = in.read(buffer, 0, buffer.length)) != -1) {
            crc32.update(buffer, 0, bytesRead);
        }
    } catch (IOException e) {
        throw new ChecksumException(LocaleFactory.localizedString("Checksum failure", "Error"), e.getMessage(),
                e);
    } finally {
        IOUtils.closeQuietly(in);
    }
    return new Checksum(HashAlgorithm.crc32, Long.toHexString(crc32.getValue()));
}

From source file:io.fabric8.maven.generator.springboot.SpringBootGenerator.java

private ZipEntry createZipEntry(File file, String fullPath) throws IOException {
    ZipEntry entry = new ZipEntry(fullPath);

    byte[] buffer = new byte[8192];
    int bytesRead = -1;
    try (InputStream is = new FileInputStream(file)) {
        CRC32 crc = new CRC32();
        int size = 0;
        while ((bytesRead = is.read(buffer)) != -1) {
            crc.update(buffer, 0, bytesRead);
            size += bytesRead;/*www  . j a va 2 s .  c o m*/
        }
        entry.setSize(size);
        entry.setCompressedSize(size);
        entry.setCrc(crc.getValue());
        entry.setMethod(ZipEntry.STORED);
        return entry;
    }
}

From source file:org.apache.hadoop.raid.TestRaidDfs.java

private void validateFile(FileSystem fileSys, Path name1, Path name2, long crc) throws IOException {

    FileStatus stat1 = fileSys.getFileStatus(name1);
    FileStatus stat2 = fileSys.getFileStatus(name2);
    assertTrue(" Length of file " + name1 + " is " + stat1.getLen() + " is different from length of file "
            + name1 + " " + stat2.getLen(), stat1.getLen() == stat2.getLen());

    CRC32 newcrc = new CRC32();
    FSDataInputStream stm = fileSys.open(name2);
    final byte[] b = new byte[4192];
    int num = 0;//from  w w w.java 2 s .  c  om
    while (num >= 0) {
        num = stm.read(b);
        if (num < 0) {
            break;
        }
        newcrc.update(b, 0, num);
    }
    stm.close();
    LOG.info(" Newcrc " + newcrc.getValue() + " old crc " + crc);
    if (newcrc.getValue() != crc) {
        fail("CRC mismatch of files " + name1 + " with file " + name2);
    }
}

From source file:com.netflix.spinnaker.halyard.config.model.v1.node.Node.java

public void stageLocalFiles(Path outputPath) {
    if (!GlobalApplicationOptions.getInstance().isUseRemoteDaemon()) {
        return;/*from   ww  w . j  ava2 s  . c o  m*/
    }
    localFiles().forEach(f -> {
        try {
            f.setAccessible(true);
            String fContent = (String) f.get(this);
            if (fContent != null) {
                CRC32 crc = new CRC32();
                crc.update(fContent.getBytes());
                String fPath = Paths
                        .get(outputPath.toAbsolutePath().toString(), Long.toHexString(crc.getValue()))
                        .toString();
                FileUtils.writeStringToFile(new File(fPath), fContent);
                f.set(this, fPath);
            }
        } catch (IllegalAccessException | IOException e) {
            throw new RuntimeException("Failed to get local files for node " + this.getNodeName(), e);
        } finally {
            f.setAccessible(false);
        }
    });
}

From source file:org.apache.hadoop.raid.SmokeTestThread.java

@Override
public Boolean call() throws Exception {
    Path testPath = null;//from   www  .  j a  va  2  s .c o  m
    try {
        fileSys = FileSystem.get(distRaidNode.getConf());
        // Create a small file with 3 blocks
        String testFile = testFileBase + rand.nextLong();
        testPath = new Path(testFile);
        if (fileSys.exists(testPath)) {
            fileSys.delete(testPath, true);
        }
        long blockSize = BLOCK_SIZE;
        FSDataOutputStream stm = fileSys.create(testPath, true,
                fileSys.getConf().getInt("io.file.buffer.size", 4096), (short) 3, blockSize);
        // Write 3 blocks.
        byte[] b = new byte[(int) blockSize];
        for (int i = 0; i < NUM_SOURCE_BLOCKS; i++) {
            rand.nextBytes(b);
            stm.write(b);
            checksum.update(b);
        }
        stm.close();
        LOG.info(
                "[SMOKETEST] Created a test file: " + testFile + " with CRC32 checksum " + checksum.getValue());
        PolicyInfo info = new PolicyInfo(testFile, distRaidNode.getConf());
        info.setCodecId(TEST_CODEC);
        info.setSrcPath(testFileDirectory);
        info.setShouldRaid(true);
        info.setProperty("modTimePeriod", "0");
        info.setProperty("targetReplication", "1");
        info.setProperty("metaReplication", "1");
        FileStatus stat = fileSys.getFileStatus(testPath);
        ArrayList<FileStatus> fstats = new ArrayList<FileStatus>();
        fstats.add(stat);
        // Raid it using rs
        DistRaid dr = DistRaidNode.raidFiles(distRaidNode.getConf(), distRaidNode.jobMonitor, fstats, info);
        LOG.info("[SMOKETEST] RS Raid test file: " + testFile);
        if (dr == null) {
            throw new IOException("Failed to sart a raiding job");
        }
        long startTime = System.currentTimeMillis();
        while (!dr.checkComplete() && System.currentTimeMillis() - startTime < timeOut) {
            Thread.sleep(SLEEP_TIME);
        }
        if (!dr.checkComplete()) {
            throw new IOException("Failed to finish the raiding job in " + (timeOut / 1000) + " seconds");
        }
        if (!dr.successful()) {
            throw new IOException("Failed to raid the file " + testFile);
        }
        LOG.info("[SMOKETEST] Finish raiding test file: " + testFile);
        // Verify parity file exists
        Codec codec = Codec.getCodec(TEST_CODEC);
        Path parityPath = new Path(codec.getParityPrefix(), RaidNode.makeRelative(testPath));
        FileStatus parityStat = fileSys.getFileStatus(parityPath);
        long numParityBlocks = RaidNode.numBlocks(parityStat);
        long expectedNumParityBlocks = RaidNode.numStripes(NUM_SOURCE_BLOCKS, codec.stripeLength)
                * codec.parityLength;
        if (numParityBlocks != expectedNumParityBlocks
                || parityStat.getLen() != expectedNumParityBlocks * BLOCK_SIZE) {
            throw new IOException("[SMOKETEST] Parity file " + parityPath + " has " + numParityBlocks
                    + " blocks and " + parityStat.getLen() + " bytes, but we expect " + expectedNumParityBlocks
                    + " blocks and " + (expectedNumParityBlocks * BLOCK_SIZE) + " bytes");
        }
        LOG.info("[SMOKETEST] Verification of parity file " + parityPath + " succeeded");
        LocatedBlock[] blocks = new LocatedBlock[1];
        LocatedBlocks lbs = ((DistributedFileSystem) fileSys).getLocatedBlocks(testPath, 0, Integer.MAX_VALUE);
        // Corrupt the first block
        blocks[0] = lbs.get(0);
        ((DistributedFileSystem) fileSys).getClient().reportBadBlocks(blocks);
        LOG.info("[SMOKETEST] Finish corrupting the first block " + lbs.get(0).getBlock());
        // submit a job to "fix" it
        Set<String> jobFiles = new HashSet<String>();
        jobFiles.add(testFile);
        Job job = DistBlockIntegrityMonitor.startOneJob(
                (DistBlockIntegrityMonitor.Worker) distRaidNode.blockIntegrityMonitor.getCorruptionMonitor(),
                Priority.HIGH, jobFiles, System.currentTimeMillis(), new AtomicLong(0),
                new AtomicLong(System.currentTimeMillis()), Integer.MAX_VALUE);
        startTime = System.currentTimeMillis();
        while (!job.isComplete() && System.currentTimeMillis() - startTime < timeOut) {
            Thread.sleep(SLEEP_TIME);
        }
        if (!job.isComplete()) {
            throw new IOException("Failed to finish the blockfixing job in " + (timeOut / 1000) + " seconds");
        }
        if (!job.isSuccessful()) {
            throw new IOException("Failed to fix the file " + testFile);
        }
        LOG.info("[SMOKETEST] Finish blockfixing test file: " + testFile);
        // wait for block is reported
        startTime = System.currentTimeMillis();
        while (((DistributedFileSystem) fileSys).getLocatedBlocks(testPath, 0, Integer.MAX_VALUE).get(0)
                .isCorrupt() && System.currentTimeMillis() - startTime < timeOut) {
            Thread.sleep(SLEEP_TIME);
        }
        CRC32 newChk = new CRC32();
        FSDataInputStream readStm = fileSys.open(testPath);
        int num = 0;
        while (num >= 0) {
            num = readStm.read(b);
            if (num < 0) {
                break;
            }
            newChk.update(b, 0, num);
        }
        stm.close();
        if (newChk.getValue() != checksum.getValue()) {
            throw new IOException(
                    "Fixed file's checksum " + newChk.getValue() + " != original one " + checksum.getValue());
        }
        LOG.info("[SMOKETEST] Verification of fixed test file: " + testFile);
        return true;
    } catch (IOException ex) {
        LOG.error("Get IOException in SmokeTestThread", ex);
        ioe = ex;
        return false;
    } catch (Throwable ex) {
        LOG.error("Get Error in SmokeTestThread", ex);
        ioe = new IOException(ex);
        return false;
    } finally {
        try {
            if (fileSys != null) {
                fileSys.delete(testPath, true);
            }
        } catch (IOException ioe) {
            LOG.error("Get error during deletion", ioe);
        }
    }
}

From source file:uk.ac.cam.cl.dtg.isaac.dos.eventbookings.PgEventBookings.java

/**
 * Acquire a globally unique database lock.
 * This method will block until the lock is released.
 * Any locks must be released manually./*from   w  w  w . jav a 2  s .c  o  m*/
 *
 * @param resourceId - the unique id for the object to be locked.
 */
@Override
public void acquireDistributedLock(final String resourceId) throws SegueDatabaseException {
    // generate 32 bit CRC based on table id and resource id so that is is more likely to be unique globally.
    CRC32 crc = new CRC32();
    crc.update((TABLE_NAME + resourceId).getBytes());

    // acquire lock
    try (Connection conn = ds.getDatabaseConnection()) {
        PreparedStatement pst;
        pst = conn.prepareStatement("SELECT pg_advisory_lock(?)");
        pst.setLong(1, crc.getValue());
        log.debug(String.format("Acquiring advisory lock on %s (%s)", TABLE_NAME + resourceId, crc.getValue()));
        pst.executeQuery();
    } catch (SQLException e) {
        String msg = String.format("Unable to acquire lock for event (%s).", resourceId);
        log.error(msg);
        throw new SegueDatabaseException(msg);
    }
    log.debug(String.format("Acquired advisory lock on %s (%s)", TABLE_NAME + resourceId, crc.getValue()));
}

From source file:org.cryptomator.crypto.aes256.Aes256Cryptor.java

private long crc32Sum(byte[] source) {
    final CRC32 crc32 = new CRC32();
    crc32.update(source);//  w  w w  .  ja va 2 s.com
    return crc32.getValue();
}