List of usage examples for java.util.zip CRC32 getValue
@Override public long getValue()
From source file:org.apache.hadoop.raid.Decoder.java
/** * Having buffers of the right size is extremely important. If the the * buffer size is not a divisor of the block size, we may end up reading * across block boundaries.//from w w w . j ava 2 s . com * * If codec's simulateBlockFix is true, we use the old code to fix blocks * and verify the new code's result is the same as the old one. */ CRC32 fixErasedBlock(FileSystem srcFs, FileStatus srcStat, FileSystem parityFs, Path parityFile, boolean fixSource, long blockSize, long errorOffset, long limit, boolean partial, OutputStream out, StripeInfo si, Context context, boolean skipVerify) throws IOException, InterruptedException { configureBuffers(blockSize); Progressable reporter = context; if (reporter == null) { reporter = RaidUtils.NULL_PROGRESSABLE; } Path srcFile = srcStat.getPath(); LOG.info("Code: " + this.codec.id + " simulation: " + this.codec.simulateBlockFix); if (this.codec.simulateBlockFix) { String oldId = getOldCodeId(srcStat); if (oldId == null) { // Couldn't find old codec for block fixing, throw exception instead throw new IOException("Couldn't find old parity files for " + srcFile + ". Won't reconstruct the block since code " + this.codec.id + " is still under test"); } if (partial) { throw new IOException( "Couldn't reconstruct the partial data because " + "old decoders don't support it"); } Decoder decoder = (oldId.equals("xor")) ? new XORDecoder(conf) : new ReedSolomonDecoder(conf); CRC32 newCRC = null; long newLen = 0; if (!skipVerify) { newCRC = new CRC32(); newLen = this.fixErasedBlockImpl(srcFs, srcFile, parityFs, parityFile, fixSource, blockSize, errorOffset, limit, partial, null, context, newCRC, null, false, null); } CRC32 oldCRC = (skipVerify && checksumStore == null) ? null : new CRC32(); long oldLen = decoder.fixErasedBlockImpl(srcFs, srcFile, parityFs, parityFile, fixSource, blockSize, errorOffset, limit, partial, out, context, oldCRC, si, false, null); if (!skipVerify) { if (newCRC.getValue() != oldCRC.getValue() || newLen != oldLen) { LOG.error(" New code " + codec.id + " produces different data from old code " + oldId + " during fixing " + (fixSource ? srcFile.toString() : parityFile.toString()) + " (offset=" + errorOffset + ", limit=" + limit + ")" + " checksum:" + newCRC.getValue() + ", " + oldCRC.getValue() + " len:" + newLen + ", " + oldLen); LogUtils.logRaidReconstructionMetrics(LOGRESULTS.FAILURE, 0, codec, -1, -1, -1, numReadBytes, numReadBytesRemoteRack, (fixSource ? srcFile : parityFile), errorOffset, LOGTYPES.OFFLINE_RECONSTRUCTION_SIMULATION, (fixSource ? srcFs : parityFs), null, context, -1); if (context != null) { context.getCounter(RaidCounter.BLOCK_FIX_SIMULATION_FAILED).increment(1L); // The key includes the file path and simulation failure state String outkey = DistBlockIntegrityMonitor.SIMULATION_FAILED_FILE + ","; if (fixSource) { outkey += srcFile.toUri().getPath(); } else { outkey += parityFile.toUri().getPath(); } // The value is the task id String outval = context.getConfiguration().get("mapred.task.id"); context.write(new Text(outkey), new Text(outval)); } } else { LOG.info(" New code " + codec.id + " produces the same data with old code " + oldId + " during fixing " + (fixSource ? srcFile.toString() : parityFile.toString()) + " (offset=" + errorOffset + ", limit=" + limit + ")"); if (context != null) { context.getCounter(RaidCounter.BLOCK_FIX_SIMULATION_SUCCEEDED).increment(1L); } } } return oldCRC; } else { CRC32 crc = null; if (checksumStore != null) { crc = new CRC32(); } fixErasedBlockImpl(srcFs, srcFile, parityFs, parityFile, fixSource, blockSize, errorOffset, limit, partial, out, context, crc, si, false, null); return crc; } }
From source file:jef.tools.StringUtils.java
/** * CRC?,8???//w w w. j av a 2 s . c om */ public static String getCRC(InputStream in) { CRC32 crc32 = new CRC32(); byte[] b = new byte[65536]; int len = 0; try { while ((len = in.read(b)) != -1) { crc32.update(b, 0, len); } return Long.toHexString(crc32.getValue()); } catch (IOException e) { throw new RuntimeException(e); } finally { IOUtils.closeQuietly(in); } }
From source file:com.nridge.core.base.field.data.DataBag.java
/** * Convenience method will calculate a unique type id property for * the bag based on each field name using a CRC32 algorithm. *//*from ww w.j a va2 s . com*/ public void setTypeIdByNames() { CRC32 crc32 = new CRC32(); crc32.reset(); if (StringUtils.isNotEmpty(mName)) crc32.update(mName.getBytes()); else { for (DataField dataField : mFields) crc32.update(dataField.getName().getBytes()); } setTypeId(crc32.getValue()); }
From source file:org.openbravo.erpCommon.obps.ActivationKey.java
public String getOpsLogId() { CRC32 crc = new CRC32(); crc.update(getPublicKey().getBytes()); return Long.toHexString(crc.getValue()); }
From source file:org.apache.isis.objectstore.nosql.db.file.server.FileServer.java
private void syncConnection(final Socket connection, final int readTimeout) { try {/*from w w w . j av a 2 s . c om*/ final CRC32 crc32 = new CRC32(); final DataOutput output = new DataOutputStream(connection.getOutputStream()); final DataInput input = new DataInputStream(new CheckedInputStream(connection.getInputStream(), crc32)); if (input.readByte() != INIT) { return; } final LogRange logFileRange = Util.logFileRange(); final long lastId = logFileRange.noLogFile() ? -1 : logFileRange.getLast(); output.writeLong(lastId); do { if (input.readByte() != RECOVERY_LOG) { return; } crc32.reset(); final long logId = input.readLong(); final File file = Util.tmpLogFile(logId); LOG.info("syncing recovery file: " + file.getName()); final BufferedOutputStream fileOutput = new BufferedOutputStream(new FileOutputStream(file)); final byte[] buffer = new byte[8092]; int length; while ((length = input.readInt()) > 0) { input.readFully(buffer, 0, length); fileOutput.write(buffer, 0, length); } fileOutput.close(); final long calculatedChecksum = crc32.getValue(); final long sentChecksum = input.readLong(); if (calculatedChecksum != sentChecksum) { throw new NoSqlStoreException("Checksum didn't match during download of " + file.getName()); } recover(file); final File renameTo = Util.logFile(logId); file.renameTo(renameTo); } while (true); } catch (final NoSqlStoreException e) { LOG.error("file server failure", e); } catch (final IOException e) { LOG.error("networking failure", e); } catch (final RuntimeException e) { LOG.error("request failure", e); } finally { try { connection.close(); } catch (final IOException e) { LOG.warn("failure to close connection", e); } } // TODO restart }
From source file:org.exist.xquery.modules.compression.AbstractCompressFunction.java
/** * Adds a element to a archive/*from w ww .j a va 2 s . c om*/ * * @param os * The Output Stream to add the element to * @param element * The element to add to the archive * @param useHierarchy * Whether to use a folder hierarchy in the archive file that * reflects the collection hierarchy */ private void compressElement(OutputStream os, Element element, boolean useHierarchy, String stripOffset) throws XPathException { if (!(element.getNodeName().equals("entry") || element.getNamespaceURI().length() > 0)) throw new XPathException(this, "Item must be type of xs:anyURI or element entry."); if (element.getChildNodes().getLength() > 1) throw new XPathException(this, "Entry content is not valid XML fragment."); String name = element.getAttribute("name"); // if(name == null) // throw new XPathException(this, "Entry must have name attribute."); String type = element.getAttribute("type"); if ("uri".equals(type)) { compressFromUri(os, URI.create(element.getFirstChild().getNodeValue()), useHierarchy, stripOffset, element.getAttribute("method"), name); return; } if (useHierarchy) { name = removeLeadingOffset(name, stripOffset); } else { name = name.substring(name.lastIndexOf("/") + 1); } if ("collection".equals(type)) name += "/"; Object entry = null; try { entry = newEntry(name); if (!"collection".equals(type)) { byte[] value; CRC32 chksum = new CRC32(); Node content = element.getFirstChild(); if (content == null) { value = new byte[0]; } else { if (content.getNodeType() == Node.TEXT_NODE) { String text = content.getNodeValue(); Base64Decoder dec = new Base64Decoder(); if ("binary".equals(type)) { //base64 binary dec.translate(text); value = dec.getByteArray(); } else { //text value = text.getBytes(); } } else { //xml Serializer serializer = context.getBroker().getSerializer(); serializer.setUser(context.getUser()); serializer.setProperty("omit-xml-declaration", "no"); getDynamicSerializerOptions(serializer); value = serializer.serialize((NodeValue) content).getBytes(); } } if (entry instanceof ZipEntry && "store".equals(element.getAttribute("method"))) { ((ZipEntry) entry).setMethod(ZipOutputStream.STORED); chksum.update(value); ((ZipEntry) entry).setCrc(chksum.getValue()); ((ZipEntry) entry).setSize(value.length); } putEntry(os, entry); os.write(value); } } catch (IOException ioe) { throw new XPathException(this, ioe.getMessage(), ioe); } catch (SAXException saxe) { throw new XPathException(this, saxe.getMessage(), saxe); } finally { if (entry != null) try { closeEntry(os); } catch (IOException ioe) { throw new XPathException(this, ioe.getMessage(), ioe); } } }
From source file:com.redskyit.scriptDriver.RunTests.java
private boolean compareStrings(String s1, String s2, boolean checksum) { if (checksum) { CRC32 crc = new CRC32(); crc.update(s1.getBytes());/*from w ww . j a va2 s. co m*/ return ("crc32:" + crc.getValue()).equals(s2); } return s1.equals(s2); }
From source file:org.talend.core.model.metadata.builder.database.ExtractMetaDataUtils.java
public boolean checkFileCRCCode(File targetFile, File sourceFile) throws Exception { // Cyclic Redundancy Check(CRC) if (!targetFile.exists() || !sourceFile.exists()) { return true; }/* w w w. j a v a 2 s . c om*/ FileInputStream tagetFilestream = new FileInputStream(targetFile); CRC32 targertCrc32 = new CRC32(); for (CheckedInputStream checkedinputstream = new CheckedInputStream(tagetFilestream, targertCrc32); checkedinputstream.read() != -1;) { // } FileInputStream sourceFilestream = new FileInputStream(sourceFile); CRC32 sourceCrc32 = new CRC32(); for (CheckedInputStream checkedinputstream = new CheckedInputStream(sourceFilestream, sourceCrc32); checkedinputstream.read() != -1;) { // } tagetFilestream.close(); sourceFilestream.close(); return Long.toHexString(targertCrc32.getValue()).equals(Long.toHexString(sourceCrc32.getValue())); }
From source file:org.apache.isis.objectstore.nosql.db.file.server.FileServer.java
private void startSyncing() { final String syncHost = config.getString("fileserver.sync-host", DEFAULT_HOST); final int syncPort = config.getInt("fileserver.sync-port", DEFAULT_SYNC_PORT); final int connectionTimeout = config.getInt("fileserver.connection.timeout", 5000); LOG.info("preparing to sync to secondary server on " + syncHost + " port " + syncPort); final InetAddress address; try {//from ww w. jav a 2 s .c o m address = InetAddress.getByName(syncHost); } catch (final UnknownHostException e) { LOG.error("Unknown host " + syncHost, e); System.exit(0); return; } while (awaitConnections) { Socket socket = null; try { socket = new Socket(address, syncPort); LOG.info("sync connected to " + socket.getInetAddress().getHostAddress() + " port " + socket.getLocalPort()); final CRC32 crc32 = new CRC32(); final DataOutput output = new DataOutputStream( new CheckedOutputStream(socket.getOutputStream(), crc32)); final DataInput input = new DataInputStream(socket.getInputStream()); output.writeByte(INIT); long logId = input.readLong(); do { final long nextLogId = logId + 1; final File file = Util.logFile(nextLogId); if (file.exists() && server.getLogger().isWritten(nextLogId)) { logId++; output.writeByte(RECOVERY_LOG); crc32.reset(); output.writeLong(logId); LOG.info("sending recovery file: " + file.getName()); final BufferedInputStream fileInput = new BufferedInputStream(new FileInputStream(file)); final byte[] buffer = new byte[8092]; int read; while ((read = fileInput.read(buffer)) > 0) { output.writeInt(read); output.write(buffer, 0, read); } output.writeInt(0); output.writeLong(crc32.getValue()); } try { Thread.sleep(300); } catch (final InterruptedException ignore) { } while (isQuiescent) { try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } } while (awaitConnections); } catch (final ConnectException e) { LOG.warn("not yet connected to secondary server at " + syncHost + " port " + syncPort); try { Thread.sleep(connectionTimeout); } catch (final InterruptedException ignore) { } } catch (final IOException e) { LOG.error("start failure - networking not set up for " + syncHost, e); try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } catch (final RuntimeException e) { LOG.error("start failure", e); try { Thread.sleep(300); } catch (final InterruptedException ignore) { } } } }
From source file:org.apache.hadoop.raid.TestDirectoryRaidEncoder.java
private long createDirectoryFile(FileSystem fileSys, Path name, int repl, long[] fileSizes, long[] blockSizes, int[] seeds, long blockSize) throws IOException { CRC32 crc = new CRC32(); assert fileSizes.length == blockSizes.length; assert fileSizes.length == seeds.length; FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf().getInt("io.file.buffer.size", 4096), (short) repl, blockSize); byte[] zeros = new byte[(int) (blockSize)]; for (int j = 0; j < zeros.length; j++) { zeros[j] = 0;//w w w. j av a2 s . c o m } // fill random data into file for (int i = 0; i < fileSizes.length; i++) { assert blockSizes[i] <= blockSize; byte[] b = new byte[(int) blockSizes[i]]; long numBlocks = fileSizes[i] / blockSizes[i]; Random rand = new Random(seeds[i]); for (int j = 0; j < numBlocks; j++) { rand.nextBytes(b); stm.write(b); crc.update(b); int zeroLen = (int) (blockSize - blockSizes[i]); stm.write(zeros, 0, zeroLen); crc.update(zeros, 0, zeroLen); } long lastBlock = fileSizes[i] - numBlocks * blockSizes[i]; if (lastBlock > 0) { b = new byte[(int) lastBlock]; rand.nextBytes(b); stm.write(b); crc.update(b); if (i + 1 < fileSizes.length) { // Not last block of file, write zero int zeroLen = (int) (blockSize - lastBlock); stm.write(zeros, 0, zeroLen); crc.update(zeros, 0, zeroLen); } } } stm.close(); return crc.getValue(); }