List of usage examples for java.io FileInputStream getChannel
public FileChannel getChannel()
From source file:org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.MappableBlock.java
/** * Verifies the block's checksum. This is an I/O intensive operation. * @return if the block was successfully checksummed. *///from www. j a v a2 s. c om private static void verifyChecksum(long length, FileInputStream metaIn, FileChannel blockChannel, String blockFileName) throws IOException, ChecksumException { // Verify the checksum from the block's meta file // Get the DataChecksum from the meta file header BlockMetadataHeader header = BlockMetadataHeader.readHeader( new DataInputStream(new BufferedInputStream(metaIn, BlockMetadataHeader.getHeaderSize()))); FileChannel metaChannel = null; try { metaChannel = metaIn.getChannel(); if (metaChannel == null) { throw new IOException("Block InputStream meta file has no FileChannel."); } DataChecksum checksum = header.getChecksum(); final int bytesPerChecksum = checksum.getBytesPerChecksum(); final int checksumSize = checksum.getChecksumSize(); final int numChunks = (8 * 1024 * 1024) / bytesPerChecksum; ByteBuffer blockBuf = ByteBuffer.allocate(numChunks * bytesPerChecksum); ByteBuffer checksumBuf = ByteBuffer.allocate(numChunks * checksumSize); // Verify the checksum int bytesVerified = 0; while (bytesVerified < length) { Preconditions.checkState(bytesVerified % bytesPerChecksum == 0, "Unexpected partial chunk before EOF"); assert bytesVerified % bytesPerChecksum == 0; int bytesRead = fillBuffer(blockChannel, blockBuf); if (bytesRead == -1) { throw new IOException("checksum verification failed: premature EOF"); } blockBuf.flip(); // Number of read chunks, including partial chunk at end int chunks = (bytesRead + bytesPerChecksum - 1) / bytesPerChecksum; checksumBuf.limit(chunks * checksumSize); fillBuffer(metaChannel, checksumBuf); checksumBuf.flip(); checksum.verifyChunkedSums(blockBuf, checksumBuf, blockFileName, bytesVerified); // Success bytesVerified += bytesRead; blockBuf.clear(); checksumBuf.clear(); } } finally { IOUtils.closeQuietly(metaChannel); } }
From source file:org.yamj.core.service.file.tools.FileTools.java
/** * Copy the source file to the destination * * @param src//from ww w . j a v a2 s .co m * @param dst * @return */ public static boolean copyFile(File src, File dst) { boolean returnValue = Boolean.FALSE; if (!src.exists()) { LOG.error("The file '{}' does not exist", src); return returnValue; } if (dst.isDirectory()) { makeDirectories(dst); returnValue = copyFile(src, new File(dst + File.separator + src.getName())); } else { FileInputStream inSource = null; FileOutputStream outSource = null; FileChannel inChannel = null; FileChannel outChannel = null; try { // gc: copy using file channels, potentially much faster inSource = new FileInputStream(src); outSource = new FileOutputStream(dst); inChannel = inSource.getChannel(); outChannel = outSource.getChannel(); long p = 0, s = inChannel.size(); while (p < s) { p += inChannel.transferTo(p, 1024 * 1024, outChannel); } return Boolean.TRUE; } catch (IOException error) { LOG.error("Failed copying file '{}' to '{}'", src, dst); LOG.error("File copying error", error); returnValue = Boolean.FALSE; } finally { if (inChannel != null) { try { inChannel.close(); } catch (IOException ex) { // Ignore } } if (inSource != null) { try { inSource.close(); } catch (IOException ex) { // Ignore } } if (outChannel != null) { try { outChannel.close(); } catch (IOException ex) { // Ignore } } if (outSource != null) { try { outSource.close(); } catch (IOException ex) { // Ignore } } } } return returnValue; }
From source file:com.igormaznitsa.jcp.utils.PreprocessorUtils.java
public static void copyFile(@Nonnull final File source, @Nonnull final File dest, final boolean copyFileAttributes) throws IOException { assertNotNull("Source is null", source); assertNotNull("Destination file is null", dest); if (source.isDirectory()) { throw new IllegalArgumentException("Source file is directory"); }/*from w w w .j a v a 2s . c om*/ if (!dest.getParentFile().exists() && !dest.getParentFile().mkdirs()) { throw new IOException("Can't make directory [" + getFilePath(dest.getParentFile()) + ']'); } FileChannel fileSrc = null; FileChannel fileDst = null; final FileInputStream fileSrcInput = new FileInputStream(source); FileOutputStream fileOutput = null; try { fileSrc = fileSrcInput.getChannel(); fileOutput = new FileOutputStream(dest); fileDst = fileOutput.getChannel(); long size = fileSrc.size(); long pos = 0L; while (size > 0) { final long written = fileSrc.transferTo(pos, size, fileDst); pos += written; size -= written; } } finally { IOUtils.closeQuietly(fileSrcInput); IOUtils.closeQuietly(fileOutput); IOUtils.closeQuietly(fileDst); IOUtils.closeQuietly(fileSrc); } if (copyFileAttributes) { copyFileAttributes(source, dest); } }
From source file:maspack.fileutil.SafeFileUtils.java
/** * Internal copy file method.// ww w . j a va2 s . com * * @param srcFile * the validated source file, must not be {@code null} * @param destFile * the validated destination file, must not be {@code null} * @param options * determine whether to use a file lock, and preserve date information * @throws IOException * if an error occurs */ private static void doCopyFile(File srcFile, File destFile, int options) throws IOException { if (destFile.exists() && destFile.isDirectory()) { throw new IOException("Destination '" + destFile + "' exists but is a directory"); } File lockFile = new File(destFile.getAbsolutePath() + LOCK_EXTENSION); FileInputStream fis = null; FileOutputStream fos = null; FileChannel input = null; FileChannel output = null; try { fis = new FileInputStream(srcFile); fos = new FileOutputStream(destFile); input = fis.getChannel(); output = fos.getChannel(); long size = input.size(); long pos = 0; long count = 0; // Create lock before starting transfer // NOTE: we are purposely not using the Java NIO FileLock, because that // is automatically removed when the JVM exits. We want this file to // persist to inform the system the transfer was never completed. if ((options & LOCK_FILE) != 0) { if (lockFile.exists()) { // if we are not cleaning old locks, throw error if ((options & CLEAN_LOCK) == 0) { throw new IOException( "Lock file exists, preventing a write to " + destFile.getAbsolutePath() + ". Delete " + lockFile.getName() + " or set the CLEAN_LOCK option flag"); } } else { lockFile.createNewFile(); // will always return true or throw // error } } while (pos < size) { count = size - pos > FILE_COPY_BUFFER_SIZE ? FILE_COPY_BUFFER_SIZE : size - pos; pos += output.transferFrom(input, pos, count); } } finally { closeQuietly(output); closeQuietly(fos); closeQuietly(input); closeQuietly(fis); } if (srcFile.length() != destFile.length()) { throw new IOException("Failed to copy full contents from '" + srcFile + "' to '" + destFile + "'"); } if ((options & PRESERVE_DATE) != 0) { destFile.setLastModified(srcFile.lastModified()); } // successful copy, delete lock file deleteQuietly(lockFile); }
From source file:com.arc.embeddedcdt.gui.jtag.ConfigJTAGTab.java
public static byte[] read(File file) { try {//from w ww . java 2s.c o m FileInputStream fis = new FileInputStream(file); FileChannel fc = fis.getChannel(); byte[] data = new byte[(int) fc.size()]; // fc.size returns the // size of the file // which backs the // channel ByteBuffer bb = ByteBuffer.wrap(data); fc.read(bb); return data; } catch (Exception e) { throw new RuntimeException(e); } }
From source file:pt.ubi.di.pdm.swipe.CollectionDemoActivity.java
public static String readFromFile(String filename, Context ctx) { FileInputStream fis = null; String str = ""; try {/* w w w. j a v a 2 s . c o m*/ fis = ctx.openFileInput(filename); byte[] buffer = new byte[(int) fis.getChannel().size()]; fis.read(buffer); for (byte b : buffer) str += (char) b; Log.i("TAG", String.format("GOT: [%s]", str)); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { try { if (fis != null) fis.close(); } catch (IOException e) { e.printStackTrace(); } return str; } }
From source file:org.apache.hadoop.hdfs.server.datanode.TestFsDatasetCache.java
private static long[] getBlockSizes(HdfsBlockLocation[] locs) throws Exception { long[] sizes = new long[locs.length]; for (int i = 0; i < locs.length; i++) { HdfsBlockLocation loc = locs[i]; String bpid = loc.getLocatedBlock().getBlock().getBlockPoolId(); Block block = loc.getLocatedBlock().getBlock().getLocalBlock(); ExtendedBlock extBlock = new ExtendedBlock(bpid, block); FileInputStream blockInputStream = null; FileChannel blockChannel = null; try {/*w w w . j av a 2s . c o m*/ blockInputStream = (FileInputStream) fsd.getBlockInputStream(extBlock, 0); blockChannel = blockInputStream.getChannel(); sizes[i] = blockChannel.size(); } finally { IOUtils.cleanup(LOG, blockChannel, blockInputStream); } } return sizes; }
From source file:com.hipu.bdb.util.FileUtils.java
/** * Retrieve a number of lines from the file around the given * position, as when paging forward or backward through a file. * // ww w. ja v a 2 s . co m * @param file File to retrieve lines * @param position offset to anchor lines * @param signedDesiredLineCount lines requested; if negative, * want this number of lines ending with a line containing * the position; if positive, want this number of lines, * all starting at or after position. * @param lines List<String> to insert found lines * @param lineEstimate int estimate of line size, 0 means use default * of 128 * @return LongRange indicating the file offsets corresponding to * the beginning of the first line returned, and the point * after the end of the last line returned * @throws IOException */ @SuppressWarnings("unchecked") public static LongRange pagedLines(File file, long position, int signedDesiredLineCount, List<String> lines, int lineEstimate) throws IOException { // consider negative positions as from end of file; -1 = last byte if (position < 0) { position = file.length() + position; } // calculate a reasonably sized chunk likely to have all desired lines if (lineEstimate == 0) { lineEstimate = 128; } int desiredLineCount = Math.abs(signedDesiredLineCount); long startPosition; long fileEnd = file.length(); int bufferSize = (desiredLineCount + 5) * lineEstimate; if (signedDesiredLineCount > 0) { // reading forward; include previous char in case line-end startPosition = position - 1; } else { // reading backward startPosition = position - bufferSize + (2 * lineEstimate); } if (startPosition < 0) { startPosition = 0; } if (startPosition + bufferSize > fileEnd) { bufferSize = (int) (fileEnd - startPosition); } // read that reasonable chunk FileInputStream fis = new FileInputStream(file); fis.getChannel().position(startPosition); byte[] buf = new byte[bufferSize]; IOUtils.closeQuietly(fis); // find all line starts fully in buffer // (positions after a line-end, per line-end definition in // BufferedReader.readLine) LinkedList<Integer> lineStarts = new LinkedList<Integer>(); if (startPosition == 0) { lineStarts.add(0); } boolean atLineEnd = false; boolean eatLF = false; int i; for (i = 0; i < bufferSize; i++) { if ((char) buf[i] == '\n' && eatLF) { eatLF = false; continue; } if (atLineEnd) { atLineEnd = false; lineStarts.add(i); if (signedDesiredLineCount < 0 && startPosition + i > position) { // reached next line past position, read no more break; } } if ((char) buf[i] == '\r') { atLineEnd = true; eatLF = true; continue; } if ((char) buf[i] == '\n') { atLineEnd = true; } } if (startPosition + i == fileEnd) { // add phantom lineStart after end lineStarts.add(bufferSize); } int foundFullLines = lineStarts.size() - 1; // if found no lines if (foundFullLines < 1) { if (signedDesiredLineCount > 0) { if (startPosition + bufferSize == fileEnd) { // nothing more to read: return nothing return new LongRange(fileEnd, fileEnd); } else { // retry with larger lineEstimate return pagedLines(file, position, signedDesiredLineCount, lines, Math.max(bufferSize, lineEstimate)); } } else { // try again with much larger line estimate // TODO: fail gracefully before growing to multi-MB buffers return pagedLines(file, position, signedDesiredLineCount, lines, bufferSize); } } // trim unneeded lines while (signedDesiredLineCount > 0 && startPosition + lineStarts.getFirst() < position) { // discard lines starting before desired position lineStarts.removeFirst(); } while (lineStarts.size() > desiredLineCount + 1) { if (signedDesiredLineCount < 0 && (startPosition + lineStarts.get(1) <= position)) { // discard from front until reach line containing target position lineStarts.removeFirst(); } else { lineStarts.removeLast(); } } int firstLine = lineStarts.getFirst(); int partialLine = lineStarts.getLast(); LongRange range = new LongRange(startPosition + firstLine, startPosition + partialLine); List<String> foundLines = IOUtils .readLines(new ByteArrayInputStream(buf, firstLine, partialLine - firstLine)); if (foundFullLines < desiredLineCount && signedDesiredLineCount < 0 && startPosition > 0) { // if needed and reading backward, read more lines from earlier range = expandRange(range, pagedLines(file, range.getMinimumLong() - 1, signedDesiredLineCount + foundFullLines, lines, bufferSize / foundFullLines)); } lines.addAll(foundLines); if (signedDesiredLineCount < 0 && range.getMaximumLong() < position) { // did not get line containining start position range = expandRange(range, pagedLines(file, partialLine, 1, lines, bufferSize / foundFullLines)); } if (signedDesiredLineCount > 0 && foundFullLines < desiredLineCount && range.getMaximumLong() < fileEnd) { // need more forward lines range = expandRange(range, pagedLines(file, range.getMaximumLong(), desiredLineCount - foundFullLines, lines, bufferSize / foundFullLines)); } return range; }
From source file:com.mods.grx.settings.utils.Utils.java
public static void file_copy(File ori_file, File dest_file) { try {/*from w w w .ja v a 2 s . com*/ FileInputStream i_s = new FileInputStream(ori_file); FileOutputStream o_s = new FileOutputStream(dest_file); FileChannel inChannel = i_s.getChannel(); FileChannel outChannel = o_s.getChannel(); inChannel.transferTo(0, inChannel.size(), outChannel); i_s.close(); o_s.close(); } catch (IOException e) { e.printStackTrace(); } }
From source file:org.solmix.commons.util.Files.java
/** * ?MD5//from www.j a va2 s . c o m * * @param file * @return */ public static String getMd5ByFile(File file) { String value = null; FileInputStream in = null; try { in = new FileInputStream(file); MappedByteBuffer byteBuffer = in.getChannel().map(FileChannel.MapMode.READ_ONLY, 0, file.length()); MessageDigest md5 = MessageDigest.getInstance("MD5"); md5.update(byteBuffer); BigInteger bi = new BigInteger(1, md5.digest()); value = bi.toString(16); } catch (Exception e) { e.printStackTrace(); } finally { if (null != in) { try { in.close(); } catch (IOException e) { e.printStackTrace(); } } } return value; }