List of usage examples for java.nio.channels FileChannel size
public abstract long size() throws IOException;
From source file:org.apache.hadoop.hdfs.tools.offlineEditsViewer.TestOfflineEditsViewer.java
@Test public void testRecoveryMode() throws IOException { // edits generated by nnHelper (MiniDFSCluster), should have all op codes // binary, XML, reparsed binary String edits = nnHelper.generateEdits(); FileOutputStream os = new FileOutputStream(edits, true); // Corrupt the file by truncating the end FileChannel editsFile = os.getChannel(); editsFile.truncate(editsFile.size() - 5); String editsParsedXml = folder.newFile("editsRecoveredParsed.xml").getAbsolutePath(); String editsReparsed = folder.newFile("editsRecoveredReparsed").getAbsolutePath(); String editsParsedXml2 = folder.newFile("editsRecoveredParsed2.xml").getAbsolutePath(); // Can't read the corrupted file without recovery mode assertEquals(-1, runOev(edits, editsParsedXml, "xml", false)); // parse to XML then back to binary assertEquals(0, runOev(edits, editsParsedXml, "xml", true)); assertEquals(0, runOev(editsParsedXml, editsReparsed, "binary", false)); assertEquals(0, runOev(editsReparsed, editsParsedXml2, "xml", false)); // judgment time assertTrue("Test round trip", FileUtils.contentEqualsIgnoreEOL(new File(editsParsedXml), new File(editsParsedXml2), "UTF-8")); os.close();//from w w w. ja va2 s. c o m }
From source file:voldemort.store.cachestore.voldeimpl.StoreIterator.java
private boolean checkSignature(FileChannel channel) throws IOException { ByteBuffer intBytes = ByteBuffer.allocate(OFFSET); if (channel.size() == 0) { throw new StoreException("File size is 0"); } else {/*from w ww . j av a 2 s .com*/ channel.read(intBytes); intBytes.rewind(); if (intBytes.getInt() != MAGIC) throw new StoreException("Header mismatch expect " + MAGIC + " read " + intBytes.getInt()); } return true; }
From source file:forge.gui.ImportDialog.java
private static void _copyFile(final File srcFile, final File destFile, final boolean deleteSrcAfter) throws IOException { destFile.getParentFile().mkdirs();//from w w w . ja va 2s . co m // if this is a move, try a simple rename first if (deleteSrcAfter) { if (srcFile.renameTo(destFile)) { return; } } if (!destFile.exists()) { destFile.createNewFile(); } FileChannel src = null; FileChannel dest = null; try { src = new FileInputStream(srcFile).getChannel(); dest = new FileOutputStream(destFile).getChannel(); dest.transferFrom(src, 0, src.size()); } finally { if (src != null) { src.close(); } if (dest != null) { dest.close(); } } if (deleteSrcAfter) { srcFile.delete(); } }
From source file:net.praqma.jenkins.memorymap.parser.AbstractMemoryMapParser.java
protected CharSequence createCharSequenceFromFile(String charset, File f) throws IOException { String chosenCharset = charset; CharBuffer cbuf = null;// w w w. j a va 2s . c om FileInputStream fis = null; try { fis = new FileInputStream(f.getAbsolutePath()); FileChannel fc = fis.getChannel(); ByteBuffer bbuf = fc.map(FileChannel.MapMode.READ_ONLY, 0, (int) fc.size()); if (!Charset.isSupported(chosenCharset)) { logger.warning(String.format("The charset %s is not supported", charset)); cbuf = Charset.defaultCharset().newDecoder().decode(bbuf); } else { cbuf = Charset.forName(charset).newDecoder().decode(bbuf); } } catch (IOException ex) { throw ex; } finally { if (fis != null) { fis.close(); } } return cbuf; }
From source file:com.bittorrent.mpetazzoni.common.Torrent.java
private static String hashFiles(List<File> files) throws InterruptedException, IOException { int threads = getHashingThreadsCount(); ExecutorService executor = Executors.newFixedThreadPool(threads); ByteBuffer buffer = ByteBuffer.allocate(Torrent.PIECE_LENGTH); List<Future<String>> results = new LinkedList<Future<String>>(); StringBuilder hashes = new StringBuilder(); long length = 0L; int pieces = 0; long start = System.nanoTime(); for (File file : files) { logger.info("Hashing data from {} with {} threads ({} pieces)...", new Object[] { file.getName(), threads, (int) (Math.ceil((double) file.length() / Torrent.PIECE_LENGTH)) }); length += file.length();// w w w . j ava 2 s . co m FileInputStream fis = new FileInputStream(file); FileChannel channel = fis.getChannel(); int step = 10; try { while (channel.read(buffer) > 0) { if (buffer.remaining() == 0) { buffer.clear(); results.add(executor.submit(new CallableChunkHasher(buffer))); } if (results.size() >= threads) { pieces += accumulateHashes(hashes, results); } if (channel.position() / (double) channel.size() * 100f > step) { logger.info(" ... {}% complete", step); step += 10; } } } finally { channel.close(); fis.close(); } } // Hash the last bit, if any if (buffer.position() > 0) { buffer.limit(buffer.position()); buffer.position(0); results.add(executor.submit(new CallableChunkHasher(buffer))); } pieces += accumulateHashes(hashes, results); // Request orderly executor shutdown and wait for hashing tasks to // complete. executor.shutdown(); while (!executor.isTerminated()) { Thread.sleep(10); } long elapsed = System.nanoTime() - start; int expectedPieces = (int) (Math.ceil((double) length / Torrent.PIECE_LENGTH)); logger.info("Hashed {} file(s) ({} bytes) in {} pieces ({} expected) in {}ms.", new Object[] { files.size(), length, pieces, expectedPieces, String.format("%.1f", elapsed / 1e6), }); return hashes.toString(); }
From source file:com.turn.ttorrent.common.Torrent.java
private static String hashFiles(List<File> files, int pieceLenght) throws InterruptedException, IOException, NoSuchAlgorithmException { int threads = getHashingThreadsCount(); ExecutorService executor = Executors.newFixedThreadPool(threads); ByteBuffer buffer = ByteBuffer.allocate(pieceLenght); List<Future<String>> results = new LinkedList<Future<String>>(); StringBuilder hashes = new StringBuilder(); long length = 0L; int pieces = 0; long start = System.nanoTime(); for (File file : files) { logger.info("Hashing data from {} with {} threads ({} pieces)...", new Object[] { file.getName(), threads, (int) (Math.ceil((double) file.length() / pieceLenght)) }); length += file.length();/*from ww w. j a v a 2 s . c o m*/ FileInputStream fis = new FileInputStream(file); FileChannel channel = fis.getChannel(); int step = 10; try { while (channel.read(buffer) > 0) { if (buffer.remaining() == 0) { buffer.clear(); results.add(executor.submit(new CallableChunkHasher(buffer))); } if (results.size() >= threads) { pieces += accumulateHashes(hashes, results); } if (channel.position() / (double) channel.size() * 100f > step) { logger.info(" ... {}% complete", step); step += 10; } } } finally { channel.close(); fis.close(); } } // Hash the last bit, if any if (buffer.position() > 0) { buffer.limit(buffer.position()); buffer.position(0); results.add(executor.submit(new CallableChunkHasher(buffer))); } pieces += accumulateHashes(hashes, results); // Request orderly executor shutdown and wait for hashing tasks to // complete. executor.shutdown(); while (!executor.isTerminated()) { Thread.sleep(10); } long elapsed = System.nanoTime() - start; int expectedPieces = (int) (Math.ceil((double) length / pieceLenght)); logger.info("Hashed {} file(s) ({} bytes) in {} pieces ({} expected) in {}ms.", new Object[] { files.size(), length, pieces, expectedPieces, String.format("%.1f", elapsed / 1e6), }); return hashes.toString(); }
From source file:org.kevinferrare.solarSystemDataRetriever.jplhorizons.webfetcher.JplHorizonRawDataRetriever.java
/** * Loads data from the folder that was set using setFolderName.<br /> * The loaded keys are the file names and the associated value is the file content. * /*from ww w .j a v a 2s .co m*/ * @return a Map with the data loaded from the folder * @throws IOException */ public Map<String, String> loadFromFolder() throws IOException { Map<String, String> rawDataMap = new HashMap<String, String>(); File folder = new File(folderName); if (!folder.exists()) { log.error("Invalid folder " + folderName); return null; } File[] listOfFiles = folder.listFiles(new OnlyExtensionFilter(RAW_DATA_FILE_EXTENSION)); for (File file : listOfFiles) { String name = file.getName().replace("." + RAW_DATA_FILE_EXTENSION, ""); FileInputStream stream = new FileInputStream(file); FileChannel fileChannel = stream.getChannel(); MappedByteBuffer mappedByteBuffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, 0, fileChannel.size()); String content = Charset.forName("ASCII").decode(mappedByteBuffer).toString(); stream.close(); rawDataMap.put(name, content); } return rawDataMap; }
From source file:org.meerkat.util.FileUtil.java
/** * readFileContents//from w w w . ja v a 2 s . c om * * @param filename * @return file contents */ public final String readFileContents(String filePath) { FileInputStream stream = null; String contents = ""; try { stream = new FileInputStream(new File(filePath)); } catch (Exception e) { log.error("File not found: " + filePath, e); } try { FileChannel fc = stream.getChannel(); MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); /* Instead of using default, pass in a decoder. */ contents = Charset.defaultCharset().decode(bb).toString(); } catch (IOException e) { log.error("Error streaming file contents: " + filePath, e); } finally { try { stream.close(); } catch (IOException e) { log.error("Error closing file stream: " + filePath, e); } } return contents; }
From source file:fi.vtt.nubomedia.armodule.Ar3DHandler.java
private String getFile(String path) throws IOException { RandomAccessFile in = new RandomAccessFile(new File(path), "r"); FileChannel ch = in.getChannel(); long size = ch.size(); byte[] buf = new byte[(int) size]; in.read(buf, 0, buf.length);/*from ww w . java2 s . c o m*/ in.close(); return new String(buf); }
From source file:org.grouplens.lenskit.data.dao.packed.BinaryIndexTableTest.java
@Test public void testSingleEntry() throws IOException { File file = folder.newFile(); FileChannel chan = new RandomAccessFile(file, "rw").getChannel(); BinaryIndexTableWriter w = BinaryIndexTableWriter.create(BinaryFormat.create(), chan, 1); w.writeEntry(42, new int[] { 0 }); MappedByteBuffer buf = chan.map(FileChannel.MapMode.READ_ONLY, 0, chan.size()); BinaryIndexTable tbl = BinaryIndexTable.fromBuffer(1, buf); assertThat(tbl.getKeys(), contains(42L)); assertThat(tbl.getEntry(42), contains(0)); assertThat(tbl.getEntry(43), nullValue()); }