List of usage examples for java.nio.channels FileChannel size
public abstract long size() throws IOException;
From source file:org.gnucash.android.export.ExporterAsyncTask.java
/** * Copies a file from <code>src</code> to <code>dst</code> * @param src Absolute path to the source file * @param dst Absolute path to the destination file * @throws IOException if the file could not be copied *///w ww . jav a2 s .c o m public void copyFile(File src, File dst) throws IOException { //TODO: Make this asynchronous at some time, t in the future. if (mExportParams.getExportFormat() == ExportFormat.QIF) { splitQIF(src, dst); } else { FileChannel inChannel = new FileInputStream(src).getChannel(); FileChannel outChannel = new FileOutputStream(dst).getChannel(); try { inChannel.transferTo(0, inChannel.size(), outChannel); } finally { if (inChannel != null) inChannel.close(); if (outChannel != null) outChannel.close(); } } }
From source file:org.apache.camel.component.file.FileOperations.java
/** * Creates and prepares the output file channel. Will position itself in correct position if the file is writable * eg. it should append or override any existing content. *//*from w w w . j a v a2 s . c om*/ private FileChannel prepareOutputFileChannel(File target, FileChannel out) throws IOException { if (endpoint.getFileExist() == GenericFileExist.Append) { out = new RandomAccessFile(target, "rw").getChannel(); out = out.position(out.size()); } else { // will override out = new FileOutputStream(target).getChannel(); } return out; }
From source file:com.sds.acube.ndisc.mts.xserver.util.XNDiscUtils.java
/** * ? // w w w . j av a 2 s . c om * * @param nFile * NFile * @param bForwardMedia * ? ? * @param bForce * * @throws Exception */ public static void copyNFile(NFile[] nFile, boolean bForwardMedia, boolean bForce) throws Exception { FileChannel srcChannel = null; FileChannel dstChannel = null; String srcFile = null; String dstFile = null; try { for (int i = 0; i < nFile.length; i++) { if (false == bForce && XNDiscConfig.STAT_NONE.equals(nFile[i].getStatType())) { continue; } else { if (bForwardMedia) { srcFile = nFile[i].getTmpPath(); dstFile = nFile[i].getStoragePath(); } else { srcFile = nFile[i].getStoragePath(); dstFile = nFile[i].getTmpPath(); } srcChannel = new FileInputStream(srcFile).getChannel(); dstChannel = new FileOutputStream(dstFile).getChannel(); dstChannel.transferFrom(srcChannel, 0, srcChannel.size()); if (bForwardMedia) { new File(srcFile).delete(); } } } } catch (Exception e) { e.printStackTrace(); throw e; } finally { if (null != srcChannel && srcChannel.isOpen()) { srcChannel.close(); } if (null != dstChannel && dstChannel.isOpen()) { dstChannel.close(); } } }
From source file:org.grouplens.lenskit.data.dao.packed.BinaryIndexTableTest.java
@Test public void testLimitedView() throws IOException { File file = folder.newFile(); FileChannel chan = new RandomAccessFile(file, "rw").getChannel(); BinaryIndexTableWriter writer = BinaryIndexTableWriter.create(BinaryFormat.create(), chan, 3); writer.writeEntry(12, new int[] { 0 }); writer.writeEntry(17, new int[] { 1, 3 }); writer.writeEntry(19, new int[] { 4, 5 }); MappedByteBuffer buffer = chan.map(FileChannel.MapMode.READ_ONLY, 0, chan.size()); BinaryIndexTable tbl = BinaryIndexTable.fromBuffer(3, buffer); tbl = tbl.createLimitedView(2);//from w w w . j ava2s .c om assertThat(tbl.getKeys(), hasSize(2)); assertThat(tbl.getKeys(), contains(12L, 17L)); assertThat(tbl.getEntry(12), contains(0)); assertThat(tbl.getEntry(17), contains(1)); assertThat(tbl.getEntry(19), nullValue()); assertThat(tbl.getEntry(-1), nullValue()); BinaryIndexTable serializedTbl = SerializationUtils.clone(tbl); assertThat(serializedTbl.getKeys(), hasSize(2)); assertThat(serializedTbl.getKeys(), contains(12L, 17L)); assertThat(serializedTbl.getEntry(12), contains(0)); assertThat(serializedTbl.getEntry(17), contains(1)); assertThat(serializedTbl.getEntry(19), nullValue()); assertThat(serializedTbl.getEntry(-1), nullValue()); }
From source file:org.apache.hadoop.dfs.TestFsck.java
public void testCorruptBlock() throws Exception { Configuration conf = new Configuration(); conf.setLong("dfs.blockreport.intervalMsec", 1000); FileSystem fs = null;//w ww.ja v a 2s. co m DFSClient dfsClient = null; LocatedBlocks blocks = null; int replicaCount = 0; Random random = new Random(); String outStr = null; MiniDFSCluster cluster = new MiniDFSCluster(conf, 3, true, null); cluster.waitActive(); fs = cluster.getFileSystem(); Path file1 = new Path("/testCorruptBlock"); DFSTestUtil.createFile(fs, file1, 1024, (short) 3, 0); // Wait until file replication has completed DFSTestUtil.waitReplication(fs, file1, (short) 3); String block = DFSTestUtil.getFirstBlock(fs, file1).getBlockName(); // Make sure filesystem is in healthy state outStr = runFsck(conf, 0, true, "/"); System.out.println(outStr); assertTrue(outStr.contains("HEALTHY")); // corrupt replicas File baseDir = new File(System.getProperty("test.build.data"), "dfs/data"); for (int i = 0; i < 6; i++) { File blockFile = new File(baseDir, "data" + (i + 1) + "/current/" + block); if (blockFile.exists()) { RandomAccessFile raFile = new RandomAccessFile(blockFile, "rw"); FileChannel channel = raFile.getChannel(); String badString = "BADBAD"; int rand = random.nextInt((int) channel.size() / 2); raFile.seek(rand); raFile.write(badString.getBytes()); raFile.close(); } } // Read the file to trigger reportBadBlocks try { IOUtils.copyBytes(fs.open(file1), new IOUtils.NullOutputStream(), conf, true); } catch (IOException ie) { // Ignore exception } dfsClient = new DFSClient(new InetSocketAddress("localhost", cluster.getNameNodePort()), conf); blocks = dfsClient.namenode.getBlockLocations(file1.toString(), 0, Long.MAX_VALUE); replicaCount = blocks.get(0).getLocations().length; while (replicaCount != 3) { try { Thread.sleep(100); } catch (InterruptedException ignore) { } blocks = dfsClient.namenode.getBlockLocations(file1.toString(), 0, Long.MAX_VALUE); replicaCount = blocks.get(0).getLocations().length; } assertTrue(blocks.get(0).isCorrupt()); // Check if fsck reports the same outStr = runFsck(conf, 1, true, "/"); System.out.println(outStr); assertTrue(outStr.contains("CORRUPT")); assertTrue(outStr.contains("testCorruptBlock")); cluster.shutdown(); }
From source file:com.l2jfree.gameserver.geodata.pathfinding.geonodes.GeoPathFinding.java
private void LoadPathNodeFile(byte rx, byte ry) { String fname = "./data/pathnode/" + rx + "_" + ry + ".pn"; short regionoffset = getRegionOffset(rx, ry); _log.info("PathFinding Engine: - Loading: " + fname + " -> region offset: " + regionoffset + "X: " + rx + " Y: " + ry); File Pn = new File(fname); int node = 0, size, index = 0; FileChannel roChannel = null; try {/*from ww w. j av a2s. c o m*/ // Create a read-only memory-mapped file roChannel = new RandomAccessFile(Pn, "r").getChannel(); size = (int) roChannel.size(); MappedByteBuffer nodes; if (Config.FORCE_GEODATA) //Force O/S to Loads this buffer's content into physical memory. //it is not guarantee, because the underlying operating system may have paged out some of the buffer's data nodes = roChannel.map(FileChannel.MapMode.READ_ONLY, 0, size).load(); else nodes = roChannel.map(FileChannel.MapMode.READ_ONLY, 0, size); // Indexing pathnode files, so we will know where each block starts IntBuffer indexs = IntBuffer.allocate(65536); while (node < 65536) { byte layer = nodes.get(index); indexs.put(node++, index); index += layer * 10 + 1; } _pathNodesIndex.set(regionoffset, indexs); _pathNodes.set(regionoffset, nodes); } catch (Exception e) { _log.warn("Failed to Load PathNode File: " + fname + "\n", e); } finally { try { if (roChannel != null) roChannel.close(); } catch (Exception e) { } } }
From source file:org.opennms.systemreport.AbstractSystemReportPlugin.java
protected String slurp(final File lsb) { if (lsb != null && lsb.exists()) { FileInputStream stream = null; try {/* ww w . jav a 2s. c o m*/ stream = new FileInputStream(lsb); FileChannel fc = stream.getChannel(); MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); return Charset.defaultCharset().decode(bb).toString().replace("[\\r\\n]*$", ""); } catch (final Exception e) { LOG.debug("Unable to read from file '{}'", lsb.getPath(), e); } finally { IOUtils.closeQuietly(stream); } } return null; }
From source file:com.filemanager.free.filesystem.FileUtil.java
/** * Copy a file. The target file may even be on external SD card for Kitkat. * * @param source The source file/* w w w .j av a 2 s .c o m*/ * @param target The target file * @return true if the copying was successful. */ @SuppressWarnings("null") public static boolean copyFile(final File source, final File target, Context context) { FileInputStream inStream = null; OutputStream outStream = null; FileChannel inChannel = null; FileChannel outChannel = null; try { inStream = new FileInputStream(source); // First try the normal way if (isWritable(target)) { // standard way outStream = new FileOutputStream(target); inChannel = inStream.getChannel(); outChannel = ((FileOutputStream) outStream).getChannel(); inChannel.transferTo(0, inChannel.size(), outChannel); } else { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { // Storage Access Framework DocumentFile targetDocument = getDocumentFile(target, false, context); outStream = context.getContentResolver().openOutputStream(targetDocument.getUri()); } else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.KITKAT) { // Workaround for Kitkat ext SD card Uri uri = MediaStoreHack.getUriFromFile(target.getAbsolutePath(), context); outStream = context.getContentResolver().openOutputStream(uri); } else { return false; } if (outStream != null) { // Both for SAF and for Kitkat, write to output stream. byte[] buffer = new byte[16384]; // MAGIC_NUMBER int bytesRead; while ((bytesRead = inStream.read(buffer)) != -1) { outStream.write(buffer, 0, bytesRead); } } } } catch (Exception e) { Log.e("AmazeFileUtils", "Error when copying file from " + source.getAbsolutePath() + " to " + target.getAbsolutePath(), e); return false; } finally { try { inStream.close(); } catch (Exception e) { // ignore exception } try { outStream.close(); } catch (Exception e) { // ignore exception } try { inChannel.close(); } catch (Exception e) { // ignore exception } try { outChannel.close(); } catch (Exception e) { // ignore exception } } return true; }
From source file:pyromaniac.IO.MMFastaImporter.java
/** * _init seq.//w ww . j a v a2 s.com * * @throws Exception the exception */ private void _initSeq() throws Exception { FileInputStream tempStream = new FileInputStream(new File(this.seqFile)); FileChannel fcSeq = tempStream.getChannel(); this.seqSizeLong = fcSeq.size(); this.seqStartsLL = new ArrayList<Pair<Integer, Long>>(); for (long startPosition = 0L; startPosition < this.seqSizeLong; startPosition += HALF_GIGA) { MappedByteBuffer seqBuffer = fcSeq.map(FileChannel.MapMode.READ_ONLY, startPosition, Math.min(this.seqSizeLong - startPosition, HALF_GIGA)); this.seqBuffers.add(seqBuffer); int sbf_pos = seqBuffers.size() - 1; int maxBuffer = 2048; int bufferSize = (seqBuffer.capacity() > maxBuffer) ? maxBuffer : seqBuffer.capacity(); seqBuffer.limit(bufferSize); seqBuffer.position(0); while (seqBuffer.position() != seqBuffer.capacity()) { int prevPos = seqBuffer.position(); CharBuffer result = decoder.decode(seqBuffer); seqBuffer.position(prevPos); for (int i = 0; i < result.capacity(); i++) { char curr = result.charAt(i); int posInFile = prevPos + i; if (curr == BEGINNING_FASTA_HEADER) { seqStartsLL.add(new Pair<Integer, Long>(sbf_pos, new Long(posInFile))); } } int newPos = seqBuffer.limit(); if (seqBuffer.limit() + bufferSize > seqBuffer.capacity()) seqBuffer.limit(seqBuffer.capacity()); else seqBuffer.limit(seqBuffer.limit() + bufferSize); seqBuffer.position(newPos); } seqBuffer.rewind(); } }
From source file:pyromaniac.IO.MMFastaImporter.java
/** * _init qual./*from w w w . j ava 2 s. c o m*/ * * @throws Exception the exception */ private void _initQual() throws Exception { FileInputStream tempStream = new FileInputStream(new File(this.qualFile)); FileChannel fcQual = tempStream.getChannel(); this.qualSizeLong = fcQual.size(); //qual starts LL contains pairs, marking file #no (in qualBuffers) and position #no (in the buffer). this.qualStartsLL = new ArrayList<Pair<Integer, Long>>(); for (long startPosition = 0L; startPosition < this.qualSizeLong; startPosition += HALF_GIGA) { MappedByteBuffer qualBuffer = fcQual.map(FileChannel.MapMode.READ_ONLY, startPosition, Math.min(this.qualSizeLong - startPosition, HALF_GIGA)); //map half a gig to this channel. this.qualBuffers.add(qualBuffer); int qbf_pos = qualBuffers.size() - 1; int maxBuffer = 2048; int bufferSize = (qualBuffer.capacity() > maxBuffer) ? maxBuffer : qualBuffer.capacity(); qualBuffer.limit(bufferSize); qualBuffer.position(0); while (qualBuffer.position() != qualBuffer.capacity()) { int prevPos = qualBuffer.position(); CharBuffer result = decoder.decode(qualBuffer); qualBuffer.position(prevPos); for (int i = 0; i < result.capacity(); i++) { char curr = result.charAt(i); int posInFile = prevPos + i; if (curr == BEGINNING_FASTA_HEADER) { qualStartsLL.add(new Pair<Integer, Long>(qbf_pos, new Long(posInFile))); } } int newPos = qualBuffer.limit(); if (qualBuffer.limit() + bufferSize > qualBuffer.capacity()) qualBuffer.limit(qualBuffer.capacity()); else qualBuffer.limit(qualBuffer.limit() + bufferSize); qualBuffer.position(newPos); } qualBuffer.rewind(); } }