List of usage examples for java.nio.channels FileChannel map
public abstract MappedByteBuffer map(MapMode mode, long position, long size) throws IOException;
From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.sav.SAVFileReaderSpi.java
@Override public boolean canDecodeInput(File file) throws IOException { if (file == null) { throw new IllegalArgumentException("file == null!"); }// w ww . j a va 2s . c om if (!file.canRead()) { throw new IOException("cannot read the input file"); } dbgLog.fine("applying the sav test\n"); // set-up a FileChannel instance for a given file object FileChannel srcChannel = new FileInputStream(file).getChannel(); // create a read-only MappedByteBuffer MappedByteBuffer buff = srcChannel.map(FileChannel.MapMode.READ_ONLY, 0, SAV_HEADER_SIZE); //printHexDump(buff, "hex dump of the byte-buffer"); dbgLog.info("hex dump of the 1st 4 bytes[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(buff.array()))); buff.rewind(); boolean DEBUG = false; byte[] hdr4 = new byte[4]; buff.get(hdr4, 0, 4); String hdr4sav = new String(hdr4); dbgLog.fine("from string[hdr4]=" + new String(Hex.encodeHex(hdr4)).toUpperCase()); if (hdr4sav.equals("$FL2")) { dbgLog.fine("this file is spss-sav type"); return true; } else { dbgLog.fine("this file is NOT spss-sav type"); } return false; }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.sav.SAVFileReaderSpi.java
@Override public boolean canDecodeInput(File file) throws IOException { if (file == null) { throw new IllegalArgumentException("file == null!"); }/*from w ww .ja v a2 s . co m*/ if (!file.canRead()) { throw new IOException("cannot read the input file"); } dbgLog.fine("applying the sav test\n"); // set-up a FileChannel instance for a given file object FileChannel srcChannel = new FileInputStream(file).getChannel(); // create a read-only MappedByteBuffer MappedByteBuffer buff = srcChannel.map(FileChannel.MapMode.READ_ONLY, 0, SAV_HEADER_SIZE); //printHexDump(buff, "hex dump of the byte-buffer"); dbgLog.fine("hex dump of the 1st 4 bytes[$FL2 == 24 46 4C 32]=" + new String(Hex.encodeHex(buff.array()))); buff.rewind(); boolean DEBUG = false; byte[] hdr4 = new byte[4]; buff.get(hdr4, 0, 4); String hdr4sav = new String(hdr4); dbgLog.fine("from string[hdr4]=" + new String(Hex.encodeHex(hdr4)).toUpperCase()); if (hdr4sav.equals("$FL2")) { dbgLog.fine("this file is spss-sav type"); return true; } else { dbgLog.fine("this file is NOT spss-sav type"); } return false; }
From source file:de.cosmocode.palava.store.FileSystemStore.java
@Override public ByteBuffer view(String identifier) throws IOException { Preconditions.checkNotNull(identifier, "Identifier"); final File file = getFile(identifier); Preconditions.checkState(file.exists(), "%s does not exist", file); LOG.trace("Reading file from {}", file); final FileChannel channel = new RandomAccessFile(file, "r").getChannel(); return channel.map(MapMode.READ_ONLY, 0, channel.size()); }
From source file:edu.harvard.iq.dataverse.ingest.IngestableDataCheckerTest.java
private MappedByteBuffer createTempFileAndGetBuffer(String filename, String fileContents) throws IOException { File fh = this.createTempFile(filename, fileContents); FileChannel srcChannel = new FileInputStream(fh).getChannel(); // create a read-only MappedByteBuffer MappedByteBuffer buff = srcChannel.map(FileChannel.MapMode.READ_ONLY, 0, fh.length()); return buff;// www . j a v a 2s . co m }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.dta.DTAFileReaderSpi.java
@Override public boolean canDecodeInput(File file) throws IOException { if (file == null) { throw new IllegalArgumentException("file == null!"); }//from w w w. j a v a 2s . com if (!file.canRead()) { throw new IIOException("cannot read the input file"); } // set-up a FileChannel instance for a given file object FileChannel srcChannel = new FileInputStream(file).getChannel(); // create a read-only MappedByteBuffer MappedByteBuffer buff = srcChannel.map(FileChannel.MapMode.READ_ONLY, 0, DTA_HEADER_SIZE); //printHexDump(buff, "hex dump of the byte-buffer"); buff.rewind(); dbgLog.fine("applying the dta test\n"); byte[] hdr4 = new byte[4]; buff.get(hdr4, 0, 4); dbgLog.fine("hex dump: 1st 4bytes =>" + new String(Hex.encodeHex(hdr4)) + "<-"); if (hdr4[2] != 1) { dbgLog.fine("3rd byte is not 1: given file is not stata-dta type"); return false; } else if ((hdr4[1] != 1) && (hdr4[1] != 2)) { dbgLog.fine("2nd byte is neither 0 nor 1: this file is not stata-dta type"); return false; } else if (!stataReleaseNumber.containsKey(hdr4[0])) { dbgLog.fine("1st byte (" + hdr4[0] + ") is not within the ingestable range [rel. 3-10]: this file is NOT stata-dta type"); return false; } else { dbgLog.fine("this file is stata-dta type: " + stataReleaseNumber.get(hdr4[0]) + "(No in HEX=" + hdr4[0] + ")"); return true; } }
From source file:edu.harvard.iq.dvn.ingest.statdataio.impl.plugins.dta.DTAFileReaderSpi.java
@Override public boolean canDecodeInput(File file) throws IOException { if (file == null) { throw new IllegalArgumentException("file == null!"); }//from w w w . j av a 2 s . c o m if (!file.canRead()) { throw new IIOException("cannot read the input file"); } // set-up a FileChannel instance for a given file object FileChannel srcChannel = new FileInputStream(file).getChannel(); // create a read-only MappedByteBuffer MappedByteBuffer buff = srcChannel.map(FileChannel.MapMode.READ_ONLY, 0, DTA_HEADER_SIZE); //printHexDump(buff, "hex dump of the byte-buffer"); buff.rewind(); boolean result = false; dbgLog.fine("applying the dta test\n"); byte[] hdr4 = new byte[4]; buff.get(hdr4, 0, 4); dbgLog.info("hex dump: 1st 4bytes =>" + new String(Hex.encodeHex(hdr4)) + "<-"); if (hdr4[2] != 1) { dbgLog.fine("3rd byte is not 1: given file is not stata-dta type"); return false; } else if ((hdr4[1] != 1) && (hdr4[1] != 2)) { dbgLog.fine("2nd byte is neither 0 nor 1: this file is not stata-dta type"); return false; } else if (!stataReleaseNumber.containsKey(hdr4[0])) { dbgLog.fine("1st byte (" + hdr4[0] + ") is not within the ingestable range [rel. 3-10]: this file is NOT stata-dta type"); return false; } else { dbgLog.fine("this file is stata-dta type: " + stataReleaseNumber.get(hdr4[0]) + "(No in HEX=" + hdr4[0] + ")"); return true; } }
From source file:org.apache.hadoop.hdfs.client.ShortCircuitReplica.java
MappedByteBuffer loadMmapInternal() { try {/*from ww w.java 2 s . c om*/ FileChannel channel = dataStream.getChannel(); MappedByteBuffer mmap = channel.map(MapMode.READ_ONLY, 0, Math.min(Integer.MAX_VALUE, channel.size())); if (LOG.isTraceEnabled()) { LOG.trace(this + ": created mmap of size " + channel.size()); } return mmap; } catch (IOException e) { LOG.warn(this + ": mmap error", e); return null; } catch (RuntimeException e) { LOG.warn(this + ": mmap error", e); return null; } }
From source file:org.grouplens.lenskit.data.dao.packed.BinaryIndexTableTest.java
@Test public void testMultipleEntries() throws IOException { File file = folder.newFile(); FileChannel chan = new RandomAccessFile(file, "rw").getChannel(); BinaryIndexTableWriter w = BinaryIndexTableWriter.create(BinaryFormat.create(), chan, 3); w.writeEntry(42, new int[] { 0 }); w.writeEntry(49, new int[] { 1, 3 }); w.writeEntry(67, new int[] { 2, 4 }); MappedByteBuffer buf = chan.map(FileChannel.MapMode.READ_ONLY, 0, chan.size()); BinaryIndexTable tbl = BinaryIndexTable.fromBuffer(3, buf); assertThat(tbl.getKeys(), contains(42L, 49L, 67L)); assertThat(tbl.getEntry(42), contains(0)); assertThat(tbl.getEntry(49), contains(1, 3)); assertThat(tbl.getEntry(67), contains(2, 4)); assertThat(tbl.getEntry(-1), nullValue()); }
From source file:MyFormApp.java
void pdfToimage(File filename) throws FileNotFoundException, IOException { //?pdf ? // TODO Auto-generated method stub File pdfFile = new File(filename.toString()); // pdf RandomAccessFile raf = new RandomAccessFile(pdfFile, "r"); FileChannel channel = raf.getChannel(); ByteBuffer buf = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); PDFFile pdf = new PDFFile(buf); int i = 0;//from ww w . j a va 2 s . c o m String fileNameWithOutExt = FilenameUtils.removeExtension(filename.getName()); Rectangle rect = new Rectangle(0, 0, (int) pdf.getPage(i).getBBox().getWidth(), // (int) pdf.getPage(i).getBBox().getHeight()); BufferedImage bufferedImage = new BufferedImage(100, 100, BufferedImage.TYPE_INT_RGB); Image image = pdf.getPage(i).getImage(rect.width, rect.height, // width & height rect, // clip rect null, // null for the ImageObserver true, // fill background with white true // block until drawing is done ); Graphics2D bufImageGraphics = bufferedImage.createGraphics(); bufImageGraphics.drawImage(image.getScaledInstance(100, 100, Image.SCALE_AREA_AVERAGING), 0, 0, null); ImageIO.write(bufferedImage, "PNG", new File(PATH + fileNameWithOutExt + ".png")); //? }
From source file:org.granite.grails.web.GrailsWebSWFServlet.java
@Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { request.setAttribute(GrailsApplicationAttributes.REQUEST_SCOPE_ID, grailsAttributes); // Get the name of the Groovy script (intern the name so that we can lock on it) String pageName = "/swf" + request.getServletPath(); Resource requestedFile = getResourceForUri(pageName); File swfFile = requestedFile.getFile(); if (swfFile == null || !swfFile.exists()) { response.sendError(404, "\"" + pageName + "\" not found."); return;/*from w w w.j a v a 2 s .c o m*/ } response.setContentType("application/x-shockwave-flash"); response.setContentLength((int) swfFile.length()); response.setBufferSize((int) swfFile.length()); response.setDateHeader("Expires", 0); FileInputStream is = null; FileChannel inChan = null; try { is = new FileInputStream(swfFile); OutputStream os = response.getOutputStream(); inChan = is.getChannel(); long fSize = inChan.size(); MappedByteBuffer mBuf = inChan.map(FileChannel.MapMode.READ_ONLY, 0, fSize); byte[] buf = new byte[(int) fSize]; mBuf.get(buf); os.write(buf); } finally { if (is != null) { IOUtils.closeQuietly(is); } if (inChan != null) { try { inChan.close(); } catch (IOException ignored) { } } } }