List of usage examples for java.nio.channels FileChannel map
public abstract MappedByteBuffer map(MapMode mode, long position, long size) throws IOException;
From source file:com.alibaba.otter.shared.common.utils.NioUtilsPerformance.java
public static void mappedTest(File source, File target) throws Exception { FileInputStream fis = null;/*from ww w . j av a 2s .co m*/ FileOutputStream fos = null; MappedByteBuffer mapbuffer = null; try { long fileSize = source.length(); final byte[] outputData = new byte[(int) fileSize]; fis = new FileInputStream(source); fos = new FileOutputStream(target); FileChannel sChannel = fis.getChannel(); target.createNewFile(); mapbuffer = sChannel.map(FileChannel.MapMode.READ_ONLY, 0, fileSize); for (int i = 0; i < fileSize; i++) { outputData[i] = mapbuffer.get(); } mapbuffer.clear(); fos.write(outputData); fos.flush(); } finally { IOUtils.closeQuietly(fis); IOUtils.closeQuietly(fos); if (mapbuffer == null) { return; } final Object buffer = mapbuffer; AccessController.doPrivileged(new PrivilegedAction() { public Object run() { try { Method clean = buffer.getClass().getMethod("cleaner", new Class[0]); if (clean == null) { return null; } clean.setAccessible(true); sun.misc.Cleaner cleaner = (sun.misc.Cleaner) clean.invoke(buffer, new Object[0]); cleaner.clean(); } catch (Throwable ex) { } return null; } }); } }
From source file:edu.umich.flowfence.common.ParceledPayload.java
public static ParceledPayload fromParcel(Parcel p) { byte[] data = p.createByteArray(); if (data == null) { // Null data = data's stored in an ashmem region. try (ParcelFileDescriptor pfd = p.readFileDescriptor()) { FileDescriptor fd = pfd.getFileDescriptor(); int size = MemoryFile.getSize(fd); if (size == -1) { throw new ParcelFormatException("ParceledPayload blob is not ashmem"); }/* w w w.j ava 2 s. co m*/ data = new byte[size]; FileInputStream fis = new FileInputStream(fd); FileChannel chan = fis.getChannel(); MappedByteBuffer mapping = chan.map(FileChannel.MapMode.READ_ONLY, 0, size); mapping.get(data); } catch (IOException e) { Log.e(TAG, "Couldn't unparcel - not an ashmem region?", e); ParcelFormatException pfe = new ParcelFormatException("Exception reading blob for ParceledPayload"); pfe.initCause(e); throw pfe; } } return new ParceledPayload(data); }
From source file:nl.b3p.applet.local.Shapefiles.java
/** Returns a JSON object with shapefile and DBF metadata. * //from www.ja v a 2 s. c o m * @param file the shapefile to read * @return a JSON object, read the code to find out which properties */ public static String getMetadata(String file) throws IOException, JSONException { if (!file.toLowerCase().endsWith(".shp")) { throw new IllegalArgumentException("File does not end with .shp: " + file); } FileChannel channel = new FileInputStream(file).getChannel(); ShapefileHeader header = new ShapefileHeader(); ByteBuffer bb = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); header.read(bb, true); channel.close(); channel = null; file = file.substring(0, file.length() - 4) + ".dbf"; JSONObject j = new JSONObject(); j.put("type", header.getShapeType().name); j.put("version", header.getVersion()); j.put("minX", header.minX()); j.put("minY", header.minY()); j.put("maxX", header.maxX()); j.put("maxY", header.maxY()); JSONObject dbf = new JSONObject(); j.put("dbf", dbf); try { channel = new FileInputStream(file).getChannel(); DbaseFileHeader dheader = new DbaseFileHeader(); bb = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); dheader.readHeader(bb); dbf.put("numRecords", dheader.getNumRecords()); JSONArray fields = new JSONArray(); dbf.put("fields", fields); for (int i = 0; i < dheader.getNumFields(); i++) { JSONObject field = new JSONObject(); fields.put(field); field.put("name", dheader.getFieldName(i)); field.put("length", dheader.getFieldLength(i)); field.put("decimalCount", dheader.getFieldDecimalCount(i)); field.put("class", dheader.getFieldClass(i).getName().toString()); field.put("type", dheader.getFieldType(i) + ""); } } catch (Exception e) { dbf.put("error", e.toString()); } finally { if (channel != null) { channel.close(); } } file = file.substring(0, file.length() - 4) + ".prj"; File f = new File(file); String prj = null; if (f.exists()) { Scanner s = new Scanner(f); prj = ""; try { while (s.hasNextLine()) { if (prj.length() > 0) { prj += "\n"; } prj += s.nextLine(); } } finally { s.close(); } } j.put("prj", prj); return j.toString(); }
From source file:org.sleuthkit.autopsy.recentactivity.Util.java
public static String readFile(String path) throws IOException { FileInputStream stream = new FileInputStream(new File(path)); try {/*from w w w . ja va 2 s.c om*/ FileChannel fc = stream.getChannel(); MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); /* * Instead of using default, pass in a decoder. */ return Charset.defaultCharset().decode(bb).toString(); } finally { stream.close(); } }
From source file:com.project.utilities.Utilities.java
public static ArrayList<ViolationDataModel> readViolationJSONFileToArray() { ArrayList<ViolationDataModel> violation = new ArrayList<ViolationDataModel>(); Gson gson = new Gson(); String jsonStr = ""; try {//from ww w . ja v a 2 s . c o m File jsonFile = new File(Constants.EXTERNAL_DIRECTORY + "/" + Constants.FILENAME_VIOLATION_JSON); if (!jsonFile.exists()) { Log.e("VIOLATION_JSON_LIST", "Error reading file"); } else { Log.e("readJsonFile", "File Found"); } FileInputStream stream = new FileInputStream(jsonFile); try { FileChannel fc = stream.getChannel(); MappedByteBuffer byteBuffer = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); jsonStr = Charset.defaultCharset().decode(byteBuffer).toString(); } finally { stream.close(); } } catch (Exception e) { Log.e("readJsonFile", e.getMessage()); } try { JSONObject jsonContent = new JSONObject(jsonStr); violation = gson.fromJson(jsonContent.getString("data"), new TypeToken<ArrayList<ViolationDataModel>>() { }.getType()); } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); Log.e("Error GSON", e.getMessage().toString()); } return violation; }
From source file:com.bazaarvoice.seo.sdk.util.BVUtility.java
public static String readFile(String path) throws IOException { FileInputStream stream = new FileInputStream(new File(path)); try {// w w w . j av a 2 s . c o m FileChannel fc = stream.getChannel(); MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); return Charset.forName("UTF-8").decode(bb).toString(); } finally { stream.close(); } }
From source file:nl.b3p.catalog.arcgis.Shapefiles.java
/** Returns a JSON object with shapefile and DBF metadata. * * @param file the shapefile to read//from ww w . j ava2 s .c om * @return a JSON object, read the code to find out which properties */ public static String getMetadata(String file) throws IOException, JSONException { if (!file.toLowerCase().endsWith(".shp")) { throw new IllegalArgumentException("File does not end with .shp: " + file); } JSONObject j = new JSONObject(); String localFilename = new File(file).getName(); String title = ""; int dotIndex = localFilename.lastIndexOf("."); if (dotIndex > 0) { title = localFilename.substring(0, dotIndex); } else if (dotIndex == 0) { title = localFilename.substring(1); } j.put("title", title); FileChannel channel = new FileInputStream(file).getChannel(); ShapefileHeader header = new ShapefileHeader(); ByteBuffer bb = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); header.read(bb, true); channel.close(); channel = null; file = file.substring(0, file.length() - 4) + ".dbf"; j.put("type", header.getShapeType().name); j.put("version", header.getVersion()); j.put("minX", header.minX()); j.put("minY", header.minY()); j.put("maxX", header.maxX()); j.put("maxY", header.maxY()); JSONObject dbf = new JSONObject(); j.put("dbf", dbf); try { channel = new FileInputStream(file).getChannel(); DbaseFileHeader dheader = new DbaseFileHeader(); bb = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); dheader.readHeader(bb); dbf.put("numRecords", dheader.getNumRecords()); JSONArray fields = new JSONArray(); dbf.put("fields", fields); for (int i = 0; i < dheader.getNumFields(); i++) { JSONObject field = new JSONObject(); fields.put(field); field.put("name", dheader.getFieldName(i)); field.put("length", dheader.getFieldLength(i)); field.put("decimalCount", dheader.getFieldDecimalCount(i)); field.put("class", dheader.getFieldClass(i).getName().toString()); field.put("type", dheader.getFieldType(i) + ""); } } catch (Exception e) { dbf.put("error", e.toString()); } finally { if (channel != null) { channel.close(); } } file = file.substring(0, file.length() - 4) + ".prj"; File f = new File(file); String prj = null; if (f.exists()) { Scanner s = new Scanner(f); prj = ""; try { while (s.hasNextLine()) { if (prj.length() > 0) { prj += "\n"; } prj += s.nextLine(); } } finally { s.close(); } } j.put("prj", prj); return j.toString(); }
From source file:Grep.java
public static void setFile(File f) throws IOException { FileInputStream fis = new FileInputStream(f); FileChannel fc = fis.getChannel(); // Get the file's size and then map it into memory int sz = (int) fc.size(); MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, sz); // Decode the file into a char buffer indexFile = decoder.decode(bb);//from w ww . j ava 2 s .c om fc.close(); }
From source file:org.uva.itast.blended.omr.pages.PDFPageImage.java
/** * @param filePath2/* ww w . j a v a 2s. c om*/ * @throws IOException */ public static PDFFile loadPDFFile(File inputpath) throws IOException { RandomAccessFile raf = new RandomAccessFile(inputpath, "r"); //se carga la imagen pdf para leerla FileChannel channel = raf.getChannel(); ByteBuffer buf = channel.map(FileChannel.MapMode.READ_ONLY, 0, channel.size()); PDFFile pdffile = new PDFFile(buf); return pdffile; }
From source file:yui.classes.utils.IOUtils.java
public static void fastCopy(File source, File dest) throws IOException { FileInputStream fi = new FileInputStream(source); FileChannel fic = fi.getChannel(); MappedByteBuffer mbuf = fic.map(FileChannel.MapMode.READ_ONLY, 0, source.length()); fic.close();/*from w w w . j a v a 2 s .c om*/ fi.close(); FileOutputStream fo = new FileOutputStream(dest); FileChannel foc = fo.getChannel(); foc.write(mbuf); foc.close(); fo.close(); }