List of usage examples for android.graphics ImageFormat NV21
int NV21
To view the source code for android.graphics ImageFormat NV21.
Click Source Link
From source file:Main.java
public static String savetoJPEG(byte[] data, int width, int height, String file) { Rect frame = new Rect(0, 0, width, height); YuvImage img = new YuvImage(data, ImageFormat.NV21, width, height, null); OutputStream os = null;//from w ww . j a va2s . c o m File jpgfile = new File(file); try { os = new FileOutputStream(jpgfile); img.compressToJpeg(frame, 100, os); os.flush(); os.close(); } catch (Exception e) { e.printStackTrace(); } return jpgfile.getPath(); }
From source file:Main.java
@SuppressLint("NewApi") public static Bitmap NV21ToRGBABitmap(byte[] nv21, int width, int height, Context context) { TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap"); Rect rect = new Rect(0, 0, width, height); try {/*from w w w .jav a 2 s . c o m*/ Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV"); Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB"); byte[] imageData = nv21; if (mRS == null) { mRS = RenderScript.create(context); mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS)); Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV)); tb.setX(width); tb.setY(height); tb.setMipmaps(false); tb.setYuvFormat(ImageFormat.NV21); ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); timings.addSplit("Prepare for ain"); Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS)); tb2.setX(width); tb2.setY(height); tb2.setMipmaps(false); aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED); timings.addSplit("Prepare for aOut"); bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); timings.addSplit("Create Bitmap"); } ain.copyFrom(imageData); timings.addSplit("ain copyFrom"); mYuvToRgb.setInput(ain); timings.addSplit("setInput ain"); mYuvToRgb.forEach(aOut); timings.addSplit("NV21 to ARGB forEach"); aOut.copyTo(bitmap); timings.addSplit("Allocation to Bitmap"); } catch (Exception e) { YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null); timings.addSplit("NV21 bytes to YuvImage"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvImage.compressToJpeg(rect, 90, baos); byte[] cur = baos.toByteArray(); timings.addSplit("YuvImage crop and compress to Jpeg Bytes"); bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length); timings.addSplit("Jpeg Bytes to Bitmap"); } timings.dumpToLog(); return bitmap; }
From source file:Main.java
@SuppressLint("NewApi") public static Bitmap NV21ToRGBABitmap(byte[] nv21, int width, int height, Context context) { TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap"); Rect rect = new Rect(0, 0, width, height); try {/*from www . j a v a 2 s .c o m*/ Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV"); Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB"); byte[] imageData = nv21; if (mRS == null) { mRS = RenderScript.create(context); mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS)); Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV)); tb.setX(width); tb.setY(height); tb.setMipmaps(false); tb.setYuvFormat(ImageFormat.NV21); ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); timings.addSplit("Prepare for ain"); Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS)); tb2.setX(width); tb2.setY(height); tb2.setMipmaps(false); aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED); timings.addSplit("Prepare for aOut"); bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888); timings.addSplit("Create Bitmap"); } ain.copyFrom(imageData); timings.addSplit("ain copyFrom"); mYuvToRgb.setInput(ain); timings.addSplit("setInput ain"); mYuvToRgb.forEach(aOut); timings.addSplit("NV21 to ARGB forEach"); aOut.copyTo(bitmap); timings.addSplit("Allocation to Bitmap"); } catch (Exception e) { YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null); timings.addSplit("NV21 bytes to YuvImage"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvImage.compressToJpeg(rect, 90, baos); byte[] cur = baos.toByteArray(); timings.addSplit("YuvImage crop and compress to Jpeg Bytes"); bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length); timings.addSplit("Jpeg Bytes to Bitmap"); } timings.dumpToLog(); return bitmap; }
From source file:Main.java
static public Bitmap NV21ToRGBABitmap(byte[] nv21, int width, int height) { YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, baos); byte[] cur = baos.toByteArray(); return BitmapFactory.decodeByteArray(cur, 0, cur.length); }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.detect.DetectStripTask.java
@Nullable @Override/*from w w w. ja va 2s . c om*/ protected Void doInBackground(Intent... params) { Intent intent = params[0]; if (intent == null) { return null; } String uuid = intent.getStringExtra(Constant.UUID); StripTest stripTest = new StripTest(); int numPatches = stripTest.getPatchCount(uuid); format = intent.getIntExtra(Constant.FORMAT, ImageFormat.NV21); width = intent.getIntExtra(Constant.WIDTH, 0); height = intent.getIntExtra(Constant.HEIGHT, 0); if (width == 0 || height == 0) { return null; } JSONArray imagePatchArray = null; int imageCount = -1; Mat labImg; // Mat for image from NV21 data Mat labStrip; // Mat for detected strip try { String json = FileUtil.readFromInternalStorage(context, Constant.IMAGE_PATCH); imagePatchArray = new JSONArray(json); } catch (Exception e) { Timber.e(e); } for (int i = 0; i < numPatches; i++) { try { if (imagePatchArray != null) { // sub-array for each patch JSONArray array = imagePatchArray.getJSONArray(i); // get the image number from the json array int imageNo = array.getInt(0); if (imageNo > imageCount) { // Set imageCount to current number imageCount = imageNo; byte[] data = FileUtil.readByteArray(context, Constant.DATA + imageNo); if (data == null) { throw new IOException(); } //make a L,A,B Mat object from data try { labImg = makeLab(data); } catch (Exception e) { if (context != null) { Timber.e(e); } continue; } //perspectiveTransform try { if (labImg != null) { warp(labImg, imageNo); } } catch (Exception e) { if (context != null) { Timber.e(e); } continue; } //divide into calibration and strip areas try { if (context != null) { divideIntoCalibrationAndStripArea(); } } catch (Exception e) { Timber.e(e); continue; } //save warped image to external storage // if (DEVELOP_MODE) { // Mat rgb = new Mat(); // Imgproc.cvtColor(warpMat, rgb, Imgproc.COLOR_Lab2RGB); // Bitmap bitmap = Bitmap.createBitmap(rgb.width(), rgb.height(), Bitmap.Config.ARGB_8888); // Utils.matToBitmap(rgb, bitmap); // // //if (FileUtil.isExternalStorageWritable()) { // FileUtil.writeBitmapToExternalStorage(bitmap, "/warp", UUID.randomUUID().toString() + ".png"); //} // //Bitmap.createScaledBitmap(bitmap, BITMAP_SCALED_WIDTH, BITMAP_SCALED_HEIGHT, false); // } //calibrate Mat calibrationMat; try { CalibrationResultData calResult = getCalibratedImage(warpMat); if (calResult == null) { return null; } else { calibrationMat = calResult.getCalibratedImage(); } // Log.d(this.getClass().getSimpleName(), "E94 error mean: " + String.format(Locale.US, "%.2f", calResult.meanE94) // + ", max: " + String.format(Locale.US, "%.2f", calResult.maxE94) // + ", total: " + String.format(Locale.US, "%.2f", calResult.totalE94)); // if (AppPreferences.isDiagnosticMode()) { // listener.showError("E94 mean: " + String.format(Locale.US, "%.2f", calResult.meanE94) // + ", max: " + String.format(Locale.US, "%.2f", calResult.maxE94) // + ", total: " + String.format(Locale.US, "%.2f", calResult.totalE94)); // } } catch (Exception e) { Timber.e(e); return null; } //show calibrated image // if (DEVELOP_MODE) { // Mat rgb = new Mat(); // Imgproc.cvtColor(calibrationMat, rgb, Imgproc.COLOR_Lab2RGB); // Bitmap bitmap = Bitmap.createBitmap(rgb.width(), rgb.height(), Bitmap.Config.ARGB_8888); // Utils.matToBitmap(rgb, bitmap); // if (FileUtil.isExternalStorageWritable()) { // FileUtil.writeBitmapToExternalStorage(bitmap, "/warp", UUID.randomUUID().toString() + "_cal.png"); // } // //Bitmap.createScaledBitmap(bitmap, BITMAP_SCALED_WIDTH, BITMAP_SCALED_HEIGHT, false); // } // cut out black area that contains the strip Mat stripArea = null; if (roiStripArea != null) { stripArea = calibrationMat.submat(roiStripArea); } if (stripArea != null) { Mat strip = null; try { StripTest.Brand brand = stripTest.getBrand(uuid); strip = OpenCVUtil.detectStrip(stripArea, brand, ratioW, ratioH); } catch (Exception e) { Timber.e(e); } String error = ""; if (strip != null) { labStrip = strip.clone(); } else { if (context != null) { Timber.e(context.getString(R.string.error_calibrating)); } labStrip = stripArea.clone(); error = Constant.ERROR; //draw a red cross over the image Scalar red = RED_LAB_COLOR; // Lab color Imgproc.line(labStrip, new Point(0, 0), new Point(labStrip.cols(), labStrip.rows()), red, 2); Imgproc.line(labStrip, new Point(0, labStrip.rows()), new Point(labStrip.cols(), 0), red, 2); } try { // create byte[] from Mat and store it in internal storage // In order to restore the byte array, we also need the rows and columns dimensions // these are stored in the last 8 bytes int dataSize = labStrip.cols() * labStrip.rows() * 3; byte[] payload = new byte[dataSize + 8]; byte[] matByteArray = new byte[dataSize]; labStrip.get(0, 0, matByteArray); // pack cols and rows into byte arrays byte[] rows = FileUtil.leIntToByteArray(labStrip.rows()); byte[] cols = FileUtil.leIntToByteArray(labStrip.cols()); // append them to the end of the array, in order rows, cols System.arraycopy(matByteArray, 0, payload, 0, dataSize); System.arraycopy(rows, 0, payload, dataSize, 4); System.arraycopy(cols, 0, payload, dataSize + 4, 4); FileUtil.writeByteArray(context, payload, Constant.STRIP + imageNo + error); } catch (Exception e) { Timber.e(e); } } } } } catch (@NonNull JSONException | IOException e) { if (context != null) { Timber.e(context.getString(R.string.error_cut_out_strip)); } } } return null; }
From source file:com.example.android.camera.CameraActivity.java
@Override protected void onResume() { super.onResume(); Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() { @Override// w ww . j a v a 2 s .c o m public void onPreviewFrame(byte[] data, Camera camera) { if (notRequesting && mPreview.faces.size() >= 1 && imageFormat == ImageFormat.NV21) { // Block Request. notRequesting = false; try { Camera.Parameters parameters = camera.getParameters(); Size size = parameters.getPreviewSize(); textServerView.setText("Preparing Image to send"); YuvImage previewImg = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null); pWidth = previewImg.getWidth(); pHeight = previewImg.getHeight(); Log.d("face View", "Width: " + pWidth + " x Height: " + pHeight); prepareMatrix(matrix, 0, pWidth, pHeight); List<Rect> foundFaces = getFaces(); for (Rect cRect : foundFaces) { // Cropping ByteArrayOutputStream bao = new ByteArrayOutputStream(); previewImg.compressToJpeg(cRect, 100, bao); byte[] mydata = bao.toByteArray(); // Resizing ByteArrayOutputStream sbao = new ByteArrayOutputStream(); Bitmap bm = BitmapFactory.decodeByteArray(mydata, 0, mydata.length); Bitmap sbm = Bitmap.createScaledBitmap(bm, 100, 100, true); bm.recycle(); sbm.compress(Bitmap.CompressFormat.JPEG, 100, sbao); byte[] mysdata = sbao.toByteArray(); RequestParams params = new RequestParams(); params.put("upload", new ByteArrayInputStream(mysdata), "tmp.jpg"); textServerView.setText("Sending Image to the Server"); FaceMatchClient.post(":8080/match", params, new JsonHttpResponseHandler() { @Override public void onSuccess(JSONArray result) { Log.d("face onSuccess", result.toString()); try { JSONObject myJson = (JSONObject) result.get(0); float dist = (float) Double.parseDouble(myJson.getString("dist")); Log.d("distance", "" + dist); int level = (int) ((1 - dist) * 100); if (level > previousMatchLevel) { textView.setText("Match " + level + "% with " + myJson.getString("name") + " <" + myJson.getString("email") + "> "); loadImage(myJson.getString("classes"), myJson.getString("username")); } previousMatchLevel = level; trialCounter++; if (trialCounter < 100 && level < 74) { textServerView.setText("Retrying..."); notRequesting = true; } else if (trialCounter == 100) { textServerView.setText("Fail..."); } else { textServerView.setText("Found Good Match? If not try again!"); fdButtonClicked = false; trialCounter = 0; previousMatchLevel = 0; mCamera.stopFaceDetection(); button.setText("StartFaceDetection"); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } // informationView.showInfo(myJson); } }); } textServerView.setText("POST Sent"); textServerView.setText("Awaiting for response"); } catch (Exception e) { e.printStackTrace(); textServerView.setText("Error AsyncPOST"); } } } }; // Open the default i.e. the first rear facing camera. mCamera = Camera.open(); mCamera.setPreviewCallback(previewCallback); // To use front camera // mCamera = Camera.open(CameraActivity.getFrontCameraId()); mPreview.setCamera(mCamera); parameters = mCamera.getParameters(); imageFormat = parameters.getPreviewFormat(); PreviewSize = parameters.getPreviewSize(); }
From source file:com.wlanjie.streaming.camera.Camera1.java
private void adjustCameraParameters() { SortedSet<Size> sizes = mPreviewSizes.sizes(mAspectRatio); if (sizes == null) { // Not supported mAspectRatio = chooseAspectRatio(); sizes = mPreviewSizes.sizes(mAspectRatio); }/*from w ww.j a v a2s . co m*/ Size size = chooseOptimalSize(sizes); // Largest picture size in this ratio if (mShowingPreview) { mCamera.stopPreview(); } mCameraParameters.setRotation(calcCameraRotation(mDisplayOrientation)); int[] fps = chooseFpsRange(); mCameraParameters.setPreviewFpsRange(fps[0], fps[1]); mCameraParameters.setPreviewFormat(ImageFormat.NV21); // Largest picture size in this ratio if (mShowingPreview) { mCamera.stopPreview(); } mCameraParameters.setPreviewSize(size.getWidth(), size.getHeight()); mCamera.setParameters(mCameraParameters); mCamera.setDisplayOrientation(calcCameraRotation(mDisplayOrientation)); // mCallback.onPreview(size.getWidth(), size.getHeight()); final byte[] previewBuffer = new byte[size.getWidth() * size.getHeight() * 3 / 2]; mCamera.addCallbackBuffer(previewBuffer); mCamera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() { @Override public void onPreviewFrame(byte[] bytes, Camera camera) { mCallback.onPreviewFrame(bytes); mCamera.addCallbackBuffer(previewBuffer); } }); if (mShowingPreview) { mCamera.startPreview(); } }
From source file:com.jasompeter.openalpr.CameraActivity.java
public void startPreview() { try {// w w w. j a v a2s . c o m mCamera.setPreviewDisplay(mSurfaceHolder); } catch (IOException e) { e.printStackTrace(); Log.d(TAG, "Cannot set preview display."); } setCorrectOrientation(mCamera); setCorrectSize(mCamera, mSurfaceHolder.getSurfaceFrame().width(), mSurfaceHolder.getSurfaceFrame().height()); mCamera.startPreview(); mCamera.setPreviewCallback(new Camera.PreviewCallback() { @Override public void onPreviewFrame(byte[] data, Camera camera) { if (!mEnableRecognition) { return; } if (camera.getParameters().getPreviewFormat() == ImageFormat.NV21) { Camera.Size previewSize = camera.getParameters().getPreviewSize(); YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 50, baos); recognize(baos.toByteArray()); } } }); }
From source file:org.akvo.caddisfly.sensor.colorimetry.strip.detect.DetectStripTask.java
private Mat makeLab(byte[] data) { if (format == ImageFormat.NV21) { //convert preview data to Mat object in CIELab format Mat rgb = new Mat(height, width, CvType.CV_8UC3); Mat labImg = new Mat(height, width, CvType.CV_8UC3); Mat previewMat = new Mat(height + height / 2, width, CvType.CV_8UC1); previewMat.put(0, 0, data);//from ww w .ja v a 2 s . c o m Imgproc.cvtColor(previewMat, rgb, Imgproc.COLOR_YUV2RGB_NV21, rgb.channels()); Imgproc.cvtColor(rgb, labImg, Imgproc.COLOR_RGB2Lab, rgb.channels()); return labImg; } return null; }
From source file:com.almalence.opencam.SavingService.java
public void saveResultPicture(long sessionID) { initSavingPrefs();/* w w w .ja v a2 s . c o m*/ SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getApplicationContext()); // save fused result try { File saveDir = getSaveDir(false); Calendar d = Calendar.getInstance(); int imagesAmount = Integer .parseInt(getFromSharedMem("amountofresultframes" + Long.toString(sessionID))); if (imagesAmount == 0) imagesAmount = 1; int imageIndex = 0; String sImageIndex = getFromSharedMem("resultframeindex" + Long.toString(sessionID)); if (sImageIndex != null) imageIndex = Integer.parseInt(getFromSharedMem("resultframeindex" + Long.toString(sessionID))); if (imageIndex != 0) imagesAmount = 1; ContentValues values = null; boolean hasDNGResult = false; for (int i = 1; i <= imagesAmount; i++) { hasDNGResult = false; String format = getFromSharedMem("resultframeformat" + i + Long.toString(sessionID)); if (format != null && format.equalsIgnoreCase("dng")) hasDNGResult = true; String idx = ""; if (imagesAmount != 1) idx += "_" + ((format != null && !format.equalsIgnoreCase("dng") && hasDNGResult) ? i - imagesAmount / 2 : i); String modeName = getFromSharedMem("modeSaveName" + Long.toString(sessionID)); // define file name format. from settings! String fileFormat = getExportFileName(modeName); fileFormat += idx + ((format != null && format.equalsIgnoreCase("dng")) ? ".dng" : ".jpg"); File file; if (ApplicationScreen.getForceFilename() == null) { file = new File(saveDir, fileFormat); } else { file = ApplicationScreen.getForceFilename(); } OutputStream os = null; if (ApplicationScreen.getForceFilename() != null) { os = getApplicationContext().getContentResolver() .openOutputStream(ApplicationScreen.getForceFilenameURI()); } else { try { os = new FileOutputStream(file); } catch (Exception e) { // save always if not working saving to sdcard e.printStackTrace(); saveDir = getSaveDir(true); if (ApplicationScreen.getForceFilename() == null) { file = new File(saveDir, fileFormat); } else { file = ApplicationScreen.getForceFilename(); } os = new FileOutputStream(file); } } // Take only one result frame from several results // Used for PreShot plugin that may decide which result to save if (imagesAmount == 1 && imageIndex != 0) i = imageIndex; String resultOrientation = getFromSharedMem( "resultframeorientation" + i + Long.toString(sessionID)); int orientation = 0; if (resultOrientation != null) orientation = Integer.parseInt(resultOrientation); String resultMirrored = getFromSharedMem("resultframemirrored" + i + Long.toString(sessionID)); Boolean cameraMirrored = false; if (resultMirrored != null) cameraMirrored = Boolean.parseBoolean(resultMirrored); int x = Integer.parseInt(getFromSharedMem("saveImageHeight" + Long.toString(sessionID))); int y = Integer.parseInt(getFromSharedMem("saveImageWidth" + Long.toString(sessionID))); if (orientation == 0 || orientation == 180 || (format != null && format.equalsIgnoreCase("dng"))) { x = Integer.valueOf(getFromSharedMem("saveImageWidth" + Long.toString(sessionID))); y = Integer.valueOf(getFromSharedMem("saveImageHeight" + Long.toString(sessionID))); } Boolean writeOrientationTag = true; String writeOrientTag = getFromSharedMem("writeorientationtag" + Long.toString(sessionID)); if (writeOrientTag != null) writeOrientationTag = Boolean.parseBoolean(writeOrientTag); if (format != null && format.equalsIgnoreCase("jpeg")) {// if result in jpeg format if (os != null) { byte[] frame = SwapHeap.SwapFromHeap( Integer.parseInt(getFromSharedMem("resultframe" + i + Long.toString(sessionID))), Integer.parseInt( getFromSharedMem("resultframelen" + i + Long.toString(sessionID)))); os.write(frame); try { os.close(); } catch (Exception e) { e.printStackTrace(); } } } else if (format != null && format.equalsIgnoreCase("dng")) { saveDNGPicture(i, sessionID, os, x, y, orientation, cameraMirrored); } else {// if result in nv21 format int yuv = Integer.parseInt(getFromSharedMem("resultframe" + i + Long.toString(sessionID))); com.almalence.YuvImage out = new com.almalence.YuvImage(yuv, ImageFormat.NV21, x, y, null); Rect r; String res = getFromSharedMem("resultfromshared" + Long.toString(sessionID)); if ((null == res) || "".equals(res) || "true".equals(res)) { // to avoid problems with SKIA int cropHeight = out.getHeight() - out.getHeight() % 16; r = new Rect(0, 0, out.getWidth(), cropHeight); } else { if (null == getFromSharedMem("resultcrop0" + Long.toString(sessionID))) { // to avoid problems with SKIA int cropHeight = out.getHeight() - out.getHeight() % 16; r = new Rect(0, 0, out.getWidth(), cropHeight); } else { int crop0 = Integer .parseInt(getFromSharedMem("resultcrop0" + Long.toString(sessionID))); int crop1 = Integer .parseInt(getFromSharedMem("resultcrop1" + Long.toString(sessionID))); int crop2 = Integer .parseInt(getFromSharedMem("resultcrop2" + Long.toString(sessionID))); int crop3 = Integer .parseInt(getFromSharedMem("resultcrop3" + Long.toString(sessionID))); r = new Rect(crop0, crop1, crop0 + crop2, crop1 + crop3); } } jpegQuality = Integer.parseInt(prefs.getString(ApplicationScreen.sJPEGQualityPref, "95")); if (!out.compressToJpeg(r, jpegQuality, os)) { if (ApplicationScreen.instance != null && ApplicationScreen.getMessageHandler() != null) { ApplicationScreen.getMessageHandler() .sendEmptyMessage(ApplicationInterface.MSG_EXPORT_FINISHED_IOEXCEPTION); } return; } SwapHeap.FreeFromHeap(yuv); } String orientation_tag = String.valueOf(0); // int sensorOrientation = CameraController.getSensorOrientation(); // int displayOrientation = CameraController.getDisplayOrientation(); // sensorOrientation = (360 + sensorOrientation + (cameraMirrored ? -displayOrientation // : displayOrientation)) % 360; // if (CameraController.isFlippedSensorDevice() && cameraMirrored) // orientation = (orientation + 180) % 360; switch (orientation) { default: case 0: orientation_tag = String.valueOf(0); break; case 90: orientation_tag = cameraMirrored ? String.valueOf(270) : String.valueOf(90); break; case 180: orientation_tag = String.valueOf(180); break; case 270: orientation_tag = cameraMirrored ? String.valueOf(90) : String.valueOf(270); break; } int exif_orientation = ExifInterface.ORIENTATION_NORMAL; if (writeOrientationTag) { switch ((orientation + 360) % 360) { default: case 0: exif_orientation = ExifInterface.ORIENTATION_NORMAL; break; case 90: exif_orientation = cameraMirrored ? ExifInterface.ORIENTATION_ROTATE_270 : ExifInterface.ORIENTATION_ROTATE_90; break; case 180: exif_orientation = ExifInterface.ORIENTATION_ROTATE_180; break; case 270: exif_orientation = cameraMirrored ? ExifInterface.ORIENTATION_ROTATE_90 : ExifInterface.ORIENTATION_ROTATE_270; break; } } else { switch ((additionalRotationValue + 360) % 360) { default: case 0: exif_orientation = ExifInterface.ORIENTATION_NORMAL; break; case 90: exif_orientation = cameraMirrored ? ExifInterface.ORIENTATION_ROTATE_270 : ExifInterface.ORIENTATION_ROTATE_90; break; case 180: exif_orientation = ExifInterface.ORIENTATION_ROTATE_180; break; case 270: exif_orientation = cameraMirrored ? ExifInterface.ORIENTATION_ROTATE_90 : ExifInterface.ORIENTATION_ROTATE_270; break; } } if (!enableExifTagOrientation) exif_orientation = ExifInterface.ORIENTATION_NORMAL; File parent = file.getParentFile(); String path = parent.toString().toLowerCase(); String name = parent.getName().toLowerCase(); values = new ContentValues(); values.put(ImageColumns.TITLE, file.getName().substring(0, file.getName().lastIndexOf(".") >= 0 ? file.getName().lastIndexOf(".") : file.getName().length())); values.put(ImageColumns.DISPLAY_NAME, file.getName()); values.put(ImageColumns.DATE_TAKEN, System.currentTimeMillis()); values.put(ImageColumns.MIME_TYPE, "image/jpeg"); if (enableExifTagOrientation) { if (writeOrientationTag) { values.put(ImageColumns.ORIENTATION, String.valueOf( (Integer.parseInt(orientation_tag) + additionalRotationValue + 360) % 360)); } else { values.put(ImageColumns.ORIENTATION, String.valueOf((additionalRotationValue + 360) % 360)); } } else { values.put(ImageColumns.ORIENTATION, String.valueOf(0)); } values.put(ImageColumns.BUCKET_ID, path.hashCode()); values.put(ImageColumns.BUCKET_DISPLAY_NAME, name); values.put(ImageColumns.DATA, file.getAbsolutePath()); File tmpFile; if (ApplicationScreen.getForceFilename() == null) { tmpFile = file; } else { tmpFile = new File(getApplicationContext().getFilesDir(), "buffer.jpeg"); tmpFile.createNewFile(); copyFromForceFileName(tmpFile); } if (!enableExifTagOrientation) { Matrix matrix = new Matrix(); if (writeOrientationTag && (orientation + additionalRotationValue) != 0) { matrix.postRotate((orientation + additionalRotationValue + 360) % 360); rotateImage(tmpFile, matrix); } else if (!writeOrientationTag && additionalRotationValue != 0) { matrix.postRotate((additionalRotationValue + 360) % 360); rotateImage(tmpFile, matrix); } } if (useGeoTaggingPrefExport) { Location l = MLocation.getLocation(getApplicationContext()); if (l != null) { double lat = l.getLatitude(); double lon = l.getLongitude(); boolean hasLatLon = (lat != 0.0d) || (lon != 0.0d); if (hasLatLon) { values.put(ImageColumns.LATITUDE, l.getLatitude()); values.put(ImageColumns.LONGITUDE, l.getLongitude()); } } } File modifiedFile = saveExifTags(tmpFile, sessionID, i, x, y, exif_orientation, useGeoTaggingPrefExport, enableExifTagOrientation); if (ApplicationScreen.getForceFilename() == null) { file.delete(); modifiedFile.renameTo(file); } else { copyToForceFileName(modifiedFile); tmpFile.delete(); modifiedFile.delete(); } Uri uri = getApplicationContext().getContentResolver().insert(Images.Media.EXTERNAL_CONTENT_URI, values); broadcastNewPicture(uri); } ApplicationScreen.getMessageHandler().sendEmptyMessage(ApplicationInterface.MSG_EXPORT_FINISHED); } catch (IOException e) { e.printStackTrace(); ApplicationScreen.getMessageHandler() .sendEmptyMessage(ApplicationInterface.MSG_EXPORT_FINISHED_IOEXCEPTION); return; } catch (Exception e) { e.printStackTrace(); ApplicationScreen.getMessageHandler().sendEmptyMessage(ApplicationInterface.MSG_EXPORT_FINISHED); } finally { ApplicationScreen.setForceFilename(null); } }