List of usage examples for android.graphics Matrix Matrix
public Matrix()
From source file:com.tealeaf.TeaLeaf.java
private Bitmap rotateBitmap(Bitmap bitmap, int rotate) { // rotate as needed Bitmap bmp;//from w w w .ja v a2 s . c om //only allow the largest size to be 768 for now, several phones //including the galaxy s3 seem to crash with rotating very large //images (out of memory errors) from the gallery int w = bitmap.getWidth(); int h = bitmap.getHeight(); Bitmap scaled = bitmap; if (w > h && w > 768) { float ratio = 768.f / (float) w; w = 768; h = (int) (ratio * h); scaled = Bitmap.createScaledBitmap(bitmap, w, h, true); if (bitmap != scaled) { bitmap.recycle(); } } if (h > w && h > 768) { float ratio = 768.f / (float) h; h = 768; w = (int) (ratio * w); scaled = Bitmap.createScaledBitmap(bitmap, w, h, true); if (bitmap != scaled) { bitmap.recycle(); } } int newWidth = scaled.getWidth(); int newHeight = scaled.getHeight(); int degrees = 0; if (rotate == ROTATE_90 || rotate == ROTATE_270) { newWidth = scaled.getHeight(); newHeight = scaled.getWidth(); } Matrix matrix = new Matrix(); matrix.postRotate(rotate); bmp = Bitmap.createBitmap(scaled, 0, 0, scaled.getWidth(), scaled.getHeight(), matrix, true); if (scaled != bmp) { scaled.recycle(); } return bmp; }
From source file:bikebadger.RideFragment.java
private void updateUI() { boolean trackingLocations = mRideManager.isTrackingLocations(); boolean trackingThisRun = mRideManager.isTrackingRun(mRide); boolean mRunNotNull = mRide != null; String msg = ""; Log.d(Constants.APP.TAG, "RideFragment::updateUI"); if (trackingLocations) Log.d(Constants.APP.TAG, "trackingLocations=" + trackingLocations); else/*from w w w . j a va2s. c om*/ Log.d(Constants.APP.TAG, "trackingLocations is FALSE"); if (trackingThisRun) Log.d(Constants.APP.TAG, "trackingThisRun=" + trackingThisRun); else Log.d(Constants.APP.TAG, "trackingThisRun is FALSE"); if (mRunNotNull) Log.d(Constants.APP.TAG, "mRide is NOT null!"); else Log.d(Constants.APP.TAG, "mRide is NULL!"); if (mRide != null) { mStartedTextView.setText(mRide.getStartDate().toString()); } int durationSeconds = 0; if (mRide != null && mLastLocation != null) { Log.d(Constants.APP.TAG, "mRide != null && mLastLocation != null"); mMessagebarView.setTextColor(Color.WHITE); //String msg = "mRide != null && mLastLocation != null"; double bearing1 = mLastLocation.getBearing(); double bearing2 = mRideManager.mClosestBearing; double bearingDif = bearing2 - bearing1; if (mRideManager.mClosestWaypoint != null) { Log.d(Constants.APP.TAG, "mRideManager.mClosestWaypoint != null"); if (mRideManager.mClosestWaypoint.IsTriggered()) { mMessagebarView.setTextColor(Color.RED); msg = "\"" + mRideManager.mClosestWaypoint.getName() + "\""; msg += " (Active)"; Bitmap activeBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.ic_triggered); mArrowView.setImageBitmap(activeBitmap); } else { mMessagebarView.setTextColor(Color.WHITE); msg = "\"" + mRideManager.mClosestWaypoint.getName() + "\""; msg += " at "; msg += Formatter.FormatDistanceMiles(mRideManager.mClosestDistance); //msg += " ft"; //msg += " (" + Formatter.FormatDecimal(bearing1) + "->"; // msg += Formatter.FormatDecimal(bearing2) + ":" + Formatter.FormatDecimal(bearingDif); // Point the arrow to the bearing of the closest waypoint... Bitmap myBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.ic_arrow_white); Matrix matrix = new Matrix(); matrix.postRotate((float) (bearingDif)); final int width = myBitmap.getWidth(); final int height = myBitmap.getHeight(); Log.d(Constants.APP.TAG, "myBitmap.getWidth=" + myBitmap.getWidth() + "myBitmap.getHeight=" + myBitmap.getHeight()); Bitmap rotatedBitmap = Bitmap.createBitmap(myBitmap, 0, 0, width, height, matrix, true); mArrowView.setImageBitmap(rotatedBitmap); } } // set the top message line mMessagebarView.setText(msg); durationSeconds = mRide.StopwatchSeconds(); mSpeedTextView.setText(Formatter.FormatDecimal(mLastLocation.getSpeed() * Ride.METERS_TO_MILES)); mTargetSpeedTextView.setText(Formatter.FormatDecimal(mRide.GetTargetAvgSpeed())); mAverageSpeedTextView.setText(Formatter.FormatDecimal(mRide.GetAverageSpeed())); mDurationTextView.setText(Formatter.FormatDuration(durationSeconds)); } else if (mRide == null && mLastLocation == null) { Log.d(Constants.APP.TAG, "mRide == null && mLastLocation == null"); mTargetSpeedTextView.setText(Formatter.FormatDecimal(mRideManager.GetDefaultTargetAvgSpeed())); mMessagebarView.setTextColor(Color.WHITE); if (mRideManager.WaypointsLoaded()) { mMessagebarView.setText("\"" + mRideManager.GetGPXFileName() + "\" loaded (" + mRideManager.mWaypoints.size() + ")"); } } else if (mRide != null && mLastLocation == null) { // mLastLocation is null mMessagebarView.setTextColor(Color.RED); mMessagebarView.setText("Waiting on GPS..."); } else if (mRide == null && mLastLocation != null) { Log.d(Constants.APP.TAG, "mRide == null && mLastLocation != null"); } if (!trackingThisRun) { mStartStopButton.setBackgroundResource(R.drawable.ic_button_white_play); mStartStopButton.setOnClickListener(mPlayButtonClickListener); } if (trackingThisRun) { mStartStopButton.setBackgroundResource(R.drawable.ic_button_white_pause); mStartStopButton.setOnClickListener(mPauseButtonClickListener); } //mStartButton.setEnabled(!started); mResetButton.setEnabled(trackingLocations && trackingThisRun); mWaypointButton.setEnabled(trackingLocations); //mTargetEditButton.setEnabled(trackingLocations && trackingThisRun); }
From source file:com.android.ex.chips.RecipientEditTextView.java
private Bitmap createUnselectedChip(final RecipientEntry contact, final TextPaint paint, final boolean leaveBlankIconSpacer) { // Ellipsize the text so that it takes AT MOST the entire width of the // autocomplete text entry area. Make sure to leave space for padding // on the sides. final int height = (int) mChipHeight; int iconWidth = height; final float[] widths = new float[1]; paint.getTextWidths(" ", widths); final float availableWidth = calculateAvailableWidth(); final String chipDisplayText = createChipDisplayText(contact); final CharSequence ellipsizedText = ellipsizeText(chipDisplayText, paint, availableWidth - iconWidth - widths[0]); // Make sure there is a minimum chip width so the user can ALWAYS // tap a chip without difficulty. final int width = Math.max(iconWidth * 2, (int) Math.floor(paint.measureText(ellipsizedText, 0, ellipsizedText.length())) + mChipPadding * 2 + iconWidth);/*w w w . j a va 2 s . c om*/ // Create the background of the chip. final Bitmap tmpBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); final Canvas canvas = new Canvas(tmpBitmap); final Drawable background = getChipBackground(contact); if (background != null) { background.setBounds(0, 0, width, height); background.draw(canvas); // Don't draw photos for recipients that have been typed in OR generated on the fly. final long contactId = contact.getContactId(); final boolean drawPhotos = isPhoneQuery() ? contactId != RecipientEntry.INVALID_CONTACT : contactId != RecipientEntry.INVALID_CONTACT && contactId != RecipientEntry.GENERATED_CONTACT && !TextUtils.isEmpty(contact.getDisplayName()); if (drawPhotos) { byte[] photoBytes = contact.getPhotoBytes(); // There may not be a photo yet if anything but the first contact address // was selected. if (photoBytes == null && contact.getPhotoThumbnailUri() != null) { // TODO: cache this in the recipient entry? getAdapter().fetchPhoto(contact, contact.getPhotoThumbnailUri()); photoBytes = contact.getPhotoBytes(); } Bitmap photo; if (photoBytes != null) photo = BitmapFactory.decodeByteArray(photoBytes, 0, photoBytes.length); else // TODO: can the scaled down default photo be cached? photo = mDefaultContactPhoto; // Draw the photo on the left side. if (photo != null) { final RectF src = new RectF(0, 0, photo.getWidth(), photo.getHeight()); final Rect backgroundPadding = new Rect(); mChipBackground.getPadding(backgroundPadding); final RectF dst = new RectF(width - iconWidth + backgroundPadding.left, 0 + backgroundPadding.top, width - backgroundPadding.right, height - backgroundPadding.bottom); final Matrix matrix = new Matrix(); matrix.setRectToRect(src, dst, Matrix.ScaleToFit.FILL); canvas.drawBitmap(photo, matrix, paint); } } else if (!leaveBlankIconSpacer || isPhoneQuery()) iconWidth = 0; paint.setColor(ContextCompat.getColor(getContext(), android.R.color.black)); // Vertically center the text in the chip. canvas.drawText(ellipsizedText, 0, ellipsizedText.length(), mChipPadding, getTextYOffset((String) ellipsizedText, paint, height), paint); } else Log.w(TAG, "Unable to draw a background for the chips as it was never set"); return tmpBitmap; }
From source file:camera2basic.Camera2BasicFragment.java
/** * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`. * This method should be called after the camera preview size is determined in * setUpCameraOutputs and also the size of `mTextureView` is fixed. * * @param viewWidth The width of `mTextureView` * @param viewHeight The height of `mTextureView` *///from w w w.j av a2 s .co m private void configureTransform(int viewWidth, int viewHeight) { Activity activity = getActivity(); if (null == mTextureView || null == mPreviewSize || null == activity) { return; } int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); Matrix matrix = new Matrix(); RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); float centerX = viewRect.centerX(); float centerY = viewRect.centerY(); if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(), (float) viewWidth / mPreviewSize.getWidth()); matrix.postScale(scale, scale, centerX, centerY); matrix.postRotate(90 * (rotation - 2), centerX, centerY); } else if (Surface.ROTATION_180 == rotation) { matrix.postRotate(180, centerX, centerY); } mTextureView.setTransform(matrix); }
From source file:com.bofsoft.laio.laiovehiclegps.Fragment.BaiduMapFragment.java
private void addOverlay(GPSInfoData gpsInfoData) {//marker ?? ? LatLng llA = new LatLng(gpsInfoData.Latitude, gpsInfoData.Longitude); // GPSGPS?????? CoordinateConverter converter = new CoordinateConverter(); converter.from(CoordinateConverter.CoordType.GPS); // sourceLatLng??? converter.coord(llA);/*from w w w .j a v a 2 s . c o m*/ // desLatLng = converter.convert(); LatLng tmpLL = converter.convert(); View v_temp = LayoutInflater.from(getActivity()).inflate(R.layout.map_marker, null);// TextView tv_temp = (TextView) v_temp.findViewById(R.id.tv_marker);//?textview ImageView img_temp = (ImageView) v_temp.findViewById(R.id.iv_marker);//?imageview tv_temp.setText(gpsInfoData.License);//? if (gpsInfoData.Status == 0) { img_temp.setImageResource(imgIds[0]);//marker bitmap = BitmapFactory.decodeResource(getResources(), imgIds[0]); } else { img_temp.setImageResource(imgIds[2]);//marker bitmap = BitmapFactory.decodeResource(getResources(), imgIds[2]); } Matrix matrix = new Matrix(); img_temp.setScaleType(ImageView.ScaleType.MATRIX); //required matrix.postScale(1, 1); //ImageViewImage int dw = bitmap.getWidth(); int dh = bitmap.getHeight(); matrix.postRotate((float) gpsInfoData.Direction, (float) dw / 2, (float) dh / 2);// img_temp.setImageMatrix(matrix); bd_temp = BitmapDescriptorFactory.fromView(v_temp);//?marker MarkerOptions oo = new MarkerOptions().position(tmpLL).icon(bd_temp).anchor(0.5f, 1.0f).zIndex(15); Marker mMarkerA = (Marker) (mBaiduMap.addOverlay(oo)); Bundle bundle = new Bundle(); bundle.putString("License", gpsInfoData.getLicense()); mMarkerA.setExtraInfo(bundle); }
From source file:com.ezartech.ezar.videooverlay.ezAR.java
private void updateMatrix() { Matrix workMatrix = new Matrix(); Size sz = getWebViewSize();/* ww w .jav a2s. c om*/ Util.prepareMatrix(workMatrix, isMirror(), getDisplayOrientation(), sz.width, sz.height); // In face detection, the matrix converts the driver coordinates to UI // coordinates. In tap focus, the inverted matrix converts the UI // coordinates to driver coordinates. workMatrix.invert(matrix); }
From source file:com.mobantica.DriverItRide.activities.ActivityProfile.java
public static Bitmap rotateImage(Bitmap source, float angle) { Bitmap retVal;/*from w w w . jav a 2 s.co m*/ Matrix matrix = new Matrix(); matrix.postRotate(angle); retVal = Bitmap.createBitmap(source, 0, 0, source.getWidth(), source.getHeight(), matrix, true); return retVal; }
From source file:com.cordova.photo.CameraLauncher.java
/** * Figure out if the bitmap should be rotated. For instance if the picture was taken in * portrait mode/*from w w w. j a v a 2s . co m*/ * * @param rotate * @param bitmap * @return rotated bitmap */ private Bitmap getRotatedBitmap(int rotate, Bitmap bitmap, ExifHelper exif) { Matrix matrix = new Matrix(); if (rotate == 180) { matrix.setRotate(rotate); } else { matrix.setRotate(rotate, (float) bitmap.getWidth() / 2, (float) bitmap.getHeight() / 2); } try { bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); exif.resetOrientation(); } catch (OutOfMemoryError oom) { // You can run out of memory if the image is very large: // http://simonmacdonald.blogspot.ca/2012/07/change-to-camera-code-in-phonegap-190.html // If this happens, simply do not rotate the image and return it unmodified. // If you do not catch the OutOfMemoryError, the Android app crashes. } return bitmap; }
From source file:com.netcompss.ffmpeg4android_client.BaseWizard.java
@SuppressWarnings("unused") private String reporteds(String path) { ExifInterface exif = null;//from w w w . ja va 2 s . c om try { exif = new ExifInterface(path); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); Matrix matrix = new Matrix(); if (orientation == 6) { matrix.postRotate(90); } else if (orientation == 3) { matrix.postRotate(180); } else if (orientation == 8) { matrix.postRotate(270); } if (path != null) { if (path.contains("http")) { try { URL url = new URL(path); HttpGet httpRequest = null; httpRequest = new HttpGet(url.toURI()); HttpClient httpclient = new DefaultHttpClient(); HttpResponse response = (HttpResponse) httpclient.execute(httpRequest); HttpEntity entity = response.getEntity(); BufferedHttpEntity bufHttpEntity = new BufferedHttpEntity(entity); InputStream input = bufHttpEntity.getContent(); Bitmap bitmap = BitmapFactory.decodeStream(input); input.close(); return getPath(bitmap); } catch (MalformedURLException e) { Log.e("ImageActivity", "bad url", e); } catch (Exception e) { Log.e("ImageActivity", "io error", e); } } else { Options options = new Options(); options.inSampleSize = 2; options.inJustDecodeBounds = true; BitmapFactory.decodeResource(getApplicationContext().getResources(), srcBgId, options); options.inJustDecodeBounds = false; options.inSampleSize = calculateInSampleSize(options, w, h); Bitmap unbgbtmp = BitmapFactory.decodeResource(getApplicationContext().getResources(), srcBgId, options); Bitmap unrlbtmp = ScalingUtilities.decodeFile(path, w, h, ScalingLogic.FIT); unrlbtmp.recycle(); Bitmap rlbtmp = null; if (unrlbtmp != null) { rlbtmp = ScalingUtilities.createScaledBitmap(unrlbtmp, w, h, ScalingLogic.FIT); } if (unbgbtmp != null && rlbtmp != null) { Bitmap bgbtmp = ScalingUtilities.createScaledBitmap(unbgbtmp, w, h, ScalingLogic.FIT); Bitmap newscaledBitmap = ProcessingBitmapTwo(bgbtmp, rlbtmp); unbgbtmp.recycle(); return getPath(newscaledBitmap); } } } return path; }
From source file:com.phonegap.plugins.wsiCameraLauncher.WsiCameraLauncher.java
/** * Called when the camera view exits./* w ww . j a v a2 s .c om*/ * * @param requestCode * The request code originally supplied to * startActivityForResult(), allowing you to identify who this * result came from. * @param resultCode * The integer result code returned by the child activity through * its setResult(). * @param intent * An Intent, which can return result data to the caller (various * data can be attached to Intent "extras"). */ public void onActivityResult(int requestCode, int resultCode, Intent intent) { // Get src and dest types from request code int srcType = (requestCode / 16) - 1; int destType = (requestCode % 16) - 1; int rotate = 0; Log.d(LOG_TAG, "-z"); // If retrieving photo from library if ((srcType == PHOTOLIBRARY) || (srcType == SAVEDPHOTOALBUM)) { Log.d(LOG_TAG, "-y"); if (resultCode == Activity.RESULT_OK) { Log.d(LOG_TAG, "-x"); Uri uri = intent.getData(); Log.d(LOG_TAG, "-w"); // If you ask for video or all media type you will automatically // get back a file URI // and there will be no attempt to resize any returned data if (this.mediaType != PICTURE) { Log.d(LOG_TAG, "mediaType not PICTURE, so must be Video"); String metadataDateTime = ""; ExifInterface exif; try { exif = new ExifInterface(this.getRealPathFromURI(uri, this.cordova)); if (exif.getAttribute(ExifInterface.TAG_DATETIME) != null) { Log.d(LOG_TAG, "z4a"); metadataDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME).toString(); metadataDateTime = metadataDateTime.replaceFirst(":", "-"); metadataDateTime = metadataDateTime.replaceFirst(":", "-"); } } catch (IOException e2) { // TODO Auto-generated catch block e2.printStackTrace(); } Log.d(LOG_TAG, "before create thumbnail"); Bitmap bitmap = ThumbnailUtils.createVideoThumbnail( (new File(this.getRealPathFromURI(uri, this.cordova))).getAbsolutePath(), MediaStore.Images.Thumbnails.MINI_KIND); Log.d(LOG_TAG, "after create thumbnail"); String mid = generateRandomMid(); try { String filePathMedium = this.getTempDirectoryPath(this.cordova.getActivity()) + "/medium_" + mid + ".jpg"; FileOutputStream foMedium = new FileOutputStream(filePathMedium); bitmap.compress(CompressFormat.JPEG, 100, foMedium); foMedium.flush(); foMedium.close(); bitmap.recycle(); System.gc(); JSONObject mediaFile = new JSONObject(); try { mediaFile.put("mid", mid); mediaFile.put("mediaType", "video"); mediaFile.put("filePath", filePathMedium); mediaFile.put("filePathMedium", filePathMedium); mediaFile.put("filePathThumb", filePathMedium); mediaFile.put("typeOfPluginResult", "initialRecordInformer"); String absolutePath = (new File(this.getRealPathFromURI(uri, this.cordova))) .getAbsolutePath(); mediaFile.put("fileExt", absolutePath.substring(absolutePath.lastIndexOf(".") + 1)); if (metadataDateTime != "") { mediaFile.put("metadataDateTime", metadataDateTime); } } catch (JSONException e) { Log.d(LOG_TAG, "error: " + e.getStackTrace().toString()); } Log.d(LOG_TAG, "mediafile at 638" + mediaFile.toString()); PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, (new JSONArray()).put(mediaFile)); pluginResult.setKeepCallback(true); this.callbackContext.sendPluginResult(pluginResult); new UploadVideoToS3Task().execute(new File(this.getRealPathFromURI(uri, this.cordova)), this.callbackContext, mid, mediaFile); } catch (FileNotFoundException e1) { e1.printStackTrace(); } catch (IOException e1) { e1.printStackTrace(); } } else { String imagePath = this.getRealPathFromURI(uri, this.cordova); String mimeType = FileUtils.getMimeType(imagePath); // If we don't have a valid image so quit. if (imagePath == null || mimeType == null || !(mimeType.equalsIgnoreCase("image/jpeg") || mimeType.equalsIgnoreCase("image/png"))) { Log.d(LOG_TAG, "I either have a null image path or bitmap"); this.failPicture("Unable to retrieve path to picture!"); return; } String mid = generateRandomMid(); Log.d(LOG_TAG, "a"); JSONObject mediaFile = new JSONObject(); Log.d(LOG_TAG, "b"); try { FileInputStream fi = new FileInputStream(imagePath); Bitmap bitmap = BitmapFactory.decodeStream(fi); fi.close(); Log.d(LOG_TAG, "z1"); // try to get exif data ExifInterface exif = new ExifInterface(imagePath); Log.d(LOG_TAG, "z2"); JSONObject metadataJson = new JSONObject(); Log.d(LOG_TAG, "z3"); /* JSONObject latlng = new JSONObject(); String lat = "0"; String lng = "0"; if (exif.getAttribute(ExifInterface.TAG_GPS_LATITUDE) != null) { lat = exif.getAttribute(ExifInterface.TAG_GPS_LATITUDE); } if (exif.getAttribute(ExifInterface.TAG_GPS_LONGITUDE) != null) { lng = exif.getAttribute(ExifInterface.TAG_GPS_LONGITUDE); } latlng.put("lat", lat); latlng.put("lng", lng); Log.d(LOG_TAG, "z4"); metadataJson.put("locationData", latlng); */ String metadataDateTime = ""; if (exif.getAttribute(ExifInterface.TAG_DATETIME) != null) { Log.d(LOG_TAG, "z4a"); JSONObject exifWrapper = new JSONObject(); exifWrapper.put("DateTimeOriginal", exif.getAttribute(ExifInterface.TAG_DATETIME).toString()); exifWrapper.put("DateTimeDigitized", exif.getAttribute(ExifInterface.TAG_DATETIME).toString()); metadataDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME).toString(); metadataDateTime = metadataDateTime.replaceFirst(":", "-"); metadataDateTime = metadataDateTime.replaceFirst(":", "-"); Log.d(LOG_TAG, "z5"); metadataJson.put("Exif", exifWrapper); } Log.d(LOG_TAG, "z6"); Log.d(LOG_TAG, "metadataJson: " + metadataJson.toString()); Log.d(LOG_TAG, "metadataDateTime: " + metadataDateTime.toString()); if (exif.getAttribute(ExifInterface.TAG_ORIENTATION) != null) { int o = Integer.parseInt(exif.getAttribute(ExifInterface.TAG_ORIENTATION)); Log.d(LOG_TAG, "z7"); if (o == ExifInterface.ORIENTATION_NORMAL) { rotate = 0; } else if (o == ExifInterface.ORIENTATION_ROTATE_90) { rotate = 90; } else if (o == ExifInterface.ORIENTATION_ROTATE_180) { rotate = 180; } else if (o == ExifInterface.ORIENTATION_ROTATE_270) { rotate = 270; } else { rotate = 0; } Log.d(LOG_TAG, "z8"); Log.d(LOG_TAG, "rotate: " + rotate); // try to correct orientation if (rotate != 0) { Matrix matrix = new Matrix(); Log.d(LOG_TAG, "z9"); matrix.setRotate(rotate); Log.d(LOG_TAG, "z10"); bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); Log.d(LOG_TAG, "z11"); } } Log.d(LOG_TAG, "c"); String filePath = this.getTempDirectoryPath(this.cordova.getActivity()) + "/econ_" + mid + ".jpg"; FileOutputStream foEcon = new FileOutputStream(filePath); fitInsideSquare(bitmap, 850).compress(CompressFormat.JPEG, 45, foEcon); foEcon.flush(); foEcon.close(); Log.d(LOG_TAG, "d"); String filePathMedium = this.getTempDirectoryPath(this.cordova.getActivity()) + "/medium_" + mid + ".jpg"; FileOutputStream foMedium = new FileOutputStream(filePathMedium); makeInsideSquare(bitmap, 320).compress(CompressFormat.JPEG, 55, foMedium); foMedium.flush(); foMedium.close(); Log.d(LOG_TAG, "e"); String filePathThumb = this.getTempDirectoryPath(this.cordova.getActivity()) + "/thumb_" + mid + ".jpg"; FileOutputStream foThumb = new FileOutputStream(filePathThumb); makeInsideSquare(bitmap, 175).compress(CompressFormat.JPEG, 55, foThumb); foThumb.flush(); foThumb.close(); bitmap.recycle(); System.gc(); Log.d(LOG_TAG, "f"); mediaFile.put("mid", mid); mediaFile.put("mediaType", "photo"); mediaFile.put("filePath", filePath); mediaFile.put("filePathMedium", filePath); mediaFile.put("filePathThumb", filePath); mediaFile.put("typeOfPluginResult", "initialRecordInformer"); //mediaFile.put("metadataJson", metadataJson); if (metadataDateTime != "") { mediaFile.put("metadataDateTime", metadataDateTime); } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, (new JSONArray()).put(mediaFile)); pluginResult.setKeepCallback(true); this.callbackContext.sendPluginResult(pluginResult); Log.d(LOG_TAG, "g"); Log.d(LOG_TAG, "mediaFile " + mediaFile.toString()); new UploadFilesToS3Task().execute(new File(filePath), new File(filePathMedium), new File(filePathThumb), this.callbackContext, mid, mediaFile); Log.d(LOG_TAG, "h"); } catch (FileNotFoundException e) { Log.d(LOG_TAG, "error: " + e.getStackTrace().toString()); } catch (IOException e1) { // TODO Auto-generated catch block Log.d(LOG_TAG, "error: " + e1.getStackTrace().toString()); } catch (JSONException e2) { // TODO Auto-generated catch block Log.d(LOG_TAG, "error: " + e2.getStackTrace().toString()); } /* if (this.correctOrientation) { String[] cols = { MediaStore.Images.Media.ORIENTATION }; Cursor cursor = this.cordova .getActivity() .getContentResolver() .query(intent.getData(), cols, null, null, null); if (cursor != null) { cursor.moveToPosition(0); rotate = cursor.getInt(0); cursor.close(); } if (rotate != 0) { Matrix matrix = new Matrix(); matrix.setRotate(rotate); bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); } } // Create an ExifHelper to save the exif // data that is lost during compression String resizePath = this .getTempDirectoryPath(this.cordova .getActivity()) + "/resize.jpg"; ExifHelper exif = new ExifHelper(); try { if (this.encodingType == JPEG) { exif.createInFile(resizePath); exif.readExifData(); rotate = exif.getOrientation(); } } catch (IOException e) { e.printStackTrace(); } OutputStream os = new FileOutputStream( resizePath); bitmap.compress(Bitmap.CompressFormat.JPEG, this.mQuality, os); os.close(); // Restore exif data to file if (this.encodingType == JPEG) { exif.createOutFile(this .getRealPathFromURI(uri, this.cordova)); exif.writeExifData(); } if (bitmap != null) { bitmap.recycle(); bitmap = null; } System.gc(); // The resized image is cached by the app in // order to get around this and not have to // delete your // application cache I'm adding the current // system time to the end of the file url. this.callbackContext.success("file://" + resizePath + "?" + System.currentTimeMillis()); */ } } else if (resultCode == Activity.RESULT_CANCELED) { this.failPicture("Selection cancelled."); } else { this.failPicture("Selection did not complete!"); } } }