List of usage examples for android.view Surface ROTATION_0
int ROTATION_0
To view the source code for android.view Surface ROTATION_0.
Click Source Link
From source file:com.android.gpstest.GpsTestActivity.java
@TargetApi(Build.VERSION_CODES.GINGERBREAD) @Override/*from w w w . j a v a2 s . c o m*/ public void onSensorChanged(SensorEvent event) { double orientation = Double.NaN; double tilt = Double.NaN; switch (event.sensor.getType()) { case Sensor.TYPE_ROTATION_VECTOR: // Modern rotation vector sensors if (!mTruncateVector) { try { SensorManager.getRotationMatrixFromVector(mRotationMatrix, event.values); } catch (IllegalArgumentException e) { // On some Samsung devices, an exception is thrown if this vector > 4 (see #39) // Truncate the array, since we can deal with only the first four values Log.e(TAG, "Samsung device error? Will truncate vectors - " + e); mTruncateVector = true; // Do the truncation here the first time the exception occurs getRotationMatrixFromTruncatedVector(event.values); } } else { // Truncate the array to avoid the exception on some devices (see #39) getRotationMatrixFromTruncatedVector(event.values); } int rot = getWindowManager().getDefaultDisplay().getRotation(); switch (rot) { case Surface.ROTATION_0: // No orientation change, use default coordinate system SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-0"); break; case Surface.ROTATION_90: // Log.d(TAG, "Rotation-90"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_Y, SensorManager.AXIS_MINUS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_180: // Log.d(TAG, "Rotation-180"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_X, SensorManager.AXIS_MINUS_Y, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; case Surface.ROTATION_270: // Log.d(TAG, "Rotation-270"); SensorManager.remapCoordinateSystem(mRotationMatrix, SensorManager.AXIS_MINUS_Y, SensorManager.AXIS_X, mRemappedMatrix); SensorManager.getOrientation(mRemappedMatrix, mValues); break; default: // This shouldn't happen - assume default orientation SensorManager.getOrientation(mRotationMatrix, mValues); // Log.d(TAG, "Rotation-Unknown"); break; } orientation = Math.toDegrees(mValues[0]); // azimuth tilt = Math.toDegrees(mValues[1]); break; case Sensor.TYPE_ORIENTATION: // Legacy orientation sensors orientation = event.values[0]; break; default: // A sensor we're not using, so return return; } // Correct for true north, if preference is set if (mFaceTrueNorth && mGeomagneticField != null) { orientation += mGeomagneticField.getDeclination(); // Make sure value is between 0-360 orientation = MathUtils.mod((float) orientation, 360.0f); } for (GpsTestListener listener : mGpsTestListeners) { listener.onOrientationChanged(orientation, tilt); } }
From source file:research.dlsu.cacaoapp.Camera2BasicFragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *//*from ww w.j a v a 2 s . c om*/ private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance("Error. Camera2API is not supported on your device.") .show(getChildFragmentManager(), FRAGMENT_DIALOG); } }
From source file:com.askjeffreyliu.camera2barcode.camera.CameraSource.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *//*from w ww. j a va2 s . co m*/ private void setUpCameraOutputs(int width, int height) { try { if (ContextCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { return; } if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } if (manager == null) manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE); mCameraId = manager.getCameraIdList()[mFacing]; CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { return; } // For still image captures, we use the largest available size. Size largest = getBestAspectPictureSize(map.getOutputSizes(ImageFormat.JPEG)); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = mDisplayOrientation; //noinspection ConstantConditions int mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(Utils.getScreenWidth(mContext), Utils.getScreenHeight(mContext)); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. Size[] outputSizes = Utils.sizeToSize(map.getOutputSizes(SurfaceTexture.class)); mPreviewSize = chooseOptimalSize(outputSizes, rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = mDisplayOrientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; // control.aeTargetFpsRange Range<Integer>[] availableFpsRange = characteristics .get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); configureTransform(width, height); manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening.", e); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. Log.d(TAG, "Camera Error: " + e.getMessage()); } }
From source file:org.tensorflow.demo.Camera2BasicFragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview */// w w w. j a va 2 s . co m private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.YUV_420_888, 2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } previewWidth = mPreviewSize.getWidth(); previewHeight = mPreviewSize.getHeight(); rgbBytes = new int[previewWidth * previewHeight]; rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Bitmap.Config.ARGB_8888); // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = false; //= available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(), FRAGMENT_DIALOG); } }
From source file:nz.ac.auckland.lablet.ExperimentRunViewManager.java
/** * Lock the screen to the current orientation. * @return the previous orientation settings *//*from w w w . j a v a 2 s .c om*/ private int lockScreenOrientation() { int initialRequestedOrientation = getRequestedOrientation(); // Note: a surface rotation of 90 degrees means a physical device rotation of -90 degrees. int orientation = getResources().getConfiguration().orientation; int rotation = getWindowManager().getDefaultDisplay().getRotation(); switch (rotation) { case Surface.ROTATION_0: if (orientation == Configuration.ORIENTATION_PORTRAIT) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); else if (orientation == Configuration.ORIENTATION_LANDSCAPE) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); break; case Surface.ROTATION_90: if (orientation == Configuration.ORIENTATION_PORTRAIT) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT); else if (orientation == Configuration.ORIENTATION_LANDSCAPE) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); break; case Surface.ROTATION_180: if (orientation == Configuration.ORIENTATION_PORTRAIT) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT); else if (orientation == Configuration.ORIENTATION_LANDSCAPE) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE); break; case Surface.ROTATION_270: if (orientation == Configuration.ORIENTATION_PORTRAIT) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); else if (orientation == Configuration.ORIENTATION_LANDSCAPE) setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE); break; } return initialRequestedOrientation; }
From source file:com.example.camera2apidemo.Camera2BasicFragment.java
/** , ? * ??://from w w w . j a v a2s . c om * 1. ???, ? * 2. ????, ??? * 3. ??, ? * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview */ private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { // ???? for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); checkCamera2Support(characteristics); // ???, ?(???) // We don't use a front facing camera in this sample. Integer facing = characteristics.get(LENS_FACING); if (facing != null && facing == LENS_FACING_FRONT) { continue; } // ?? StreamConfigurationMap map = characteristics.get(SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. // // maxImagesImageReader?? Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. // ???(??, ""???ROTATION_90 // ROTATION_270,?ROTATION_0ROTATION_180) int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions // ??(""?0, , ?90) // ?, ??, , 90, switch?? mSensorOrientation = characteristics.get(SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { // ROTATION_0ROTATION_180?????? // ?, ?90270, ??(?true) case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } // ???, Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); // ?(), ?? int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; // ?(, ?(texture????)) int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; // ??, if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } // ?? if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. // ? // ?:map.getOutputSizes(SurfaceTexture.class)SurfaceTexture?List mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. // ???? int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { // ??(landscape) mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(), FRAGMENT_DIALOG); } }
From source file:uk.org.rivernile.edinburghbustracker.android.fragments.general.BusStopDetailsFragment.java
/** * Update the direction needle so that it is pointing towards the bus stop, * based on the device location and the direction it is facing. */// w ww . j ava 2s .c om private void updateDirectionNeedle() { // We need values for location, the accelerometer and magnetometer to // continue. if (lastLocation == null || accelerometerValues == null || magnetometerValues == null) { // Make sure the needle isn't showing. txtDistance.setCompoundDrawablesWithIntrinsicBounds(null, null, null, null); recycleNeedleBitmapIfNotNull(null); return; } // Calculating the rotation matrix may fail, for example, if the device // is in freefall. In that case we cannot continue as the values will // be unreliable. if (!SensorManager.getRotationMatrix(rotationMatrix, null, accelerometerValues, magnetometerValues)) { return; } // The screen rotation was obtained earlier. switch (screenRotation) { // There's lots of information about this elsewhere, but briefly; // The values from the sensors are in the device's coordinate system // which may be correct if the device is in its natural orientation, // but it needs to be remapped if the device is rotated. case Surface.ROTATION_0: SensorManager.remapCoordinateSystem(rotationMatrix, SensorManager.AXIS_X, SensorManager.AXIS_Z, rotationMatrix); break; case Surface.ROTATION_90: SensorManager.remapCoordinateSystem(rotationMatrix, SensorManager.AXIS_Z, SensorManager.AXIS_MINUS_X, rotationMatrix); break; case Surface.ROTATION_180: SensorManager.remapCoordinateSystem(rotationMatrix, SensorManager.AXIS_MINUS_X, SensorManager.AXIS_MINUS_Z, rotationMatrix); break; case Surface.ROTATION_270: SensorManager.remapCoordinateSystem(rotationMatrix, SensorManager.AXIS_MINUS_Z, SensorManager.AXIS_X, rotationMatrix); break; } // Get the X, Y and Z orientations, which are in radians. Covert this // in to degrees East of North. SensorManager.getOrientation(rotationMatrix, headings); double heading = Math.toDegrees(headings[0]); // If there's a GeomagneticField value, then adjust the heading to take // this in to account. if (geoField != null) { heading -= geoField.getDeclination(); } // The orientation is in the range of -180 to +180. Convert this in to // a range of 0 to 360. final float bearingTo = distance[1] < 0 ? distance[1] + 360 : distance[1]; // This is the heading to the bus stop. heading = bearingTo - heading; // The above calculation may come out as a negative number again. Put // this back in to the range of 0 to 360. if (heading < 0) { heading += 360; } // This 'if' statement is required to prevent a crash during device // rotation. It ensured that the Fragment is still part of the Activity. if (isAdded()) { // Get the arrow bitmap from the resources. final Bitmap needleIn = BitmapFactory.decodeResource(getResources(), R.drawable.heading_arrow); // Get an identity matrix and rotate it by the required amount. final Matrix m = new Matrix(); m.setRotate((float) heading % 360, (float) needleIn.getWidth() / 2, (float) needleIn.getHeight() / 2); // Apply the rotation matrix to the Bitmap, to create a new Bitmap. final Bitmap needleOut = Bitmap.createBitmap(needleIn, 0, 0, needleIn.getWidth(), needleIn.getHeight(), m, true); // Recycle the needle read in if it's not the same as the rotated // needle. if (needleIn != needleOut) { needleIn.recycle(); } // This Bitmap needs to be converted to a Drawable type. final BitmapDrawable drawable = new BitmapDrawable(getResources(), needleOut); // Set the new needle to be on the right hand side of the TextView. txtDistance.setCompoundDrawablesWithIntrinsicBounds(null, null, drawable, null); recycleNeedleBitmapIfNotNull(needleOut); } else { // If the Fragment is not added to the Activity, then make sure // there's no needle. txtDistance.setCompoundDrawablesWithIntrinsicBounds(null, null, null, null); recycleNeedleBitmapIfNotNull(null); } }
From source file:com.almalence.util.Util.java
public static boolean shouldRemapOrientation(final int orientationProc, final int rotation) { return (orientationProc == Configuration.ORIENTATION_LANDSCAPE && rotation == Surface.ROTATION_0) || (orientationProc == Configuration.ORIENTATION_LANDSCAPE && rotation == Surface.ROTATION_180) || (orientationProc == Configuration.ORIENTATION_PORTRAIT && rotation == Surface.ROTATION_90) || (orientationProc == Configuration.ORIENTATION_PORTRAIT && rotation == Surface.ROTATION_270); }
From source file:com.iwuvhugs.seekgame.PhotoFragment.java
/** * Configures the necessary {@link Matrix} transformation to `mTextureView`. * This method should be called after the camera preview size is determined in * setUpCameraOutputs and also the size of `mTextureView` is fixed. * * @param viewWidth The width of `mTextureView` * @param viewHeight The height of `mTextureView` */// w w w. ja v a 2s .c om private void configureTransform(int viewWidth, int viewHeight) { Activity activity = getActivity(); if (null == mTextureView || null == mPreviewSize || null == activity) { return; } int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); Matrix matrix = new Matrix(); RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); float centerX = viewRect.centerX(); float centerY = viewRect.centerY(); if (Surface.ROTATION_0 == rotation) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); float scale = (float) (viewWidth / mPreviewSize.getWidth() + 0.6); matrix.postScale(scale, scale, centerX, centerY); } mTextureView.setTransform(matrix); }
From source file:com.projecttango.examples.java.modelcorrespondence.ModelCorrespondenceActivity.java
/** * Calculate the transform needed to place the model in the upper left corner of the camera, * and rotate it to show the next point to make the correspondence. */// w w w . jav a 2s . c o m private float[] calculateModelTransformFixedToCam(int mDisplayRotation) { // Translate to the upper left corner and ahead of the cam if the device is in landscape // mode or to the upper center if it is in portrait mode. float[] rgbTHouse = new float[16]; Matrix.setIdentityM(rgbTHouse, 0); if (mDisplayRotation == Surface.ROTATION_0 || mDisplayRotation == Surface.ROTATION_180) { Matrix.translateM(rgbTHouse, 0, 0f, 1.2f, -4); } else { Matrix.translateM(rgbTHouse, 0, -1.5f, 0.3f, -4); } // Rotate it 180 degrees around the Z axis to show the front of the house as default // orientation. Matrix.rotateM(rgbTHouse, 0, 180, 0, 0, 1); // Rotate it around the X axis so it looks better as seen from above. Matrix.rotateM(rgbTHouse, 0, 70, 1, 0, 0); // Rotate it around the Z axis to show the next correspondence point to be added. Matrix.rotateM(rgbTHouse, 0, -mModelZRotation, 0, 0, 1); // Scale it to a proper size. Matrix.scaleM(rgbTHouse, 0, 0.03f, 0.03f, 0.03f); return rgbTHouse; }