List of usage examples for android.hardware.camera2 CameraCharacteristics SENSOR_ORIENTATION
Key SENSOR_ORIENTATION
To view the source code for android.hardware.camera2 CameraCharacteristics SENSOR_ORIENTATION.
Click Source Link
Clockwise angle through which the output image needs to be rotated to be upright on the device screen in its native orientation.
Also defines the direction of rolling shutter readout, which is from top to bottom in the sensor's coordinate system.
Units: Degrees of clockwise rotation; always a multiple of 90
Range of valid values:
0, 90, 180, 270
This key is available on all devices.
From source file:com.askjeffreyliu.camera2barcode.camera.CameraSource.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *///from w ww . j a v a2 s. co m private void setUpCameraOutputs(int width, int height) { try { if (ContextCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { return; } if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } if (manager == null) manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE); mCameraId = manager.getCameraIdList()[mFacing]; CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { return; } // For still image captures, we use the largest available size. Size largest = getBestAspectPictureSize(map.getOutputSizes(ImageFormat.JPEG)); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = mDisplayOrientation; //noinspection ConstantConditions int mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(Utils.getScreenWidth(mContext), Utils.getScreenHeight(mContext)); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. Size[] outputSizes = Utils.sizeToSize(map.getOutputSizes(SurfaceTexture.class)); mPreviewSize = chooseOptimalSize(outputSizes, rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = mDisplayOrientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; // control.aeTargetFpsRange Range<Integer>[] availableFpsRange = characteristics .get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); configureTransform(width, height); manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening.", e); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. Log.d(TAG, "Camera Error: " + e.getMessage()); } }
From source file:org.tensorflow.demo.Camera2BasicFragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *//* w w w . java 2s. c o m*/ private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.YUV_420_888, 2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } previewWidth = mPreviewSize.getWidth(); previewHeight = mPreviewSize.getHeight(); rgbBytes = new int[previewWidth * previewHeight]; rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Bitmap.Config.ARGB_8888); // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = false; //= available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(), FRAGMENT_DIALOG); } }
From source file:net.ddns.mlsoftlaberge.trycorder.TryviscamFragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *//*from ww w . j a v a 2 s.co m*/ private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. //ErrorDialog.newInstance(getString(R.string.camera_error)) // .show(getChildFragmentManager(), FRAGMENT_DIALOG); e.printStackTrace(); } }
From source file:com.example.abrahamsofer.ident.Camera2Fragment.java
/** * Capture a still picture. This method should be called when we get a response in * {@link #mCaptureCallback} from both {@link #lockFocus()}. *///from w w w . ja v a 2 s . com @TargetApi(Build.VERSION_CODES.M) private void captureStillPicture() { try { final Activity activity = getActivity(); if (null == activity || null == mCameraDevice) { return; } // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); // Use the same AE and AF modes as the preview. captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); setAutoFlash(captureBuilder); // Orientation // int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); // captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE); CameraCharacteristics cameraCharacteristics = manager .getCameraCharacteristics("" + mCameraDevice.getId()); int rotation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, rotation); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { // showToast("Saved: " + mFile); Log.d(TAG, mFile.toString()); unlockFocus(); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
/** * PREVIEW STUFF/* w w w. j ava 2 s.co m*/ */ @Override public void startPreview() { picSize = appSettingsManager.pictureSize.get(); Log.d(TAG, "Start Preview"); largestImageSize = Collections.max(Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); picFormat = appSettingsManager.pictureFormat.get(); if (picFormat.equals("")) { picFormat = KEYS.JPEG; appSettingsManager.pictureFormat.set(KEYS.JPEG); parameterHandler.PictureFormat.onValueHasChanged(KEYS.JPEG); } if (picFormat.equals(KEYS.JPEG)) { String[] split = picSize.split("x"); int width, height; if (split.length < 2) { mImageWidth = largestImageSize.getWidth(); mImageHeight = largestImageSize.getHeight(); } else { mImageWidth = Integer.parseInt(split[0]); mImageHeight = Integer.parseInt(split[1]); } //create new ImageReader with the size and format for the image Log.d(TAG, "ImageReader JPEG"); } else if (picFormat.equals(CameraHolderApi2.RAW_SENSOR)) { Log.d(TAG, "ImageReader RAW_SENOSR"); largestImageSize = Collections.max( Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); mImageWidth = largestImageSize.getWidth(); mImageHeight = largestImageSize.getHeight(); } else if (picFormat.equals(CameraHolderApi2.RAW10)) { Log.d(TAG, "ImageReader RAW_SENOSR"); largestImageSize = Collections.max(Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.RAW10)), new CompareSizesByArea()); mImageWidth = largestImageSize.getWidth(); mImageHeight = largestImageSize.getHeight(); } int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); int orientationToSet = (360 + cameraUiWrapper.getActivityInterface().getOrientation() + sensorOrientation) % 360; if (appSettingsManager.getApiString(AppSettingsManager.SETTING_OrientationHack).equals(KEYS.ON)) orientationToSet = (360 + cameraUiWrapper.getActivityInterface().getOrientation() + sensorOrientation + 180) % 360; cameraHolder.SetParameter(CaptureRequest.JPEG_ORIENTATION, orientationToSet); // Here, we create a CameraCaptureSession for camera preview if (parameterHandler.Burst == null) SetBurst(1); else SetBurst(parameterHandler.Burst.GetValue()); }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
private void SetBurst(int burst) { try {// ww w .j a va2s . c o m Log.d(TAG, "Set Burst to:" + burst); previewSize = cameraHolder.getSizeForPreviewDependingOnImageSize( cameraHolder.map.getOutputSizes(ImageFormat.YUV_420_888), cameraHolder.characteristics, mImageWidth, mImageHeight); if (cameraUiWrapper.getFocusPeakProcessor() != null) { cameraUiWrapper.getFocusPeakProcessor().kill(); } int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); int orientation = 0; switch (sensorOrientation) { case 90: orientation = 0; break; case 180: orientation = 90; break; case 270: orientation = 180; break; case 0: orientation = 270; break; } cameraHolder.CaptureSessionH.SetTextureViewSize(previewSize.getWidth(), previewSize.getHeight(), orientation, orientation + 180, false); SurfaceTexture texture = cameraHolder.CaptureSessionH.getSurfaceTexture(); texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); previewsurface = new Surface(texture); cameraUiWrapper.getFocusPeakProcessor().Reset(previewSize.getWidth(), previewSize.getHeight()); Log.d(TAG, "Previewsurface vailid:" + previewsurface.isValid()); cameraUiWrapper.getFocusPeakProcessor().setOutputSurface(previewsurface); camerasurface = cameraUiWrapper.getFocusPeakProcessor().getInputSurface(); cameraHolder.CaptureSessionH.AddSurface(camerasurface, true); if (picFormat.equals(KEYS.JPEG)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.JPEG, burst + 1); else if (picFormat.equals(CameraHolderApi2.RAW10)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW10, burst + 1); else if (picFormat.equals(CameraHolderApi2.RAW_SENSOR)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW_SENSOR, burst + 1); else if (picFormat.equals(CameraHolderApi2.RAW12)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW12, burst + 1); cameraHolder.CaptureSessionH.AddSurface(mImageReader.getSurface(), false); cameraHolder.CaptureSessionH.CreateCaptureSession(); } catch (Exception ex) { ex.printStackTrace(); } if (parameterHandler.Burst != null) parameterHandler.Burst.ThrowCurrentValueChanged(parameterHandler.Burst.GetValue()); }
From source file:com.obviousengine.android.focus.ZslFocusCamera.java
/** * @see com.obviousengine.android.focus.FocusCamera#triggerFocusAndMeterAtPoint(float, float) *///from w w w . jav a 2 s . co m @Override public void triggerFocusAndMeterAtPoint(float nx, float ny) { int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); aERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(nx, ny, cropRegion, sensorOrientation); aFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(nx, ny, cropRegion, sensorOrientation); startAFCycle(); }
From source file:com.obviousengine.android.focus.ZslFocusCamera.java
@Override public int getSensorOrientation() { return characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); }
From source file:com.android.camera.one.v2.OneCameraZslImpl.java
/** * @see com.android.camera.one.OneCamera#triggerFocusAndMeterAtPoint(float, * float)/* www. j a va2 s . com*/ */ @Override public void triggerFocusAndMeterAtPoint(float nx, float ny) { int sensorOrientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); mAERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation); mAFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation); startAFCycle(); }
From source file:com.quectel.camera2test.Camera2RawFragment.java
/** * Rotation need to transform from the camera sensor orientation to the device's current * orientation./* ww w . j ava 2s .c om*/ * * @param c the {@link CameraCharacteristics} to query for the camera sensor * orientation. * @param deviceOrientation the current device orientation relative to the native device * orientation. * @return the total rotation from the sensor orientation to the current device orientation. */ private static int sensorToDeviceRotation(CameraCharacteristics c, int deviceOrientation) { int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION); // Get device orientation in degrees deviceOrientation = ORIENTATIONS.get(deviceOrientation); // Reverse device orientation for front-facing cameras if (c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) { deviceOrientation = -deviceOrientation; } // Calculate desired JPEG orientation relative to camera orientation to make // the image upright relative to the device orientation return (sensorOrientation - deviceOrientation + 360) % 360; }