List of usage examples for android.hardware.camera2 CameraManager getCameraIdList
@NonNull public String[] getCameraIdList() throws CameraAccessException
From source file:com.android.launcher3.Utilities.java
public static void turnOnFlashLight(Context context) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { CameraManager camManager = (CameraManager) context.getApplicationContext() .getSystemService(Context.CAMERA_SERVICE); String cameraId = null;//from w w w . j ava2 s.c o m try { cameraId = camManager.getCameraIdList()[0]; camManager.setTorchMode(cameraId, true); isFlashLightOn = true; } catch (CameraAccessException e) { isFlashLightOn = false; e.printStackTrace(); } } else { try { if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) { Camera cam = Camera.open(); Camera.Parameters p = cam.getParameters(); p.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH); cam.setParameters(p); cam.startPreview(); isFlashLightOn = true; } } catch (Exception e) { e.printStackTrace(); isFlashLightOn = false; } } }
From source file:com.example.administrator.testscreenrecording.control.StreamingCamera2Fragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *///from w w w . j a v a 2 s . com private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } initRecorder(mPreviewSize.getWidth(), mPreviewSize.getHeight()); // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(), FRAGMENT_DIALOG); } }
From source file:kr.ac.kpu.wheeling.blackbox.Camera2VideoFragment.java
/** * Tries to open a {@link CameraDevice}. The result is listened by `mStateCallback`. *//* w w w .j a v a2s .com*/ private void openCamera(int width, int height) { if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) { requestVideoPermissions(); return; } final Activity activity = getActivity(); if (null == activity || activity.isFinishing()) { return; } CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { Log.d(TAG, "tryAcquire"); if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } String cameraId = manager.getCameraIdList()[0]; // Choose the sizes for camera preview and video recording CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class)); mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize); Log.d("INFO", "Width and Height: " + width + height); Log.d("INFO", "Video Height: " + mVideoSize.getHeight() + " Width: " + mVideoSize.getWidth()); Log.d("INFO", "Preview Height: " + mPreviewSize.getHeight() + " Width: " + mPreviewSize.getWidth()); screenInfo = new ScreenInfo(); screenInfo.setNoSoftKeyScreenInfo(activity); Log.d("SCREEN", screenInfo.toString()); int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } configureTransform(width, height); mMediaRecorder = new MediaRecorder(); manager.openCamera(cameraId, mStateCallback, null); } catch (CameraAccessException e) { Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show(); activity.finish(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance(getString(kr.ac.kpu.wheeling.R.string.camera_error)) .show(getChildFragmentManager(), FRAGMENT_DIALOG); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening."); } catch (SecurityException e) { } }
From source file:com.example.camera2apidemo.Camera2BasicFragment.java
/** , ? * ??://from www. j a va 2 s .c om * 1. ???, ? * 2. ????, ??? * 3. ??, ? * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview */ private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { // ???? for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); checkCamera2Support(characteristics); // ???, ?(???) // We don't use a front facing camera in this sample. Integer facing = characteristics.get(LENS_FACING); if (facing != null && facing == LENS_FACING_FRONT) { continue; } // ?? StreamConfigurationMap map = characteristics.get(SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. // // maxImagesImageReader?? Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. // ???(??, ""???ROTATION_90 // ROTATION_270,?ROTATION_0ROTATION_180) int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions // ??(""?0, , ?90) // ?, ??, , 90, switch?? mSensorOrientation = characteristics.get(SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { // ROTATION_0ROTATION_180?????? // ?, ?90270, ??(?true) case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } // ???, Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); // ?(), ?? int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; // ?(, ?(texture????)) int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; // ??, if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } // ?? if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. // ? // ?:map.getOutputSizes(SurfaceTexture.class)SurfaceTexture?List mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. // ???? int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { // ??(landscape) mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(), FRAGMENT_DIALOG); } }
From source file:net.ddns.mlsoftlaberge.trycorder.TryviscamFragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *///from w w w . jav a 2s. c o m private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. //ErrorDialog.newInstance(getString(R.string.camera_error)) // .show(getChildFragmentManager(), FRAGMENT_DIALOG); e.printStackTrace(); } }
From source file:com.example.android.camera2raw.Camera2RawFragment.java
/** * Sets up state related to camera that is needed before opening a {@link CameraDevice}. *///from w w w . j ava2 s .co m private boolean setUpCameraOutputs() { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); if (manager == null) { ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog"); return false; } try { // Find a CameraDevice that supports RAW captures, and configure state. for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We only use a camera that supports RAW in this sample. if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // For still image captures, we use the largest available size. Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); synchronized (mCameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference // counted wrapper to ensure they are only closed when all background tasks // using them are finished. if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) { mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance( largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5)); } mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler); if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) { mRawImageReader = new RefCountedAutoCloseable<>( ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5)); } mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler); mCharacteristics = characteristics; mCameraId = cameraId; } return true; } } catch (CameraAccessException e) { e.printStackTrace(); } // If we found no suitable cameras for capturing RAW, warn the user. ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(), "dialog"); return false; }
From source file:com.quectel.camera2test.Camera2RawFragment.java
/** * Sets up state related to camera that is needed before opening a {@link CameraDevice}. *///from w w w . j av a 2 s.c o m private boolean setUpCameraOutputs() { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); if (manager == null) { ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog"); return false; } try { // Find a CameraDevice that supports RAW captures, and configure state. for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); Log.d(TAG, "---characteristics = " + characteristics); // We only use a camera that supports RAW in this sample. if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { Log.d(TAG, "-1--characteristics continue"); continue; } Log.d(TAG, "-1--characteristics = " + characteristics); StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // For still image captures, we use the largest available size. Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); synchronized (mCameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference // counted wrapper to ensure they are only closed when all background tasks // using them are finished. if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) { mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance( largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5)); } mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler); if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) { mRawImageReader = new RefCountedAutoCloseable<>( ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5)); } mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler); mCharacteristics = characteristics; mCameraId = cameraId; } return true; } } catch (CameraAccessException e) { e.printStackTrace(); } // If we found no suitable cameras for capturing RAW, warn the user. ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(), "dialog"); return false; }
From source file:com.ape.camera2raw.Camera2RawFragment.java
/** * Sets up state related to camera that is needed before opening a {@link CameraDevice}. *//*from w w w .j a v a 2s . c om*/ private boolean setUpCameraOutputs() { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); if (manager == null) { ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog"); return false; } try { // Find a CameraDevice that supports RAW captures, and configure state. for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We only use a camera that supports RAW in this sample. if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // For still image captures, we use the largest available size. Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); Log.d("WAY", "largestRaw = " + largestRaw); //largestRaw = new Size(4208, 3120);//9051 synchronized (mCameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference // counted wrapper to ensure they are only closed when all background tasks // using them are finished. if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) { mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance( largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5)); } mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler); if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) { mRawImageReader = new RefCountedAutoCloseable<>( ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5)); } mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler); mCharacteristics = characteristics; mCameraId = cameraId; } return true; } } catch (CameraAccessException e) { e.printStackTrace(); } // If we found no suitable cameras for capturing RAW, warn the user. ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(), "dialog"); return false; }