List of usage examples for android.hardware.camera2 CameraAccessException printStackTrace
public void printStackTrace()
From source file:com.vest.album.fragment.CameraBasicFragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *///w w w . j a va 2 s.c om private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { callback.onPhotoError("?"); e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. callback.onPhotoError(getString(R.string.camera_error)); } }
From source file:org.tensorflow.demo.Camera2BasicFragment.java
/** * Capture a still picture. This method should be called when we get a response in * {@link #mCaptureCallback} from both {@link #lockFocus()}. *///from w w w . j av a2 s . c om private void captureStillPicture() { try { final Activity activity = getActivity(); if (null == activity || null == mCameraDevice) { return; } // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); // Use the same AE and AF modes as the preview. captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); setAutoFlash(captureBuilder); // Orientation int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); sensorOrientation = rotation + getOrientation(rotation); frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, INPUT_SIZE, INPUT_SIZE, sensorOrientation, MAINTAIN_ASPECT); cropToFrameTransform = new Matrix(); frameToCropTransform.invert(cropToFrameTransform); classifier = TensorFlowImageClassifier.create(getActivity().getAssets(), MODEL_FILE, LABEL_FILE, INPUT_SIZE, IMAGE_MEAN, IMAGE_STD, INPUT_NAME, OUTPUT_NAME); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { Log.d(TAG, mFile.toString()); unlockFocus(); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:research.dlsu.cacaoapp.Camera2BasicFragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *//*w w w . j av a2 s . c o m*/ private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance("Error. Camera2API is not supported on your device.") .show(getChildFragmentManager(), FRAGMENT_DIALOG); } }
From source file:com.Yamate.Camera.Camera.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *//* ww w .ja v a 2 s .c om*/ private void setUpCameraOutputs(int width, int height) { Activity activity = mActivity; CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } Util.PiCoreLog("output w:" + map.getOutputSizes(SurfaceTexture.class)[0].getWidth() + ",h:" + map.getOutputSizes(SurfaceTexture.class)[0].getHeight()); // For still image captures, we use the largest available size. /* Size largest = Collections.max( Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, 2); mImageReader.setOnImageAvailableListener( mOnImageAvailableListener, mBackgroundHandler); mCaptureSize=new Size(largest.getWidth(), largest.getHeight()); */ // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } /* if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } */ // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. //mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), // rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, // maxPreviewHeight, largest); if (displaySize.x > displaySize.y) { double scale = (double) displaySize.y / (double) map.getOutputSizes(SurfaceTexture.class)[0].getHeight(); mPreviewSize = new Size((int) (scale * map.getOutputSizes(SurfaceTexture.class)[0].getWidth()), (int) (scale * map.getOutputSizes(SurfaceTexture.class)[0].getHeight())); } else { double scale = (double) displaySize.x / (double) map.getOutputSizes(SurfaceTexture.class)[0].getHeight(); mPreviewSize = new Size((int) (scale * map.getOutputSizes(SurfaceTexture.class)[0].getHeight()), (int) (scale * map.getOutputSizes(SurfaceTexture.class)[0].getWidth())); } Util.PiCoreLog("mPreviewSize w:" + mPreviewSize.getWidth() + ",h:" + mPreviewSize.getHeight()); mCaptureSize = new Size(CAPTURE_WIDTH, CAPTURE_HEIGHT); mImageReader = ImageReader.newInstance(mCaptureSize.getWidth(), mCaptureSize.getHeight(), ImageFormat.JPEG, 2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // We fit the aspect ratio of TextureView to the size of preview we picked. /* int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio( mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio( mPreviewSize.getHeight(), mPreviewSize.getWidth()); } */ // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. e.printStackTrace(); } }
From source file:com.microsoft.projectoxford.face.samples.ui.Camera2BasicFragment.java
/** * Creates a new {@link CameraCaptureSession} for camera preview. *//*from w w w .j a v a2 s . c o m*/ private void createCameraPreviewSession() { try { SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewRequestBuilder.addTarget(surface); // Here, we create a CameraCaptureSession for camera preview. mCameraDevice.createCaptureSession( Arrays.asList(surface, mImageReader.getSurface(), nImageReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == mCameraDevice) { return; } // When the session is ready, we start displaying the preview. mCaptureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // Flash is automatically enabled when necessary. setAutoFlash(mPreviewRequestBuilder); // Finally, we start displaying the camera preview. mPreviewRequest = mPreviewRequestBuilder.build(); mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { showToast("Failed"); } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:com.example.camera2apidemo.Camera2BasicFragment.java
/** * Creates a new {@link CameraCaptureSession} for camera preview. *//*from w ww.j a v a2 s .c o m*/ private void createCameraPreviewSession() { try {// ?texture SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. // // CameraDeviceCameraCaptureSession?CameraCaptureSession?? mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewRequestBuilder.addTarget(surface); // Here, we create a CameraCaptureSession for camera preview. /*??? ???CameraDeviceCameraCaptureSession? CameraCaptureSession?? CameraCaptureSession??? ?Surface? ??CameraCaptureSession ?Handler? */ mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == mCameraDevice) { return; } // When the session is ready, we start displaying the preview. // ??, ?field?? mCaptureSession mCaptureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. // mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // Flash is automatically enabled when necessary. setAutoFlash(mPreviewRequestBuilder); // // Finally, we start displaying the camera preview. // mPreviewRequest = mPreviewRequestBuilder.build(); // ???, mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { showToast("Failed"); } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:research.dlsu.cacaoapp.Camera2BasicFragment1.java
/** * Creates a new {@link CameraCaptureSession} for camera preview. */// www .j av a2 s.co m private void createCameraPreviewSession() { try { SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewRequestBuilder.addTarget(surface); // Here, we create a CameraCaptureSession for camera preview. mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == mCameraDevice) { return; } // When the session is ready, we start displaying the preview. mCaptureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // Flash is automatically enabled when necessary. setAutoFlash(mPreviewRequestBuilder); // Finally, we start displaying the camera preview. mPreviewRequest = mPreviewRequestBuilder.build(); mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onClosed(CameraCaptureSession session) { super.onClosed(session); stopBackgroundThread(); } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { showToast("Failed"); } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:edu.umich.eecs.lab11.camera.CameraFragment.java
/** * Opens the camera specified by {@link CameraFragment#mCameraId}. *//*from w ww . j a v a2 s. c o m*/ private void openCamera(int width, int height) { if (!hasAllPermissionsGranted()) { requestCameraPermissions(); return; } if (ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { requestCameraPermission(); return; } setUpCameraOutputs(width, height); configureTransform(width, height); Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening.", e); } }
From source file:com.android.launcher3.Utilities.java
public static void turnOffFlashLight(Context context) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { CameraManager camManager = (CameraManager) context.getApplicationContext() .getSystemService(Context.CAMERA_SERVICE); String cameraId = null;//from w w w . j a v a2 s . c om try { cameraId = camManager.getCameraIdList()[0]; camManager.setTorchMode(cameraId, false); isFlashLightOn = false; } catch (CameraAccessException e) { isFlashLightOn = true; e.printStackTrace(); } } else { try { if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) { cam.stopPreview(); cam.release(); cam = null; isFlashLightOn = false; } } catch (Exception e) { isFlashLightOn = true; e.printStackTrace(); } } }
From source file:com.android.launcher3.Utilities.java
public static void turnOnFlashLight(Context context) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { CameraManager camManager = (CameraManager) context.getApplicationContext() .getSystemService(Context.CAMERA_SERVICE); String cameraId = null;/*from w w w .j ava2s. c om*/ try { cameraId = camManager.getCameraIdList()[0]; camManager.setTorchMode(cameraId, true); isFlashLightOn = true; } catch (CameraAccessException e) { isFlashLightOn = false; e.printStackTrace(); } } else { try { if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA_FLASH)) { Camera cam = Camera.open(); Camera.Parameters p = cam.getParameters(); p.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH); cam.setParameters(p); cam.startPreview(); isFlashLightOn = true; } } catch (Exception e) { e.printStackTrace(); isFlashLightOn = false; } } }