List of usage examples for android.hardware.camera2 CaptureRequest CONTROL_AF_MODE
Key CONTROL_AF_MODE
To view the source code for android.hardware.camera2 CaptureRequest CONTROL_AF_MODE.
Click Source Link
Whether auto-focus (AF) is currently enabled, and what mode it is set to.
Only effective if CaptureRequest#CONTROL_MODE android.control.mode = AUTO and the lens is not fixed focus (i.e.
From source file:com.dastanapps.camera2.view.Cam2AutoFitTextureView.java
@Nullable private Boolean touchTofocus2(MotionEvent event) { MotionEvent motionEvent = event;/* w w w . ja v a 2 s . c om*/ final int actionMasked = motionEvent.getActionMasked(); if (actionMasked != MotionEvent.ACTION_DOWN) { return false; } if (mManualFocusEngaged) { Log.d(TAG, "Manual focus already engaged"); return true; } final Rect sensorArraySize = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); //TODO: here I just flip x,y, but this needs to correspond with the sensor orientation (via SENSOR_ORIENTATION) final int y = (int) ((motionEvent.getX() / (float) getWidth()) * (float) sensorArraySize.height()); final int x = (int) ((motionEvent.getY() / (float) getHeight()) * (float) sensorArraySize.width()); final int halfTouchWidth = 150; //(int)motionEvent.getTouchMajor(); //TODO: this doesn't represent actual touch size in pixel. Values range in [3, 10]... final int halfTouchHeight = 150; //(int)motionEvent.getTouchMinor(); MeteringRectangle focusAreaTouch = new MeteringRectangle(Math.max(x - halfTouchWidth, 0), Math.max(y - halfTouchHeight, 0), halfTouchWidth * 2, halfTouchHeight * 2, MeteringRectangle.METERING_WEIGHT_MAX - 1); CameraCaptureSession.CaptureCallback captureCallbackHandler = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { super.onCaptureCompleted(session, request, result); mManualFocusEngaged = false; if (request.getTag() == "FOCUS_TAG") { //the focus trigger is complete - //resume repeating (preview surface will get frames), clear AF trigger mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, null); try { mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, null); } catch (CameraAccessException e) { e.printStackTrace(); } } } @Override public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { super.onCaptureFailed(session, request, failure); Log.e(TAG, "Manual AF failure: " + failure); mManualFocusEngaged = false; } }; //first stop the existing repeating request try { mPreviewSession.stopRepeating(); } catch (CameraAccessException e) { e.printStackTrace(); } //cancel any existing AF trigger (repeated touches, etc.) mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL); mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF); try { mPreviewSession.capture(mPreviewBuilder.build(), captureCallbackHandler, null); } catch (CameraAccessException e) { e.printStackTrace(); } //Now add a new AF trigger with focus region if (isMeteringAreaAFSupported()) { mPreviewBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, new MeteringRectangle[] { focusAreaTouch }); } mPreviewBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START); mPreviewBuilder.setTag("FOCUS_TAG"); //we'll capture this later for resuming the preview // //then we ask for a single request (not repeating!) // mPreviewSession.capture(mPreviewBuilder.build(), captureCallbackHandler, mBackgroundHandler); return null; }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
/** * Capture a still picture. This method should be called when we get a response in * *//*from w w w . j av a 2s . co m*/ protected void captureStillPicture() { Log.d(TAG, "StartStillCapture"); // Use the same AE and AF modes as the preview. try { captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, cameraHolder.get(CaptureRequest.CONTROL_AF_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, cameraHolder.get(CaptureRequest.CONTROL_AE_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.FLASH_MODE, cameraHolder.get(CaptureRequest.FLASH_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, cameraHolder.get(CaptureRequest.COLOR_CORRECTION_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, cameraHolder.get(CaptureRequest.COLOR_CORRECTION_TRANSFORM)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, cameraHolder.get(CaptureRequest.COLOR_CORRECTION_GAINS)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.TONEMAP_CURVE, cameraHolder.get(CaptureRequest.TONEMAP_CURVE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { if (Build.VERSION.SDK_INT >= VERSION_CODES.M) captureBuilder.set(CaptureRequest.TONEMAP_GAMMA, cameraHolder.get(CaptureRequest.TONEMAP_GAMMA)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { int awb = cameraHolder.get(CaptureRequest.CONTROL_AWB_MODE); captureBuilder.set(CaptureRequest.CONTROL_AWB_MODE, awb); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.EDGE_MODE, cameraHolder.get(CaptureRequest.EDGE_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.HOT_PIXEL_MODE, cameraHolder.get(CaptureRequest.HOT_PIXEL_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, cameraHolder.get(CaptureRequest.NOISE_REDUCTION_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, cameraHolder.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { long val = 0; if (!parameterHandler.ManualIso.GetStringValue().equals(KEYS.AUTO)) val = (long) (AbstractManualShutter.getMilliSecondStringFromShutterString( parameterHandler.ManualShutter.getStringValues()[parameterHandler.ManualShutter.GetValue()]) * 1000f); else val = cameraHolder.get(CaptureRequest.SENSOR_EXPOSURE_TIME); Log.d(TAG, "Set ExposureTime for Capture to:" + val); captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, val); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, cameraHolder.get(CaptureRequest.SENSOR_SENSITIVITY)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, cameraHolder.get(CaptureRequest.CONTROL_EFFECT_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, cameraHolder.get(CaptureRequest.CONTROL_SCENE_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, cameraHolder.get(CaptureRequest.LENS_FOCUS_DISTANCE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, cameraUiWrapper.getActivityInterface().getOrientation()); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, cameraHolder.get(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.SCALER_CROP_REGION, cameraHolder.get(CaptureRequest.SCALER_CROP_REGION)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { if (appSettingsManager.getApiString(AppSettingsManager.SETTING_LOCATION).equals(KEYS.ON)) captureBuilder.set(CaptureRequest.JPEG_GPS_LOCATION, cameraUiWrapper.getActivityInterface().getLocationHandler().getCurrentLocation()); } catch (NullPointerException ex) { ex.printStackTrace(); } prepareCaptureBuilder(captureBuilder); imagecount = 0; //mDngResult = null; if (parameterHandler.Burst != null && parameterHandler.Burst.GetValue() > 0) { initBurstCapture(captureBuilder, CaptureCallback); } else { //cameraHolder.CaptureSessionH.StopRepeatingCaptureSession(); captureBuilder.setTag(mRequestCounter.getAndIncrement()); captureBuilder.addTarget(mImageReader.getSurface()); if (cameraHolder.get(CaptureRequest.SENSOR_EXPOSURE_TIME) != null && cameraHolder.get(CaptureRequest.SENSOR_EXPOSURE_TIME) > 500000 * 1000) cameraHolder.CaptureSessionH.StopRepeatingCaptureSession(); ImageHolder imageHolder = new ImageHolder(); resultQueue.put((int) captureBuilder.build().getTag(), imageHolder); changeCaptureState(CaptureStates.image_capture_start); cameraHolder.CaptureSessionH.StartImageCapture(captureBuilder, CaptureCallback, mBackgroundHandler); } }
From source file:com.andrasta.dashi.MainActivity.java
@SuppressWarnings("MissingPermission") private void openCamera(int width, int height) { try {/*from ww w .ja v a 2 s. c o m*/ configBuilder = CameraUtils.initCameraConfig(this, display, width, height); onCameraOrientationSet(configBuilder.getCameraOrientation()); int cameraWidth = configBuilder.getSize().getWidth(); int cameraHeight = configBuilder.getSize().getHeight(); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = this.getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { textureView.setAspectRatio(cameraWidth, cameraHeight); laneView.setAspectRatio(cameraWidth, cameraHeight); } else { textureView.setAspectRatio(cameraHeight, cameraWidth); laneView.setAspectRatio(cameraHeight, cameraWidth); } Matrix matrix = CameraUtils.configureTransform(display.getRotation(), width, height, cameraWidth, cameraHeight); textureView.setTransform(matrix); SurfaceTexture texture = textureView.getSurfaceTexture(); if (texture == null) { Log.d(TAG, "No SurfaceTexture"); return; } // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(cameraWidth, cameraHeight); CameraConfig.Request request = new CameraConfig.Request(CameraDevice.TEMPLATE_PREVIEW, new Surface(texture)); request.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); configBuilder.addRequest(request); Log.d(TAG, "Display camera resolution " + cameraWidth + 'x' + cameraHeight); imageReader = ImageReader.newInstance(cameraRecSize.getWidth(), cameraRecSize.getHeight(), ImageFormat.YUV_420_888, alprHandler.getThreadsNum() + 1); imageReader.setOnImageAvailableListener(this, null); request = new CameraConfig.Request(CameraDevice.TEMPLATE_PREVIEW, imageReader.getSurface()); configBuilder.addRequest(request); Log.d(TAG, "Recognition camera resolution " + cameraRecSize.getWidth() + 'x' + cameraRecSize.getHeight()); camera.open(configBuilder.build()); Log.d(TAG, "Camera opened: " + configBuilder.getCameraId()); } catch (CameraAccessException e) { onError(false, e); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the device onError(true, e); } }
From source file:com.dastanapps.camera2.view.Cam2AutoFitTextureView.java
protected void touchToFocus(MotionEvent event) { //first stop the existing repeating request try {/* w w w. j a v a 2s .com*/ mPreviewSession.stopRepeating(); } catch (CameraAccessException e) { e.printStackTrace(); } Rect rect = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); Log.i(TAG, "SENSOR_INFO_ACTIVE_ARRAY_SIZE,,,,,,,,rect.left--->" + rect.left + ",,,rect.top--->" + rect.top + ",,,,rect.right--->" + rect.right + ",,,,rect.bottom---->" + rect.bottom); Size size = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE); Log.i(TAG, "mCameraCharacteristics,,,,size.getWidth()--->" + size.getWidth() + ",,,size.getHeight()--->" + size.getHeight()); int areaSize = 200; int right = rect.right; int bottom = rect.bottom; int viewWidth = getWidth(); int viewHeight = getHeight(); int ll, rr; Rect newRect; int centerX = (int) event.getX(); int centerY = (int) event.getY(); ll = ((centerX * right) - areaSize) / viewWidth; rr = ((centerY * bottom) - areaSize) / viewHeight; int focusLeft = clamp(ll, 0, right); int focusBottom = clamp(rr, 0, bottom); Log.i(TAG, "focusLeft--->" + focusLeft + ",,,focusTop--->" + focusBottom + ",,,focusRight--->" + (focusLeft + areaSize) + ",,,focusBottom--->" + (focusBottom + areaSize)); newRect = new Rect(focusLeft, focusBottom, focusLeft + areaSize, focusBottom + areaSize); MeteringRectangle meteringRectangle = new MeteringRectangle(newRect, 500); MeteringRectangle[] meteringRectangleArr = { meteringRectangle }; mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_CANCEL); mPreviewBuilder.set(CaptureRequest.CONTROL_AF_REGIONS, meteringRectangleArr); mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO); mPreviewBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START); }
From source file:com.example.camera2apidemo.Camera2Fragment.java
private void startPrieview() { try {//w w w . ja va 2s .com mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewRequestBuilder.addTarget(surface); mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); mPreviewRequest = mPreviewRequestBuilder.build(); mCaptureSession.setRepeatingRequest(mPreviewRequest, null, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:com.example.camera2apidemo.Camera2Fragment.java
private void captureStillPicture() { try {/*from w ww . j a v a 2s. com*/ final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { unlockFocus(); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:com.example.testcamera.Camera2BasicFragmentEasy.java
private void takePicture() { try {/*from www .j a v a 2s . c o m*/ final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); int rotation = getActivity().getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation)); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { showToast("Saved: " + mFile); try { mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:com.example.testcamera.Camera2BasicFragmentGoogle.java
/** * Creates a new {@link CameraCaptureSession} for camera preview. *///from w w w. ja v a 2 s . c om private void createCameraPreviewSession() { try { SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewRequestBuilder.addTarget(surface); // Here, we create a CameraCaptureSession for camera preview. mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == mCameraDevice) { return; } // When the session is ready, we start displaying the preview. mCaptureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // Flash is automatically enabled when necessary. mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // Finally, we start displaying the camera preview. mPreviewRequest = mPreviewRequestBuilder.build(); mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) { Activity activity = getActivity(); if (null != activity) { Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show(); } } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:com.tzutalin.dlibtest.CameraConnectionFragment.java
/** * Creates a new {@link CameraCaptureSession} for camera preview. *//* www .j a v a2 s. c o m*/ @SuppressLint("LongLogTag") @DebugLog private void createCameraPreviewSession() { try { final SurfaceTexture texture = textureView.getSurfaceTexture(); assert texture != null; // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); //texture.setOnFrameAvailableListener(); // This is the output Surface we need to start preview. final Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); //previewRequestBuilder.addTarget(surface); // Create the reader for the preview frames. previewReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2); previewReader.setOnImageAvailableListener(mOnGetPreviewListener, backgroundHandler); previewRequestBuilder.addTarget(previewReader.getSurface()); // Here, we create a CameraCaptureSession for camera preview. cameraDevice.createCaptureSession(Arrays.asList(previewReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(final CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == cameraDevice) { return; } // When the session is ready, we start displaying the preview. captureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // Flash is automatically enabled when necessary. previewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // Finally, we start displaying the camera preview. previewRequest = previewRequestBuilder.build(); captureSession.setRepeatingRequest(previewRequest, captureCallback, backgroundHandler); } catch (final CameraAccessException e) { Timber.tag(TAG).e("Exception!", e); } } @Override public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) { showToast("Failed"); } }, null); } catch (final CameraAccessException e) { Timber.tag(TAG).e("Exception!", e); } mOnGetPreviewListener.initialize(getActivity().getApplicationContext(), getActivity().getAssets(), mScoreView, inferenceHandler); }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
protected void finishCapture(Builder captureBuilder) { try {//from w ww. j a v a 2 s.c o m Log.d(TAG, "CaptureDone"); cameraHolder.CaptureSessionH.StartRepeatingCaptureSession(); if (cameraHolder .get(CaptureRequest.CONTROL_AF_MODE) == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE || cameraHolder.get( CaptureRequest.CONTROL_AF_MODE) == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) { cameraHolder.SetParameterRepeating(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_CANCEL); cameraHolder.SetParameterRepeating(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE); } } catch (NullPointerException ex) { ex.printStackTrace(); ; } isWorking = false; }