List of usage examples for android.hardware.camera2 CaptureRequest SCALER_CROP_REGION
Key SCALER_CROP_REGION
To view the source code for android.hardware.camera2 CaptureRequest SCALER_CROP_REGION.
Click Source Link
The desired region of the sensor to read out for this capture.
This control can be used to implement digital zoom.
For devices not supporting CaptureRequest#DISTORTION_CORRECTION_MODE android.distortionCorrection.mode control, the coordinate system always follows that of CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize , with (0, 0)
being the top-left pixel of the active array.
For devices supporting CaptureRequest#DISTORTION_CORRECTION_MODE android.distortionCorrection.mode control, the coordinate system depends on the mode being set.
From source file:com.dastanapps.camera2.view.Cam2AutoFitTextureView.java
protected void pinchToZoom(MotionEvent event) { maximumZoomLevel = mCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM) * 10; Rect rect = mCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); float currentFingerSpacing; if (event.getPointerCount() > 1) { // Multi touch logic currentFingerSpacing = getFingerSpacing(event); if (fingerSpacing != 0) { if (currentFingerSpacing > fingerSpacing && maximumZoomLevel > zoomLevel) { zoomLevel = zoomLevel + .4; } else if (currentFingerSpacing < fingerSpacing && zoomLevel > 1) { zoomLevel = zoomLevel - .4; }/*from w w w. j ava2 s . co m*/ int minW = (int) (rect.width() / maximumZoomLevel); int minH = (int) (rect.height() / maximumZoomLevel); int difW = rect.width() - minW; int difH = rect.height() - minH; int cropW = difW / 100 * (int) zoomLevel; int cropH = difH / 100 * (int) zoomLevel; cropW -= cropW & 3; cropH -= cropH & 3; Rect zoom = new Rect(cropW, cropH, rect.width() - cropW, rect.height() - cropH); mPreviewBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom); } fingerSpacing = currentFingerSpacing; } }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
/** * Capture a still picture. This method should be called when we get a response in * *///from www . j a v a 2s . co m protected void captureStillPicture() { Log.d(TAG, "StartStillCapture"); // Use the same AE and AF modes as the preview. try { captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, cameraHolder.get(CaptureRequest.CONTROL_AF_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, cameraHolder.get(CaptureRequest.CONTROL_AE_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.FLASH_MODE, cameraHolder.get(CaptureRequest.FLASH_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE, cameraHolder.get(CaptureRequest.COLOR_CORRECTION_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM, cameraHolder.get(CaptureRequest.COLOR_CORRECTION_TRANSFORM)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS, cameraHolder.get(CaptureRequest.COLOR_CORRECTION_GAINS)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.TONEMAP_CURVE, cameraHolder.get(CaptureRequest.TONEMAP_CURVE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { if (Build.VERSION.SDK_INT >= VERSION_CODES.M) captureBuilder.set(CaptureRequest.TONEMAP_GAMMA, cameraHolder.get(CaptureRequest.TONEMAP_GAMMA)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { int awb = cameraHolder.get(CaptureRequest.CONTROL_AWB_MODE); captureBuilder.set(CaptureRequest.CONTROL_AWB_MODE, awb); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.EDGE_MODE, cameraHolder.get(CaptureRequest.EDGE_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.HOT_PIXEL_MODE, cameraHolder.get(CaptureRequest.HOT_PIXEL_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE, cameraHolder.get(CaptureRequest.NOISE_REDUCTION_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, cameraHolder.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { long val = 0; if (!parameterHandler.ManualIso.GetStringValue().equals(KEYS.AUTO)) val = (long) (AbstractManualShutter.getMilliSecondStringFromShutterString( parameterHandler.ManualShutter.getStringValues()[parameterHandler.ManualShutter.GetValue()]) * 1000f); else val = cameraHolder.get(CaptureRequest.SENSOR_EXPOSURE_TIME); Log.d(TAG, "Set ExposureTime for Capture to:" + val); captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, val); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, cameraHolder.get(CaptureRequest.SENSOR_SENSITIVITY)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, cameraHolder.get(CaptureRequest.CONTROL_EFFECT_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.CONTROL_SCENE_MODE, cameraHolder.get(CaptureRequest.CONTROL_SCENE_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE, cameraHolder.get(CaptureRequest.LENS_FOCUS_DISTANCE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, cameraUiWrapper.getActivityInterface().getOrientation()); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE, cameraHolder.get(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { captureBuilder.set(CaptureRequest.SCALER_CROP_REGION, cameraHolder.get(CaptureRequest.SCALER_CROP_REGION)); } catch (NullPointerException ex) { ex.printStackTrace(); } try { if (appSettingsManager.getApiString(AppSettingsManager.SETTING_LOCATION).equals(KEYS.ON)) captureBuilder.set(CaptureRequest.JPEG_GPS_LOCATION, cameraUiWrapper.getActivityInterface().getLocationHandler().getCurrentLocation()); } catch (NullPointerException ex) { ex.printStackTrace(); } prepareCaptureBuilder(captureBuilder); imagecount = 0; //mDngResult = null; if (parameterHandler.Burst != null && parameterHandler.Burst.GetValue() > 0) { initBurstCapture(captureBuilder, CaptureCallback); } else { //cameraHolder.CaptureSessionH.StopRepeatingCaptureSession(); captureBuilder.setTag(mRequestCounter.getAndIncrement()); captureBuilder.addTarget(mImageReader.getSurface()); if (cameraHolder.get(CaptureRequest.SENSOR_EXPOSURE_TIME) != null && cameraHolder.get(CaptureRequest.SENSOR_EXPOSURE_TIME) > 500000 * 1000) cameraHolder.CaptureSessionH.StopRepeatingCaptureSession(); ImageHolder imageHolder = new ImageHolder(); resultQueue.put((int) captureBuilder.build().getTag(), imageHolder); changeCaptureState(CaptureStates.image_capture_start); cameraHolder.CaptureSessionH.StartImageCapture(captureBuilder, CaptureCallback, mBackgroundHandler); } }
From source file:com.obviousengine.android.focus.ZslFocusCamera.java
private void addRegionsToCaptureRequestBuilder(CaptureRequest.Builder builder) { builder.set(CaptureRequest.CONTROL_AE_REGIONS, aERegions); builder.set(CaptureRequest.CONTROL_AF_REGIONS, aFRegions); builder.set(CaptureRequest.SCALER_CROP_REGION, cropRegion); }
From source file:io.engineersatwork.blink.Camera2BasicFragment.java
/** * Creates a new {@link CameraCaptureSession} for camera preview. *///ww w . j av a 2 s . c om private void createCameraPreviewSession() { try { SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD); //mPreviewRequestBuilder.addTarget(surface); mPreviewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, mCropRect); mPreviewRequestBuilder.addTarget(mImageReader.getSurface()); // Here, we create a CameraCaptureSession for camera preview. mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == mCameraDevice) { return; } // When the session is ready, we start displaying the preview. mCaptureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); // Flash is automatically enabled when necessary. setAutoFlash(mPreviewRequestBuilder); // Finally, we start displaying the camera preview. mPreviewRequest = mPreviewRequestBuilder.build(); mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { showToast("Failed"); } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:com.android.camera.one.v2.OneCameraZslImpl.java
private void addRegionsToCaptureRequestBuilder(CaptureRequest.Builder builder) { builder.set(CaptureRequest.CONTROL_AE_REGIONS, mAERegions); builder.set(CaptureRequest.CONTROL_AF_REGIONS, mAFRegions); builder.set(CaptureRequest.SCALER_CROP_REGION, mCropRegion); }