Example usage for android.hardware.camera2 CaptureRequest STATISTICS_LENS_SHADING_MAP_MODE

List of usage examples for android.hardware.camera2 CaptureRequest STATISTICS_LENS_SHADING_MAP_MODE

Introduction

In this page you can find the example usage for android.hardware.camera2 CaptureRequest STATISTICS_LENS_SHADING_MAP_MODE.

Prototype

Key STATISTICS_LENS_SHADING_MAP_MODE

To view the source code for android.hardware.camera2 CaptureRequest STATISTICS_LENS_SHADING_MAP_MODE.

Click Source Link

Document

Whether the camera device will output the lens shading map in output result metadata.

When set to ON, android.statistics.lensShadingMap will be provided in the output result metadata.

ON is always supported on devices with the RAW capability.

Possible values:

  • #STATISTICS_LENS_SHADING_MAP_MODE_OFF OFF
  • #STATISTICS_LENS_SHADING_MAP_MODE_ON ON

Available values for this device:
CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES android.statistics.info.availableLensShadingMapModes

Optional - This value may be null on some devices.

Full capability - Present on all camera devices that report being CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL devices in the CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel key

Usage

From source file:com.vest.album.fragment.CameraBasicFragment.java

/**
 * Capture a still picture. This method should be called when we get a response in
 * {@link #mCaptureCallback} from both {@link #lockFocus()}.
 *//*from www.j  ava 2  s.c  o  m*/
private void captureStillPicture() {
    try {
        final Activity activity = getActivity();
        if (null == activity || null == mCameraDevice) {
            return;
        }
        // This is the CaptureRequest.Builder that we use to take a picture.
        final CaptureRequest.Builder captureBuilder = mCameraDevice
                .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
        captureBuilder.addTarget(mImageReader.getSurface());
        // Use the same AE and AF modes as the preview.
        captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
        captureBuilder.set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE,
                CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE_ON);
        captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, (long) ((214735991 - 13231) / 2));
        captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
        captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY, (10000 - 100) / 2);// ISO
        setAutoFlash(captureBuilder);

        // Orientation
        int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
        captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));

        CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() {

            @Override
            public void onCaptureCompleted(@NonNull CameraCaptureSession session,
                    @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
                //                    showToast("Saved: " + mFile);
                //                    Log.d(TAG, mFile.toString());
                //                    unlockFocus();
                handler.sendEmptyMessage(1);
            }
        };

        mCaptureSession.stopRepeating();
        mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
    } catch (CameraAccessException e) {
        callback.onPhotoError("?");
        e.printStackTrace();
    }
}

From source file:com.android.camera2.its.ItsService.java

private void doCapture(JSONObject params) throws ItsException {
    try {/*w w  w .  ja v a 2s .  c  o m*/
        // Parse the JSON to get the list of capture requests.
        List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(mCamera, params);

        // Set the output surface(s) and listeners.
        int widths[] = new int[MAX_NUM_OUTPUT_SURFACES];
        int heights[] = new int[MAX_NUM_OUTPUT_SURFACES];
        int formats[] = new int[MAX_NUM_OUTPUT_SURFACES];
        int numSurfaces = 0;
        try {
            mCountRawOrDng.set(0);
            mCountJpg.set(0);
            mCountYuv.set(0);
            mCountRaw10.set(0);
            mCountCapRes.set(0);
            mCaptureRawIsDng = false;
            mCaptureResults = new CaptureResult[requests.size()];

            JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
            if (jsonOutputSpecs != null) {
                numSurfaces = jsonOutputSpecs.length();
                if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) {
                    throw new ItsException("Too many output surfaces");
                }
                for (int i = 0; i < numSurfaces; i++) {
                    // Get the specified surface.
                    JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i);
                    String sformat = surfaceObj.optString("format");
                    Size sizes[];
                    if ("yuv".equals(sformat) || "".equals(sformat)) {
                        // Default to YUV if no format is specified.
                        formats[i] = ImageFormat.YUV_420_888;
                        sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
                    } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
                        formats[i] = ImageFormat.JPEG;
                        sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics);
                    } else if ("raw".equals(sformat)) {
                        formats[i] = ImageFormat.RAW_SENSOR;
                        sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
                    } else if ("raw10".equals(sformat)) {
                        formats[i] = ImageFormat.RAW10;
                        sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
                    } else if ("dng".equals(sformat)) {
                        formats[i] = ImageFormat.RAW_SENSOR;
                        sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
                        mCaptureRawIsDng = true;
                    } else {
                        throw new ItsException("Unsupported format: " + sformat);
                    }
                    // If the size is omitted, then default to the largest allowed size for the
                    // format.
                    widths[i] = surfaceObj.optInt("width");
                    heights[i] = surfaceObj.optInt("height");
                    if (widths[i] <= 0) {
                        if (sizes == null || sizes.length == 0) {
                            throw new ItsException(String
                                    .format("Zero stream configs available for requested format: %s", sformat));
                        }
                        widths[i] = sizes[0].getWidth();
                    }
                    if (heights[i] <= 0) {
                        heights[i] = sizes[0].getHeight();
                    }
                }
            } else {
                // No surface(s) specified at all.
                // Default: a single output surface which is full-res YUV.
                Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
                numSurfaces = 1;
                widths[0] = sizes[0].getWidth();
                heights[0] = sizes[0].getHeight();
                formats[0] = ImageFormat.YUV_420_888;
            }

            prepareCaptureReader(widths, heights, formats, numSurfaces);
            List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces);
            for (int i = 0; i < numSurfaces; i++) {
                outputSurfaces.add(mCaptureReaders[i].getSurface());
            }
            BlockingSessionCallback sessionListener = new BlockingSessionCallback();
            mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
            mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);

            for (int i = 0; i < numSurfaces; i++) {
                ImageReader.OnImageAvailableListener readerListener = createAvailableListener(mCaptureCallback);
                mCaptureReaders[i].setOnImageAvailableListener(readerListener, mSaveHandlers[i]);
            }

            // Plan for how many callbacks need to be received throughout the duration of this
            // sequence of capture requests. There is one callback per image surface, and one
            // callback for the CaptureResult, for each capture.
            int numCaptures = requests.size();
            mCountCallbacksRemaining.set(numCaptures * (numSurfaces + 1));

        } catch (CameraAccessException e) {
            throw new ItsException("Error configuring outputs", e);
        } catch (org.json.JSONException e) {
            throw new ItsException("JSON error", e);
        }

        // Initiate the captures.
        for (int i = 0; i < requests.size(); i++) {
            // For DNG captures, need the LSC map to be available.
            if (mCaptureRawIsDng) {
                requests.get(i).set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1);
            }

            CaptureRequest.Builder req = requests.get(i);
            for (int j = 0; j < numSurfaces; j++) {
                req.addTarget(mCaptureReaders[j].getSurface());
            }
            mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
        }

        // Make sure all callbacks have been hit (wait until captures are done).
        // If no timeouts are received after a timeout, then fail.
        int currentCount = mCountCallbacksRemaining.get();
        while (currentCount > 0) {
            try {
                Thread.sleep(TIMEOUT_CALLBACK * 1000);
            } catch (InterruptedException e) {
                throw new ItsException("Timeout failure", e);
            }
            int newCount = mCountCallbacksRemaining.get();
            if (newCount == currentCount) {
                throw new ItsException("No callback received within timeout");
            }
            currentCount = newCount;
        }
    } catch (android.hardware.camera2.CameraAccessException e) {
        throw new ItsException("Access error: ", e);
    }
}