Example usage for android.hardware.camera2 CaptureRequest FLASH_MODE

List of usage examples for android.hardware.camera2 CaptureRequest FLASH_MODE

Introduction

In this page you can find the example usage for android.hardware.camera2 CaptureRequest FLASH_MODE.

Prototype

Key FLASH_MODE

To view the source code for android.hardware.camera2 CaptureRequest FLASH_MODE.

Click Source Link

Document

The desired mode for for the camera device's flash control.

This control is only effective when flash unit is available ( CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available == true).

When this control is used, the CaptureRequest#CONTROL_AE_MODE android.control.aeMode must be set to ON or OFF.

Usage

From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java

/**
 * Capture a still picture. This method should be called when we get a response in
 *
 *///from  www .  j a  va  2 s .c om
protected void captureStillPicture() {
    Log.d(TAG, "StartStillCapture");

    // Use the same AE and AF modes as the preview.
    try {
        captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, cameraHolder.get(CaptureRequest.CONTROL_AF_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, cameraHolder.get(CaptureRequest.CONTROL_AE_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.FLASH_MODE, cameraHolder.get(CaptureRequest.FLASH_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.COLOR_CORRECTION_MODE,
                cameraHolder.get(CaptureRequest.COLOR_CORRECTION_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.COLOR_CORRECTION_TRANSFORM,
                cameraHolder.get(CaptureRequest.COLOR_CORRECTION_TRANSFORM));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.COLOR_CORRECTION_GAINS,
                cameraHolder.get(CaptureRequest.COLOR_CORRECTION_GAINS));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.TONEMAP_CURVE, cameraHolder.get(CaptureRequest.TONEMAP_CURVE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        if (Build.VERSION.SDK_INT >= VERSION_CODES.M)
            captureBuilder.set(CaptureRequest.TONEMAP_GAMMA, cameraHolder.get(CaptureRequest.TONEMAP_GAMMA));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }

    try {
        int awb = cameraHolder.get(CaptureRequest.CONTROL_AWB_MODE);
        captureBuilder.set(CaptureRequest.CONTROL_AWB_MODE, awb);
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.EDGE_MODE, cameraHolder.get(CaptureRequest.EDGE_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.HOT_PIXEL_MODE, cameraHolder.get(CaptureRequest.HOT_PIXEL_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.NOISE_REDUCTION_MODE,
                cameraHolder.get(CaptureRequest.NOISE_REDUCTION_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION,
                cameraHolder.get(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        long val = 0;
        if (!parameterHandler.ManualIso.GetStringValue().equals(KEYS.AUTO))
            val = (long) (AbstractManualShutter.getMilliSecondStringFromShutterString(
                    parameterHandler.ManualShutter.getStringValues()[parameterHandler.ManualShutter.GetValue()])
                    * 1000f);
        else
            val = cameraHolder.get(CaptureRequest.SENSOR_EXPOSURE_TIME);
        Log.d(TAG, "Set ExposureTime for Capture to:" + val);
        captureBuilder.set(CaptureRequest.SENSOR_EXPOSURE_TIME, val);
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.SENSOR_SENSITIVITY,
                cameraHolder.get(CaptureRequest.SENSOR_SENSITIVITY));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE,
                cameraHolder.get(CaptureRequest.CONTROL_EFFECT_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.CONTROL_SCENE_MODE,
                cameraHolder.get(CaptureRequest.CONTROL_SCENE_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.LENS_FOCUS_DISTANCE,
                cameraHolder.get(CaptureRequest.LENS_FOCUS_DISTANCE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.JPEG_ORIENTATION,
                cameraUiWrapper.getActivityInterface().getOrientation());
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
                cameraHolder.get(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        captureBuilder.set(CaptureRequest.SCALER_CROP_REGION,
                cameraHolder.get(CaptureRequest.SCALER_CROP_REGION));
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }
    try {
        if (appSettingsManager.getApiString(AppSettingsManager.SETTING_LOCATION).equals(KEYS.ON))
            captureBuilder.set(CaptureRequest.JPEG_GPS_LOCATION,
                    cameraUiWrapper.getActivityInterface().getLocationHandler().getCurrentLocation());
    } catch (NullPointerException ex) {
        ex.printStackTrace();
    }

    prepareCaptureBuilder(captureBuilder);
    imagecount = 0;
    //mDngResult = null;
    if (parameterHandler.Burst != null && parameterHandler.Burst.GetValue() > 0) {
        initBurstCapture(captureBuilder, CaptureCallback);
    } else {
        //cameraHolder.CaptureSessionH.StopRepeatingCaptureSession();
        captureBuilder.setTag(mRequestCounter.getAndIncrement());
        captureBuilder.addTarget(mImageReader.getSurface());
        if (cameraHolder.get(CaptureRequest.SENSOR_EXPOSURE_TIME) != null
                && cameraHolder.get(CaptureRequest.SENSOR_EXPOSURE_TIME) > 500000 * 1000)
            cameraHolder.CaptureSessionH.StopRepeatingCaptureSession();
        ImageHolder imageHolder = new ImageHolder();
        resultQueue.put((int) captureBuilder.build().getTag(), imageHolder);
        changeCaptureState(CaptureStates.image_capture_start);
        cameraHolder.CaptureSessionH.StartImageCapture(captureBuilder, CaptureCallback, mBackgroundHandler);
    }
}

From source file:cliq.com.cliqgram.fragments.CameraFragment.java

/**
 * Capture a still picture. This method should be called when we get a response in
 * {@link #mCaptureCallback} from both {@link #lockFocus()}.
 *//*from w w  w. j av a  2 s . c  o m*/
private void captureStillPicture() {
    try {
        final Activity activity = getActivity();
        if (null == activity || null == mCameraDevice) {
            return;
        }
        // This is the CaptureRequest.Builder that we use to take a picture.
        final CaptureRequest.Builder captureBuilder = mCameraDevice
                .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
        captureBuilder.addTarget(mImageReader.getSurface());

        // Use the same AE and AF modes as the preview.
        captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
        captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);

        // check to turn on/off flash mode
        captureBuilder.set(CaptureRequest.FLASH_MODE,
                flashOn ? CaptureResult.FLASH_MODE_SINGLE : CaptureResult.FLASH_MODE_OFF);

        // Orientation
        int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
        captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));

        CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() {

            @Override
            public void onCaptureCompleted(@NonNull CameraCaptureSession session,
                    @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
                showToast("Saved: " + mFile);
                Log.d(TAG, "Saved file: " + mFile.toString());
                unlockFocus();

                startImageDisplayActivity(mFile.getName());

            }
        };

        mCaptureSession.stopRepeating();
        mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:com.obviousengine.android.focus.ZslFocusCamera.java

/**
 * Request a stream of images.//  w w w  . j  av  a 2  s.c  om
 *
 * @return true if successful, false if there was an error submitting the
 *         capture request.
 */
private boolean sendRepeatingCaptureRequest() {
    Timber.v("sendRepeatingCaptureRequest()");
    try {
        CaptureRequest.Builder builder;
        if (ZSL_ENABLED) {
            builder = device.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
        } else {
            builder = device.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        }

        builder.addTarget(previewSurface);

        if (ZSL_ENABLED) {
            builder.addTarget(captureImageReader.getSurface());
        }

        builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);

        builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
        builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);

        builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
        builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);

        addRegionsToCaptureRequestBuilder(builder);

        captureSession.setRepeatingRequest(builder.build(), captureManager, cameraHandler);
        return true;
    } catch (CameraAccessException e) {
        if (ZSL_ENABLED) {
            Timber.w(e, "Could not execute zero-shutter-lag repeating request.");
        } else {
            Timber.w(e, "Could not execute preview request.");
        }
        return false;
    }
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

private void addFlashToCaptureRequestBuilder(CaptureRequest.Builder builder, Flash flashMode) {
    switch (flashMode) {
    case ON:/*from  www  . j a  v a2s.  co  m*/
        builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
        builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
        break;
    case OFF:
        builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
        builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
        break;
    case AUTO:
        builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
        break;
    }
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

/**
 * Request a stream of images.//  ww w  .  jav a 2  s. com
 *
 * @return true if successful, false if there was an error submitting the
 *         capture request.
 */
private boolean sendRepeatingCaptureRequest() {
    Log.v(TAG, "sendRepeatingCaptureRequest()");
    try {
        CaptureRequest.Builder builder;
        if (ZSL_ENABLED) {
            builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
        } else {
            builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        }

        builder.addTarget(mPreviewSurface);

        if (ZSL_ENABLED) {
            builder.addTarget(mCaptureImageReader.getSurface());
        }

        builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);

        builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
        builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);

        builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
        builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);

        addRegionsToCaptureRequestBuilder(builder);

        mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
        return true;
    } catch (CameraAccessException e) {
        if (ZSL_ENABLED) {
            Log.v(TAG, "Could not execute zero-shutter-lag repeating request.", e);
        } else {
            Log.v(TAG, "Could not execute preview request.", e);
        }
        return false;
    }
}

From source file:com.oddsix.nutripro.fragments.Camera2Fragment.java

public void setFlash(int flashMode) {
    switch (flashMode) {
    case FLASH_AUTO:
        mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
                CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
        break;//from  www .  j av  a2 s .  co m
    case FLASH_OFF:
        mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CameraMetadata.FLASH_MODE_OFF);
        break;
    case FLASH_ON:
        mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
        break;
    }
    try {
        // Reset the auto-focus trigger
        mCaptureSession.capture(mPreviewRequestBuilder.build(), mCaptureCallback, mBackgroundHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

private boolean sendRepeatingBurstCaptureRequest() {
    Log.v(TAG, "sendRepeatingBurstCaptureRequest()");
    try {//  w w  w.  j a va2s  . com
        CaptureRequest.Builder builder;
        builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
        builder.addTarget(mPreviewSurface);

        if (ZSL_ENABLED) {
            builder.addTarget(mCaptureImageReader.getSurface());
        }

        builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
        builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
        builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);

        builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
        builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);

        addRegionsToCaptureRequestBuilder(builder);

        mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
        return true;
    } catch (CameraAccessException e) {
        Log.v(TAG, "Could not send repeating burst capture request.", e);
        return false;
    }
}

From source file:com.android.camera2.its.ItsService.java

private void do3A(JSONObject params) throws ItsException {
    try {//from w w w  . ja v a  2 s.c o  m
        // Start a 3A action, and wait for it to converge.
        // Get the converged values for each "A", and package into JSON result for caller.

        // 3A happens on full-res frames.
        Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
        int widths[] = new int[1];
        int heights[] = new int[1];
        int formats[] = new int[1];
        widths[0] = sizes[0].getWidth();
        heights[0] = sizes[0].getHeight();
        formats[0] = ImageFormat.YUV_420_888;
        int width = widths[0];
        int height = heights[0];

        prepareCaptureReader(widths, heights, formats, 1);
        List<Surface> outputSurfaces = new ArrayList<Surface>(1);
        outputSurfaces.add(mCaptureReaders[0].getSurface());
        BlockingSessionCallback sessionListener = new BlockingSessionCallback();
        mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
        mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);

        // Add a listener that just recycles buffers; they aren't saved anywhere.
        ImageReader.OnImageAvailableListener readerListener = createAvailableListenerDropper(mCaptureCallback);
        mCaptureReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]);

        // Get the user-specified regions for AE, AWB, AF.
        // Note that the user specifies normalized [x,y,w,h], which is converted below
        // to an [x0,y0,x1,y1] region in sensor coords. The capture request region
        // also has a fifth "weight" element: [x0,y0,x1,y1,w].
        MeteringRectangle[] regionAE = new MeteringRectangle[] {
                new MeteringRectangle(0, 0, width, height, 1) };
        MeteringRectangle[] regionAF = new MeteringRectangle[] {
                new MeteringRectangle(0, 0, width, height, 1) };
        MeteringRectangle[] regionAWB = new MeteringRectangle[] {
                new MeteringRectangle(0, 0, width, height, 1) };
        if (params.has(REGION_KEY)) {
            JSONObject regions = params.getJSONObject(REGION_KEY);
            if (regions.has(REGION_AE_KEY)) {
                regionAE = ItsUtils.getJsonWeightedRectsFromArray(regions.getJSONArray(REGION_AE_KEY), true,
                        width, height);
            }
            if (regions.has(REGION_AF_KEY)) {
                regionAF = ItsUtils.getJsonWeightedRectsFromArray(regions.getJSONArray(REGION_AF_KEY), true,
                        width, height);
            }
            if (regions.has(REGION_AWB_KEY)) {
                regionAWB = ItsUtils.getJsonWeightedRectsFromArray(regions.getJSONArray(REGION_AWB_KEY), true,
                        width, height);
            }
        }

        // If AE or AWB lock is specified, then the 3A will converge first and then lock these
        // values, waiting until the HAL has reported that the lock was successful.
        mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false);
        mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false);

        // By default, AE and AF both get triggered, but the user can optionally override this.
        // Also, AF won't get triggered if the lens is fixed-focus.
        boolean doAE = true;
        boolean doAF = true;
        if (params.has(TRIGGER_KEY)) {
            JSONObject triggers = params.getJSONObject(TRIGGER_KEY);
            if (triggers.has(TRIGGER_AE_KEY)) {
                doAE = triggers.getBoolean(TRIGGER_AE_KEY);
            }
            if (triggers.has(TRIGGER_AF_KEY)) {
                doAF = triggers.getBoolean(TRIGGER_AF_KEY);
            }
        }
        if (doAF && mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) == 0) {
            // Send a dummy result back for the code that is waiting for this message to see
            // that AF has converged.
            Logt.i(TAG, "Ignoring request for AF on fixed-focus camera");
            mSocketRunnableObj.sendResponse("afResult", "0.0");
            doAF = false;
        }

        mInterlock3A.open();
        mIssuedRequest3A = false;
        mConvergedAE = false;
        mConvergedAWB = false;
        mConvergedAF = false;
        mLockedAE = false;
        mLockedAWB = false;
        long tstart = System.currentTimeMillis();
        boolean triggeredAE = false;
        boolean triggeredAF = false;

        Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d", doAE ? 1 : 0,
                doAF ? 1 : 0, mNeedsLockedAE ? 1 : 0, mNeedsLockedAWB ? 1 : 0));

        // Keep issuing capture requests until 3A has converged.
        while (true) {

            // Block until can take the next 3A frame. Only want one outstanding frame
            // at a time, to simplify the logic here.
            if (!mInterlock3A.block(TIMEOUT_3A * 1000)
                    || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
                throw new ItsException("3A failed to converge (timeout)");
            }
            mInterlock3A.close();

            // If not converged yet, issue another capture request.
            if ((doAE && (!triggeredAE || !mConvergedAE)) || !mConvergedAWB
                    || (doAF && (!triggeredAF || !mConvergedAF)) || (doAE && mNeedsLockedAE && !mLockedAE)
                    || (mNeedsLockedAWB && !mLockedAWB)) {

                // Baseline capture request for 3A.
                CaptureRequest.Builder req = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
                req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
                req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
                req.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
                req.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
                req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
                req.set(CaptureRequest.CONTROL_AE_LOCK, false);
                req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
                req.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
                req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
                req.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO);
                req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
                req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);

                if (mConvergedAE && mNeedsLockedAE) {
                    req.set(CaptureRequest.CONTROL_AE_LOCK, true);
                }
                if (mConvergedAWB && mNeedsLockedAWB) {
                    req.set(CaptureRequest.CONTROL_AWB_LOCK, true);
                }

                // Trigger AE first.
                if (doAE && !triggeredAE) {
                    Logt.i(TAG, "Triggering AE");
                    req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
                            CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
                    triggeredAE = true;
                }

                // After AE has converged, trigger AF.
                if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
                    Logt.i(TAG, "Triggering AF");
                    req.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
                    triggeredAF = true;
                }

                req.addTarget(mCaptureReaders[0].getSurface());

                mIssuedRequest3A = true;
                mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
            } else {
                mSocketRunnableObj.sendResponse("3aConverged", "");
                Logt.i(TAG, "3A converged");
                break;
            }
        }
    } catch (android.hardware.camera2.CameraAccessException e) {
        throw new ItsException("Access error: ", e);
    } catch (org.json.JSONException e) {
        throw new ItsException("JSON error: ", e);
    } finally {
        mSocketRunnableObj.sendResponse("3aDone", "");
    }
}

From source file:com.vest.album.fragment.CameraBasicFragment.java

private void setAutoFlash(CaptureRequest.Builder requestBuilder) {
    //        if (mFlashSupported) {
    ////            if (mFlashed) {
    //            requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
    //        } else {
    requestBuilder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
    //        }//  w ww.j a v  a2  s.c om
}