Example usage for android.hardware.camera2 CameraAccessException printStackTrace

List of usage examples for android.hardware.camera2 CameraAccessException printStackTrace

Introduction

In this page you can find the example usage for android.hardware.camera2 CameraAccessException printStackTrace.

Prototype

public void printStackTrace() 

Source Link

Document

Prints this throwable and its backtrace to the standard error stream.

Usage

From source file:com.ape.camera2raw.Camera2RawFragment.java

/**
 * Initiate a still image capture./*from ww  w  .  j  av  a 2  s .c  o  m*/
 * <p/>
 * This function sends a capture request that initiates a pre-capture sequence in our state
 * machine that waits for auto-focus to finish, ending in a "locked" state where the lens is no
 * longer moving, waits for auto-exposure to choose a good exposure value, and waits for
 * auto-white-balance to converge.
 */
private void takePicture() {
    synchronized (mCameraStateLock) {
        mPendingUserCaptures++;

        // If we already triggered a pre-capture sequence, or are in a state where we cannot
        // do this, return immediately.
        if (mState != STATE_PREVIEW) {
            return;
        }

        try {
            // Trigger an auto-focus run if camera is capable. If the camera is already focused,
            // this should do nothing.
            if (!mNoAFRun) {
                mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
                        CameraMetadata.CONTROL_AF_TRIGGER_START);
            }

            // If this is not a legacy device, we can also trigger an auto-exposure metering
            // run.
            if (!isLegacyLocked()) {
                // Tell the camera to lock focus.
                mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
                        CameraMetadata.CONTROL_AE_PRECAPTURE_TRIGGER_START);
            }

            // Update state machine to wait for auto-focus, auto-exposure, and
            // auto-white-balance (aka. "3A") to converge.
            mState = STATE_WAITING_FOR_3A_CONVERGENCE;

            // Start a timer for the pre-capture sequence.
            startTimerLocked();

            // Replace the existing repeating request with one with updated 3A triggers.
            mCaptureSession.capture(mPreviewRequestBuilder.build(), mPreCaptureCallback, mBackgroundHandler);
        } catch (CameraAccessException e) {
            e.printStackTrace();
        }
    }
}

From source file:com.quectel.camera2test.Camera2RawFragment.java

/**
 * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
 *///from  ww  w  . jav  a 2s .c  o m
private boolean setUpCameraOutputs() {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    if (manager == null) {
        ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(),
                "dialog");
        return false;
    }
    try {
        // Find a CameraDevice that supports RAW captures, and configure state.
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
            Log.d(TAG, "---characteristics = " + characteristics);
            // We only use a camera that supports RAW in this sample.
            if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
                Log.d(TAG, "-1--characteristics continue");
                continue;
            }

            Log.d(TAG, "-1--characteristics = " + characteristics);
            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            // For still image captures, we use the largest available size.
            Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());

            Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                    new CompareSizesByArea());

            synchronized (mCameraStateLock) {
                // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                // counted wrapper to ensure they are only closed when all background tasks
                // using them are finished.
                if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                    mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(
                            largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                }
                mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener,
                        mBackgroundHandler);

                if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                    mRawImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(),
                                    ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                }
                mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener,
                        mBackgroundHandler);

                mCharacteristics = characteristics;
                mCameraId = cameraId;
            }
            return true;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    // If we found no suitable cameras for capturing RAW, warn the user.
    ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(),
            "dialog");
    return false;
}

From source file:com.ape.camera2raw.Camera2RawFragment.java

/**
 * Sets up state related to camera that is needed before opening a {@link CameraDevice}.
 *///from w w w .j a  va2s  .  com
private boolean setUpCameraOutputs() {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    if (manager == null) {
        ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(),
                "dialog");
        return false;
    }
    try {
        // Find a CameraDevice that supports RAW captures, and configure state.
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We only use a camera that supports RAW in this sample.
            if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES),
                    CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            // For still image captures, we use the largest available size.
            Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());

            Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)),
                    new CompareSizesByArea());
            Log.d("WAY", "largestRaw = " + largestRaw);
            //largestRaw = new Size(4208, 3120);//9051

            synchronized (mCameraStateLock) {
                // Set up ImageReaders for JPEG and RAW outputs.  Place these in a reference
                // counted wrapper to ensure they are only closed when all background tasks
                // using them are finished.
                if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) {
                    mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance(
                            largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5));
                }
                mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener,
                        mBackgroundHandler);

                if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) {
                    mRawImageReader = new RefCountedAutoCloseable<>(
                            ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(),
                                    ImageFormat.RAW_SENSOR, /*maxImages*/ 5));
                }
                mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener,
                        mBackgroundHandler);

                mCharacteristics = characteristics;
                mCameraId = cameraId;
            }
            return true;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }

    // If we found no suitable cameras for capturing RAW, warn the user.
    ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(),
            "dialog");
    return false;
}

From source file:us.bojie.rawgyroscope.camera.Camera2RawFragment.java

/**
 * Send a capture request to the camera device that initiates a capture targeting the JPEG and
 * RAW outputs./*from   www .ja v  a 2s  .c  om*/
 * <p/>
 * Call this only with {@link #mCameraStateLock} held.
 */
private void captureStillPictureLocked() {
    try {
        final Activity activity = getActivity();
        if (null == activity || null == mCameraDevice) {
            return;
        }
        // This is the CaptureRequest.Builder that we use to take a picture.
        final CaptureRequest.Builder captureBuilder = mCameraDevice
                .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);

        captureBuilder.addTarget(mJpegImageReader.get().getSurface());
        captureBuilder.addTarget(mRawImageReader.get().getSurface());

        // Use the same AE and AF modes as the preview.
        setup3AControlsLocked(captureBuilder);

        // Set orientation.
        int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
        captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, sensorToDeviceRotation(mCharacteristics, rotation));

        // Set request tag to easily track results in callbacks.
        captureBuilder.setTag(mRequestCounter.getAndIncrement());

        CaptureRequest request = captureBuilder.build();

        // Create an ImageSaverBuilder in which to collect results, and add it to the queue
        // of active requests.
        ImageSaver.ImageSaverBuilder jpegBuilder = new ImageSaver.ImageSaverBuilder(activity, mListener)
                .setCharacteristics(mCharacteristics);
        ImageSaver.ImageSaverBuilder rawBuilder = new ImageSaver.ImageSaverBuilder(activity, mListener)
                .setCharacteristics(mCharacteristics);

        mJpegResultQueue.put((int) request.getTag(), jpegBuilder);
        mRawResultQueue.put((int) request.getTag(), rawBuilder);

        mCaptureSession.capture(request, mCaptureCallback, mBackgroundHandler);

    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}