Example usage for android.graphics ImageFormat YUV_420_888

List of usage examples for android.graphics ImageFormat YUV_420_888

Introduction

In this page you can find the example usage for android.graphics ImageFormat YUV_420_888.

Prototype

int YUV_420_888

To view the source code for android.graphics ImageFormat YUV_420_888.

Click Source Link

Document

Multi-plane Android YUV 420 format

This format is a generic YCbCr format, capable of describing any 4:2:0 chroma-subsampled planar or semiplanar buffer (but not fully interleaved), with 8 bits per color sample.

Images in this format are always represented by three separate buffers of data, one for each color plane.

Usage

From source file:com.team3061.cheezdroid.SelfieModeFragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *//*from w  w w  .  java2  s . c  o  m*/
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        for (String cameraId : manager.getCameraIdList()) {
            Log.i("CameraId", cameraId);
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                Log.w(TAG, "Skipped for facing " + cameraId);
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                Log.w(TAG, "Skipped for null map " + cameraId);
                continue;
            }

            // For still image captures, we use the largest available size.
            Size[] imgSizes = map.getOutputSizes(ImageFormat.YUV_420_888);
            Size selected = imgSizes[imgSizes.length - 1];
            for (Size s : imgSizes) {
                if (s.getWidth() <= 352) {
                    selected = s;
                    break;
                }
            }
            mImageReader = ImageReader.newInstance(selected.getWidth(), selected.getHeight(),
                    ImageFormat.YUV_420_888, /*maxImages*/3);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (sensorOrientation == 90 || sensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (sensorOrientation == 0 || sensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }

            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            if (swappedDimensions) {
                rotatedPreviewWidth = height;
                rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }

            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, selected);

            Log.i(TAG, "Size : " + mPreviewSize);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            // Check if the flash is supported.
            Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
            mFlashSupported = available == null ? false : available;

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        e.printStackTrace();
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance("Camera error").show(getChildFragmentManager(), FRAGMENT_DIALOG);
    }
}

From source file:com.example.android.camera2basic.Fragment.Camera2BasicFragment.java

/**
 * Sets up member variables related to camera.
 * , ?/*from  w  ww . ja v  a 2  s  .  com*/
 *
 * ??:
 * 1. ???, ?
 * 2. ????, ?????
 * 3. ??, ?
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 */
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                continue;
            }

            // For still image captures, we use the largest available size.
            //
            //maxImagesImageReader??
            Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
                    new CompareSizesByArea());

            Log.d(TAG, "largest.width: " + largest.getWidth());
            Log.d(TAG, "largest.height: " + largest.getHeight());

            //                mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
            //                        ImageFormat.YUV_420_888, /*maxImages*/5);
            //
            //                mImageReader.setOnImageAvailableListener(
            //                        mOnImageAvailableListener, null);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            // ???(??, ""???ROTATION_90
            // ROTATION_270,?ROTATION_0ROTATION_180)
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            //noinspection ConstantConditions
            // ??(""?0, , ?90)
            // ?, ?, ?, , 90, switch
            // ??
            mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            Log.d(TAG, "displayRotation: " + displayRotation);
            Log.d(TAG, "sensorOritentation: " + mSensorOrientation);
            switch (displayRotation) {
            // ROTATION_0ROTATION_180??????
            // ?, ?90270, ??(?true)
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (mSensorOrientation == 90 || mSensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            // ROTATION_90ROTATION_270??????
            // ?, ?0180, ??(?true)
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (mSensorOrientation == 0 || mSensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }
            // ???, 
            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            // ?(), ??
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            // ?(, ?(texture????))
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;
            Log.d(TAG, "??maxPreviewWidth: " + maxPreviewWidth);
            Log.d(TAG, "??maxPreviewHeight: " + maxPreviewHeight);
            // ??, 
            if (swappedDimensions) {
                //                    rotatedPreviewWidth = height;
                //                    rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }

            Log.d(TAG, "?");
            Log.d(TAG, "real preview width: " + rotatedPreviewWidth);
            Log.d(TAG, "real preview height: " + rotatedPreviewHeight);

            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            // ?
            // ?:map.getOutputSizes(SurfaceTexture.class)SurfaceTexture?List
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            // ????
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                // ??(landscape)
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                // ???(??)
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            // ??previewSize=======================================
            Log.d(TAG, "??");
            Log.d(TAG, "mPreviewSize.getWidth: " + mPreviewSize.getWidth());
            Log.d(TAG, "mPreviewSize.getHeight: " + mPreviewSize.getHeight());
            // Check if the flash is supported.
            Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
            mFlashSupported = available == null ? false : available;

            //                mPreviewSize = new Size(width,height);
            Log.d(TAG, " mPreviewSize " + mPreviewSize.getWidth() + mPreviewSize.getHeight());

            sendQuene = CameraActivity.quene.getH264SendQueue();

            mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
                    ImageFormat.YV12, /*maxImages*/5);

            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, null);
            //                *
            //                mH264Encode = new EncoderH264(960,540,framerate);
            //                mH264Encode = new EncoderH264(mPreviewSize.getWidth(),mPreviewSize.getHeight(),framerate);
            mH264Encode = new EncoderH264(mPreviewSize.getHeight(), mPreviewSize.getWidth(), framerate);

            Log.d(TAG, "????");
            try {
                mH264Encode.createFile();
            } catch (FileNotFoundException e) {
                e.printStackTrace();
            }

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.

    }
}

From source file:rqg.fantasy.rtmpdemo.Camera2BasicFragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *//*from  www.  ja v  a2 s.c  o m*/
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                //                    continue;
            } else {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                continue;
            }

            // For still image captures, we use the largest available size.
            Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                    new CompareSizesByArea());
            mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
                    ImageFormat.YUV_420_888, /*maxImages*/5);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            //noinspection ConstantConditions
            mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (mSensorOrientation == 90 || mSensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (mSensorOrientation == 0 || mSensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }

            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            if (swappedDimensions) {
                rotatedPreviewWidth = height;
                rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }

            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            // Check if the flash is supported.
            Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
            mFlashSupported = available == null ? false : available;

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(),
                FRAGMENT_DIALOG);
    }
}

From source file:com.tzutalin.dlibtest.CameraConnectionFragment.java

/**
 * Creates a new {@link CameraCaptureSession} for camera preview.
 *///ww  w  .  ja  va 2 s  .c o m
@SuppressLint("LongLogTag")
@DebugLog
private void createCameraPreviewSession() {
    try {
        final SurfaceTexture texture = textureView.getSurfaceTexture();
        assert texture != null;

        // We configure the size of default buffer to be the size of camera preview we want.
        texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
        //texture.setOnFrameAvailableListener();

        // This is the output Surface we need to start preview.
        final Surface surface = new Surface(texture);

        // We set up a CaptureRequest.Builder with the output Surface.
        previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        //previewRequestBuilder.addTarget(surface);

        // Create the reader for the preview frames.
        previewReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(),
                ImageFormat.YUV_420_888, 2);

        previewReader.setOnImageAvailableListener(mOnGetPreviewListener, backgroundHandler);
        previewRequestBuilder.addTarget(previewReader.getSurface());

        // Here, we create a CameraCaptureSession for camera preview.
        cameraDevice.createCaptureSession(Arrays.asList(previewReader.getSurface()),
                new CameraCaptureSession.StateCallback() {

                    @Override
                    public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
                        // The camera is already closed
                        if (null == cameraDevice) {
                            return;
                        }

                        // When the session is ready, we start displaying the preview.
                        captureSession = cameraCaptureSession;
                        try {
                            // Auto focus should be continuous for camera preview.
                            previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
                                    CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                            // Flash is automatically enabled when necessary.
                            previewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
                                    CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);

                            // Finally, we start displaying the camera preview.
                            previewRequest = previewRequestBuilder.build();
                            captureSession.setRepeatingRequest(previewRequest, captureCallback,
                                    backgroundHandler);
                        } catch (final CameraAccessException e) {
                            Timber.tag(TAG).e("Exception!", e);
                        }
                    }

                    @Override
                    public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
                        showToast("Failed");
                    }
                }, null);
    } catch (final CameraAccessException e) {
        Timber.tag(TAG).e("Exception!", e);
    }

    mOnGetPreviewListener.initialize(getActivity().getApplicationContext(), getActivity().getAssets(),
            mScoreView, inferenceHandler);
}

From source file:io.engineersatwork.blink.Camera2BasicFragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *///w ww. j  ava 2s  .  c om
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_BACK) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                continue;
            }

            // For processing images we crop a small area from the whole camera image to speedup
            List<Size> supportedSizes = Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888));
            Collections.sort(supportedSizes, new CompareSizesByArea());
            Size smallest = supportedSizes.get(0);
            Log.e("abcde", "Use size: " + smallest.toString());
            mCropRect = new Rect(/*left*/ 0, /*top*/ 0, /*right*/ smallest.getWidth(),
                    /*bottom*/ smallest.getHeight());

            // The image reader uses the cropped size
            mImageReader = ImageReader.newInstance(smallest.getWidth(), smallest.getHeight(),
                    ImageFormat.YUV_420_888, /*maxImages*/2);
            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            //noinspection ConstantConditions
            mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (mSensorOrientation == 90 || mSensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (mSensorOrientation == 0 || mSensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }

            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            if (swappedDimensions) {
                rotatedPreviewWidth = height;
                rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }

            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, smallest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            // Check if the flash is supported.
            Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
            mFlashSupported = available == null ? false : available;

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(),
                FRAGMENT_DIALOG);
    }
}

From source file:org.tensorflow.demo.Camera2BasicFragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *//* ww w .  j a  v  a  2  s  .  c  o m*/
private void setUpCameraOutputs(int width, int height) {
    Activity activity = getActivity();
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        for (String cameraId : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

            // We don't use a front facing camera in this sample.
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                continue;
            }

            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            if (map == null) {
                continue;
            }

            // For still image captures, we use the largest available size.
            Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
                    new CompareSizesByArea());
            mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
                    ImageFormat.YUV_420_888, 2);

            mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler);

            // Find out if we need to swap dimension to get the preview size relative to sensor
            // coordinate.
            int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
            //noinspection ConstantConditions
            mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            boolean swappedDimensions = false;
            switch (displayRotation) {
            case Surface.ROTATION_0:
            case Surface.ROTATION_180:
                if (mSensorOrientation == 90 || mSensorOrientation == 270) {
                    swappedDimensions = true;
                }
                break;
            case Surface.ROTATION_90:
            case Surface.ROTATION_270:
                if (mSensorOrientation == 0 || mSensorOrientation == 180) {
                    swappedDimensions = true;
                }
                break;
            default:
                Log.e(TAG, "Display rotation is invalid: " + displayRotation);
            }

            Point displaySize = new Point();
            activity.getWindowManager().getDefaultDisplay().getSize(displaySize);
            int rotatedPreviewWidth = width;
            int rotatedPreviewHeight = height;
            int maxPreviewWidth = displaySize.x;
            int maxPreviewHeight = displaySize.y;

            if (swappedDimensions) {
                rotatedPreviewWidth = height;
                rotatedPreviewHeight = width;
                maxPreviewWidth = displaySize.y;
                maxPreviewHeight = displaySize.x;
            }

            if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                maxPreviewWidth = MAX_PREVIEW_WIDTH;
            }

            if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                maxPreviewHeight = MAX_PREVIEW_HEIGHT;
            }

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                    rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
            } else {
                mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
            }

            previewWidth = mPreviewSize.getWidth();
            previewHeight = mPreviewSize.getHeight();
            rgbBytes = new int[previewWidth * previewHeight];
            rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
            croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Bitmap.Config.ARGB_8888);

            // Check if the flash is supported.
            Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
            mFlashSupported = false; //= available == null ? false : available;

            mCameraId = cameraId;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(),
                FRAGMENT_DIALOG);
    }
}

From source file:com.askjeffreyliu.camera2barcode.camera.CameraSource.java

/**
 * Creates a new {@link CameraCaptureSession} for camera preview.
 *//*from w w  w  .  ja  va 2s . com*/
private void createCameraPreviewSession() {
    try {
        SurfaceTexture texture = mTextureView.getSurfaceTexture();
        assert texture != null;

        // We configure the size of default buffer to be the size of camera preview we want.
        texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());

        mImageReaderPreview = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
                ImageFormat.YUV_420_888, 1);
        mImageReaderPreview.setOnImageAvailableListener(mOnPreviewAvailableListener, mBackgroundHandler);

        // This is the output Surface we need to start preview.
        Surface surface = new Surface(texture);

        // We set up a CaptureRequest.Builder with the output Surface.
        mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
        mPreviewRequestBuilder.addTarget(surface);
        mPreviewRequestBuilder.addTarget(mImageReaderPreview.getSurface());

        // Here, we create a CameraCaptureSession for camera preview.
        mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReaderPreview.getSurface()),
                new CameraCaptureSession.StateCallback() {

                    @Override
                    public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
                        // The camera is already closed
                        if (null == mCameraDevice) {
                            return;
                        }

                        // When the session is ready, we start displaying the preview.
                        mCaptureSession = cameraCaptureSession;
                        try {
                            // Auto focus should be continuous for camera preview.
                            mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mFocusMode);
                            if (mFlashSupported) {
                                mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mFlashMode);
                            }

                            // Finally, we start displaying the camera preview.
                            mPreviewRequest = mPreviewRequestBuilder.build();
                            mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback,
                                    mBackgroundHandler);
                        } catch (CameraAccessException e) {
                            e.printStackTrace();
                        }
                    }

                    @Override
                    public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
                        Log.d(TAG, "Configuration failed!");
                    }
                }, null);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:com.android.camera2.its.ItsService.java

private void do3A(JSONObject params) throws ItsException {
    try {//from www. ja v  a 2  s .c  o  m
        // Start a 3A action, and wait for it to converge.
        // Get the converged values for each "A", and package into JSON result for caller.

        // 3A happens on full-res frames.
        Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
        int widths[] = new int[1];
        int heights[] = new int[1];
        int formats[] = new int[1];
        widths[0] = sizes[0].getWidth();
        heights[0] = sizes[0].getHeight();
        formats[0] = ImageFormat.YUV_420_888;
        int width = widths[0];
        int height = heights[0];

        prepareCaptureReader(widths, heights, formats, 1);
        List<Surface> outputSurfaces = new ArrayList<Surface>(1);
        outputSurfaces.add(mCaptureReaders[0].getSurface());
        BlockingSessionCallback sessionListener = new BlockingSessionCallback();
        mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
        mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);

        // Add a listener that just recycles buffers; they aren't saved anywhere.
        ImageReader.OnImageAvailableListener readerListener = createAvailableListenerDropper(mCaptureCallback);
        mCaptureReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]);

        // Get the user-specified regions for AE, AWB, AF.
        // Note that the user specifies normalized [x,y,w,h], which is converted below
        // to an [x0,y0,x1,y1] region in sensor coords. The capture request region
        // also has a fifth "weight" element: [x0,y0,x1,y1,w].
        MeteringRectangle[] regionAE = new MeteringRectangle[] {
                new MeteringRectangle(0, 0, width, height, 1) };
        MeteringRectangle[] regionAF = new MeteringRectangle[] {
                new MeteringRectangle(0, 0, width, height, 1) };
        MeteringRectangle[] regionAWB = new MeteringRectangle[] {
                new MeteringRectangle(0, 0, width, height, 1) };
        if (params.has(REGION_KEY)) {
            JSONObject regions = params.getJSONObject(REGION_KEY);
            if (regions.has(REGION_AE_KEY)) {
                regionAE = ItsUtils.getJsonWeightedRectsFromArray(regions.getJSONArray(REGION_AE_KEY), true,
                        width, height);
            }
            if (regions.has(REGION_AF_KEY)) {
                regionAF = ItsUtils.getJsonWeightedRectsFromArray(regions.getJSONArray(REGION_AF_KEY), true,
                        width, height);
            }
            if (regions.has(REGION_AWB_KEY)) {
                regionAWB = ItsUtils.getJsonWeightedRectsFromArray(regions.getJSONArray(REGION_AWB_KEY), true,
                        width, height);
            }
        }

        // If AE or AWB lock is specified, then the 3A will converge first and then lock these
        // values, waiting until the HAL has reported that the lock was successful.
        mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false);
        mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false);

        // By default, AE and AF both get triggered, but the user can optionally override this.
        // Also, AF won't get triggered if the lens is fixed-focus.
        boolean doAE = true;
        boolean doAF = true;
        if (params.has(TRIGGER_KEY)) {
            JSONObject triggers = params.getJSONObject(TRIGGER_KEY);
            if (triggers.has(TRIGGER_AE_KEY)) {
                doAE = triggers.getBoolean(TRIGGER_AE_KEY);
            }
            if (triggers.has(TRIGGER_AF_KEY)) {
                doAF = triggers.getBoolean(TRIGGER_AF_KEY);
            }
        }
        if (doAF && mCameraCharacteristics.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) == 0) {
            // Send a dummy result back for the code that is waiting for this message to see
            // that AF has converged.
            Logt.i(TAG, "Ignoring request for AF on fixed-focus camera");
            mSocketRunnableObj.sendResponse("afResult", "0.0");
            doAF = false;
        }

        mInterlock3A.open();
        mIssuedRequest3A = false;
        mConvergedAE = false;
        mConvergedAWB = false;
        mConvergedAF = false;
        mLockedAE = false;
        mLockedAWB = false;
        long tstart = System.currentTimeMillis();
        boolean triggeredAE = false;
        boolean triggeredAF = false;

        Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d", doAE ? 1 : 0,
                doAF ? 1 : 0, mNeedsLockedAE ? 1 : 0, mNeedsLockedAWB ? 1 : 0));

        // Keep issuing capture requests until 3A has converged.
        while (true) {

            // Block until can take the next 3A frame. Only want one outstanding frame
            // at a time, to simplify the logic here.
            if (!mInterlock3A.block(TIMEOUT_3A * 1000)
                    || System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
                throw new ItsException("3A failed to converge (timeout)");
            }
            mInterlock3A.close();

            // If not converged yet, issue another capture request.
            if ((doAE && (!triggeredAE || !mConvergedAE)) || !mConvergedAWB
                    || (doAF && (!triggeredAF || !mConvergedAF)) || (doAE && mNeedsLockedAE && !mLockedAE)
                    || (mNeedsLockedAWB && !mLockedAWB)) {

                // Baseline capture request for 3A.
                CaptureRequest.Builder req = mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
                req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
                req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
                req.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
                req.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
                req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
                req.set(CaptureRequest.CONTROL_AE_LOCK, false);
                req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
                req.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
                req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
                req.set(CaptureRequest.CONTROL_AWB_MODE, CaptureRequest.CONTROL_AWB_MODE_AUTO);
                req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
                req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);

                if (mConvergedAE && mNeedsLockedAE) {
                    req.set(CaptureRequest.CONTROL_AE_LOCK, true);
                }
                if (mConvergedAWB && mNeedsLockedAWB) {
                    req.set(CaptureRequest.CONTROL_AWB_LOCK, true);
                }

                // Trigger AE first.
                if (doAE && !triggeredAE) {
                    Logt.i(TAG, "Triggering AE");
                    req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
                            CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
                    triggeredAE = true;
                }

                // After AE has converged, trigger AF.
                if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
                    Logt.i(TAG, "Triggering AF");
                    req.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
                    triggeredAF = true;
                }

                req.addTarget(mCaptureReaders[0].getSurface());

                mIssuedRequest3A = true;
                mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
            } else {
                mSocketRunnableObj.sendResponse("3aConverged", "");
                Logt.i(TAG, "3A converged");
                break;
            }
        }
    } catch (android.hardware.camera2.CameraAccessException e) {
        throw new ItsException("Access error: ", e);
    } catch (org.json.JSONException e) {
        throw new ItsException("JSON error: ", e);
    } finally {
        mSocketRunnableObj.sendResponse("3aDone", "");
    }
}

From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java

private void SetBurst(int burst) {
    try {// ww  w  . ja  va2s.  c  om
        Log.d(TAG, "Set Burst to:" + burst);
        previewSize = cameraHolder.getSizeForPreviewDependingOnImageSize(
                cameraHolder.map.getOutputSizes(ImageFormat.YUV_420_888), cameraHolder.characteristics,
                mImageWidth, mImageHeight);
        if (cameraUiWrapper.getFocusPeakProcessor() != null) {
            cameraUiWrapper.getFocusPeakProcessor().kill();
        }
        int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
        int orientation = 0;
        switch (sensorOrientation) {
        case 90:
            orientation = 0;
            break;
        case 180:
            orientation = 90;
            break;
        case 270:
            orientation = 180;
            break;
        case 0:
            orientation = 270;
            break;
        }
        cameraHolder.CaptureSessionH.SetTextureViewSize(previewSize.getWidth(), previewSize.getHeight(),
                orientation, orientation + 180, false);
        SurfaceTexture texture = cameraHolder.CaptureSessionH.getSurfaceTexture();
        texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
        previewsurface = new Surface(texture);

        cameraUiWrapper.getFocusPeakProcessor().Reset(previewSize.getWidth(), previewSize.getHeight());
        Log.d(TAG, "Previewsurface vailid:" + previewsurface.isValid());
        cameraUiWrapper.getFocusPeakProcessor().setOutputSurface(previewsurface);
        camerasurface = cameraUiWrapper.getFocusPeakProcessor().getInputSurface();
        cameraHolder.CaptureSessionH.AddSurface(camerasurface, true);

        if (picFormat.equals(KEYS.JPEG))
            mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.JPEG, burst + 1);
        else if (picFormat.equals(CameraHolderApi2.RAW10))
            mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW10, burst + 1);
        else if (picFormat.equals(CameraHolderApi2.RAW_SENSOR))
            mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW_SENSOR,
                    burst + 1);
        else if (picFormat.equals(CameraHolderApi2.RAW12))
            mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW12, burst + 1);
        cameraHolder.CaptureSessionH.AddSurface(mImageReader.getSurface(), false);
        cameraHolder.CaptureSessionH.CreateCaptureSession();
    } catch (Exception ex) {
        ex.printStackTrace();
    }
    if (parameterHandler.Burst != null)
        parameterHandler.Burst.ThrowCurrentValueChanged(parameterHandler.Burst.GetValue());
}

From source file:com.android.camera2.its.ItsService.java

private void doCapture(JSONObject params) throws ItsException {
    try {//from w  ww.j av a  2 s.  c  om
        // Parse the JSON to get the list of capture requests.
        List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(mCamera, params);

        // Set the output surface(s) and listeners.
        int widths[] = new int[MAX_NUM_OUTPUT_SURFACES];
        int heights[] = new int[MAX_NUM_OUTPUT_SURFACES];
        int formats[] = new int[MAX_NUM_OUTPUT_SURFACES];
        int numSurfaces = 0;
        try {
            mCountRawOrDng.set(0);
            mCountJpg.set(0);
            mCountYuv.set(0);
            mCountRaw10.set(0);
            mCountCapRes.set(0);
            mCaptureRawIsDng = false;
            mCaptureResults = new CaptureResult[requests.size()];

            JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
            if (jsonOutputSpecs != null) {
                numSurfaces = jsonOutputSpecs.length();
                if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) {
                    throw new ItsException("Too many output surfaces");
                }
                for (int i = 0; i < numSurfaces; i++) {
                    // Get the specified surface.
                    JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i);
                    String sformat = surfaceObj.optString("format");
                    Size sizes[];
                    if ("yuv".equals(sformat) || "".equals(sformat)) {
                        // Default to YUV if no format is specified.
                        formats[i] = ImageFormat.YUV_420_888;
                        sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
                    } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
                        formats[i] = ImageFormat.JPEG;
                        sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics);
                    } else if ("raw".equals(sformat)) {
                        formats[i] = ImageFormat.RAW_SENSOR;
                        sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
                    } else if ("raw10".equals(sformat)) {
                        formats[i] = ImageFormat.RAW10;
                        sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
                    } else if ("dng".equals(sformat)) {
                        formats[i] = ImageFormat.RAW_SENSOR;
                        sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
                        mCaptureRawIsDng = true;
                    } else {
                        throw new ItsException("Unsupported format: " + sformat);
                    }
                    // If the size is omitted, then default to the largest allowed size for the
                    // format.
                    widths[i] = surfaceObj.optInt("width");
                    heights[i] = surfaceObj.optInt("height");
                    if (widths[i] <= 0) {
                        if (sizes == null || sizes.length == 0) {
                            throw new ItsException(String
                                    .format("Zero stream configs available for requested format: %s", sformat));
                        }
                        widths[i] = sizes[0].getWidth();
                    }
                    if (heights[i] <= 0) {
                        heights[i] = sizes[0].getHeight();
                    }
                }
            } else {
                // No surface(s) specified at all.
                // Default: a single output surface which is full-res YUV.
                Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
                numSurfaces = 1;
                widths[0] = sizes[0].getWidth();
                heights[0] = sizes[0].getHeight();
                formats[0] = ImageFormat.YUV_420_888;
            }

            prepareCaptureReader(widths, heights, formats, numSurfaces);
            List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces);
            for (int i = 0; i < numSurfaces; i++) {
                outputSurfaces.add(mCaptureReaders[i].getSurface());
            }
            BlockingSessionCallback sessionListener = new BlockingSessionCallback();
            mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
            mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);

            for (int i = 0; i < numSurfaces; i++) {
                ImageReader.OnImageAvailableListener readerListener = createAvailableListener(mCaptureCallback);
                mCaptureReaders[i].setOnImageAvailableListener(readerListener, mSaveHandlers[i]);
            }

            // Plan for how many callbacks need to be received throughout the duration of this
            // sequence of capture requests. There is one callback per image surface, and one
            // callback for the CaptureResult, for each capture.
            int numCaptures = requests.size();
            mCountCallbacksRemaining.set(numCaptures * (numSurfaces + 1));

        } catch (CameraAccessException e) {
            throw new ItsException("Error configuring outputs", e);
        } catch (org.json.JSONException e) {
            throw new ItsException("JSON error", e);
        }

        // Initiate the captures.
        for (int i = 0; i < requests.size(); i++) {
            // For DNG captures, need the LSC map to be available.
            if (mCaptureRawIsDng) {
                requests.get(i).set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1);
            }

            CaptureRequest.Builder req = requests.get(i);
            for (int j = 0; j < numSurfaces; j++) {
                req.addTarget(mCaptureReaders[j].getSurface());
            }
            mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
        }

        // Make sure all callbacks have been hit (wait until captures are done).
        // If no timeouts are received after a timeout, then fail.
        int currentCount = mCountCallbacksRemaining.get();
        while (currentCount > 0) {
            try {
                Thread.sleep(TIMEOUT_CALLBACK * 1000);
            } catch (InterruptedException e) {
                throw new ItsException("Timeout failure", e);
            }
            int newCount = mCountCallbacksRemaining.get();
            if (newCount == currentCount) {
                throw new ItsException("No callback received within timeout");
            }
            currentCount = newCount;
        }
    } catch (android.hardware.camera2.CameraAccessException e) {
        throw new ItsException("Access error: ", e);
    }
}