List of usage examples for android.hardware.camera2 CaptureResult getFrameNumber
public long getFrameNumber()
From source file:com.obviousengine.android.focus.ZslFocusCamera.java
/** * Instantiates a new camera based on Camera 2 API. * * @param device The underlying Camera 2 device. * @param characteristics The device's characteristics. * @param pictureSize the size of the final image to be taken. *///from w w w . ja v a 2 s . co m ZslFocusCamera(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) { Timber.v("Creating new ZslFocusCamera"); this.device = device; this.characteristics = characteristics; fullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics); cameraThread = new HandlerThread("FocusCamera"); // If this thread stalls, it will delay viewfinder frames. cameraThread.setPriority(Thread.MAX_PRIORITY); cameraThread.start(); cameraHandler = new Handler(cameraThread.getLooper()); cameraListenerThread = new HandlerThread("FocusCamera-Listener"); cameraListenerThread.start(); cameraListenerHandler = new Handler(cameraListenerThread.getLooper()); // TODO: Encoding on multiple cores results in preview jank due to // excessive GC. int numEncodingCores = Utils.getNumCpuCores(); imageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()); captureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, cameraListenerHandler, imageSaverThreadPool); captureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() { @Override public void onReadyStateChange(boolean capturePossible) { readyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY, capturePossible); } }); // Listen for changes to auto focus state and dispatch to // focusStateListener. captureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE, new ImageCaptureManager.MetadataChangeListener() { @Override public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue, CaptureResult result) { if (focusStateListener == null) { return; } focusStateListener.onFocusStatusUpdate( AutoFocusHelper.stateFromCamera2State(result.get(CaptureResult.CONTROL_AF_STATE)), result.getFrameNumber()); } }); // Allocate the image reader to store all images received from the // camera. if (pictureSize == null) { // TODO The default should be selected by the caller, and // pictureSize should never be null. pictureSize = getDefaultPictureSize(); } captureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(), CAPTURE_IMAGE_FORMAT, MAX_CAPTURE_IMAGES); captureImageReader.setOnImageAvailableListener(captureManager, cameraHandler); mediaActionSound.load(MediaActionSound.SHUTTER_CLICK); }
From source file:com.android.camera.one.v2.OneCameraZslImpl.java
/** * Instantiates a new camera based on Camera 2 API. * * @param device The underlying Camera 2 device. * @param characteristics The device's characteristics. * @param pictureSize the size of the final image to be taken. *//*from w w w . j a va 2 s .com*/ OneCameraZslImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) { Log.v(TAG, "Creating new OneCameraZslImpl"); mDevice = device; mCharacteristics = characteristics; mLensRange = LensRangeCalculator.getDiopterToRatioCalculator(characteristics); mDirection = new CameraDirectionProvider(mCharacteristics); mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics); mCameraThread = new HandlerThread("OneCamera2"); // If this thread stalls, it will delay viewfinder frames. mCameraThread.setPriority(Thread.MAX_PRIORITY); mCameraThread.start(); mCameraHandler = new Handler(mCameraThread.getLooper()); mCameraListenerThread = new HandlerThread("OneCamera2-Listener"); mCameraListenerThread.start(); mCameraListenerHandler = new Handler(mCameraListenerThread.getLooper()); // TODO: Encoding on multiple cores results in preview jank due to // excessive GC. int numEncodingCores = CameraUtil.getNumCpuCores(); mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()); mCaptureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler, mImageSaverThreadPool); mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() { @Override public void onReadyStateChange(boolean capturePossible) { mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY, capturePossible); } }); // Listen for changes to auto focus state and dispatch to // mFocusStateListener. mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE, new ImageCaptureManager.MetadataChangeListener() { @Override public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue, CaptureResult result) { FocusStateListener listener = mFocusStateListener; if (listener != null) { listener.onFocusStatusUpdate(AutoFocusHelper.stateFromCamera2State( result.get(CaptureResult.CONTROL_AF_STATE)), result.getFrameNumber()); } } }); // Allocate the image reader to store all images received from the // camera. if (pictureSize == null) { // TODO The default should be selected by the caller, and // pictureSize should never be null. pictureSize = getDefaultPictureSize(); } mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(), sCaptureImageFormat, MAX_CAPTURE_IMAGES); mCaptureImageReader.setOnImageAvailableListener(mCaptureManager, mCameraHandler); mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK); }