Java tutorial
/* * Copyright (C) The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.askjeffreyliu.camera2barcode.camera; import android.Manifest; import android.annotation.SuppressLint; import android.content.Context; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.ImageFormat; import android.graphics.Matrix; import android.graphics.Point; import android.graphics.RectF; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureResult; import android.hardware.camera2.TotalCaptureResult; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.os.Handler; import android.os.HandlerThread; import android.support.annotation.NonNull; import android.support.annotation.RequiresPermission; import android.support.v4.content.ContextCompat; import android.util.Log; import android.util.Range; import android.util.SparseIntArray; import android.view.Surface; import com.askjeffreyliu.camera2barcode.MultiResultEvent; import com.askjeffreyliu.camera2barcode.camera2.AutoFitTextureView; import com.askjeffreyliu.camera2barcode.utils.Utils; import com.google.android.gms.common.images.Size; import com.google.zxing.BarcodeFormat; import com.google.zxing.BinaryBitmap; import com.google.zxing.DecodeHintType; import com.google.zxing.NotFoundException; import com.google.zxing.PlanarYUVLuminanceSource; import com.google.zxing.Result; import com.google.zxing.common.HybridBinarizer; import com.google.zxing.multi.GenericMultipleBarcodeReader; import com.google.zxing.multi.qrcode.QRCodeMultiReader; import com.google.zxing.pdf417.PDF417Reader; import org.greenrobot.eventbus.EventBus; import java.io.IOException; import java.lang.Thread.State; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Hashtable; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; // Note: This requires Google Play Services 8.1 or higher, due to using indirect byte buffers for // storing images. /** * Manages the camera in conjunction with an underlying * {@link com.google.android.gms.vision.Detector}. This receives preview frames from the camera at * a specified rate, sending those frames to the detector as fast as it is able to process those * frames. * <p/> * The following Android permission is required to use the camera: * <ul> * <li>android.permissions.CAMERA</li> * </ul> */ public class CameraSource { public static final int CAMERA_FACING_BACK = 0; public static final int CAMERA_FACING_FRONT = 1; private int mFacing = CAMERA_FACING_BACK; private int mFlashMode = CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH; private int mFocusMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE; private static final String TAG = "OpenCameraSource"; private static final double maxRatioTolerance = 0.1; private Context mContext; private QRCodeMultiReader mQrReader; private GenericMultipleBarcodeReader matrixReader; private PDF417Reader pdf417Reader; private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); private boolean cameraStarted = false; private int imageWidth, imageHeight; private Hashtable<DecodeHintType, Object> hints = new Hashtable<>(); /** * A reference to the opened {@link CameraDevice}. */ private CameraDevice mCameraDevice; /** * An additional thread for running tasks that shouldn't block the UI. */ private HandlerThread mBackgroundThread; /** * A {@link Handler} for running tasks in the background. */ private Handler mBackgroundHandler; private int mDisplayOrientation; /** * {@link CaptureRequest.Builder} for the camera preview */ private CaptureRequest.Builder mPreviewRequestBuilder; /** * {@link CaptureRequest} generated by {@link #mPreviewRequestBuilder} */ private CaptureRequest mPreviewRequest; /** * A {@link CameraCaptureSession } for camera preview. */ private CameraCaptureSession mCaptureSession; /** * The {@link Size} of camera preview. */ private Size mPreviewSize; /** * ID of the current {@link CameraDevice}. */ private String mCameraId; /** * Max preview width that is guaranteed by Camera2 API */ private static final int MAX_PREVIEW_WIDTH = 1920; /** * Max preview height that is guaranteed by Camera2 API */ private static final int MAX_PREVIEW_HEIGHT = 1080; /** * An {@link AutoFitTextureView} for camera preview. */ private AutoFitTextureView mTextureView; private CameraManager manager = null; static { ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_270, 180); } /** * A {@link Semaphore} to prevent the app from exiting before closing the camera. */ private Semaphore mCameraOpenCloseLock = new Semaphore(1); /** * Whether the current camera device supports Flash or not. */ private boolean mFlashSupported; /** * Dedicated thread and associated runnable for calling into the detector with frames, as the * frames become available from the camera. */ private Thread mProcessingThread; private FrameProcessingRunnable mFrameProcessor; /** * An {@link ImageReader} that handles live preview. */ private ImageReader mImageReaderPreview; /** * A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. */ private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) { } @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { } }; /** * This is a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a * preview frame is ready to be processed. */ private final ImageReader.OnImageAvailableListener mOnPreviewAvailableListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { Image mImage = reader.acquireNextImage(); if (mImage == null) { return; } if (imageWidth == 0 || imageHeight == 0) { imageWidth = mImage.getHeight(); // swapping imageHeight = mImage.getWidth();// swapping } mFrameProcessor.setNextFrame(mImage); mImage.close(); } }; /** * {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */ private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(CameraDevice cameraDevice) { mCameraOpenCloseLock.release(); mCameraDevice = cameraDevice; createCameraPreviewSession(); } @Override public void onDisconnected(CameraDevice cameraDevice) { mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; } @Override public void onError(CameraDevice cameraDevice, int error) { mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; } }; //============================================================================================== // Builder //============================================================================================== /** * Builder for configuring and creating an associated camera source. */ public static class Builder { private CameraSource mCameraSource = new CameraSource(); /** * Creates a camera source builder with the supplied context and detector. Camera preview * images will be streamed to the associated detector upon starting the camera source. */ public Builder(Context context, QRCodeMultiReader mQrReader, GenericMultipleBarcodeReader dataMatrixReader, PDF417Reader pdf417Reader) { if (context == null) { throw new IllegalArgumentException("No context supplied."); } mCameraSource.mQrReader = mQrReader; mCameraSource.matrixReader = dataMatrixReader; mCameraSource.pdf417Reader = pdf417Reader; mCameraSource.mContext = context; mCameraSource.hints.put(DecodeHintType.TRY_HARDER, Boolean.TRUE); mCameraSource.hints.put(DecodeHintType.POSSIBLE_FORMATS, Arrays.asList(BarcodeFormat.DATA_MATRIX)); } public Builder setFocusMode(int mode) { mCameraSource.mFocusMode = mode; return this; } public Builder setFlashMode(int mode) { mCameraSource.mFlashMode = mode; return this; } /** * Sets the camera to use (either {@link #CAMERA_FACING_BACK} or * {@link #CAMERA_FACING_FRONT}). Default: back facing. */ public Builder setFacing(int facing) { if ((facing != CAMERA_FACING_BACK) && (facing != CAMERA_FACING_FRONT)) { throw new IllegalArgumentException("Invalid camera: " + facing); } mCameraSource.mFacing = facing; return this; } /** * Creates an instance of the camera source. */ public CameraSource build() { mCameraSource.mFrameProcessor = mCameraSource.new FrameProcessingRunnable(); return mCameraSource; } } //============================================================================================== // Bridge Functionality for the Camera2 API //============================================================================================== // AUTO FOCUS PART HAS BEEN OMITTED FOR SIMPLICITY. //============================================================================================== // Public //============================================================================================== /** * Starts a background thread and its {@link Handler}. */ private void startBackgroundThread() { mBackgroundThread = new HandlerThread("CameraBackground"); mBackgroundThread.start(); mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); } /** * Stops the background thread and its {@link Handler}. */ private void stopBackgroundThread() { try { if (mBackgroundThread != null) { mBackgroundThread.quitSafely(); mBackgroundThread.join(); mBackgroundThread = null; mBackgroundHandler = null; } } catch (InterruptedException e) { e.printStackTrace(); } } /** * Stops the camera and releases the resources of the camera and underlying detector. */ public void release() { mFrameProcessor.release(); stop(); } public void setReaderType(int readerType) { mFrameProcessor.setReadType(readerType); } /** * Closes the camera and stops sending frames to the underlying frame detector. * <p/> * This camera source may be restarted again by calling {@link #start(AutoFitTextureView, int)}. * <p/> * Call {@link #release()} instead to completely shut down this camera source and release the * resources of the underlying detector. */ public void stop() { try { mFrameProcessor.setActive(false); if (mProcessingThread != null) { try { // Wait for the thread to complete to ensure that we can't have multiple threads // executing at the same time (i.e., which would happen if we called start too // quickly after stop). mProcessingThread.join(); } catch (InterruptedException e) { Log.d(TAG, "Frame processing thread interrupted on release."); } mProcessingThread = null; } mCameraOpenCloseLock.acquire(); if (null != mCaptureSession) { mCaptureSession.close(); mCaptureSession = null; } if (null != mCameraDevice) { mCameraDevice.close(); mCameraDevice = null; } if (null != mImageReaderPreview) { mImageReaderPreview.close(); mImageReaderPreview = null; } } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera closing.", e); } finally { mCameraOpenCloseLock.release(); stopBackgroundThread(); Log.d("ASD", "FINISHED CLOSING CAMERA2"); } } public boolean isCamera2Native() { try { if (ContextCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { return false; } manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE); mCameraId = manager.getCameraIdList()[mFacing]; CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraId); //CHECK CAMERA HARDWARE LEVEL. int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); return (deviceLevel != CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY); } catch (CameraAccessException ex) { return false; } } /** * Opens the camera and starts sending preview frames to the underlying detector. The supplied * texture view is used for the preview so frames can be displayed to the user. * * @param textureView the surface holder to use for the preview frames * @param displayOrientation the display orientation for a non stretched preview * @throws IOException if the supplied texture view could not be used as the preview display */ @RequiresPermission(Manifest.permission.CAMERA) public CameraSource start(AutoFitTextureView textureView, int displayOrientation) throws IOException { mDisplayOrientation = displayOrientation; if (ContextCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED) { if (cameraStarted) { return this; } cameraStarted = true; startBackgroundThread(); mProcessingThread = new Thread(mFrameProcessor); mFrameProcessor.setActive(true); mProcessingThread.start(); mTextureView = textureView; if (mTextureView.isAvailable()) { openCamera(mTextureView.getWidth(), mTextureView.getHeight()); } } return this; } /** * Returns the preview size that is currently in use by the underlying camera. */ public Size getPreviewSize() { return mPreviewSize; } /** * Returns the selected camera; one of {@link #CAMERA_FACING_BACK} or * {@link #CAMERA_FACING_FRONT}. */ public int getCameraFacing() { return mFacing; } private Size getBestAspectPictureSize(android.util.Size[] supportedPictureSizes) { float targetRatio = Utils.getScreenRatio(mContext); Size bestSize = null; TreeMap<Double, List> diffs = new TreeMap<>(); for (android.util.Size size : supportedPictureSizes) { float ratio = (float) size.getWidth() / size.getHeight(); double diff = Math.abs(ratio - targetRatio); if (diff < maxRatioTolerance) { if (diffs.keySet().contains(diff)) { //add the value to the list diffs.get(diff).add(size); } else { List newList = new ArrayList<>(); newList.add(size); diffs.put(diff, newList); } } } //diffs now contains all of the usable sizes //now let's see which one has the least amount of for (Map.Entry entry : diffs.entrySet()) { List<android.util.Size> entries = (List) entry.getValue(); for (android.util.Size s : entries) { if (bestSize == null) { bestSize = new Size(s.getWidth(), s.getHeight()); } else if (bestSize.getWidth() < s.getWidth() || bestSize.getHeight() < s.getHeight()) { bestSize = new Size(s.getWidth(), s.getHeight()); } } } return bestSize; } /** * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that * is at least as large as the respective texture view size, and that is at most as large as the * respective max size, and whose aspect ratio matches with the specified value. If such size * doesn't exist, choose the largest one that is at most as large as the respective max size, * and whose aspect ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output * class * @param textureViewWidth The width of the texture view relative to sensor coordinate * @param textureViewHeight The height of the texture view relative to sensor coordinate * @param maxWidth The maximum width that can be chosen * @param maxHeight The maximum height that can be chosen * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List<Size> bigEnough = new ArrayList<>(); // Collect the supported resolutions that are smaller than the preview Surface List<Size> notBigEnough = new ArrayList<>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); for (Size option : choices) { if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && option.getHeight() == option.getWidth() * h / w) { if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) { bigEnough.add(option); } else { notBigEnough.add(option); } } } // Pick the smallest of those big enough. If there is no one big enough, pick the // largest of those not big enough. if (bigEnough.size() > 0) { return Collections.min(bigEnough, new CompareSizesByArea()); } else if (notBigEnough.size() > 0) { return Collections.max(notBigEnough, new CompareSizesByArea()); } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } } /** * Compares two {@code Size}s based on their areas. */ private static class CompareSizesByArea implements Comparator<Size> { @Override public int compare(Size lhs, Size rhs) { // We cast here to ensure the multiplications won't overflow return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } private void openCamera(int width, int height) { setUpCameraOutputs(width, height); } /** * Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`. * This method should be called after the camera preview size is determined in * setUpCameraOutputs and also the size of `mTextureView` is fixed. * * @param viewWidth The width of `mTextureView` * @param viewHeight The height of `mTextureView` */ private void configureTransform(int viewWidth, int viewHeight) { if (null == mTextureView || null == mPreviewSize) { return; } int rotation = mDisplayOrientation; Matrix matrix = new Matrix(); RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth()); float centerX = viewRect.centerX(); float centerY = viewRect.centerY(); if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); float scale = Math.max((float) viewHeight / mPreviewSize.getHeight(), (float) viewWidth / mPreviewSize.getWidth()); matrix.postScale(scale, scale, centerX, centerY); matrix.postRotate(90 * (rotation - 2), centerX, centerY); } else if (Surface.ROTATION_180 == rotation) { matrix.postRotate(180, centerX, centerY); } mTextureView.setTransform(matrix); } /** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview */ private void setUpCameraOutputs(int width, int height) { try { if (ContextCompat.checkSelfPermission(mContext, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { return; } if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } if (manager == null) manager = (CameraManager) mContext.getSystemService(Context.CAMERA_SERVICE); mCameraId = manager.getCameraIdList()[mFacing]; CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { return; } // For still image captures, we use the largest available size. Size largest = getBestAspectPictureSize(map.getOutputSizes(ImageFormat.JPEG)); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = mDisplayOrientation; //noinspection ConstantConditions int mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(Utils.getScreenWidth(mContext), Utils.getScreenHeight(mContext)); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. Size[] outputSizes = Utils.sizeToSize(map.getOutputSizes(SurfaceTexture.class)); mPreviewSize = chooseOptimalSize(outputSizes, rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = mDisplayOrientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; // control.aeTargetFpsRange Range<Integer>[] availableFpsRange = characteristics .get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES); configureTransform(width, height); manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening.", e); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. Log.d(TAG, "Camera Error: " + e.getMessage()); } } /** * Creates a new {@link CameraCaptureSession} for camera preview. */ private void createCameraPreviewSession() { try { SurfaceTexture texture = mTextureView.getSurfaceTexture(); assert texture != null; // We configure the size of default buffer to be the size of camera preview we want. texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); mImageReaderPreview = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YUV_420_888, 1); mImageReaderPreview.setOnImageAvailableListener(mOnPreviewAvailableListener, mBackgroundHandler); // This is the output Surface we need to start preview. Surface surface = new Surface(texture); // We set up a CaptureRequest.Builder with the output Surface. mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewRequestBuilder.addTarget(surface); mPreviewRequestBuilder.addTarget(mImageReaderPreview.getSurface()); // Here, we create a CameraCaptureSession for camera preview. mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReaderPreview.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == mCameraDevice) { return; } // When the session is ready, we start displaying the preview. mCaptureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, mFocusMode); if (mFlashSupported) { mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, mFlashMode); } // Finally, we start displaying the camera preview. mPreviewRequest = mPreviewRequestBuilder.build(); mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { Log.d(TAG, "Configuration failed!"); } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } } /** * This runnable controls access to the underlying receiver, calling it to process frames when * available from the camera. This is designed to run detection on frames as fast as possible * (i.e., without unnecessary context switching or waiting on the next frame). * <p/> * While detection is running on a frame, new frames may be received from the camera. As these * frames come in, the most recent frame is held onto as pending. As soon as detection and its * associated processing are done for the previous frame, detection on the mostly recently * received frame will immediately start on the same thread. */ private class FrameProcessingRunnable implements Runnable { // This lock guards all of the member variables below. private final Object mLock = new Object(); private boolean mActive = true; private int readType = 0; // 0 is qr, 1 is data matrix, 2 is pdf private BinaryBitmap mPendingFrameData; FrameProcessingRunnable() { } /** * Releases the underlying receiver. This is only safe to do after the associated thread * has completed, which is managed in camera source's release method above. */ @SuppressLint("Assert") void release() { assert (mProcessingThread.getState() == State.TERMINATED); } public void setReadType(int readType) { this.readType = readType; } /** * Marks the runnable as active/not active. Signals any blocked threads to continue. */ void setActive(boolean active) { synchronized (mLock) { mActive = active; mLock.notifyAll(); } } /** * Sets the frame data received from the camera. */ void setNextFrame(Image mImage) { synchronized (mLock) { if (mPendingFrameData != null) { mPendingFrameData = null; } ByteBuffer buffer = mImage.getPlanes()[0].getBuffer(); byte[] data = new byte[buffer.remaining()]; buffer.get(data); int width = mImage.getWidth(); int height = mImage.getHeight(); byte[] rotatedData = new byte[data.length]; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) rotatedData[x * height + height - y - 1] = data[x + y * width]; } int tmp = width; width = height; height = tmp; PlanarYUVLuminanceSource source = new PlanarYUVLuminanceSource(rotatedData, width, height, 0, 0, width, height, false); mPendingFrameData = new BinaryBitmap(new HybridBinarizer(source)); // Notify the processor thread if it is waiting on the next frame (see below). mLock.notifyAll(); } } /** * As long as the processing thread is active, this executes detection on frames * continuously. The next pending frame is either immediately available or hasn't been * received yet. Once it is available, we transfer the frame info to local variables and * run detection on that frame. It immediately loops back for the next frame without * pausing. * <p/> * If detection takes longer than the time in between new frames from the camera, this will * mean that this loop will run without ever waiting on a frame, avoiding any context * switching or frame acquisition time latency. * <p/> * If you find that this is using more CPU than you'd like, you should probably decrease the * FPS setting above to allow for some idle time in between frames. */ @Override public void run() { Result multiRawResults[] = null; while (true) { synchronized (mLock) { while (mActive && (mPendingFrameData == null)) { try { // Wait for the next frame to be received from the camera, since we // don't have it yet. mLock.wait(); } catch (InterruptedException e) { Log.d(TAG, "Frame processing loop terminated.", e); return; } } if (!mActive) { // Exit the loop once this camera source is stopped or released. We check // this here, immediately after the wait() above, to handle the case where // setActive(false) had been called, triggering the termination of this // loop. return; } try { switch (readType) { default: case 0: multiRawResults = mQrReader.decodeMultiple(mPendingFrameData); break; case 1: multiRawResults = matrixReader.decodeMultiple(mPendingFrameData, hints); break; case 2: multiRawResults = pdf417Reader.decodeMultiple(mPendingFrameData); break; } } catch (NotFoundException e) { } // We need to clear mPendingFrameData to ensure that this buffer isn't // recycled back to the camera before we are done using that data. mPendingFrameData = null; } // The code below needs to run outside of synchronization, because this will allow // the camera to add pending frame(s) while we are running detection on the current // frame. onMultiCodeRead(multiRawResults); multiRawResults = null; } } } private void onMultiCodeRead(final Result[] rawResult) { if (rawResult != null && rawResult.length > 0 && rawResult[0] != null) { //Log.d(TAG, "onMultiCodeRead() called with: rawResult = [" + rawResult[0].getBarcodeFormat().toString() + "]"); EventBus.getDefault().post(new MultiResultEvent(rawResult, imageWidth, imageHeight)); } } }