Java tutorial
/* Modified by Tarun Garg, for Manufacton Inc. Altered the original file to have preview mode only on imageReader and scan for barcodes present in the image every time a new image is available on ImageReader */ /* * Copyright 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.manufacton.cordova; import android.Manifest; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.app.Fragment; import android.content.Context; import android.content.DialogInterface; import android.content.pm.PackageManager; import android.content.res.Configuration; import android.graphics.ImageFormat; import android.graphics.Matrix; import android.graphics.Point; import android.graphics.RectF; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureResult; import android.hardware.camera2.TotalCaptureResult; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.support.annotation.NonNull; import android.support.v4.content.ContextCompat; import android.util.Log; import android.util.Size; import android.util.SparseIntArray; import android.view.LayoutInflater; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import com.google.zxing.*; import com.google.zxing.common.HybridBinarizer; public class MyCameraManager { /** * Conversion from screen rotation to JPEG orientation. */ private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); private static final int REQUEST_CAMERA_PERMISSION = 1; private static final String FRAGMENT_DIALOG = "dialog"; static { ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_270, 180); } /** * Tag for the {@link Log}. */ private static final String TAG = "MyCameraManager"; /** * Camera state: Showing camera preview. */ private static final int STATE_PREVIEW = 0; /** * Camera state: Waiting for the focus to be locked. */ private static final int STATE_WAITING_LOCK = 1; /** * Camera state: Waiting for the exposure to be precapture state. */ private static final int STATE_WAITING_PRECAPTURE = 2; /** * Camera state: Waiting for the exposure state to be something other than precapture. */ private static final int STATE_WAITING_NON_PRECAPTURE = 3; /** * Camera state: Picture was taken. */ private static final int STATE_PICTURE_TAKEN = 4; /** * Max preview width that is guaranteed by Camera2 API */ private static final int MAX_PREVIEW_WIDTH = 1920; /** * Max preview height that is guaranteed by Camera2 API */ private static final int MAX_PREVIEW_HEIGHT = 1080; /** * ID of the current {@link CameraDevice}. */ private String mCameraId; /** * An {@link AutoFitTextureView} for camera preview. */ // private AutoFitTextureView mTextureView; /** * A {@link CameraCaptureSession } for camera preview. */ private CameraCaptureSession mCaptureSession; /** * A reference to the opened {@link CameraDevice}. */ private CameraDevice mCameraDevice; /** * The {@link android.util.Size} of camera preview. */ private Size mPreviewSize; private Activity activity; public static byte[] lastFrameBytes = null; /** * {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */ private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(@NonNull CameraDevice cameraDevice) { // This method is called when the camera is opened. We start camera preview here. Log.d(TAG, "CAMERA OPENED"); mCameraOpenCloseLock.release(); mCameraDevice = cameraDevice; Log.d(TAG, "OPENING PREVIEW SESSSION"); createCameraPreviewSession(); } @Override public void onDisconnected(@NonNull CameraDevice cameraDevice) { Log.d(TAG, "onDisconnected"); mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; } @Override public void onError(@NonNull CameraDevice cameraDevice, int error) { Log.d(TAG, "onError"); mCameraOpenCloseLock.release(); cameraDevice.close(); mCameraDevice = null; // Activity activity = getActivity(); // if (null != activity) { // activity.finish(); // } } }; /** * An additional thread for running tasks that shouldn't block the UI. */ private HandlerThread mBackgroundThread; /** * A {@link Handler} for running tasks in the background. */ private Handler mBackgroundHandler; /** * An {@link ImageReader} that handles still image capture. */ private ImageReader mImageReader; /** * This is the output file for our picture. */ private File mFile; public static String qrCode = null; /** * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a * still image is ready to be saved. */ private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { if (reader == null) { return; } Image image = reader.acquireLatestImage(); if (image == null) { return; } ByteBuffer buffer = image.getPlanes()[0].getBuffer(); lastFrameBytes = new byte[buffer.remaining()]; buffer.get(lastFrameBytes); image.close(); final Bitmap bMap = BitmapFactory.decodeByteArray(lastFrameBytes, 0, lastFrameBytes.length); int[] intArray = new int[bMap.getWidth() * bMap.getHeight()]; //copy pixel data from the Bitmap into the 'intArray' array bMap.getPixels(intArray, 0, bMap.getWidth(), 0, 0, bMap.getWidth(), bMap.getHeight()); LuminanceSource source = new RGBLuminanceSource(bMap.getWidth(), bMap.getHeight(), intArray); BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source)); Reader qrReader = new MultiFormatReader(); Log.d(TAG, "HAVE A NEW IMAGE"); try { Result result = qrReader.decode(bitmap); qrCode = result.getText(); Log.d(TAG, qrCode); } catch (NotFoundException e) { // e.printStackTrace(); return; } catch (ChecksumException e) { e.printStackTrace(); return; } catch (FormatException e) { e.printStackTrace(); return; } // mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile)); } }; /** * {@link CaptureRequest.Builder} for the camera preview */ private CaptureRequest.Builder mPreviewRequestBuilder; /** * {@link CaptureRequest} generated by {@link #mPreviewRequestBuilder} */ private CaptureRequest mPreviewRequest; /** * The current state of camera state for taking pictures. * * @see #mCaptureCallback */ private int mState = STATE_PREVIEW; /** * A {@link Semaphore} to prevent the app from exiting before closing the camera. */ private Semaphore mCameraOpenCloseLock = new Semaphore(1); /** * Whether the current camera device supports Flash or not. */ private boolean mFlashSupported; /** * Orientation of the camera sensor */ private int mSensorOrientation; /** * A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. */ private CameraCaptureSession.CaptureCallback mCaptureCallback = new CameraCaptureSession.CaptureCallback() { private void process(CaptureResult result) { Log.d(TAG, "have a camera resilt"); } }; /** * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that * is at least as large as the respective texture view size, and that is at most as large as the * respective max size, and whose aspect ratio matches with the specified value. If such size * doesn't exist, choose the largest one that is at most as large as the respective max size, * and whose aspect ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output * class * @param textureViewWidth The width of the texture view relative to sensor coordinate * @param textureViewHeight The height of the texture view relative to sensor coordinate * @param maxWidth The maximum width that can be chosen * @param maxHeight The maximum height that can be chosen * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List<Size> bigEnough = new ArrayList<>(); // Collect the supported resolutions that are smaller than the preview Surface List<Size> notBigEnough = new ArrayList<>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); for (Size option : choices) { if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && option.getHeight() == option.getWidth() * h / w) { if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) { bigEnough.add(option); } else { notBigEnough.add(option); } } } // Pick the smallest of those big enough. If there is no one big enough, pick the // largest of those not big enough. if (bigEnough.size() > 0) { return Collections.min(bigEnough, new CompareSizesByArea()); } else if (notBigEnough.size() > 0) { return Collections.max(notBigEnough, new CompareSizesByArea()); } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } } // @Override public void onResume() { // super.onResume(); startBackgroundThread(); } // @Override public void onPause() { closeCamera(); stopBackgroundThread(); // super.onPause(); } /** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview */ private void setUpCameraOutputs(int width, int height) { CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. int cameraLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); if (cameraLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY && android.os.Build.VERSION.SDK_INT == android.os.Build.VERSION_CODES.LOLLIPOP) { Log.d(TAG, "LEGACY CAMERA DETECTED & OLD OS DETECTED"); Size[] sizes = map.getOutputSizes(SurfaceTexture.class); for (Size option : sizes) { Log.d(TAG, "available : WIDTH: " + option.getWidth() + " HEIGHT: " + option.getHeight()); } mImageReader = ImageReader.newInstance(640, 480, ImageFormat.JPEG, 2); } else { Log.d(TAG, "LEGACY CAMERA NOT DETECTED"); mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.JPEG, 2); Log.d(TAG, "For mPreviewSize: WIDTH: " + mPreviewSize.getWidth() + " HEIGHT: " + mPreviewSize.getHeight()); } mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // We fit the aspect ratio of TextureView to the size of preview we picked. // int orientation = getResources().getConfiguration().orientation; // if (orientation == Configuration.ORIENTATION_LANDSCAPE) { // mTextureView.setAspectRatio( // mPreviewSize.getWidth(), mPreviewSize.getHeight()); // } else { // mTextureView.setAspectRatio( // mPreviewSize.getHeight(), mPreviewSize.getWidth()); // } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { e.printStackTrace(); } } /** * Opens the camera specified by {@link Camera2BasicFragment#mCameraId}. */ public void openCamera(int width, int height) { Log.d(TAG, "OPENING THREAD"); qrCode = null; startBackgroundThread(); Log.d(TAG, "OPENING CAMERA"); if (ContextCompat.checkSelfPermission(activity, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) { // requestCameraPermission(); return; } Log.d(TAG, "SETTING OUTPUTS"); setUpCameraOutputs(width, height); Log.d(TAG, "OUTPUTS SET"); // configureTransform(width, height); // Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); Log.d(TAG, "GOT MANAGER"); try { if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { throw new RuntimeException("Time out waiting to lock camera opening."); } manager.openCamera(mCameraId, mStateCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera opening.", e); } } /** * Closes the current {@link CameraDevice}. */ private void closeCamera() { try { mCameraOpenCloseLock.acquire(); if (null != mCaptureSession) { mCaptureSession.abortCaptures(); mCaptureSession.close(); mCaptureSession = null; } if (null != mCameraDevice) { mCameraDevice.close(); mCameraDevice = null; } if (null != mImageReader) { mImageReader.close(); mImageReader = null; } } catch (InterruptedException e) { throw new RuntimeException("Interrupted while trying to lock camera closing.", e); } catch (CameraAccessException e) { throw new RuntimeException("Interrupted while trying to access camera.", e); } finally { mCameraOpenCloseLock.release(); } } /** * Starts a background thread and its {@link Handler}. */ private void startBackgroundThread() { mBackgroundThread = new HandlerThread("CameraBackground"); mBackgroundThread.start(); mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); } /** * Stops the background thread and its {@link Handler}. */ private void stopBackgroundThread() { mBackgroundThread.quitSafely(); try { mBackgroundThread.join(); mBackgroundThread = null; mBackgroundHandler = null; } catch (InterruptedException e) { e.printStackTrace(); } } /** * Creates a new {@link CameraCaptureSession} for camera preview. */ private void createCameraPreviewSession() { try { // We set up a CaptureRequest.Builder with the output Surface. mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); mPreviewRequestBuilder.addTarget(mImageReader.getSurface()); Log.d(TAG, "OPENING CAPTURE SESSION"); // Here, we create a CameraCaptureSession for camera preview. mCameraDevice.createCaptureSession(Arrays.asList(mImageReader.getSurface()), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { // The camera is already closed if (null == mCameraDevice) { return; } Log.d(TAG, "IN onConfigured"); // When the session is ready, we start displaying the preview. mCaptureSession = cameraCaptureSession; try { // Auto focus should be continuous for camera preview. mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); mPreviewRequestBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(displayRotation)); // Flash is automatically enabled when necessary. // setAutoFlash(mPreviewRequestBuilder); // Finally, we start displaying the camera preview. mPreviewRequest = mPreviewRequestBuilder.build(); mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { Log.d(TAG, "onConfigured FAILED"); } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } } /** * Retrieves the JPEG orientation from the specified screen rotation. * * @param rotation The screen rotation. * @return The JPEG orientation (one of 0, 90, 270, and 360) */ private int getOrientation(int rotation) { // Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X) // We have to take that into account and rotate JPEG properly. // For devices with orientation of 90, we simply return our mapping from ORIENTATIONS. // For devices with orientation of 270, we need to rotate the JPEG 180 degrees. return (ORIENTATIONS.get(rotation) + mSensorOrientation + 270) % 360; } private void setAutoFlash(CaptureRequest.Builder requestBuilder) { if (mFlashSupported) { requestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); } } /** * Compares two {@code Size}s based on their areas. */ static class CompareSizesByArea implements Comparator<Size> { @Override public int compare(Size lhs, Size rhs) { // We cast here to ensure the multiplications won't overflow return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } public MyCameraManager(Activity activity1) { activity = activity1; } }