Example usage for android.hardware.camera2 CameraCharacteristics get

List of usage examples for android.hardware.camera2 CameraCharacteristics get

Introduction

In this page you can find the example usage for android.hardware.camera2 CameraCharacteristics get.

Prototype

@Nullable
public <T> T get(Key<T> key) 

Source Link

Document

Get a camera characteristics field value.

Usage

From source file:MainActivity.java

protected void takePicture(View view) {
    if (null == mCameraDevice) {
        return;//from   w  w  w .  jav a 2  s .  co m
    }
    CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
    try {
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
        StreamConfigurationMap configurationMap = characteristics
                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        if (configurationMap == null)
            return;
        Size largest = Collections.max(Arrays.asList(configurationMap.getOutputSizes(ImageFormat.JPEG)),
                new CompareSizesByArea());
        ImageReader reader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG,
                1);
        List<Surface> outputSurfaces = new ArrayList<Surface>(2);
        outputSurfaces.add(reader.getSurface());
        outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
        final CaptureRequest.Builder captureBuilder = mCameraDevice
                .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
        captureBuilder.addTarget(reader.getSurface());
        captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
        ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                Image image = null;
                try {
                    image = reader.acquireLatestImage();
                    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
                    byte[] bytes = new byte[buffer.capacity()];
                    buffer.get(bytes);
                    OutputStream output = new FileOutputStream(getPictureFile());
                    output.write(bytes);
                    output.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    if (image != null) {
                        image.close();
                    }
                }
            }
        };
        HandlerThread thread = new HandlerThread("CameraPicture");
        thread.start();
        final Handler backgroudHandler = new Handler(thread.getLooper());
        reader.setOnImageAvailableListener(readerListener, backgroudHandler);
        final CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
            @Override
            public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
                    TotalCaptureResult result) {
                super.onCaptureCompleted(session, request, result);
                Toast.makeText(MainActivity.this, "Picture Saved", Toast.LENGTH_SHORT).show();
                startPreview(session);
            }
        };
        mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
            @Override
            public void onConfigured(CameraCaptureSession session) {
                try {
                    session.capture(captureBuilder.build(), captureCallback, backgroudHandler);
                } catch (CameraAccessException e) {
                    e.printStackTrace();
                }
            }

            @Override
            public void onConfigureFailed(CameraCaptureSession session) {
            }
        }, backgroudHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:com.example.android.mediarecorder.MainActivity.java

private boolean preparePreview() {
    if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
        try {/*from   w  w  w . j av  a2s . co m*/
            CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
            String id = manager.getCameraIdList()[0];
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(id);
            sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
        } catch (CameraAccessException e) {
            Log.e("ERROR", "Cannot access Camera2 API");
            e.printStackTrace();
        }
    }

    Log.i(TAG, "Preparing Camera Preview");
    // BEGIN_INCLUDE (configure_preview)
    //        mCamera = CameraHelper.getDefaultCameraInstance();

    // We need to make sure that our preview and recording video size are supported by the
    // camera. Query camera to find all the sizes and choose the optimal size given the
    // dimensions of our preview surface.
    Camera.Parameters parameters = mCamera.getParameters();
    List<Camera.Size> mSupportedPreviewSizes = parameters.getSupportedPreviewSizes();
    List<Camera.Size> mSupportedVideoSizes = parameters.getSupportedVideoSizes();
    Camera.Size optimalSize = CameraHelper.getOptimalVideoSize(mSupportedVideoSizes, mSupportedPreviewSizes,
            mPreview.getWidth(), mPreview.getHeight());

    // Use the same size for recording profile.
    CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
    profile.videoFrameWidth = optimalSize.width;
    profile.videoFrameHeight = optimalSize.height;

    // likewise for the camera object itself.
    parameters.setPreviewSize(profile.videoFrameWidth, profile.videoFrameHeight);
    mCamera.setParameters(parameters);
    try {
        // Requires API level 11+, For backward compatibility use {@link setPreviewDisplay}
        // with {@link SurfaceView}
        //                mCamera.setPreviewTexture(mPreview.getSurfaceTexture());

        mHolder = mPreview.getHolder();
        mHolder.addCallback(this);
        mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
        //            mHolder.setKeepScreenOn(true);

        setCameraDisplayOrientation(0);
        mCamera.setPreviewDisplay(mPreview.getHolder());
        mCamera.startPreview();
        Log.i(TAG, "Setting Preview Display");
    } catch (IOException e) {
        Log.e(TAG, "Surface texture is unavailable or unsuitable" + e.getMessage());
        return false;
    }
    // END_INCLUDE (configure_preview)
    return true;
}

From source file:com.google.android.apps.watchme.StreamerActivity.java

private void setupCamera(int width, int height) {
    CameraManager cameraManager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
    try {//from w w  w  .j a  va2s  .  c om
        for (String cameraIDs : cameraManager.getCameraIdList()) {
            CameraCharacteristics cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraIDs);
            if (cameraCharacteristics
                    .get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
                continue;
            }
            StreamConfigurationMap map = cameraCharacteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            //@edsonAndrade
            // Alike the camera api version of the app, I am forcing the app into the landscape mode
            int deviceOrientation = getWindowManager().getDefaultDisplay().getRotation();
            int totalRotation = sensorToDeeviceRotation(cameraCharacteristics, deviceOrientation);
            boolean swapRotation = totalRotation == 90 || totalRotation == 270;
            int rotatedWidth = width;
            int rotatedHeight = height;
            if (swapRotation) {
                rotatedHeight = width;
                rotatedWidth = height;
            }
            previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedWidth,
                    rotatedHeight);
            cameraId = cameraIDs;
            return;
        }
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:wisc.drivesense.vediorecorder.CameraFragment.java

/**
 * Tries to open a {@link CameraDevice}. The result is listened by `mStateCallback`.
 *//*w w w .  j a v  a2  s. c o m*/
private void openCamera(int width, int height) {

    final Activity activity = getActivity();
    if (null == activity || activity.isFinishing()) {
        return;
    }

    int permissionCheck = ContextCompat.checkSelfPermission(getActivity(), Manifest.permission.CAMERA);
    if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
        return;
    }

    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        Log.d(TAG, "tryAcquire");
        if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
            throw new RuntimeException("Time out waiting to lock camera opening.");
        }

        Log.d(TAG, "check how many cameras");
        for (int i = 0; i < manager.getCameraIdList().length; ++i) {
            Log.d(TAG, manager.getCameraIdList()[i]);
        }

        String cameraId = manager.getCameraIdList()[0];

        // Choose the sizes for camera preview and video recording
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
        StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
        mVideoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
        mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, mVideoSize);

        int orientation = getResources().getConfiguration().orientation;
        if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
            mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
        } else {
            mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
        }
        configureTransform(width, height);
        mMediaRecorder = new MediaRecorder();

        manager.openCamera(cameraId, mStateCallback, null);
    } catch (CameraAccessException e) {
        Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
        activity.finish();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        //ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(), FRAGMENT_DIALOG);
        Log.e(TAG, "This camera does not suppport Camera2API");
    } catch (InterruptedException e) {
        throw new RuntimeException("Interrupted while trying to lock camera opening.");
    }
}

From source file:com.raulh82vlc.face_detection_sample.camera2.presentation.FDCamera2Presenter.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 *//*from www .j  a v  a2 s .  c  o m*/
private void setUpCameraOutputs(int width, int height) {
    if (isViewAvailable()) {
        CameraManager manager = (CameraManager) activityView.getSystemService(Context.CAMERA_SERVICE);
        try {
            for (String cameraId : manager.getCameraIdList()) {
                CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);

                // We don't use a front facing camera in this sample.
                Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
                if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                    continue;
                }

                StreamConfigurationMap map = characteristics
                        .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
                if (map == null) {
                    continue;
                }

                imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];

                // For still image captures, we use the largest available size.
                Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                        new CompareSizesByArea());

                Point displaySize = new Point();
                activityView.getWindowManager().getDefaultDisplay().getSize(displaySize);
                int rotatedPreviewWidth = width;
                int rotatedPreviewHeight = height;
                int maxPreviewWidth = displaySize.x;
                int maxPreviewHeight = displaySize.y;

                if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
                    maxPreviewWidth = MAX_PREVIEW_WIDTH;
                }

                if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
                    maxPreviewHeight = MAX_PREVIEW_HEIGHT;
                }

                // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
                // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
                // garbage capture data.
                previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth,
                        rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);

                // We fit the aspect ratio of TextureView to the size of preview we picked.
                int orientation = activityView.getResources().getConfiguration().orientation;
                if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                    activityView.getTextureView().setAspectRatio(previewSize.getWidth(),
                            previewSize.getHeight());
                } else {
                    activityView.getTextureView().setAspectRatio(previewSize.getHeight(),
                            previewSize.getWidth());
                }
                return;
            }
        } catch (CameraAccessException e) {
            e.printStackTrace();
        } catch (NullPointerException e) {
            // Currently an NPE is thrown when the Camera2API is used but not supported on the
            // device this code runs.
        }
    }
}

From source file:com.example.android.hdrviewfinder.HdrViewfinderActivity.java

private void findAndOpenCamera() {
    boolean cameraPermissions = checkCameraPermissions();
    if (cameraPermissions) {
        String errorMessage = "Unknown error";
        boolean foundCamera = false;
        initializeCamera();/*from  ww w .j  a va2  s  . com*/
        if (cameraPermissions && mCameraOps != null) {
            try {
                // Find first back-facing camera that has necessary capability.
                String[] cameraIds = mCameraManager.getCameraIdList();
                for (String id : cameraIds) {
                    CameraCharacteristics info = mCameraManager.getCameraCharacteristics(id);
                    int facing = info.get(CameraCharacteristics.LENS_FACING);

                    int level = info.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
                    boolean hasFullLevel = (level == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);

                    int[] capabilities = info.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
                    int syncLatency = info.get(CameraCharacteristics.SYNC_MAX_LATENCY);
                    boolean hasManualControl = hasCapability(capabilities,
                            CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
                    boolean hasEnoughCapability = hasManualControl
                            && syncLatency == CameraCharacteristics.SYNC_MAX_LATENCY_PER_FRAME_CONTROL;

                    // All these are guaranteed by
                    // CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL, but checking
                    // for only the things we care about expands range of devices we can run on.
                    // We want:
                    //  - Back-facing camera
                    //  - Manual sensor control
                    //  - Per-frame synchronization (so that exposure can be changed every frame)
                    if (facing == CameraCharacteristics.LENS_FACING_BACK
                            && (hasFullLevel || hasEnoughCapability)) {
                        // Found suitable camera - get info, open, and set up outputs
                        mCameraInfo = info;
                        mCameraOps.openCamera(id);
                        configureSurfaces();
                        foundCamera = true;
                        break;
                    }
                }
                if (!foundCamera) {
                    errorMessage = getString(R.string.camera_no_good);
                }
            } catch (CameraAccessException e) {
                errorMessage = getErrorString(e);
            }
            if (!foundCamera) {
                showErrorDialog(errorMessage);
            }
        }
    }
}

From source file:com.example.anna_maria.myapplication.Camera2VideoFragment.java

String getFrontFacingCameraId(CameraManager cManager) throws CameraAccessException {
    String camIdx = null;// w w  w. ja  va2 s.  c o  m
    for (final String cameraId : cManager.getCameraIdList()) {
        CameraCharacteristics characteristics = cManager.getCameraCharacteristics(cameraId);

        int cOrientation = characteristics.get(CameraCharacteristics.LENS_FACING);
        if (cOrientation == 0)
            camIdx = cameraId;
    }
    return camIdx;
}

From source file:org.odk.collect.android.fragments.Camera2VideoFragment.java

/**
 * Tries to open a {@link CameraDevice}. The result is listened by `stateCallback`.
 *///from   w  w w .  j a v a  2  s . c  om
@SuppressWarnings("MissingPermission")
private void openCamera(int width, int height) {
    if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
        requestVideoPermissions();
        return;
    }
    final Activity activity = getActivity();
    if (null == activity || activity.isFinishing()) {
        return;
    }
    CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        Timber.d("tryAcquire");
        if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
            throw new RuntimeException("Time out waiting to lock camera opening.");
        }
        String cameraId = null;
        for (String id : manager.getCameraIdList()) {
            CameraCharacteristics characteristics = manager.getCameraCharacteristics(id);
            Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing != CameraCharacteristics.LENS_FACING_FRONT) {
                continue;
            }
            // Choose the sizes for camera preview and video recording
            StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
            sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            if (map == null) {
                continue;
            }
            videoSize = chooseVideoSize(map.getOutputSizes(MediaRecorder.class));
            previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, videoSize);
            cameraId = id;
            break;
        }

        configureTransform(width, height);
        mediaRecorder = new MediaRecorder();
        manager.openCamera(cameraId, stateCallback, null);
    } catch (CameraAccessException e) {
        ToastUtils.showShortToast("Cannot access the camera.");
        activity.finish();
    } catch (NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(),
                FRAGMENT_DIALOG);
    } catch (InterruptedException e) {
        throw new RuntimeException("Interrupted while trying to lock camera opening.");
    }
}

From source file:com.tzutalin.dlibtest.CameraConnectionFragment.java

/**
 * Sets up member variables related to camera.
 *
 * @param width  The width of available size for camera preview
 * @param height The height of available size for camera preview
 */// w  w  w  .  j ava  2 s. c  o m
@DebugLog
@SuppressLint("LongLogTag")
private void setUpCameraOutputs(final int width, final int height) {
    final Activity activity = getActivity();
    final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
    try {
        SparseArray<Integer> cameraFaceTypeMap = new SparseArray<>();
        // Check the facing types of camera devices
        for (final String cameraId : manager.getCameraIdList()) {
            final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
            final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                if (cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_FRONT) != null) {
                    cameraFaceTypeMap.append(CameraCharacteristics.LENS_FACING_FRONT,
                            cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_FRONT) + 1);
                } else {
                    cameraFaceTypeMap.append(CameraCharacteristics.LENS_FACING_FRONT, 1);
                }
            }

            if (facing != null && facing == CameraCharacteristics.LENS_FACING_BACK) {
                if (cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_FRONT) != null) {
                    cameraFaceTypeMap.append(CameraCharacteristics.LENS_FACING_BACK,
                            cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_BACK) + 1);
                } else {
                    cameraFaceTypeMap.append(CameraCharacteristics.LENS_FACING_BACK, 1);
                }
            }
        }

        Integer num_facing_back_camera = cameraFaceTypeMap.get(CameraCharacteristics.LENS_FACING_BACK);
        for (final String cameraId : manager.getCameraIdList()) {
            final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
            final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
            // If facing back camera or facing external camera exist, we won't use facing front camera
            if (num_facing_back_camera != null && num_facing_back_camera > 0) {
                // We don't use a front facing camera in this sample if there are other camera device facing types
                if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
                    continue;
                }
            }

            final StreamConfigurationMap map = characteristics
                    .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

            if (map == null) {
                continue;
            }

            // For still image captures, we use the largest available size.
            final Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
                    new CompareSizesByArea());

            // Danger, W.R.! Attempting to use too large a preview size could  exceed the camera
            // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
            // garbage capture data.
            previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height, largest);

            // We fit the aspect ratio of TextureView to the size of preview we picked.
            final int orientation = getResources().getConfiguration().orientation;
            if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
                textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
            } else {
                textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
            }

            CameraConnectionFragment.this.cameraId = cameraId;
            return;
        }
    } catch (final CameraAccessException e) {
        Timber.tag(TAG).e("Exception!", e);
    } catch (final NullPointerException e) {
        // Currently an NPE is thrown when the Camera2API is used but not supported on the
        // device this code runs.
        ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(),
                FRAGMENT_DIALOG);
    }
}

From source file:com.quectel.camera2test.Camera2RawFragment.java

/**
 * Rotation need to transform from the camera sensor orientation to the device's current
 * orientation./*from w  w  w  . j a  v a 2s  .c  o  m*/
 *
 * @param c                 the {@link CameraCharacteristics} to query for the camera sensor
 *                          orientation.
 * @param deviceOrientation the current device orientation relative to the native device
 *                          orientation.
 * @return the total rotation from the sensor orientation to the current device orientation.
 */
private static int sensorToDeviceRotation(CameraCharacteristics c, int deviceOrientation) {
    int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);

    // Get device orientation in degrees
    deviceOrientation = ORIENTATIONS.get(deviceOrientation);

    // Reverse device orientation for front-facing cameras
    if (c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
        deviceOrientation = -deviceOrientation;
    }

    // Calculate desired JPEG orientation relative to camera orientation to make
    // the image upright relative to the device orientation
    return (sensorOrientation - deviceOrientation + 360) % 360;
}