Example usage for android.hardware Camera open

List of usage examples for android.hardware Camera open

Introduction

In this page you can find the example usage for android.hardware Camera open.

Prototype

public static Camera open(int cameraId) 

Source Link

Document

Creates a new Camera object to access a particular hardware camera.

Usage

From source file:com.ezartech.ezar.videooverlay.ezAR.java

private void startPreview(final CameraDirection cameraDir, final double zoom,
        final CallbackContext callbackContext) {

    Log.d(TAG, "start Preview");

    if (activity == null || activity.isFinishing()) {
        return;// www.j  a  v a 2s.  c o  m
    }

    if (isPreviewing()) {
        if (cameraId != getCameraId(cameraDir)) {
            stopPreview(null, false);
        }
    }

    matrix = new Matrix();
    cameraId = getCameraId(cameraDir);
    cameraDirection = cameraDir;

    if (cameraId != UNDEFINED) {
        camera = Camera.open(cameraId);
    }

    if (camera == null) {
        if (callbackContext != null)
            callbackContext.error("No camera available");
        return;
    }

    initCamera(camera);

    cordova.getActivity().runOnUiThread(new Runnable() {
        @Override
        public void run() {
            try {
                setIsPreviewing(true);
                updateCameraDisplayOrientation();

                //configure scaled CVG size & preview matrix
                updateCordovaViewContainerSize();
                camera.startPreview();
                webViewView.setBackgroundColor(Color.TRANSPARENT);

                setZoom(zoom, null);

                sendFlashlightEvent(STARTED, cameraDirection, cameraId, camera);
                sendFaceDetectorEvent(STARTED, cameraDirection, cameraId, camera);

                if (callbackContext != null) {
                    callbackContext.success();
                }

            } catch (Exception e) {
                Log.e(TAG, "Error during preview create", e);
                if (callbackContext != null)
                    callbackContext.error(TAG + ": " + e.getMessage());
            }

        }

    });
}

From source file:com.arthurtimberly.fragments.CaptureFragment.java

@Override
public void onResume() {
    super.onResume();

    /*/* w w  w .  ja  v a2s .c o  m*/
     * Initialize timer for scheduling tasks.
     */
    mTimer = new Timer("countdownTimer");

    /*
     * Reload the fragment if resuming from onPause().
     */
    LaunchActivity activity = (LaunchActivity) getActivity();
    if (mOnPauseCalled) {
        // Relaunch fragment with new camera.
        activity.replaceFragment(CaptureFragment.newInstance(mUseFrontFacing), false, true);
    } else {
        if (mCameraId != INVALID_CAMERA_ID) {
            try {
                mCamera = Camera.open(mCameraId);

                /*
                 * Configure camera parameters.
                 */
                Parameters params = mCamera.getParameters();

                // Set auto white balance if supported.
                List<String> whiteBalances = params.getSupportedWhiteBalance();
                if (whiteBalances != null) {
                    for (String whiteBalance : whiteBalances) {
                        if (whiteBalance.equals(Camera.Parameters.WHITE_BALANCE_AUTO)) {
                            params.setWhiteBalance(whiteBalance);
                        }
                    }
                }

                // Set auto antibanding if supported.
                List<String> antibandings = params.getSupportedAntibanding();
                if (antibandings != null) {
                    for (String antibanding : antibandings) {
                        if (antibanding.equals(Camera.Parameters.ANTIBANDING_AUTO)) {
                            params.setAntibanding(antibanding);
                        }
                    }
                }

                // Set macro focus mode if supported.
                List<String> focusModes = params.getSupportedFocusModes();
                if (focusModes != null) {
                    for (String focusMode : focusModes) {
                        if (focusMode.equals(Camera.Parameters.FOCUS_MODE_MACRO)) {
                            params.setFocusMode(focusMode);
                        }
                    }
                }

                // Set quality for Jpeg capture.
                params.setJpegQuality(CAPTURED_JPEG_QUALITY);

                // Set optimal size for Jpeg capture.
                Size pictureSize = CameraHelper.getOptimalPictureSize(params.getSupportedPreviewSizes(),
                        params.getSupportedPictureSizes(), ImageHelper.IMAGE_SIZE, ImageHelper.IMAGE_SIZE);
                params.setPictureSize(pictureSize.width, pictureSize.height);

                mCamera.setParameters(params);

                /*
                 * Setup preview.
                 */
                mPreviewDisplayOrientation = CameraHelper.getCameraScreenOrientation(activity, mCameraId);
                mCamera.setDisplayOrientation(mPreviewDisplayOrientation);
                mPreview.start(mCamera, pictureSize.width, pictureSize.height, mPreviewDisplayOrientation);
            } catch (RuntimeException e) {
                String title = getString(R.string.capture__error_camera_dialog_title);
                String message = getString(R.string.capture__error_camera_dialog_message_in_use);
                activity.showDialogFragment(ErrorDialogFragment.newInstance(title, message));
            }
        } else {
            String title = getString(R.string.capture__error_camera_dialog_title);
            String message = getString(R.string.capture__error_camera_dialog_message_none);
            activity.showDialogFragment(ErrorDialogFragment.newInstance(title, message));
        }
    }
}

From source file:org.uguess.android.sysinfo.SiragonManager.java

@TargetApi(Build.VERSION_CODES.ECLAIR)
private String[] getSupportedPreviewSizes(int cam) {
    float mp = 0, temp, height, width;
    Camera camera = Camera.open(cam);
    if (camera != null) {
        try {/*from  w w  w  .  j  a  v  a  2 s.  c o  m*/
            android.hardware.Camera.Parameters parameters = camera.getParameters();
            List<Camera.Size> values = parameters.getSupportedPictureSizes();
            List<String> valuessupport = new ArrayList<String>();

            for (int i = 0; i < values.size(); i++) {
                String strSize = String.valueOf(i) + " : " + String.valueOf(values.get(i).height) + " x "
                        + String.valueOf(values.get(i).width);
                valuessupport.add(strSize);
                if (i == 0) {
                    height = Float.parseFloat(String.valueOf(values.get(i).height));
                    width = Float.parseFloat(String.valueOf(values.get(i).width));
                    temp = ((height * width) / 1024000);
                    mp = temp;

                } else {
                    height = Float.parseFloat(String.valueOf(values.get(i).height));
                    width = Float.parseFloat(String.valueOf(values.get(i).width));
                    temp = ((height * width) / 1024000);
                    if (temp > mp) {
                        mp = temp;
                    }
                    if (i == values.size() - 1) {
                        valuessupport.add(String.valueOf(mp) + " Megapixels");
                    }
                }
            }
            camera.release();
            Log.i("#######################################", String.valueOf(valuessupport));
            String[] stringList = valuessupport.toArray(new String[valuessupport.size()]);
            return stringList;
        } catch (RuntimeException e) {
            e.printStackTrace();
        }
    }

    return null;
}

From source file:org.uguess.android.sysinfo.SiragonManager.java

private String[] getSupportedPreviewSizesVideo(int cam) {
    float mp = 0, temp, height, width;
    Camera camera = Camera.open(cam);
    if (camera != null) {
        try {/*from   w w w  .  j a v a2  s. c o m*/
            android.hardware.Camera.Parameters parameters = camera.getParameters();
            List<Camera.Size> values = parameters.getSupportedVideoSizes();
            List<String> valuessupport = new ArrayList<String>();

            for (int i = 0; i < values.size(); i++) {
                String strSize = String.valueOf(i) + " : " + String.valueOf(values.get(i).height) + " x "
                        + String.valueOf(values.get(i).width);
                valuessupport.add(strSize);
                if (i == 0) {
                    height = Float.parseFloat(String.valueOf(values.get(i).height));
                    width = Float.parseFloat(String.valueOf(values.get(i).width));
                    temp = ((height * width) / 1024000);
                    mp = temp;

                } else {
                    height = Float.parseFloat(String.valueOf(values.get(i).height));
                    width = Float.parseFloat(String.valueOf(values.get(i).width));
                    temp = ((height * width) / 1024000);
                    if (temp > mp) {
                        mp = temp;
                    }
                    if (i == values.size() - 1) {
                        //valuessupport.add(String.valueOf(mp)+" Megapixels");
                    }
                }
            }
            camera.release();
            Log.i("#######################################", String.valueOf(valuessupport));
            String[] stringList = valuessupport.toArray(new String[valuessupport.size()]);
            return stringList;
        } catch (RuntimeException e) {
            e.printStackTrace();
        }
    }

    return null;
}

From source file:org.uguess.android.sysinfo.SiragonManager.java

private String[] getSupportedOtherCamera(int cam) {
    Camera camera = Camera.open(cam);
    if (camera != null) {
        try {//from   ww w  .  j  a  v a 2  s .  co  m
            String[] stringList = new String[7];
            android.hardware.Camera.Parameters parameters = camera.getParameters();
            String values = "Focus mode: " + parameters.getFocusMode();
            stringList[0] = values;
            values = "Max Num Focus Areas: " + parameters.getMaxNumFocusAreas();
            stringList[1] = values;
            values = "Whitebalance Values: " + parameters.getSupportedWhiteBalance();
            stringList[2] = values;
            values = "Scene mode Values: " + parameters.getSupportedSceneModes();
            stringList[3] = values;
            values = "Effects Values: " + parameters.getSupportedColorEffects();
            stringList[4] = values;
            values = "Stabilization Video: " + parameters.getVideoStabilization();
            stringList[4] = values;
            values = "Quality JPEG: " + parameters.getJpegQuality();
            stringList[5] = values;
            values = "Quality Thumbnail: " + parameters.getJpegThumbnailQuality();
            stringList[6] = values;
            camera.release();
            return stringList;
        } catch (RuntimeException e) {
            e.printStackTrace();
        }
    }

    return null;
}

From source file:com.kjsaw.alcosys.ibacapp.IBAC.java

public void surfaceCreated(SurfaceHolder holder) {
    // TODO Auto-generated method stub
    int cameraCount = 0;
    Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
    cameraCount = Camera.getNumberOfCameras();

    for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
        Camera.getCameraInfo(camIdx, cameraInfo);
        if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
            try {
                camera = Camera.open(camIdx);
                camera.setDisplayOrientation(90);
                Session.isFacingCamera = true;
                return;
            } catch (RuntimeException re) {
                Logging.d(re.toString());
            }/*from  www .  j  av a2 s .c o  m*/
        }
    }

    camera = Camera.open();
    camera.setDisplayOrientation(90);
}

From source file:com.aimfire.demo.CamcorderActivity.java

/**
 * Opens a camera, and attempts to establish preview mode at the specified width 
 * and height.//from w  ww .j a v a  2s  .  c o m
 * <p>
 * Sets mCameraPreviewWidth and mCameraPreviewHeight to the actual width/height 
 * of the preview.
 */
private boolean openCamera(int desiredFacing, int videoQuality) {
    if (mCamera != null) {
        if (BuildConfig.DEBUG)
            Log.e(TAG, "openCamera: camera already initialized");
        FirebaseCrash.report(new Exception("CamcorderActivity openCamera: camera already initialized"));
        return false;
    }

    final Camera.CameraInfo info = new Camera.CameraInfo();

    /*
     *  Try to find camera with desired facing
     */
    int numCameras = Camera.getNumberOfCameras();
    if (numCameras == 0) {
        if (BuildConfig.DEBUG)
            Log.e(TAG, "openCamera: No camera found, exiting");
        FirebaseCrash.report(new Exception("openCamera: No camera found, exiting"));
        return false;
    }

    mCameraId = -1;
    for (int i = 0; i < numCameras; i++) {
        Camera.getCameraInfo(i, info);
        if (info.facing == desiredFacing) {
            mCameraId = i;
            break;
        }
    }
    if (mCameraId == -1) {
        if (BuildConfig.DEBUG)
            Log.d(TAG, "openCamera: No camera with desired facing found; opening default");
        FirebaseCrash.report(new Exception("openCamera: No camera with desired facing found; opening default"));
        mCameraId = 0;
    }

    try {
        mCamera = Camera.open(mCameraId);
    } catch (RuntimeException e) {
        if (BuildConfig.DEBUG)
            Log.e(TAG, "openCamera: cannot open camera!");
        FirebaseCrash.report(e);
        return false;
    }

    mCameraOrientation = info.orientation;
    mCameraFacing = info.facing;

    mCameraParams = mCamera.getParameters();

    CameraUtils.setCamParams(mCameraParams);

    /*
     * if we can find a supported video/preview size that's the same as our desired size,
     * use it. otherwise, use the best quality supported by the camera.
     */
    mSupportedVideoQualities = CameraUtils.getSupportedVideoQualities();
    if ((mSupportedVideoQualities & (1 << mQualityPref)) == 0) {
        if (BuildConfig.DEBUG)
            Log.d(TAG, "openCamera: desired quality " + mQualityPref + " not supported");

        mQualityPref = CameraUtils.getMaxVideoQuality();

        /*
         * since this device doesn't support whatever quality preference we had before,
         * we save the best quality that it does support
         */
        updateQualityPref(mQualityPref);
    }
    mCameraParams.setPreviewSize(MainConsts.VIDEO_DIMENSIONS[mQualityPref][0],
            MainConsts.VIDEO_DIMENSIONS[mQualityPref][1]);

    AspectFrameLayout afl = (AspectFrameLayout) findViewById(R.id.cameraPreview_frame);
    afl.setAspectRatio((float) MainConsts.VIDEO_DIMENSIONS[mQualityPref][0]
            / (float) MainConsts.VIDEO_DIMENSIONS[mQualityPref][1]);

    /*
     * give the camera a hint that we're recording video. this can have a big
     * impact on frame rate.
     */
    mCameraParams.setRecordingHint(true);

    /*
     * disable all the automatic settings, in the hope that frame rate will
     * be less variable
     * 
     * TODO: if any of the default modes are not available then we need to 
     * sync it with the remote device
     */
    List<String> modes;

    modes = mCameraParams.getSupportedFocusModes();
    if (modes != null) {
        for (String mode : modes) {
            if (mode.contains(Camera.Parameters.FOCUS_MODE_INFINITY)) {
                mCameraParams.setFocusMode(Camera.Parameters.FOCUS_MODE_INFINITY);
                break;
            }
        }
    }

    modes = mCameraParams.getSupportedFlashModes();
    if (modes != null) {
        for (String mode : modes) {
            if (mode.contains(Camera.Parameters.FLASH_MODE_OFF)) {
                mCameraParams.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
                break;
            }
        }
    }

    /*
            modes = mCameraParams.getSupportedWhiteBalance();
            if(modes != null)
            {
    for(String mode : modes)
    {
          if(mode.contains(Camera.Parameters.WHITE_BALANCE_FLUORESCENT))
          {
            mCameraParams.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_FLUORESCENT);
            break;
          }
    }
            }
            
            modes = mCameraParams.getSupportedSceneModes();
            if(modes != null)
            {
    for(String mode : modes)
    {
          if(mode.contains(Camera.Parameters.SCENE_MODE_PORTRAIT))
          {
            mCameraParams.setSceneMode(Camera.Parameters.SCENE_MODE_PORTRAIT);
            break;
          }
    }
            }
    */

    /*
     * zoom can impact view angle. we should set it to 0 if it's not
     */
    if (mCameraParams.isZoomSupported()) {
        int zoom = mCameraParams.getZoom();
        if (zoom != 0) {
            if (BuildConfig.DEBUG)
                Log.d(TAG, "getViewAngle: camera zoom = " + zoom + ", forcing to zero");
            mCameraParams.setZoom(0);
        }
    }

    /*
     *  leave the frame rate set to default
     */
    mCamera.setParameters(mCameraParams);

    /*
    int[] fpsRange = new int[2];
    mCameraParams.getPreviewFpsRange(fpsRange);
    String previewFacts = VIDEO_DIMENSIONS[mQualityPref][0] + "x" + VIDEO_DIMENSIONS[mQualityPref][1];
    if (fpsRange[0] == fpsRange[1]) {
    previewFacts += " @" + (fpsRange[0] / 1000.0) + "fps";
    } else {
    previewFacts += " @[" + (fpsRange[0] / 1000.0) +
        " - " + (fpsRange[1] / 1000.0) + "] fps";
    }
    TextView text = (TextView) findViewById(R.id.cameraParams_text);
    text.setText(previewFacts);
    */

    if (mNaturalOrientation == Configuration.ORIENTATION_PORTRAIT) {
        if (((info.facing == Camera.CameraInfo.CAMERA_FACING_BACK)
                && (mLandscapeOrientation == mCameraOrientation))
                || ((info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
                        && (mLandscapeOrientation != mCameraOrientation))) {
            mCamera.setDisplayOrientation(180);
            mCameraOrientation = (mCameraOrientation + 180) % 360;
        }
    }

    if (mOrientationEventListener == null) {
        mOrientationEventListener = new OrientationEventListener(this, SensorManager.SENSOR_DELAY_NORMAL) {
            @Override
            public void onOrientationChanged(int deviceOrientation) {
                if (deviceOrientation == ORIENTATION_UNKNOWN)
                    return;

                handleOrientationChanged(deviceOrientation);
            }
        };

        if (mOrientationEventListener.canDetectOrientation()) {
            mOrientationEventListener.enable();
        }
    }

    Runnable forceOrientationCalcRunnable = new Runnable() {
        public void run() {
            final Handler handler = new Handler();
            handler.postDelayed(new Runnable() {
                public void run() {
                    int deviceOrientation = mCurrDeviceOrientation;
                    mCurrDeviceOrientation = -1;
                    handleOrientationChanged(deviceOrientation);
                }
            }, 100);
        }
    };
    runOnUiThread(forceOrientationCalcRunnable);

    return true;
}