List of usage examples for android.hardware Camera getParameters
public Parameters getParameters()
From source file:com.fallahpoor.infocenter.fragments.CameraFragment.java
private ArrayList<ListItem> getCameraParams(int cameraFacing) { ArrayList<ListItem> camParams = new ArrayList<>(); Camera.Parameters cameraParams;//from w ww . j ava2 s . c om Camera camera; String[] items; ArrayList<String> subItems; camera = getCameraInstance(cameraFacing); if (camera != null) { cameraParams = camera.getParameters(); releaseCamera(camera); items = getItemsArray(); subItems = getParameters(cameraParams); for (int i = 0; i < items.length; i++) { camParams.add(new OrdinaryListItem(items[i], subItems.get(i))); } } else { // camera is busy or for some other reason camera isn't available if (cameraFacing == CameraInfo.CAMERA_FACING_BACK) { camParams.add(new OrdinaryListItem(getString(R.string.cam_sub_item_back_camera_busy), "")); } else { camParams.add(new OrdinaryListItem(getString(R.string.cam_sub_item_front_camera_busy), "")); } } return camParams; }
From source file:com.longle1.facedetection.MainActivity.java
@Override public void onPreviewFrame(final byte[] data, final Camera camera) { try {// w w w . ja v a 2 s .co m Size size = camera.getParameters().getPreviewSize(); processImage(data, size.width, size.height); camera.addCallbackBuffer(data); } catch (RuntimeException e) { // The camera has probably just been released, ignore. } }
From source file:com.csusm.twinder.Fragment.QRScanFragment.java
@Nullable @Override//from www.ja v a 2s . com public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_scan, container, false); view.setOnTouchListener(new View.OnTouchListener() { public boolean onTouch(View v, MotionEvent event) { Camera camera = mCameraSource.getCamera(); if (camera != null) { camera.cancelAutoFocus(); Rect focusRect = calculateTapArea((int) event.getX(), (int) event.getY(), 1f); Camera.Parameters parameters = camera.getParameters(); parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); if (parameters.getMaxNumFocusAreas() > 0) { List<Camera.Area> mylist = new ArrayList<Camera.Area>(); mylist.add(new Camera.Area(focusRect, 1000)); parameters.setFocusAreas(mylist); } //camera.setParameters(parameters); camera.autoFocus(new Camera.AutoFocusCallback() { @Override public void onAutoFocus(boolean success, Camera camera) { camera.cancelAutoFocus(); Camera.Parameters params = camera.getParameters(); if (params.getFocusMode() != Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) { params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); camera.setParameters(params); } } }); } return true; } }); return view; }
From source file:com.example.android.animationsdemo.CameraActivity.java
private void startFocus(int x, int y) { if (mCamera != null) { mCamera.cancelAutoFocus();/*from w w w. j a va 2s .c om*/ Rect focusRect = calculateTapArea(x, y); Camera.Parameters parameters = mCamera.getParameters(); if (parameters.getFocusMode() != Camera.Parameters.FOCUS_MODE_AUTO) { parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); } if (parameters.getMaxNumFocusAreas() > 0) { List<Camera.Area> mylist = new ArrayList<Camera.Area>(); mylist.add(new Camera.Area(focusRect, 1000)); parameters.setFocusAreas(mylist); } try { mCamera.cancelAutoFocus(); mCamera.setParameters(parameters); mCamera.startPreview(); mCamera.autoFocus(new Camera.AutoFocusCallback() { @Override public void onAutoFocus(boolean success, Camera camera) { if (camera.getParameters() .getFocusMode() != Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE) { Camera.Parameters parameters = camera.getParameters(); parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); if (parameters.getMaxNumFocusAreas() > 0) { parameters.setFocusAreas(null); } camera.setParameters(parameters); camera.startPreview(); } } }); } catch (Exception e) { DKLog.e(TAG, Trace.getCurrentMethod() + e.toString()); } } }
From source file:me.hammarstrom.imagerecognition.activities.MainActivity.java
/** * Helper method to get {@link Camera} instance * * @return camera// w w w . j a v a 2 s .co m */ private Camera getCameraInstance() { Camera camera = null; try { camera = Camera.open(); // Set auto focus mode Camera.Parameters parameters = camera.getParameters(); parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); camera.setParameters(parameters); } catch (Exception e) { // cannot get camera or does not exist } return camera; }
From source file:com.jasompeter.openalpr.CameraActivity.java
public void setCorrectSize(Camera camera, int width, int height) { Camera.Parameters parameters = camera.getParameters(); Camera.Size size = getBestPreviewSize(width, height, parameters); if (size != null) { parameters.setPreviewSize(size.width, size.height); mCamera.setParameters(parameters); }// www. j a v a 2s . co m }
From source file:com.gelakinetic.selfr.CameraActivity.java
/** * A safe way to get an instance of the Camera object. Also sets up picture size & focus type * * @param cameraType Camera.CameraInfo.CAMERA_FACING_FRONT or * Camera.CameraInfo.CAMERA_FACING_BACK * @return A Camera object if it was created, or null *//*from www . j av a 2s. co m*/ @Nullable private static Camera getCameraInstance(int cameraType) { Camera camera = null; try { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); /* Scan through all the cameras for one of the specified type */ for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); camIdx++) { Camera.getCameraInfo(camIdx, cameraInfo); if (cameraInfo.facing == cameraType) { try { /* Open the camera, get default parameters */ camera = Camera.open(camIdx); Camera.Parameters parameters = camera.getParameters(); /* Set the image to native resolution */ List<Camera.Size> sizes = parameters.getSupportedPictureSizes(); Camera.Size nativeSize = null; int maxHeight = Integer.MIN_VALUE; for (Camera.Size size : sizes) { if (size.height > maxHeight) { maxHeight = size.height; nativeSize = size; } } if (nativeSize != null) { parameters.setPictureSize(nativeSize.width, nativeSize.height); } /* Set auto-focus, if we can */ List<String> focusModes = parameters.getSupportedFocusModes(); if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); } /* Set the parameters */ camera.setParameters(parameters); } catch (RuntimeException e) { /* Eat it */ } } } } catch (Exception e) { /* Camera is not available (in use or does not exist) */ } return camera; /* returns null if camera is unavailable */ }
From source file:com.jasompeter.openalpr.CameraActivity.java
public void startPreview() { try {/* w w w. j a v a 2 s . c o m*/ mCamera.setPreviewDisplay(mSurfaceHolder); } catch (IOException e) { e.printStackTrace(); Log.d(TAG, "Cannot set preview display."); } setCorrectOrientation(mCamera); setCorrectSize(mCamera, mSurfaceHolder.getSurfaceFrame().width(), mSurfaceHolder.getSurfaceFrame().height()); mCamera.startPreview(); mCamera.setPreviewCallback(new Camera.PreviewCallback() { @Override public void onPreviewFrame(byte[] data, Camera camera) { if (!mEnableRecognition) { return; } if (camera.getParameters().getPreviewFormat() == ImageFormat.NV21) { Camera.Size previewSize = camera.getParameters().getPreviewSize(); YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 50, baos); recognize(baos.toByteArray()); } } }); }
From source file:com.TaxiDriver.jy.CameraPreview.java
public void switchCamera(Camera camera) { setCamera(camera);/* w ww.j av a2s .c om*/ try { camera.setPreviewDisplay(mHolder); } catch (IOException exception) { Log.e(TAG, "IOException caused by setPreviewDisplay()", exception); } Camera.Parameters parameters = camera.getParameters(); parameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height); requestLayout(); camera.setParameters(parameters); }
From source file:info.guardianproject.iocipher.camera.VideoCameraActivity.java
@Override public void onPreviewFrame(byte[] data, Camera camera) { //even when not recording, we'll compress frames in order to estimate our FPS Camera.Parameters parameters = camera.getParameters(); mLastWidth = parameters.getPreviewSize().width; mLastHeight = parameters.getPreviewSize().height; if (mRotation > 0) //flip height and width {/*from w w w. j a v a2 s.c o m*/ mLastWidth = parameters.getPreviewSize().height; mLastHeight = parameters.getPreviewSize().width; } mPreviewFormat = parameters.getPreviewFormat(); byte[] dataResult = data; if (mPreCompressFrames) { if (mRotation > 0) { dataResult = rotateYUV420Degree90(data, mLastHeight, mLastWidth); if (getCameraDirection() == CameraInfo.CAMERA_FACING_FRONT) { dataResult = rotateYUV420Degree90(dataResult, mLastWidth, mLastHeight); dataResult = rotateYUV420Degree90(dataResult, mLastHeight, mLastWidth); } } YuvImage yuv = new YuvImage(dataResult, mPreviewFormat, mLastWidth, mLastHeight, null); ByteArrayOutputStream out = new ByteArrayOutputStream(); yuv.compressToJpeg(new Rect(0, 0, mLastWidth, mLastHeight), MediaConstants.sJpegQuality, out); dataResult = out.toByteArray(); } if (mFramesTotal == 0 && fileOut != null) { try { info.guardianproject.iocipher.FileOutputStream fosThumb = new info.guardianproject.iocipher.FileOutputStream( new info.guardianproject.iocipher.File(fileOut.getAbsolutePath() + ".thumb.jpg")); fosThumb.write(dataResult); fosThumb.flush(); fosThumb.close(); } catch (Exception e) { Log.e("VideoCam", "can't save thumb", e); } } if (mIsRecording && mFrameQ != null) synchronized (mFrameQ) { if (data != null) { VideoFrame vf = new VideoFrame(); vf.image = dataResult; vf.duration = 1;//this is frame duration, not time //System.currentTimeMillis() - lastTime; vf.fps = mFPS; mFrameQ.add(vf); mFramesTotal++; } } mFpsCounter++; if ((System.currentTimeMillis() - start) >= 1000) { mFPS = mFpsCounter; mFpsCounter = 0; start = System.currentTimeMillis(); } }