List of usage examples for android.hardware Camera getParameters
public Parameters getParameters()
From source file:com.ezartech.ezar.videooverlay.ezAR.java
private void init(JSONArray args, final CallbackContext callbackContext) { this.callbackContext = callbackContext; supportSnapshot = getSnapshotPlugin() != null; if (args != null) { String rgb = DEFAULT_RGB; try {/*from ww w .j a v a 2 s .c om*/ rgb = args.getString(0); } catch (JSONException e) { //do nothing; resort to DEFAULT_RGB } setBackgroundColor(Color.parseColor(rgb)); cordova.getActivity().runOnUiThread(new Runnable() { @Override public void run() { webViewView.setBackgroundColor(getBackgroundColor()); } }); } if (!PermissionHelper.hasPermission(this, permissions[0])) { PermissionHelper.requestPermission(this, CAMERA_SEC, Manifest.permission.CAMERA); return; } JSONObject jsonObject = new JSONObject(); try { Display display = activity.getWindowManager().getDefaultDisplay(); DisplayMetrics m = new DisplayMetrics(); display.getMetrics(m); jsonObject.put("displayWidth", m.widthPixels); jsonObject.put("displayHeight", m.heightPixels); int mNumberOfCameras = Camera.getNumberOfCameras(); Log.d(TAG, "Cameras:" + mNumberOfCameras); // Find the ID of the back-facing ("default") camera Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); for (int i = 0; i < mNumberOfCameras; i++) { Camera.getCameraInfo(i, cameraInfo); Parameters parameters; Camera open = null; try { open = Camera.open(i); parameters = open.getParameters(); } finally { if (open != null) { open.release(); } } Log.d(TAG, "Camera facing:" + cameraInfo.facing); CameraDirection type = null; for (CameraDirection f : CameraDirection.values()) { if (f.getDirection() == cameraInfo.facing) { type = f; } } if (type != null) { double zoom = 0; double maxZoom = 0; if (parameters.isZoomSupported()) { maxZoom = (parameters.getMaxZoom() + 1) / 10.0; zoom = Math.min(parameters.getZoom() / 10.0 + 1, maxZoom); } JSONObject jsonCamera = new JSONObject(); jsonCamera.put("id", i); jsonCamera.put("position", type.toString()); jsonCamera.put("zoom", zoom); jsonCamera.put("maxZoom", maxZoom); jsonObject.put(type.toString(), jsonCamera); } } } catch (JSONException e) { Log.e(TAG, "Can't set exception", e); } callbackContext.success(jsonObject); }
From source file:com.jasompeter.openalpr.CameraActivity.java
public void setCorrectOrientation(Camera camera) { int displayRotation = getWindowManager().getDefaultDisplay().getRotation(); int degrees = 0; switch (displayRotation) { case Surface.ROTATION_0: degrees = 0;//from w w w. j ava 2 s. c o m break; case Surface.ROTATION_90: degrees = 90; break; case Surface.ROTATION_180: degrees = 180; break; case Surface.ROTATION_270: degrees = 270; break; } Camera.CameraInfo cameraInfo = getCurrentCameraInfo(); int resultDegrees; if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { resultDegrees = (cameraInfo.orientation + degrees) % 360; resultDegrees = (360 - resultDegrees) % 360; } else { resultDegrees = (cameraInfo.orientation - degrees + 360) % 360; } camera.setDisplayOrientation(resultDegrees); Camera.Parameters parameters = camera.getParameters(); parameters.setRotation(resultDegrees); camera.setParameters(parameters); }
From source file:com.gsma.rcs.ri.sharing.video.OutgoingVideoSharing.java
/** * Check if good camera sizes are available for encoder. Must be used only before open camera. * * @param cameraId the camera ID/*from w ww. ja v a 2 s . c o m*/ * @return false if the camera don't have the good preview size for the encoder */ boolean checkCameraSize(CameraOptions cameraId) { boolean sizeAvailable = false; Camera camera = null; Method method = getCameraOpenMethod(); if (method != null) { try { camera = (Camera) method.invoke(camera, new Object[] { cameraId.getValue() }); } catch (Exception e) { camera = Camera.open(); } } else { camera = Camera.open(); } if (camera == null) { return false; } // Check common sizes Parameters param = camera.getParameters(); List<Camera.Size> sizes = param.getSupportedPreviewSizes(); for (Camera.Size size : sizes) { if ((size.width == H264Config.QVGA_WIDTH && size.height == H264Config.QVGA_HEIGHT) || (size.width == H264Config.CIF_WIDTH && size.height == H264Config.CIF_HEIGHT) || (size.width == H264Config.VGA_WIDTH && size.height == H264Config.VGA_HEIGHT)) { sizeAvailable = true; break; } } // Release camera camera.release(); return sizeAvailable; }
From source file:org.uguess.android.sysinfo.SiragonManager.java
private String[] getAvailableFeatureCamera() { Camera camera = Camera.open(); if (camera != null) { try {/* w w w. j av a 2s. com*/ android.hardware.Camera.Parameters parameters = camera.getParameters(); List<String> values = parameters.getSupportedFocusModes(); /* List<String> valuessupport = new ArrayList<String>(); for (int i = 0; i < values.size(); i++) { String strSize = String.valueOf(i) + " : " + String.valueOf(values.get(i).height) + " x " + String.valueOf(values.get(i).width); valuessupport.add(strSize); } camera.release(); Log.i("#######################################", String.valueOf(valuessupport));*/ String[] stringList = values.toArray(new String[values.size()]); return stringList; } catch (RuntimeException e) { e.printStackTrace(); } } return null; }
From source file:org.uguess.android.sysinfo.SiragonManager.java
private String[] getSupportedPreviewSizesVideo(int cam) { float mp = 0, temp, height, width; Camera camera = Camera.open(cam); if (camera != null) { try {/*from w ww.j a v a 2 s . co m*/ android.hardware.Camera.Parameters parameters = camera.getParameters(); List<Camera.Size> values = parameters.getSupportedVideoSizes(); List<String> valuessupport = new ArrayList<String>(); for (int i = 0; i < values.size(); i++) { String strSize = String.valueOf(i) + " : " + String.valueOf(values.get(i).height) + " x " + String.valueOf(values.get(i).width); valuessupport.add(strSize); if (i == 0) { height = Float.parseFloat(String.valueOf(values.get(i).height)); width = Float.parseFloat(String.valueOf(values.get(i).width)); temp = ((height * width) / 1024000); mp = temp; } else { height = Float.parseFloat(String.valueOf(values.get(i).height)); width = Float.parseFloat(String.valueOf(values.get(i).width)); temp = ((height * width) / 1024000); if (temp > mp) { mp = temp; } if (i == values.size() - 1) { //valuessupport.add(String.valueOf(mp)+" Megapixels"); } } } camera.release(); Log.i("#######################################", String.valueOf(valuessupport)); String[] stringList = valuessupport.toArray(new String[valuessupport.size()]); return stringList; } catch (RuntimeException e) { e.printStackTrace(); } } return null; }
From source file:org.uguess.android.sysinfo.SiragonManager.java
@TargetApi(Build.VERSION_CODES.ECLAIR) private String[] getSupportedPreviewSizes(int cam) { float mp = 0, temp, height, width; Camera camera = Camera.open(cam); if (camera != null) { try {//from w ww .j a va2 s. co m android.hardware.Camera.Parameters parameters = camera.getParameters(); List<Camera.Size> values = parameters.getSupportedPictureSizes(); List<String> valuessupport = new ArrayList<String>(); for (int i = 0; i < values.size(); i++) { String strSize = String.valueOf(i) + " : " + String.valueOf(values.get(i).height) + " x " + String.valueOf(values.get(i).width); valuessupport.add(strSize); if (i == 0) { height = Float.parseFloat(String.valueOf(values.get(i).height)); width = Float.parseFloat(String.valueOf(values.get(i).width)); temp = ((height * width) / 1024000); mp = temp; } else { height = Float.parseFloat(String.valueOf(values.get(i).height)); width = Float.parseFloat(String.valueOf(values.get(i).width)); temp = ((height * width) / 1024000); if (temp > mp) { mp = temp; } if (i == values.size() - 1) { valuessupport.add(String.valueOf(mp) + " Megapixels"); } } } camera.release(); Log.i("#######################################", String.valueOf(valuessupport)); String[] stringList = valuessupport.toArray(new String[valuessupport.size()]); return stringList; } catch (RuntimeException e) { e.printStackTrace(); } } return null; }
From source file:org.uguess.android.sysinfo.SiragonManager.java
private String[] getSupportedOtherCamera(int cam) { Camera camera = Camera.open(cam); if (camera != null) { try {/*from w ww.j a va2 s.c o m*/ String[] stringList = new String[7]; android.hardware.Camera.Parameters parameters = camera.getParameters(); String values = "Focus mode: " + parameters.getFocusMode(); stringList[0] = values; values = "Max Num Focus Areas: " + parameters.getMaxNumFocusAreas(); stringList[1] = values; values = "Whitebalance Values: " + parameters.getSupportedWhiteBalance(); stringList[2] = values; values = "Scene mode Values: " + parameters.getSupportedSceneModes(); stringList[3] = values; values = "Effects Values: " + parameters.getSupportedColorEffects(); stringList[4] = values; values = "Stabilization Video: " + parameters.getVideoStabilization(); stringList[4] = values; values = "Quality JPEG: " + parameters.getJpegQuality(); stringList[5] = values; values = "Quality Thumbnail: " + parameters.getJpegThumbnailQuality(); stringList[6] = values; camera.release(); return stringList; } catch (RuntimeException e) { e.printStackTrace(); } } return null; }
From source file:com.nekomeshi312.whiteboardcorrection.WhiteBoardCorrectionActivity.java
@Override public void onPictureTaken(byte[] data, Camera camera) { // TODO Auto-generated method stub Log.i(LOG_TAG, "onPictureTaken"); String path = null;/*from w w w.jav a 2 s . co m*/ String name = null; if (null != data) { if (MyDebug.DEBUG) Log.d(LOG_TAG, "Captured:size = " + data.length); int sdErrorID = SDCardAccess.checkSDCard(this); if (0 != sdErrorID) {//SD?????????? Toast.makeText(this, sdErrorID, Toast.LENGTH_LONG).show(); } else { String fn = null; final String folderBase = getString(R.string.picture_folder_base_name); final String filenameBase = getString(R.string.picture_base_name); final String warpBase = getString(R.string.picture_warped_name); name = PictureFolder.createPictureName(this, folderBase, filenameBase, warpBase); path = PictureFolder.createPicturePath(this, folderBase); String[] n = new String[2]; if (createPictureName(n)) { path = n[0]; name = n[1]; fn = path + name; if (MyDebug.DEBUG) Log.d(LOG_TAG, "Picture file name = " + fn); FileOutputStream fileOutputStream = null; try { fileOutputStream = new FileOutputStream(fn); fileOutputStream.write(data); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); name = null; } finally { if (null != fileOutputStream) { try { fileOutputStream.flush(); fileOutputStream.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); name = null; } } } } } } mFocusStatus = FOCUS_STATUS_IDLE; mShutterStatus = SHUTTER_STATUS_IDLE; if (null == name) { mFragCameraView.startPreview();//?pictureTaken????activity???????????????startPreview???? return; } mFragCameraView.stopPreview();//???????????????????? //fragment?fragment?? final int width = camera.getParameters().getPictureSize().width; final int height = camera.getParameters().getPictureSize().height; final int prevWidth = camera.getParameters().getPreviewSize().width; final int prevHeight = camera.getParameters().getPreviewSize().height; //???jpge?? //MediaScannerConnection.scanFile??????????DISPLAY_NAME??WIDTH??? //????????????????????? ContentResolver cr = getContentResolver(); ContentValues values = new ContentValues(); values.put(Images.Media.TITLE, name); values.put(Images.Media.DISPLAY_NAME, name); //<- ????????????? values.put(Images.Media.MIME_TYPE, "image/jpeg"); values.put(Images.Media.DATA, path + name); values.put(Images.Media.WIDTH, width); values.put(Images.Media.HEIGHT, height); cr.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values); mWhiteBoardCheckInfo.mFilePath = path; mWhiteBoardCheckInfo.mFileName = name; mWhiteBoardCheckInfo.mPicWidth = width; mWhiteBoardCheckInfo.mPicHeight = height; mWhiteBoardCheckInfo.mPrevWidth = prevWidth; mWhiteBoardCheckInfo.mPrevHeight = prevHeight; mWhiteBoardCheckInfo.mIsCaptured = true; transitToBoardCheckFragment(); }