List of usage examples for android.util Size Size
public Size(int width, int height)
From source file:freed.cam.apis.camera2.modules.VideoModuleApi2.java
@TargetApi(VERSION_CODES.LOLLIPOP) @Override/* w w w . ja v a2s . c om*/ public void startPreview() { previewSize = new Size(currentVideoProfile.videoFrameWidth, currentVideoProfile.videoFrameHeight); int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); int orientation = 0; switch (sensorOrientation) { case 90: orientation = 270; break; case 180: orientation = 0; break; case 270: orientation = 90; break; case 0: orientation = 180; break; } cameraHolder.CaptureSessionH.SetTextureViewSize(previewSize.getWidth(), previewSize.getHeight(), orientation, orientation + 180, true); SurfaceTexture texture = cameraHolder.CaptureSessionH.getSurfaceTexture(); texture.setDefaultBufferSize(currentVideoProfile.videoFrameWidth, currentVideoProfile.videoFrameHeight); previewsurface = new Surface(texture); cameraHolder.CaptureSessionH.AddSurface(previewsurface, true); cameraHolder.CaptureSessionH.CreateCaptureSession(); }
From source file:com.example.joshf.conc.CameraFragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview */// w ww.ja v a2 s. c o m private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. /* Size largest = Collections.max( Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea());*/ Size largest = new Size(1088, 1088); /* Size[] largest1 = map.getOutputSizes(ImageFormat.JPEG); for(Size n : largest1){ Log.e("camera", n.toString()); }*/ mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); // Log.e("Sensor", String.valueOf(mSensorOrientation)); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; Log.e("Sensor", "1"); } break; case Surface.ROTATION_90: Log.e("Sensor", "3"); case Surface.ROTATION_270: Log.e("Sensor", "4"); if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = new Size(1088, 1088); /* mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest);*/ // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance(getString(R.string.error)).show(getChildFragmentManager(), FRAGMENT_DIALOG); } }
From source file:com.Yamate.Camera.Camera.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *//*from w w w .j a v a2s . c o m*/ private void setUpCameraOutputs(int width, int height) { Activity activity = mActivity; CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } Util.PiCoreLog("output w:" + map.getOutputSizes(SurfaceTexture.class)[0].getWidth() + ",h:" + map.getOutputSizes(SurfaceTexture.class)[0].getHeight()); // For still image captures, we use the largest available size. /* Size largest = Collections.max( Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, 2); mImageReader.setOnImageAvailableListener( mOnImageAvailableListener, mBackgroundHandler); mCaptureSize=new Size(largest.getWidth(), largest.getHeight()); */ // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); //noinspection ConstantConditions mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (mSensorOrientation == 90 || mSensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (mSensorOrientation == 0 || mSensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } /* if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } */ // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. //mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), // rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, // maxPreviewHeight, largest); if (displaySize.x > displaySize.y) { double scale = (double) displaySize.y / (double) map.getOutputSizes(SurfaceTexture.class)[0].getHeight(); mPreviewSize = new Size((int) (scale * map.getOutputSizes(SurfaceTexture.class)[0].getWidth()), (int) (scale * map.getOutputSizes(SurfaceTexture.class)[0].getHeight())); } else { double scale = (double) displaySize.x / (double) map.getOutputSizes(SurfaceTexture.class)[0].getHeight(); mPreviewSize = new Size((int) (scale * map.getOutputSizes(SurfaceTexture.class)[0].getHeight()), (int) (scale * map.getOutputSizes(SurfaceTexture.class)[0].getWidth())); } Util.PiCoreLog("mPreviewSize w:" + mPreviewSize.getWidth() + ",h:" + mPreviewSize.getHeight()); mCaptureSize = new Size(CAPTURE_WIDTH, CAPTURE_HEIGHT); mImageReader = ImageReader.newInstance(mCaptureSize.getWidth(), mCaptureSize.getHeight(), ImageFormat.JPEG, 2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // We fit the aspect ratio of TextureView to the size of preview we picked. /* int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio( mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio( mPreviewSize.getHeight(), mPreviewSize.getWidth()); } */ // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. e.printStackTrace(); } }
From source file:com.android.camera2.its.ItsSerializer.java
@SuppressWarnings("unchecked") public static CaptureRequest.Builder deserialize(CaptureRequest.Builder mdDefault, JSONObject jsonReq) throws ItsException { try {/* w w w . j a v a 2s . c o m*/ Logt.i(TAG, "Parsing JSON capture request ..."); // Iterate over the CaptureRequest reflected fields. CaptureRequest.Builder md = mdDefault; Field[] allFields = CaptureRequest.class.getDeclaredFields(); for (Field field : allFields) { if (Modifier.isPublic(field.getModifiers()) && Modifier.isStatic(field.getModifiers()) && field.getType() == CaptureRequest.Key.class && field.getGenericType() instanceof ParameterizedType) { ParameterizedType paramType = (ParameterizedType) field.getGenericType(); Type[] argTypes = paramType.getActualTypeArguments(); if (argTypes.length > 0) { CaptureRequest.Key key = (CaptureRequest.Key) field.get(md); String keyName = key.getName(); Type keyType = argTypes[0]; // For each reflected CaptureRequest entry, look inside the JSON object // to see if it is being set. If it is found, remove the key from the // JSON object. After this process, there should be no keys left in the // JSON (otherwise an invalid key was specified). if (jsonReq.has(keyName) && !jsonReq.isNull(keyName)) { if (keyType instanceof GenericArrayType) { Type elmtType = ((GenericArrayType) keyType).getGenericComponentType(); JSONArray ja = jsonReq.getJSONArray(keyName); Object val[] = new Object[ja.length()]; for (int i = 0; i < ja.length(); i++) { if (elmtType == int.class) { Array.set(val, i, ja.getInt(i)); } else if (elmtType == byte.class) { Array.set(val, i, (byte) ja.getInt(i)); } else if (elmtType == float.class) { Array.set(val, i, (float) ja.getDouble(i)); } else if (elmtType == long.class) { Array.set(val, i, ja.getLong(i)); } else if (elmtType == double.class) { Array.set(val, i, ja.getDouble(i)); } else if (elmtType == boolean.class) { Array.set(val, i, ja.getBoolean(i)); } else if (elmtType == String.class) { Array.set(val, i, ja.getString(i)); } else if (elmtType == Size.class) { JSONObject obj = ja.getJSONObject(i); Array.set(val, i, new Size(obj.getInt("width"), obj.getInt("height"))); } else if (elmtType == Rect.class) { JSONObject obj = ja.getJSONObject(i); Array.set(val, i, new Rect(obj.getInt("left"), obj.getInt("top"), obj.getInt("bottom"), obj.getInt("right"))); } else if (elmtType == Rational.class) { JSONObject obj = ja.getJSONObject(i); Array.set(val, i, new Rational(obj.getInt("numerator"), obj.getInt("denominator"))); } else if (elmtType == RggbChannelVector.class) { JSONArray arr = ja.getJSONArray(i); Array.set(val, i, new RggbChannelVector((float) arr.getDouble(0), (float) arr.getDouble(1), (float) arr.getDouble(2), (float) arr.getDouble(3))); } else if (elmtType == ColorSpaceTransform.class) { JSONArray arr = ja.getJSONArray(i); Rational xform[] = new Rational[9]; for (int j = 0; j < 9; j++) { xform[j] = new Rational(arr.getJSONObject(j).getInt("numerator"), arr.getJSONObject(j).getInt("denominator")); } Array.set(val, i, new ColorSpaceTransform(xform)); } else if (elmtType == MeteringRectangle.class) { JSONObject obj = ja.getJSONObject(i); Array.set(val, i, new MeteringRectangle(obj.getInt("x"), obj.getInt("y"), obj.getInt("width"), obj.getInt("height"), obj.getInt("weight"))); } else { throw new ItsException("Failed to parse key from JSON: " + keyName); } } if (val != null) { Logt.i(TAG, "Set: " + keyName + " -> " + Arrays.toString(val)); md.set(key, val); jsonReq.remove(keyName); } } else { Object val = null; if (keyType == Integer.class) { val = jsonReq.getInt(keyName); } else if (keyType == Byte.class) { val = (byte) jsonReq.getInt(keyName); } else if (keyType == Double.class) { val = jsonReq.getDouble(keyName); } else if (keyType == Long.class) { val = jsonReq.getLong(keyName); } else if (keyType == Float.class) { val = (float) jsonReq.getDouble(keyName); } else if (keyType == Boolean.class) { val = jsonReq.getBoolean(keyName); } else if (keyType == String.class) { val = jsonReq.getString(keyName); } else if (keyType == Size.class) { JSONObject obj = jsonReq.getJSONObject(keyName); val = new Size(obj.getInt("width"), obj.getInt("height")); } else if (keyType == Rect.class) { JSONObject obj = jsonReq.getJSONObject(keyName); val = new Rect(obj.getInt("left"), obj.getInt("top"), obj.getInt("right"), obj.getInt("bottom")); } else if (keyType == Rational.class) { JSONObject obj = jsonReq.getJSONObject(keyName); val = new Rational(obj.getInt("numerator"), obj.getInt("denominator")); } else if (keyType == RggbChannelVector.class) { JSONObject obj = jsonReq.optJSONObject(keyName); JSONArray arr = jsonReq.optJSONArray(keyName); if (arr != null) { val = new RggbChannelVector((float) arr.getDouble(0), (float) arr.getDouble(1), (float) arr.getDouble(2), (float) arr.getDouble(3)); } else if (obj != null) { val = new RggbChannelVector((float) obj.getDouble("red"), (float) obj.getDouble("greenEven"), (float) obj.getDouble("greenOdd"), (float) obj.getDouble("blue")); } else { throw new ItsException("Invalid RggbChannelVector object"); } } else if (keyType == ColorSpaceTransform.class) { JSONArray arr = jsonReq.getJSONArray(keyName); Rational a[] = new Rational[9]; for (int i = 0; i < 9; i++) { a[i] = new Rational(arr.getJSONObject(i).getInt("numerator"), arr.getJSONObject(i).getInt("denominator")); } val = new ColorSpaceTransform(a); } else if (keyType instanceof ParameterizedType && ((ParameterizedType) keyType).getRawType() == Range.class && ((ParameterizedType) keyType).getActualTypeArguments().length == 1 && ((ParameterizedType) keyType) .getActualTypeArguments()[0] == Integer.class) { JSONArray arr = jsonReq.getJSONArray(keyName); val = new Range<Integer>(arr.getInt(0), arr.getInt(1)); } else { throw new ItsException( "Failed to parse key from JSON: " + keyName + ", " + keyType); } if (val != null) { Logt.i(TAG, "Set: " + keyName + " -> " + val); md.set(key, val); jsonReq.remove(keyName); } } } } } } // Ensure that there were no invalid keys in the JSON request object. if (jsonReq.length() != 0) { throw new ItsException("Invalid JSON key(s): " + jsonReq.toString()); } Logt.i(TAG, "Parsing JSON capture request completed"); return md; } catch (java.lang.IllegalAccessException e) { throw new ItsException("Access error: ", e); } catch (org.json.JSONException e) { throw new ItsException("JSON error: ", e); } }
From source file:com.almalence.opencam.SavingService.java
@TargetApi(21) private void saveDNGPicture(int frameNum, long sessionID, OutputStream os, int width, int height, int orientation, boolean cameraMirrored) { DngCreator creator = new DngCreator(CameraController.getCameraCharacteristics(), PluginManager.getInstance().getFromRAWCaptureResults("captureResult" + frameNum + sessionID)); byte[] frame = SwapHeap.SwapFromHeap( Integer.parseInt(getFromSharedMem("resultframe" + frameNum + Long.toString(sessionID))), Integer.parseInt(getFromSharedMem("resultframelen" + frameNum + Long.toString(sessionID)))); ByteBuffer buff = ByteBuffer.allocateDirect(frame.length); buff.put(frame);/*from www .j a v a2s .c o m*/ int exif_orientation = ExifInterface.ORIENTATION_NORMAL; switch ((orientation + 360) % 360) { default: case 0: exif_orientation = ExifInterface.ORIENTATION_NORMAL; break; case 90: exif_orientation = cameraMirrored ? ExifInterface.ORIENTATION_ROTATE_270 : ExifInterface.ORIENTATION_ROTATE_90; break; case 180: exif_orientation = ExifInterface.ORIENTATION_ROTATE_180; break; case 270: exif_orientation = cameraMirrored ? ExifInterface.ORIENTATION_ROTATE_90 : ExifInterface.ORIENTATION_ROTATE_270; break; } try { creator.setOrientation(exif_orientation); creator.writeByteBuffer(os, new Size(width, height), buff, 0); } catch (IOException e) { creator.close(); e.printStackTrace(); Log.e("Open Camera", "saveDNGPicture error: " + e.getMessage()); } creator.close(); }