List of usage examples for android.graphics ImageFormat JPEG
int JPEG
To view the source code for android.graphics ImageFormat JPEG.
Click Source Link
From source file:Main.java
public static String translatePreviewFormat(int supportedPreviewFormat) { switch (supportedPreviewFormat) { case ImageFormat.JPEG: return "ImageFormat.JPEG"; case ImageFormat.NV16: return "ImageFormat.NV16"; case ImageFormat.NV21: return "ImageFormat.NV21"; case ImageFormat.RAW10: return "ImageFormat.RAW10"; case ImageFormat.RAW_SENSOR: return "ImageFormat.RAW_SENSOR"; case ImageFormat.RGB_565: return "ImageFormat.RGB_565"; case ImageFormat.UNKNOWN: return "ImageFormat.UNKNOWN"; case ImageFormat.YUV_420_888: return "ImageFormat.YUV_420_888"; case ImageFormat.YUY2: return "ImageFormat.YUY2"; case ImageFormat.YV12: return "ImageFormat.YV12"; default:/*from w w w. java 2 s .co m*/ return "xxxxxxxxdefault"; } }
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();/* w w w .j a v a 2 s . c o m*/ switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();// w ww .j a v a 2s . co m switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.RAW12: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:com.android.camera2.its.ItsUtils.java
public static Size[] getJpegOutputSizes(CameraCharacteristics ccs) throws ItsException { return getOutputSizes(ccs, ImageFormat.JPEG); }
From source file:com.android.camera2.its.ItsUtils.java
public static byte[] getDataFromImage(Image image) throws ItsException { int format = image.getFormat(); int width = image.getWidth(); int height = image.getHeight(); byte[] data = null; // Read image data Plane[] planes = image.getPlanes();//ww w . j a v a2s . c o m // Check image validity if (!checkAndroidImageFormat(image)) { throw new ItsException("Invalid image format passed to getDataFromImage: " + image.getFormat()); } if (format == ImageFormat.JPEG) { // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. ByteBuffer buffer = planes[0].getBuffer(); data = new byte[buffer.capacity()]; buffer.get(data); return data; } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR || format == ImageFormat.RAW10) { int offset = 0; data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8]; byte[] rowData = new byte[planes[0].getRowStride()]; for (int i = 0; i < planes.length; i++) { ByteBuffer buffer = planes[i].getBuffer(); int rowStride = planes[i].getRowStride(); int pixelStride = planes[i].getPixelStride(); int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; Logt.i(TAG, String.format("Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d", format, i, width, height, rowStride, pixelStride)); // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. int w = (i == 0) ? width : width / 2; int h = (i == 0) ? height : height / 2; for (int row = 0; row < h; row++) { if (pixelStride == bytesPerPixel) { // Special case: optimized read of the entire row int length = w * bytesPerPixel; buffer.get(data, offset, length); // Advance buffer the remainder of the row stride buffer.position(buffer.position() + rowStride - length); offset += length; } else { // Generic case: should work for any pixelStride but slower. // Use intermediate buffer to avoid read byte-by-byte from // DirectByteBuffer, which is very bad for performance. // Also need avoid access out of bound by only reading the available // bytes in the bytebuffer. int readSize = rowStride; if (buffer.remaining() < readSize) { readSize = buffer.remaining(); } buffer.get(rowData, 0, readSize); if (pixelStride >= 1) { for (int col = 0; col < w; col++) { data[offset++] = rowData[col * pixelStride]; } } else { // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for // example with RAW10. Just copy the buffer, dropping any padding at // the end of the row. int length = (w * ImageFormat.getBitsPerPixel(format)) / 8; System.arraycopy(rowData, 0, data, offset, length); offset += length; } } } } Logt.i(TAG, String.format("Done reading image, format %d", format)); return data; } else { throw new ItsException("Unsupported image format: " + format); } }
From source file:MainActivity.java
protected void takePicture(View view) { if (null == mCameraDevice) { return;/* ww w .ja v a2s. c om*/ } CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId()); StreamConfigurationMap configurationMap = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (configurationMap == null) return; Size largest = Collections.max(Arrays.asList(configurationMap.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); ImageReader reader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, 1); List<Surface> outputSurfaces = new ArrayList<Surface>(2); outputSurfaces.add(reader.getSurface()); outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture())); final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(reader.getSurface()); captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { Image image = null; try { image = reader.acquireLatestImage(); ByteBuffer buffer = image.getPlanes()[0].getBuffer(); byte[] bytes = new byte[buffer.capacity()]; buffer.get(bytes); OutputStream output = new FileOutputStream(getPictureFile()); output.write(bytes); output.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { if (image != null) { image.close(); } } } }; HandlerThread thread = new HandlerThread("CameraPicture"); thread.start(); final Handler backgroudHandler = new Handler(thread.getLooper()); reader.setOnImageAvailableListener(readerListener, backgroudHandler); final CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { super.onCaptureCompleted(session, request, result); Toast.makeText(MainActivity.this, "Picture Saved", Toast.LENGTH_SHORT).show(); startPreview(session); } }; mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() { @Override public void onConfigured(CameraCaptureSession session) { try { session.capture(captureBuilder.build(), captureCallback, backgroudHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(CameraCaptureSession session) { } }, backgroudHandler); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:com.fallahpoor.infocenter.fragments.CameraFragment.java
private String getPictureFormat(Camera.Parameters cameraParams) { int intFormat = cameraParams.getPictureFormat(); String format;/*w ww. ja v a 2s . c o m*/ switch (intFormat) { case ImageFormat.JPEG: format = "JPEG"; break; case ImageFormat.RGB_565: format = "RGB"; break; default: format = getString(R.string.unknown); } return format; }
From source file:com.example.camera2apidemo.Camera2Fragment.java
private String setUpCameraOutputs(int width, int height) { Activity activity = getActivity();/*from w w w .ja v a 2s. c o m*/ CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // ??? We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } // ?? ?? StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); mImageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/2); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // Check if the flash is supported. Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE); mFlashSupported = available == null ? false : available; return cameraId; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. // ErrorDialog.newInstance(getString(R.string.camera_error)) // .show(getChildFragmentManager(), FRAGMENT_DIALOG); } return null; }
From source file:com.raulh82vlc.face_detection_sample.camera2.presentation.FDCamera2Presenter.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *//*from w w w . j a va2s.c o m*/ private void setUpCameraOutputs(int width, int height) { if (isViewAvailable()) { CameraManager manager = (CameraManager) activityView.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } imageDimension = map.getOutputSizes(SurfaceTexture.class)[0]; // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); Point displaySize = new Point(); activityView.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = activityView.getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { activityView.getTextureView().setAspectRatio(previewSize.getWidth(), previewSize.getHeight()); } else { activityView.getTextureView().setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); } return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. } } }
From source file:com.example.android.tflitecamerademo.Camera2BasicFragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *///from w w w . j a v a2s . c o m private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); imageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG, /*maxImages*/ 2); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); // noinspection ConstantConditions /* Orientation of the camera sensor */ int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (sensorOrientation == 90 || sensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (sensorOrientation == 0 || sensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int rotatedPreviewWidth = width; int rotatedPreviewHeight = height; int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { rotatedPreviewWidth = height; rotatedPreviewHeight = width; maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth, maxPreviewHeight, largest); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight()); } else { textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); } this.cameraId = cameraId; return; } } catch (CameraAccessException e) { Log.e(TAG, "Failed to access Camera", e); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance(getString(R.string.camera_error)).show(getChildFragmentManager(), FRAGMENT_DIALOG); } }