List of usage examples for android.graphics ImageFormat RAW_SENSOR
int RAW_SENSOR
To view the source code for android.graphics ImageFormat RAW_SENSOR.
Click Source Link
General raw camera sensor image format, usually representing a single-channel Bayer-mosaic image.
From source file:Main.java
public static String translatePreviewFormat(int supportedPreviewFormat) { switch (supportedPreviewFormat) { case ImageFormat.JPEG: return "ImageFormat.JPEG"; case ImageFormat.NV16: return "ImageFormat.NV16"; case ImageFormat.NV21: return "ImageFormat.NV21"; case ImageFormat.RAW10: return "ImageFormat.RAW10"; case ImageFormat.RAW_SENSOR: return "ImageFormat.RAW_SENSOR"; case ImageFormat.RGB_565: return "ImageFormat.RGB_565"; case ImageFormat.UNKNOWN: return "ImageFormat.UNKNOWN"; case ImageFormat.YUV_420_888: return "ImageFormat.YUV_420_888"; case ImageFormat.YUY2: return "ImageFormat.YUY2"; case ImageFormat.YV12: return "ImageFormat.YV12"; default://from w w w . j a v a 2s. co m return "xxxxxxxxdefault"; } }
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();/*from w w w .j a v a2s . c om*/ switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();/*from w w w. j a va 2 s .co m*/ switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.RAW12: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:com.android.camera2.its.ItsUtils.java
public static Size[] getRawOutputSizes(CameraCharacteristics ccs) throws ItsException { return getOutputSizes(ccs, ImageFormat.RAW_SENSOR); }
From source file:com.android.camera2.its.ItsUtils.java
public static byte[] getDataFromImage(Image image) throws ItsException { int format = image.getFormat(); int width = image.getWidth(); int height = image.getHeight(); byte[] data = null; // Read image data Plane[] planes = image.getPlanes();//from ww w .jav a 2s.c om // Check image validity if (!checkAndroidImageFormat(image)) { throw new ItsException("Invalid image format passed to getDataFromImage: " + image.getFormat()); } if (format == ImageFormat.JPEG) { // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. ByteBuffer buffer = planes[0].getBuffer(); data = new byte[buffer.capacity()]; buffer.get(data); return data; } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR || format == ImageFormat.RAW10) { int offset = 0; data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8]; byte[] rowData = new byte[planes[0].getRowStride()]; for (int i = 0; i < planes.length; i++) { ByteBuffer buffer = planes[i].getBuffer(); int rowStride = planes[i].getRowStride(); int pixelStride = planes[i].getPixelStride(); int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; Logt.i(TAG, String.format("Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d", format, i, width, height, rowStride, pixelStride)); // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. int w = (i == 0) ? width : width / 2; int h = (i == 0) ? height : height / 2; for (int row = 0; row < h; row++) { if (pixelStride == bytesPerPixel) { // Special case: optimized read of the entire row int length = w * bytesPerPixel; buffer.get(data, offset, length); // Advance buffer the remainder of the row stride buffer.position(buffer.position() + rowStride - length); offset += length; } else { // Generic case: should work for any pixelStride but slower. // Use intermediate buffer to avoid read byte-by-byte from // DirectByteBuffer, which is very bad for performance. // Also need avoid access out of bound by only reading the available // bytes in the bytebuffer. int readSize = rowStride; if (buffer.remaining() < readSize) { readSize = buffer.remaining(); } buffer.get(rowData, 0, readSize); if (pixelStride >= 1) { for (int col = 0; col < w; col++) { data[offset++] = rowData[col * pixelStride]; } } else { // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for // example with RAW10. Just copy the buffer, dropping any padding at // the end of the row. int length = (w * ImageFormat.getBitsPerPixel(format)) / 8; System.arraycopy(rowData, 0, data, offset, length); offset += length; } } } } Logt.i(TAG, String.format("Done reading image, format %d", format)); return data; } else { throw new ItsException("Unsupported image format: " + format); } }
From source file:com.example.aschere.cdhprototype2.Camera2RawFragment.java
/** * Sets up state related to camera that is needed before opening a {@link CameraDevice}. *///from ww w . ja v a2 s . c om private boolean setUpCameraOutputs() { if (manager == null) { //ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog"); Log.e(TAG, "This device doesn't support Camera2 API."); return false; } try { // Find a CameraDevice that supports RAW captures, and configure state. for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We only use a camera that supports RAW in this sample. if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // For still image captures, we use the largest available size. Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); synchronized (mCameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference // counted wrapper to ensure they are only closed when all background tasks // using them are finished. if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) { mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance( largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5)); } mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler); if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) { mRawImageReader = new RefCountedAutoCloseable<>( ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5)); } mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler); mCharacteristics = characteristics; mCameraId = cameraId; } Log.i(TAG, "setUpCameraOutputs successful"); return true; } } catch (CameraAccessException e) { Log.e(TAG, "setUpCameraOutputs stacktraced"); e.printStackTrace(); } // If we found no suitable cameras for capturing RAW, warn the user. //ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(), "dialog"); Log.e(TAG, "This device doesn't support capturing RAW photos"); return false; }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
private void saveImage(ImageHolder image) { int burstcount = parameterHandler.Burst.GetValue() + 1; String f;//from w w w. j a v a2s . c om if (burstcount > 1) f = cameraUiWrapper.getActivityInterface().getStorageHandler() .getNewFilePath(appSettingsManager.GetWriteExternal(), "_" + imagecount); else f = cameraUiWrapper.getActivityInterface().getStorageHandler() .getNewFilePath(appSettingsManager.GetWriteExternal(), ""); File file = null; imagecount++; switch (image.getImage().getFormat()) { case ImageFormat.JPEG: file = new File(f + ".jpg"); process_jpeg(image.getImage(), file); break; case ImageFormat.RAW10: file = new File(f + ".dng"); process_rawWithDngConverter(image, DngProfile.Mipi, file); break; case ImageFormat.RAW12: file = new File(f + ".dng"); process_rawWithDngConverter(image, DngProfile.Mipi12, file); break; case ImageFormat.RAW_SENSOR: file = new File(f + ".dng"); if (appSettingsManager.getDevice() == Devices.Moto_X2k14 || appSettingsManager.getDevice() == Devices.OnePlusTwo) process_rawWithDngConverter(image, DngProfile.Mipi16, file); else process_rawSensor(image, file); break; } internalFireOnWorkDone(file); isWorking = false; changeCaptureState(CaptureStates.image_capture_stop); if (burstcount == imagecount) { finishCapture(captureBuilder); } }
From source file:com.example.android.camera2raw.Camera2RawFragment.java
/** * Sets up state related to camera that is needed before opening a {@link CameraDevice}. *///from ww w . j a va 2s.co m private boolean setUpCameraOutputs() { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); if (manager == null) { ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog"); return false; } try { // Find a CameraDevice that supports RAW captures, and configure state. for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We only use a camera that supports RAW in this sample. if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // For still image captures, we use the largest available size. Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); synchronized (mCameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference // counted wrapper to ensure they are only closed when all background tasks // using them are finished. if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) { mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance( largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5)); } mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler); if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) { mRawImageReader = new RefCountedAutoCloseable<>( ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5)); } mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler); mCharacteristics = characteristics; mCameraId = cameraId; } return true; } } catch (CameraAccessException e) { e.printStackTrace(); } // If we found no suitable cameras for capturing RAW, warn the user. ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(), "dialog"); return false; }
From source file:com.ape.camera2raw.Camera2RawFragment.java
/** * Sets up state related to camera that is needed before opening a {@link CameraDevice}. *///from ww w.j ava 2s . c om private boolean setUpCameraOutputs() { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); if (manager == null) { ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog"); return false; } try { // Find a CameraDevice that supports RAW captures, and configure state. for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We only use a camera that supports RAW in this sample. if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // For still image captures, we use the largest available size. Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); Log.d("WAY", "largestRaw = " + largestRaw); //largestRaw = new Size(4208, 3120);//9051 synchronized (mCameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference // counted wrapper to ensure they are only closed when all background tasks // using them are finished. if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) { mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance( largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5)); } mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler); if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) { mRawImageReader = new RefCountedAutoCloseable<>( ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5)); } mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler); mCharacteristics = characteristics; mCameraId = cameraId; } return true; } } catch (CameraAccessException e) { e.printStackTrace(); } // If we found no suitable cameras for capturing RAW, warn the user. ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(), "dialog"); return false; }
From source file:com.quectel.camera2test.Camera2RawFragment.java
/** * Sets up state related to camera that is needed before opening a {@link CameraDevice}. *///from w w w.j av a2 s.c om private boolean setUpCameraOutputs() { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); if (manager == null) { ErrorDialog.buildErrorDialog("This device doesn't support Camera2 API.").show(getFragmentManager(), "dialog"); return false; } try { // Find a CameraDevice that supports RAW captures, and configure state. for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); Log.d(TAG, "---characteristics = " + characteristics); // We only use a camera that supports RAW in this sample. if (!contains(characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES), CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) { Log.d(TAG, "-1--characteristics continue"); continue; } Log.d(TAG, "-1--characteristics = " + characteristics); StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); // For still image captures, we use the largest available size. Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); Size largestRaw = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); synchronized (mCameraStateLock) { // Set up ImageReaders for JPEG and RAW outputs. Place these in a reference // counted wrapper to ensure they are only closed when all background tasks // using them are finished. if (mJpegImageReader == null || mJpegImageReader.getAndRetain() == null) { mJpegImageReader = new RefCountedAutoCloseable<>(ImageReader.newInstance( largestJpeg.getWidth(), largestJpeg.getHeight(), ImageFormat.JPEG, /*maxImages*/5)); } mJpegImageReader.get().setOnImageAvailableListener(mOnJpegImageAvailableListener, mBackgroundHandler); if (mRawImageReader == null || mRawImageReader.getAndRetain() == null) { mRawImageReader = new RefCountedAutoCloseable<>( ImageReader.newInstance(largestRaw.getWidth(), largestRaw.getHeight(), ImageFormat.RAW_SENSOR, /*maxImages*/ 5)); } mRawImageReader.get().setOnImageAvailableListener(mOnRawImageAvailableListener, mBackgroundHandler); mCharacteristics = characteristics; mCameraId = cameraId; } return true; } } catch (CameraAccessException e) { e.printStackTrace(); } // If we found no suitable cameras for capturing RAW, warn the user. ErrorDialog.buildErrorDialog("This device doesn't support capturing RAW photos").show(getFragmentManager(), "dialog"); return false; }