List of usage examples for android.graphics ImageFormat RAW10
int RAW10
To view the source code for android.graphics ImageFormat RAW10.
Click Source Link
Android 10-bit raw format
This is a single-plane, 10-bit per pixel, densely packed (in each row), unprocessed format, usually representing raw Bayer-pattern images coming from an image sensor.
From source file:Main.java
public static String translatePreviewFormat(int supportedPreviewFormat) { switch (supportedPreviewFormat) { case ImageFormat.JPEG: return "ImageFormat.JPEG"; case ImageFormat.NV16: return "ImageFormat.NV16"; case ImageFormat.NV21: return "ImageFormat.NV21"; case ImageFormat.RAW10: return "ImageFormat.RAW10"; case ImageFormat.RAW_SENSOR: return "ImageFormat.RAW_SENSOR"; case ImageFormat.RGB_565: return "ImageFormat.RGB_565"; case ImageFormat.UNKNOWN: return "ImageFormat.UNKNOWN"; case ImageFormat.YUV_420_888: return "ImageFormat.YUV_420_888"; case ImageFormat.YUY2: return "ImageFormat.YUY2"; case ImageFormat.YV12: return "ImageFormat.YV12"; default:/*from w w w . ja v a 2 s. c o m*/ return "xxxxxxxxdefault"; } }
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();//from w ww .j ava2 s.co m switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();//from w w w. j a va2 s .com switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.RAW12: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:com.android.camera2.its.ItsUtils.java
public static byte[] getDataFromImage(Image image) throws ItsException { int format = image.getFormat(); int width = image.getWidth(); int height = image.getHeight(); byte[] data = null; // Read image data Plane[] planes = image.getPlanes();// w w w . j a v a2 s .c o m // Check image validity if (!checkAndroidImageFormat(image)) { throw new ItsException("Invalid image format passed to getDataFromImage: " + image.getFormat()); } if (format == ImageFormat.JPEG) { // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. ByteBuffer buffer = planes[0].getBuffer(); data = new byte[buffer.capacity()]; buffer.get(data); return data; } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR || format == ImageFormat.RAW10) { int offset = 0; data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8]; byte[] rowData = new byte[planes[0].getRowStride()]; for (int i = 0; i < planes.length; i++) { ByteBuffer buffer = planes[i].getBuffer(); int rowStride = planes[i].getRowStride(); int pixelStride = planes[i].getPixelStride(); int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; Logt.i(TAG, String.format("Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d", format, i, width, height, rowStride, pixelStride)); // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. int w = (i == 0) ? width : width / 2; int h = (i == 0) ? height : height / 2; for (int row = 0; row < h; row++) { if (pixelStride == bytesPerPixel) { // Special case: optimized read of the entire row int length = w * bytesPerPixel; buffer.get(data, offset, length); // Advance buffer the remainder of the row stride buffer.position(buffer.position() + rowStride - length); offset += length; } else { // Generic case: should work for any pixelStride but slower. // Use intermediate buffer to avoid read byte-by-byte from // DirectByteBuffer, which is very bad for performance. // Also need avoid access out of bound by only reading the available // bytes in the bytebuffer. int readSize = rowStride; if (buffer.remaining() < readSize) { readSize = buffer.remaining(); } buffer.get(rowData, 0, readSize); if (pixelStride >= 1) { for (int col = 0; col < w; col++) { data[offset++] = rowData[col * pixelStride]; } } else { // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for // example with RAW10. Just copy the buffer, dropping any padding at // the end of the row. int length = (w * ImageFormat.getBitsPerPixel(format)) / 8; System.arraycopy(rowData, 0, data, offset, length); offset += length; } } } } Logt.i(TAG, String.format("Done reading image, format %d", format)); return data; } else { throw new ItsException("Unsupported image format: " + format); } }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
private void saveImage(ImageHolder image) { int burstcount = parameterHandler.Burst.GetValue() + 1; String f;/*from w w w. ja v a2s. c o m*/ if (burstcount > 1) f = cameraUiWrapper.getActivityInterface().getStorageHandler() .getNewFilePath(appSettingsManager.GetWriteExternal(), "_" + imagecount); else f = cameraUiWrapper.getActivityInterface().getStorageHandler() .getNewFilePath(appSettingsManager.GetWriteExternal(), ""); File file = null; imagecount++; switch (image.getImage().getFormat()) { case ImageFormat.JPEG: file = new File(f + ".jpg"); process_jpeg(image.getImage(), file); break; case ImageFormat.RAW10: file = new File(f + ".dng"); process_rawWithDngConverter(image, DngProfile.Mipi, file); break; case ImageFormat.RAW12: file = new File(f + ".dng"); process_rawWithDngConverter(image, DngProfile.Mipi12, file); break; case ImageFormat.RAW_SENSOR: file = new File(f + ".dng"); if (appSettingsManager.getDevice() == Devices.Moto_X2k14 || appSettingsManager.getDevice() == Devices.OnePlusTwo) process_rawWithDngConverter(image, DngProfile.Mipi16, file); else process_rawSensor(image, file); break; } internalFireOnWorkDone(file); isWorking = false; changeCaptureState(CaptureStates.image_capture_stop); if (burstcount == imagecount) { finishCapture(captureBuilder); } }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
/** * PREVIEW STUFF/*from w ww.j a v a 2 s . com*/ */ @Override public void startPreview() { picSize = appSettingsManager.pictureSize.get(); Log.d(TAG, "Start Preview"); largestImageSize = Collections.max(Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); picFormat = appSettingsManager.pictureFormat.get(); if (picFormat.equals("")) { picFormat = KEYS.JPEG; appSettingsManager.pictureFormat.set(KEYS.JPEG); parameterHandler.PictureFormat.onValueHasChanged(KEYS.JPEG); } if (picFormat.equals(KEYS.JPEG)) { String[] split = picSize.split("x"); int width, height; if (split.length < 2) { mImageWidth = largestImageSize.getWidth(); mImageHeight = largestImageSize.getHeight(); } else { mImageWidth = Integer.parseInt(split[0]); mImageHeight = Integer.parseInt(split[1]); } //create new ImageReader with the size and format for the image Log.d(TAG, "ImageReader JPEG"); } else if (picFormat.equals(CameraHolderApi2.RAW_SENSOR)) { Log.d(TAG, "ImageReader RAW_SENOSR"); largestImageSize = Collections.max( Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); mImageWidth = largestImageSize.getWidth(); mImageHeight = largestImageSize.getHeight(); } else if (picFormat.equals(CameraHolderApi2.RAW10)) { Log.d(TAG, "ImageReader RAW_SENOSR"); largestImageSize = Collections.max(Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.RAW10)), new CompareSizesByArea()); mImageWidth = largestImageSize.getWidth(); mImageHeight = largestImageSize.getHeight(); } int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); int orientationToSet = (360 + cameraUiWrapper.getActivityInterface().getOrientation() + sensorOrientation) % 360; if (appSettingsManager.getApiString(AppSettingsManager.SETTING_OrientationHack).equals(KEYS.ON)) orientationToSet = (360 + cameraUiWrapper.getActivityInterface().getOrientation() + sensorOrientation + 180) % 360; cameraHolder.SetParameter(CaptureRequest.JPEG_ORIENTATION, orientationToSet); // Here, we create a CameraCaptureSession for camera preview if (parameterHandler.Burst == null) SetBurst(1); else SetBurst(parameterHandler.Burst.GetValue()); }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
private void SetBurst(int burst) { try {/*from ww w . jav a 2s. c om*/ Log.d(TAG, "Set Burst to:" + burst); previewSize = cameraHolder.getSizeForPreviewDependingOnImageSize( cameraHolder.map.getOutputSizes(ImageFormat.YUV_420_888), cameraHolder.characteristics, mImageWidth, mImageHeight); if (cameraUiWrapper.getFocusPeakProcessor() != null) { cameraUiWrapper.getFocusPeakProcessor().kill(); } int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); int orientation = 0; switch (sensorOrientation) { case 90: orientation = 0; break; case 180: orientation = 90; break; case 270: orientation = 180; break; case 0: orientation = 270; break; } cameraHolder.CaptureSessionH.SetTextureViewSize(previewSize.getWidth(), previewSize.getHeight(), orientation, orientation + 180, false); SurfaceTexture texture = cameraHolder.CaptureSessionH.getSurfaceTexture(); texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); previewsurface = new Surface(texture); cameraUiWrapper.getFocusPeakProcessor().Reset(previewSize.getWidth(), previewSize.getHeight()); Log.d(TAG, "Previewsurface vailid:" + previewsurface.isValid()); cameraUiWrapper.getFocusPeakProcessor().setOutputSurface(previewsurface); camerasurface = cameraUiWrapper.getFocusPeakProcessor().getInputSurface(); cameraHolder.CaptureSessionH.AddSurface(camerasurface, true); if (picFormat.equals(KEYS.JPEG)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.JPEG, burst + 1); else if (picFormat.equals(CameraHolderApi2.RAW10)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW10, burst + 1); else if (picFormat.equals(CameraHolderApi2.RAW_SENSOR)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW_SENSOR, burst + 1); else if (picFormat.equals(CameraHolderApi2.RAW12)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW12, burst + 1); cameraHolder.CaptureSessionH.AddSurface(mImageReader.getSurface(), false); cameraHolder.CaptureSessionH.CreateCaptureSession(); } catch (Exception ex) { ex.printStackTrace(); } if (parameterHandler.Burst != null) parameterHandler.Burst.ThrowCurrentValueChanged(parameterHandler.Burst.GetValue()); }
From source file:com.android.camera2.its.ItsService.java
private void doCapture(JSONObject params) throws ItsException { try {//from w w w .j av a 2 s . co m // Parse the JSON to get the list of capture requests. List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(mCamera, params); // Set the output surface(s) and listeners. int widths[] = new int[MAX_NUM_OUTPUT_SURFACES]; int heights[] = new int[MAX_NUM_OUTPUT_SURFACES]; int formats[] = new int[MAX_NUM_OUTPUT_SURFACES]; int numSurfaces = 0; try { mCountRawOrDng.set(0); mCountJpg.set(0); mCountYuv.set(0); mCountRaw10.set(0); mCountCapRes.set(0); mCaptureRawIsDng = false; mCaptureResults = new CaptureResult[requests.size()]; JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); if (jsonOutputSpecs != null) { numSurfaces = jsonOutputSpecs.length(); if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) { throw new ItsException("Too many output surfaces"); } for (int i = 0; i < numSurfaces; i++) { // Get the specified surface. JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i); String sformat = surfaceObj.optString("format"); Size sizes[]; if ("yuv".equals(sformat) || "".equals(sformat)) { // Default to YUV if no format is specified. formats[i] = ImageFormat.YUV_420_888; sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) { formats[i] = ImageFormat.JPEG; sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics); } else if ("raw".equals(sformat)) { formats[i] = ImageFormat.RAW_SENSOR; sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics); } else if ("raw10".equals(sformat)) { formats[i] = ImageFormat.RAW10; sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics); } else if ("dng".equals(sformat)) { formats[i] = ImageFormat.RAW_SENSOR; sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics); mCaptureRawIsDng = true; } else { throw new ItsException("Unsupported format: " + sformat); } // If the size is omitted, then default to the largest allowed size for the // format. widths[i] = surfaceObj.optInt("width"); heights[i] = surfaceObj.optInt("height"); if (widths[i] <= 0) { if (sizes == null || sizes.length == 0) { throw new ItsException(String .format("Zero stream configs available for requested format: %s", sformat)); } widths[i] = sizes[0].getWidth(); } if (heights[i] <= 0) { heights[i] = sizes[0].getHeight(); } } } else { // No surface(s) specified at all. // Default: a single output surface which is full-res YUV. Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); numSurfaces = 1; widths[0] = sizes[0].getWidth(); heights[0] = sizes[0].getHeight(); formats[0] = ImageFormat.YUV_420_888; } prepareCaptureReader(widths, heights, formats, numSurfaces); List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces); for (int i = 0; i < numSurfaces; i++) { outputSurfaces.add(mCaptureReaders[i].getSurface()); } BlockingSessionCallback sessionListener = new BlockingSessionCallback(); mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler); mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); for (int i = 0; i < numSurfaces; i++) { ImageReader.OnImageAvailableListener readerListener = createAvailableListener(mCaptureCallback); mCaptureReaders[i].setOnImageAvailableListener(readerListener, mSaveHandlers[i]); } // Plan for how many callbacks need to be received throughout the duration of this // sequence of capture requests. There is one callback per image surface, and one // callback for the CaptureResult, for each capture. int numCaptures = requests.size(); mCountCallbacksRemaining.set(numCaptures * (numSurfaces + 1)); } catch (CameraAccessException e) { throw new ItsException("Error configuring outputs", e); } catch (org.json.JSONException e) { throw new ItsException("JSON error", e); } // Initiate the captures. for (int i = 0; i < requests.size(); i++) { // For DNG captures, need the LSC map to be available. if (mCaptureRawIsDng) { requests.get(i).set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1); } CaptureRequest.Builder req = requests.get(i); for (int j = 0; j < numSurfaces; j++) { req.addTarget(mCaptureReaders[j].getSurface()); } mSession.capture(req.build(), mCaptureResultListener, mResultHandler); } // Make sure all callbacks have been hit (wait until captures are done). // If no timeouts are received after a timeout, then fail. int currentCount = mCountCallbacksRemaining.get(); while (currentCount > 0) { try { Thread.sleep(TIMEOUT_CALLBACK * 1000); } catch (InterruptedException e) { throw new ItsException("Timeout failure", e); } int newCount = mCountCallbacksRemaining.get(); if (newCount == currentCount) { throw new ItsException("No callback received within timeout"); } currentCount = newCount; } } catch (android.hardware.camera2.CameraAccessException e) { throw new ItsException("Access error: ", e); } }