List of usage examples for android.graphics ImageFormat RAW_SENSOR
int RAW_SENSOR
To view the source code for android.graphics ImageFormat RAW_SENSOR.
Click Source Link
General raw camera sensor image format, usually representing a single-channel Bayer-mosaic image.
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
/** * PREVIEW STUFF// www.ja v a 2 s . c o m */ @Override public void startPreview() { picSize = appSettingsManager.pictureSize.get(); Log.d(TAG, "Start Preview"); largestImageSize = Collections.max(Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.JPEG)), new CompareSizesByArea()); picFormat = appSettingsManager.pictureFormat.get(); if (picFormat.equals("")) { picFormat = KEYS.JPEG; appSettingsManager.pictureFormat.set(KEYS.JPEG); parameterHandler.PictureFormat.onValueHasChanged(KEYS.JPEG); } if (picFormat.equals(KEYS.JPEG)) { String[] split = picSize.split("x"); int width, height; if (split.length < 2) { mImageWidth = largestImageSize.getWidth(); mImageHeight = largestImageSize.getHeight(); } else { mImageWidth = Integer.parseInt(split[0]); mImageHeight = Integer.parseInt(split[1]); } //create new ImageReader with the size and format for the image Log.d(TAG, "ImageReader JPEG"); } else if (picFormat.equals(CameraHolderApi2.RAW_SENSOR)) { Log.d(TAG, "ImageReader RAW_SENOSR"); largestImageSize = Collections.max( Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.RAW_SENSOR)), new CompareSizesByArea()); mImageWidth = largestImageSize.getWidth(); mImageHeight = largestImageSize.getHeight(); } else if (picFormat.equals(CameraHolderApi2.RAW10)) { Log.d(TAG, "ImageReader RAW_SENOSR"); largestImageSize = Collections.max(Arrays.asList(cameraHolder.map.getOutputSizes(ImageFormat.RAW10)), new CompareSizesByArea()); mImageWidth = largestImageSize.getWidth(); mImageHeight = largestImageSize.getHeight(); } int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); int orientationToSet = (360 + cameraUiWrapper.getActivityInterface().getOrientation() + sensorOrientation) % 360; if (appSettingsManager.getApiString(AppSettingsManager.SETTING_OrientationHack).equals(KEYS.ON)) orientationToSet = (360 + cameraUiWrapper.getActivityInterface().getOrientation() + sensorOrientation + 180) % 360; cameraHolder.SetParameter(CaptureRequest.JPEG_ORIENTATION, orientationToSet); // Here, we create a CameraCaptureSession for camera preview if (parameterHandler.Burst == null) SetBurst(1); else SetBurst(parameterHandler.Burst.GetValue()); }
From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java
private void SetBurst(int burst) { try {//from w w w. ja va 2s . co m Log.d(TAG, "Set Burst to:" + burst); previewSize = cameraHolder.getSizeForPreviewDependingOnImageSize( cameraHolder.map.getOutputSizes(ImageFormat.YUV_420_888), cameraHolder.characteristics, mImageWidth, mImageHeight); if (cameraUiWrapper.getFocusPeakProcessor() != null) { cameraUiWrapper.getFocusPeakProcessor().kill(); } int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); int orientation = 0; switch (sensorOrientation) { case 90: orientation = 0; break; case 180: orientation = 90; break; case 270: orientation = 180; break; case 0: orientation = 270; break; } cameraHolder.CaptureSessionH.SetTextureViewSize(previewSize.getWidth(), previewSize.getHeight(), orientation, orientation + 180, false); SurfaceTexture texture = cameraHolder.CaptureSessionH.getSurfaceTexture(); texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); previewsurface = new Surface(texture); cameraUiWrapper.getFocusPeakProcessor().Reset(previewSize.getWidth(), previewSize.getHeight()); Log.d(TAG, "Previewsurface vailid:" + previewsurface.isValid()); cameraUiWrapper.getFocusPeakProcessor().setOutputSurface(previewsurface); camerasurface = cameraUiWrapper.getFocusPeakProcessor().getInputSurface(); cameraHolder.CaptureSessionH.AddSurface(camerasurface, true); if (picFormat.equals(KEYS.JPEG)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.JPEG, burst + 1); else if (picFormat.equals(CameraHolderApi2.RAW10)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW10, burst + 1); else if (picFormat.equals(CameraHolderApi2.RAW_SENSOR)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW_SENSOR, burst + 1); else if (picFormat.equals(CameraHolderApi2.RAW12)) mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW12, burst + 1); cameraHolder.CaptureSessionH.AddSurface(mImageReader.getSurface(), false); cameraHolder.CaptureSessionH.CreateCaptureSession(); } catch (Exception ex) { ex.printStackTrace(); } if (parameterHandler.Burst != null) parameterHandler.Burst.ThrowCurrentValueChanged(parameterHandler.Burst.GetValue()); }
From source file:com.android.camera2.its.ItsService.java
private void doCapture(JSONObject params) throws ItsException { try {//from www . j a va 2s. c o m // Parse the JSON to get the list of capture requests. List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(mCamera, params); // Set the output surface(s) and listeners. int widths[] = new int[MAX_NUM_OUTPUT_SURFACES]; int heights[] = new int[MAX_NUM_OUTPUT_SURFACES]; int formats[] = new int[MAX_NUM_OUTPUT_SURFACES]; int numSurfaces = 0; try { mCountRawOrDng.set(0); mCountJpg.set(0); mCountYuv.set(0); mCountRaw10.set(0); mCountCapRes.set(0); mCaptureRawIsDng = false; mCaptureResults = new CaptureResult[requests.size()]; JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params); if (jsonOutputSpecs != null) { numSurfaces = jsonOutputSpecs.length(); if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) { throw new ItsException("Too many output surfaces"); } for (int i = 0; i < numSurfaces; i++) { // Get the specified surface. JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i); String sformat = surfaceObj.optString("format"); Size sizes[]; if ("yuv".equals(sformat) || "".equals(sformat)) { // Default to YUV if no format is specified. formats[i] = ImageFormat.YUV_420_888; sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) { formats[i] = ImageFormat.JPEG; sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics); } else if ("raw".equals(sformat)) { formats[i] = ImageFormat.RAW_SENSOR; sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics); } else if ("raw10".equals(sformat)) { formats[i] = ImageFormat.RAW10; sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics); } else if ("dng".equals(sformat)) { formats[i] = ImageFormat.RAW_SENSOR; sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics); mCaptureRawIsDng = true; } else { throw new ItsException("Unsupported format: " + sformat); } // If the size is omitted, then default to the largest allowed size for the // format. widths[i] = surfaceObj.optInt("width"); heights[i] = surfaceObj.optInt("height"); if (widths[i] <= 0) { if (sizes == null || sizes.length == 0) { throw new ItsException(String .format("Zero stream configs available for requested format: %s", sformat)); } widths[i] = sizes[0].getWidth(); } if (heights[i] <= 0) { heights[i] = sizes[0].getHeight(); } } } else { // No surface(s) specified at all. // Default: a single output surface which is full-res YUV. Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics); numSurfaces = 1; widths[0] = sizes[0].getWidth(); heights[0] = sizes[0].getHeight(); formats[0] = ImageFormat.YUV_420_888; } prepareCaptureReader(widths, heights, formats, numSurfaces); List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces); for (int i = 0; i < numSurfaces; i++) { outputSurfaces.add(mCaptureReaders[i].getSurface()); } BlockingSessionCallback sessionListener = new BlockingSessionCallback(); mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler); mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS); for (int i = 0; i < numSurfaces; i++) { ImageReader.OnImageAvailableListener readerListener = createAvailableListener(mCaptureCallback); mCaptureReaders[i].setOnImageAvailableListener(readerListener, mSaveHandlers[i]); } // Plan for how many callbacks need to be received throughout the duration of this // sequence of capture requests. There is one callback per image surface, and one // callback for the CaptureResult, for each capture. int numCaptures = requests.size(); mCountCallbacksRemaining.set(numCaptures * (numSurfaces + 1)); } catch (CameraAccessException e) { throw new ItsException("Error configuring outputs", e); } catch (org.json.JSONException e) { throw new ItsException("JSON error", e); } // Initiate the captures. for (int i = 0; i < requests.size(); i++) { // For DNG captures, need the LSC map to be available. if (mCaptureRawIsDng) { requests.get(i).set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1); } CaptureRequest.Builder req = requests.get(i); for (int j = 0; j < numSurfaces; j++) { req.addTarget(mCaptureReaders[j].getSurface()); } mSession.capture(req.build(), mCaptureResultListener, mResultHandler); } // Make sure all callbacks have been hit (wait until captures are done). // If no timeouts are received after a timeout, then fail. int currentCount = mCountCallbacksRemaining.get(); while (currentCount > 0) { try { Thread.sleep(TIMEOUT_CALLBACK * 1000); } catch (InterruptedException e) { throw new ItsException("Timeout failure", e); } int newCount = mCountCallbacksRemaining.get(); if (newCount == currentCount) { throw new ItsException("No callback received within timeout"); } currentCount = newCount; } } catch (android.hardware.camera2.CameraAccessException e) { throw new ItsException("Access error: ", e); } }