Example usage for android.graphics ImageFormat JPEG

List of usage examples for android.graphics ImageFormat JPEG

Introduction

In this page you can find the example usage for android.graphics ImageFormat JPEG.

Prototype

int JPEG

To view the source code for android.graphics ImageFormat JPEG.

Click Source Link

Document

Compressed JPEG format.

Usage

From source file:com.obviousengine.android.focus.ZslFocusCamera.java

/**
 * Given an image reader, extracts the JPEG image bytes and then closes the
 * reader.//from   ww w. j ava 2 s. c  om
 *
 * @param img the image from which to extract jpeg bytes or compress to
 *            jpeg.
 * @return The bytes of the JPEG image. Newly allocated.
 */
private byte[] acquireJpegBytes(Image img) {
    ByteBuffer buffer;

    if (img.getFormat() == ImageFormat.JPEG) {
        Image.Plane plane0 = img.getPlanes()[0];
        buffer = plane0.getBuffer();

        byte[] imageBytes = new byte[buffer.remaining()];
        buffer.get(imageBytes);
        buffer.rewind();
        return imageBytes;
    } else {
        throw new RuntimeException("Unsupported image format.");
    }
}

From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java

private void SetBurst(int burst) {
    try {/*from w  ww  .j a va2  s .c  o  m*/
        Log.d(TAG, "Set Burst to:" + burst);
        previewSize = cameraHolder.getSizeForPreviewDependingOnImageSize(
                cameraHolder.map.getOutputSizes(ImageFormat.YUV_420_888), cameraHolder.characteristics,
                mImageWidth, mImageHeight);
        if (cameraUiWrapper.getFocusPeakProcessor() != null) {
            cameraUiWrapper.getFocusPeakProcessor().kill();
        }
        int sensorOrientation = cameraHolder.characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
        int orientation = 0;
        switch (sensorOrientation) {
        case 90:
            orientation = 0;
            break;
        case 180:
            orientation = 90;
            break;
        case 270:
            orientation = 180;
            break;
        case 0:
            orientation = 270;
            break;
        }
        cameraHolder.CaptureSessionH.SetTextureViewSize(previewSize.getWidth(), previewSize.getHeight(),
                orientation, orientation + 180, false);
        SurfaceTexture texture = cameraHolder.CaptureSessionH.getSurfaceTexture();
        texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
        previewsurface = new Surface(texture);

        cameraUiWrapper.getFocusPeakProcessor().Reset(previewSize.getWidth(), previewSize.getHeight());
        Log.d(TAG, "Previewsurface vailid:" + previewsurface.isValid());
        cameraUiWrapper.getFocusPeakProcessor().setOutputSurface(previewsurface);
        camerasurface = cameraUiWrapper.getFocusPeakProcessor().getInputSurface();
        cameraHolder.CaptureSessionH.AddSurface(camerasurface, true);

        if (picFormat.equals(KEYS.JPEG))
            mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.JPEG, burst + 1);
        else if (picFormat.equals(CameraHolderApi2.RAW10))
            mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW10, burst + 1);
        else if (picFormat.equals(CameraHolderApi2.RAW_SENSOR))
            mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW_SENSOR,
                    burst + 1);
        else if (picFormat.equals(CameraHolderApi2.RAW12))
            mImageReader = ImageReader.newInstance(mImageWidth, mImageHeight, ImageFormat.RAW12, burst + 1);
        cameraHolder.CaptureSessionH.AddSurface(mImageReader.getSurface(), false);
        cameraHolder.CaptureSessionH.CreateCaptureSession();
    } catch (Exception ex) {
        ex.printStackTrace();
    }
    if (parameterHandler.Burst != null)
        parameterHandler.Burst.ThrowCurrentValueChanged(parameterHandler.Burst.GetValue());
}

From source file:com.ape.camera2raw.Camera2RawFragment.java

/**
 * Configure the necessary {@link Matrix} transformation to `mTextureView`,
 * and start/restart the preview capture session if necessary.
 * <p/>/* ww  w.  jav  a2s  .  com*/
 * This method should be called after the camera state has been initialized in
 * setUpCameraOutputs.
 *
 * @param viewWidth  The width of `mTextureView`
 * @param viewHeight The height of `mTextureView`
 */
private void configureTransform(int viewWidth, int viewHeight) {
    Activity activity = getActivity();
    synchronized (mCameraStateLock) {
        if (null == mTextureView || null == activity) {
            return;
        }

        StreamConfigurationMap map = mCharacteristics
                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);

        // For still image captures, we always use the largest available size.
        Size largestJpeg = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
                new CompareSizesByArea());

        // Find the rotation of the device relative to the native device orientation.
        int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
        Point displaySize = new Point();
        activity.getWindowManager().getDefaultDisplay().getSize(displaySize);

        // Find the rotation of the device relative to the camera sensor's orientation.
        int totalRotation = sensorToDeviceRotation(mCharacteristics, deviceRotation);

        // Swap the view dimensions for calculation as needed if they are rotated relative to
        // the sensor.
        boolean swappedDimensions = totalRotation == 90 || totalRotation == 270;
        int rotatedViewWidth = viewWidth;
        int rotatedViewHeight = viewHeight;
        int maxPreviewWidth = displaySize.x;
        int maxPreviewHeight = displaySize.y;

        if (swappedDimensions) {
            rotatedViewWidth = viewHeight;
            rotatedViewHeight = viewWidth;
            maxPreviewWidth = displaySize.y;
            maxPreviewHeight = displaySize.x;
        }

        // Preview should not be larger than display size and 1080p.
        if (maxPreviewWidth > MAX_PREVIEW_WIDTH) {
            maxPreviewWidth = MAX_PREVIEW_WIDTH;
        }

        if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) {
            maxPreviewHeight = MAX_PREVIEW_HEIGHT;
        }

        // Find the best preview size for these view dimensions and configured JPEG size.
        Size previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), rotatedViewWidth,
                rotatedViewHeight, maxPreviewWidth, maxPreviewHeight, largestJpeg);

        if (swappedDimensions) {
            mTextureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
        } else {
            mTextureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
        }

        // Find rotation of device in degrees (reverse device orientation for front-facing
        // cameras).
        int rotation = (mCharacteristics
                .get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT)
                        ? (360 + ORIENTATIONS.get(deviceRotation)) % 360
                        : (360 - ORIENTATIONS.get(deviceRotation)) % 360;

        Matrix matrix = new Matrix();
        RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
        RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
        float centerX = viewRect.centerX();
        float centerY = viewRect.centerY();

        // Initially, output stream images from the Camera2 API will be rotated to the native
        // device orientation from the sensor's orientation, and the TextureView will default to
        // scaling these buffers to fill it's view bounds.  If the aspect ratios and relative
        // orientations are correct, this is fine.
        //
        // However, if the device orientation has been rotated relative to its native
        // orientation so that the TextureView's dimensions are swapped relative to the
        // native device orientation, we must do the following to ensure the output stream
        // images are not incorrectly scaled by the TextureView:
        //   - Undo the scale-to-fill from the output buffer's dimensions (i.e. its dimensions
        //     in the native device orientation) to the TextureView's dimension.
        //   - Apply a scale-to-fill from the output buffer's rotated dimensions
        //     (i.e. its dimensions in the current device orientation) to the TextureView's
        //     dimensions.
        //   - Apply the rotation from the native device orientation to the current device
        //     rotation.
        if (Surface.ROTATION_90 == deviceRotation || Surface.ROTATION_270 == deviceRotation) {
            bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
            matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
            float scale = Math.max((float) viewHeight / previewSize.getHeight(),
                    (float) viewWidth / previewSize.getWidth());
            matrix.postScale(scale, scale, centerX, centerY);

        }
        matrix.postRotate(rotation, centerX, centerY);

        mTextureView.setTransform(matrix);

        // Start or restart the active capture session if the preview was initialized or
        // if its aspect ratio changed significantly.
        if (mPreviewSize == null || !checkAspectsEqual(previewSize, mPreviewSize)) {
            mPreviewSize = previewSize;
            if (mState != STATE_CLOSED) {
                createCameraPreviewSessionLocked();
            }
        }
    }
}

From source file:com.android.camera2.its.ItsService.java

private void doCapture(JSONObject params) throws ItsException {
    try {/*from   w w  w .  j av  a 2 s.co m*/
        // Parse the JSON to get the list of capture requests.
        List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(mCamera, params);

        // Set the output surface(s) and listeners.
        int widths[] = new int[MAX_NUM_OUTPUT_SURFACES];
        int heights[] = new int[MAX_NUM_OUTPUT_SURFACES];
        int formats[] = new int[MAX_NUM_OUTPUT_SURFACES];
        int numSurfaces = 0;
        try {
            mCountRawOrDng.set(0);
            mCountJpg.set(0);
            mCountYuv.set(0);
            mCountRaw10.set(0);
            mCountCapRes.set(0);
            mCaptureRawIsDng = false;
            mCaptureResults = new CaptureResult[requests.size()];

            JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
            if (jsonOutputSpecs != null) {
                numSurfaces = jsonOutputSpecs.length();
                if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) {
                    throw new ItsException("Too many output surfaces");
                }
                for (int i = 0; i < numSurfaces; i++) {
                    // Get the specified surface.
                    JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i);
                    String sformat = surfaceObj.optString("format");
                    Size sizes[];
                    if ("yuv".equals(sformat) || "".equals(sformat)) {
                        // Default to YUV if no format is specified.
                        formats[i] = ImageFormat.YUV_420_888;
                        sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
                    } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
                        formats[i] = ImageFormat.JPEG;
                        sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics);
                    } else if ("raw".equals(sformat)) {
                        formats[i] = ImageFormat.RAW_SENSOR;
                        sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
                    } else if ("raw10".equals(sformat)) {
                        formats[i] = ImageFormat.RAW10;
                        sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
                    } else if ("dng".equals(sformat)) {
                        formats[i] = ImageFormat.RAW_SENSOR;
                        sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
                        mCaptureRawIsDng = true;
                    } else {
                        throw new ItsException("Unsupported format: " + sformat);
                    }
                    // If the size is omitted, then default to the largest allowed size for the
                    // format.
                    widths[i] = surfaceObj.optInt("width");
                    heights[i] = surfaceObj.optInt("height");
                    if (widths[i] <= 0) {
                        if (sizes == null || sizes.length == 0) {
                            throw new ItsException(String
                                    .format("Zero stream configs available for requested format: %s", sformat));
                        }
                        widths[i] = sizes[0].getWidth();
                    }
                    if (heights[i] <= 0) {
                        heights[i] = sizes[0].getHeight();
                    }
                }
            } else {
                // No surface(s) specified at all.
                // Default: a single output surface which is full-res YUV.
                Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
                numSurfaces = 1;
                widths[0] = sizes[0].getWidth();
                heights[0] = sizes[0].getHeight();
                formats[0] = ImageFormat.YUV_420_888;
            }

            prepareCaptureReader(widths, heights, formats, numSurfaces);
            List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces);
            for (int i = 0; i < numSurfaces; i++) {
                outputSurfaces.add(mCaptureReaders[i].getSurface());
            }
            BlockingSessionCallback sessionListener = new BlockingSessionCallback();
            mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
            mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);

            for (int i = 0; i < numSurfaces; i++) {
                ImageReader.OnImageAvailableListener readerListener = createAvailableListener(mCaptureCallback);
                mCaptureReaders[i].setOnImageAvailableListener(readerListener, mSaveHandlers[i]);
            }

            // Plan for how many callbacks need to be received throughout the duration of this
            // sequence of capture requests. There is one callback per image surface, and one
            // callback for the CaptureResult, for each capture.
            int numCaptures = requests.size();
            mCountCallbacksRemaining.set(numCaptures * (numSurfaces + 1));

        } catch (CameraAccessException e) {
            throw new ItsException("Error configuring outputs", e);
        } catch (org.json.JSONException e) {
            throw new ItsException("JSON error", e);
        }

        // Initiate the captures.
        for (int i = 0; i < requests.size(); i++) {
            // For DNG captures, need the LSC map to be available.
            if (mCaptureRawIsDng) {
                requests.get(i).set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1);
            }

            CaptureRequest.Builder req = requests.get(i);
            for (int j = 0; j < numSurfaces; j++) {
                req.addTarget(mCaptureReaders[j].getSurface());
            }
            mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
        }

        // Make sure all callbacks have been hit (wait until captures are done).
        // If no timeouts are received after a timeout, then fail.
        int currentCount = mCountCallbacksRemaining.get();
        while (currentCount > 0) {
            try {
                Thread.sleep(TIMEOUT_CALLBACK * 1000);
            } catch (InterruptedException e) {
                throw new ItsException("Timeout failure", e);
            }
            int newCount = mCountCallbacksRemaining.get();
            if (newCount == currentCount) {
                throw new ItsException("No callback received within timeout");
            }
            currentCount = newCount;
        }
    } catch (android.hardware.camera2.CameraAccessException e) {
        throw new ItsException("Access error: ", e);
    }
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

/**
 * Given an image reader, extracts the JPEG image bytes and then closes the
 * reader.//from  w  ww .j a  v  a 2 s  .c  o m
 *
 * @param img the image from which to extract jpeg bytes or compress to
 *            jpeg.
 * @param degrees the angle to rotate the image clockwise, in degrees. Rotation is
 *            only applied to YUV images.
 * @return The bytes of the JPEG image. Newly allocated.
 */
private byte[] acquireJpegBytes(Image img, int degrees) {
    ByteBuffer buffer;

    if (img.getFormat() == ImageFormat.JPEG) {
        Image.Plane plane0 = img.getPlanes()[0];
        buffer = plane0.getBuffer();

        byte[] imageBytes = new byte[buffer.remaining()];
        buffer.get(imageBytes);
        buffer.rewind();
        return imageBytes;
    } else if (img.getFormat() == ImageFormat.YUV_420_888) {
        buffer = mJpegByteBufferPool.acquire();
        if (buffer == null) {
            buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
        }

        int numBytes = JpegUtilNative.compressJpegFromYUV420Image(new AndroidImageProxy(img), buffer,
                JPEG_QUALITY, degrees);

        if (numBytes < 0) {
            throw new RuntimeException("Error compressing jpeg.");
        }

        buffer.limit(numBytes);

        byte[] imageBytes = new byte[buffer.remaining()];
        buffer.get(imageBytes);

        buffer.clear();
        mJpegByteBufferPool.release(buffer);

        return imageBytes;
    } else {
        throw new RuntimeException("Unsupported image format.");
    }
}

From source file:com.kjsaw.alcosys.ibacapp.IBAC.java

public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
    // TODO Auto-generated method stub
    if (isPreviewRunning) {
        camera.stopPreview();//from  ww w  .j  av  a2s  .  c o m
    }

    Parameters parameters = camera.getParameters();
    List<Size> previewSizes = parameters.getSupportedPreviewSizes();
    Size previewSize = previewSizes.get(previewSizes.size() - 1);
    parameters.setPreviewSize(previewSize.width, previewSize.height);

    parameters.setPictureFormat(ImageFormat.JPEG);
    List<Size> listPictureSizes = parameters.getSupportedPictureSizes();
    Size pictureSize = listPictureSizes.get(listPictureSizes.size() - 1);
    parameters.setPictureSize(pictureSize.width, pictureSize.height);

    camera.setParameters(parameters);

    try {
        camera.setPreviewDisplay(holder);
    } catch (IOException e) {
        e.printStackTrace();
    }
    if (isCameraRunOnce) {
        startCamera();
    }
}