Example usage for android.media Image getPlanes

List of usage examples for android.media Image getPlanes

Introduction

In this page you can find the example usage for android.media Image getPlanes.

Prototype

public abstract Plane[] getPlanes();

Source Link

Document

Get the array of pixel planes for this Image.

Usage

From source file:Main.java

private static boolean checkAndroidImageFormat(Image image) {
    int format = image.getFormat();
    Plane[] planes = image.getPlanes();
    switch (format) {
    case ImageFormat.YUV_420_888:
    case ImageFormat.NV21:
    case ImageFormat.YV12:
        return 3 == planes.length;
    case ImageFormat.RAW_SENSOR:
    case ImageFormat.RAW10:
    case ImageFormat.JPEG:
        return 1 == planes.length;
    default://  ww  w.j  a va2s . co m
        return false;
    }
}

From source file:Main.java

private static boolean checkAndroidImageFormat(Image image) {
    int format = image.getFormat();
    Plane[] planes = image.getPlanes();
    switch (format) {
    case ImageFormat.YUV_420_888:
    case ImageFormat.NV21:
    case ImageFormat.YV12:
        return 3 == planes.length;
    case ImageFormat.RAW_SENSOR:
    case ImageFormat.RAW10:
    case ImageFormat.RAW12:
    case ImageFormat.JPEG:
        return 1 == planes.length;
    default:/*from w ww .j a  v a 2s .com*/
        return false;
    }
}

From source file:com.android.camera2.its.ItsUtils.java

public static byte[] getDataFromImage(Image image) throws ItsException {
    int format = image.getFormat();
    int width = image.getWidth();
    int height = image.getHeight();
    byte[] data = null;

    // Read image data
    Plane[] planes = image.getPlanes();

    // Check image validity
    if (!checkAndroidImageFormat(image)) {
        throw new ItsException("Invalid image format passed to getDataFromImage: " + image.getFormat());
    }/*w w w .j  ava2  s.  c o  m*/

    if (format == ImageFormat.JPEG) {
        // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer.
        ByteBuffer buffer = planes[0].getBuffer();
        data = new byte[buffer.capacity()];
        buffer.get(data);
        return data;
    } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR
            || format == ImageFormat.RAW10) {
        int offset = 0;
        data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
        byte[] rowData = new byte[planes[0].getRowStride()];
        for (int i = 0; i < planes.length; i++) {
            ByteBuffer buffer = planes[i].getBuffer();
            int rowStride = planes[i].getRowStride();
            int pixelStride = planes[i].getPixelStride();
            int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
            Logt.i(TAG, String.format("Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d",
                    format, i, width, height, rowStride, pixelStride));
            // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
            int w = (i == 0) ? width : width / 2;
            int h = (i == 0) ? height : height / 2;
            for (int row = 0; row < h; row++) {
                if (pixelStride == bytesPerPixel) {
                    // Special case: optimized read of the entire row
                    int length = w * bytesPerPixel;
                    buffer.get(data, offset, length);
                    // Advance buffer the remainder of the row stride
                    buffer.position(buffer.position() + rowStride - length);
                    offset += length;
                } else {
                    // Generic case: should work for any pixelStride but slower.
                    // Use intermediate buffer to avoid read byte-by-byte from
                    // DirectByteBuffer, which is very bad for performance.
                    // Also need avoid access out of bound by only reading the available
                    // bytes in the bytebuffer.
                    int readSize = rowStride;
                    if (buffer.remaining() < readSize) {
                        readSize = buffer.remaining();
                    }
                    buffer.get(rowData, 0, readSize);
                    if (pixelStride >= 1) {
                        for (int col = 0; col < w; col++) {
                            data[offset++] = rowData[col * pixelStride];
                        }
                    } else {
                        // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for
                        // example with RAW10. Just copy the buffer, dropping any padding at
                        // the end of the row.
                        int length = (w * ImageFormat.getBitsPerPixel(format)) / 8;
                        System.arraycopy(rowData, 0, data, offset, length);
                        offset += length;
                    }
                }
            }
        }
        Logt.i(TAG, String.format("Done reading image, format %d", format));
        return data;
    } else {
        throw new ItsException("Unsupported image format: " + format);
    }
}

From source file:MainActivity.java

protected void takePicture(View view) {
    if (null == mCameraDevice) {
        return;// w  ww  .j  a v a 2s.  c  o  m
    }
    CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
    try {
        CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraDevice.getId());
        StreamConfigurationMap configurationMap = characteristics
                .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
        if (configurationMap == null)
            return;
        Size largest = Collections.max(Arrays.asList(configurationMap.getOutputSizes(ImageFormat.JPEG)),
                new CompareSizesByArea());
        ImageReader reader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(), ImageFormat.JPEG,
                1);
        List<Surface> outputSurfaces = new ArrayList<Surface>(2);
        outputSurfaces.add(reader.getSurface());
        outputSurfaces.add(new Surface(mTextureView.getSurfaceTexture()));
        final CaptureRequest.Builder captureBuilder = mCameraDevice
                .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
        captureBuilder.addTarget(reader.getSurface());
        captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
        ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
            @Override
            public void onImageAvailable(ImageReader reader) {
                Image image = null;
                try {
                    image = reader.acquireLatestImage();
                    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
                    byte[] bytes = new byte[buffer.capacity()];
                    buffer.get(bytes);
                    OutputStream output = new FileOutputStream(getPictureFile());
                    output.write(bytes);
                    output.close();
                } catch (FileNotFoundException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    if (image != null) {
                        image.close();
                    }
                }
            }
        };
        HandlerThread thread = new HandlerThread("CameraPicture");
        thread.start();
        final Handler backgroudHandler = new Handler(thread.getLooper());
        reader.setOnImageAvailableListener(readerListener, backgroudHandler);
        final CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
            @Override
            public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
                    TotalCaptureResult result) {
                super.onCaptureCompleted(session, request, result);
                Toast.makeText(MainActivity.this, "Picture Saved", Toast.LENGTH_SHORT).show();
                startPreview(session);
            }
        };
        mCameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
            @Override
            public void onConfigured(CameraCaptureSession session) {
                try {
                    session.capture(captureBuilder.build(), captureCallback, backgroudHandler);
                } catch (CameraAccessException e) {
                    e.printStackTrace();
                }
            }

            @Override
            public void onConfigureFailed(CameraCaptureSession session) {
            }
        }, backgroudHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

From source file:com.sien.cpshoot.screencapture.ScreenCaptureFragment.java

private void saveImage(ImageReader mImageReader, int mWidth, int mHeight) {
    if (mImageReader == null)
        return;//  w w  w .j  av  a2 s  . c om

    Image image = mImageReader.acquireLatestImage();
    if (image == null)
        return;
    final Image.Plane[] planes = image.getPlanes();
    if (planes.length <= 0)
        return;

    final ByteBuffer buffer = planes[0].getBuffer();
    int offset = 0;
    int pixelStride = planes[0].getPixelStride();
    int rowStride = planes[0].getRowStride();
    int rowPadding = rowStride - pixelStride * mWidth;
    Bitmap bitmap = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.RGB_565);
    bitmap.copyPixelsFromBuffer(buffer);
    image.close();

    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy_MM_dd_hh_mm_ss");
    String strDate = dateFormat.format(new java.util.Date());
    String pathImage = Environment.getExternalStorageDirectory().getPath() + "/Pictures/";
    String nameImage = pathImage + strDate + ".png";
    if (bitmap != null) {
        try {
            File fileImage = new File(nameImage);
            if (!fileImage.exists()) {
                fileImage.createNewFile();
            }
            FileOutputStream out = new FileOutputStream(fileImage);
            if (out != null) {
                bitmap.compress(Bitmap.CompressFormat.PNG, 100, out);
                out.flush();
                out.close();
                Toast.makeText(getActivity(), "get phone's screen succeed", Toast.LENGTH_SHORT).show();
                Intent media = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
                Uri contentUri = Uri.fromFile(fileImage);
                media.setData(contentUri);
                getActivity().sendBroadcast(media);

                //
                beginCrop(Uri.fromFile(new File(nameImage)));
            }
        } catch (FileNotFoundException e) {
            e.printStackTrace();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:com.team254.cheezdroid.SelfieModeFragment.java

/**
 * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
 * still image is ready to be saved./*from  w w  w  .java2 s  .com*/
 */

private byte[] convertYUV420ToN21(Image imgYUV420) {
    byte[] rez = new byte[0];

    ByteBuffer buffer0 = imgYUV420.getPlanes()[0].getBuffer();
    ByteBuffer buffer2 = imgYUV420.getPlanes()[2].getBuffer();
    int buffer0_size = buffer0.remaining();
    int buffer2_size = buffer2.remaining();
    rez = new byte[buffer0_size + buffer2_size];

    buffer0.get(rez, 0, buffer0_size);
    buffer2.get(rez, buffer0_size, buffer2_size);

    return rez;
}

From source file:freed.cam.apis.camera2.modules.PictureModuleApi2.java

@NonNull
private void process_jpeg(Image image, File file) {

    Log.d(TAG, "Create JPEG");
    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
    byte[] bytes = new byte[buffer.remaining()];
    buffer.get(bytes);// w w w  .j a  va 2  s .  c  o m
    saveJpeg(file, bytes);
    image.close();
    buffer.clear();
    image = null;

}

From source file:com.obviousengine.android.focus.ZslFocusCamera.java

/**
 * Given an image reader, extracts the JPEG image bytes and then closes the
 * reader./*  w  w w.  j av a2s . co m*/
 *
 * @param img the image from which to extract jpeg bytes or compress to
 *            jpeg.
 * @return The bytes of the JPEG image. Newly allocated.
 */
private byte[] acquireJpegBytes(Image img) {
    ByteBuffer buffer;

    if (img.getFormat() == ImageFormat.JPEG) {
        Image.Plane plane0 = img.getPlanes()[0];
        buffer = plane0.getBuffer();

        byte[] imageBytes = new byte[buffer.remaining()];
        buffer.get(imageBytes);
        buffer.rewind();
        return imageBytes;
    } else {
        throw new RuntimeException("Unsupported image format.");
    }
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

/**
 * Given an image reader, extracts the JPEG image bytes and then closes the
 * reader./*from  w  ww  .  ja  va2  s .  c o  m*/
 *
 * @param img the image from which to extract jpeg bytes or compress to
 *            jpeg.
 * @param degrees the angle to rotate the image clockwise, in degrees. Rotation is
 *            only applied to YUV images.
 * @return The bytes of the JPEG image. Newly allocated.
 */
private byte[] acquireJpegBytes(Image img, int degrees) {
    ByteBuffer buffer;

    if (img.getFormat() == ImageFormat.JPEG) {
        Image.Plane plane0 = img.getPlanes()[0];
        buffer = plane0.getBuffer();

        byte[] imageBytes = new byte[buffer.remaining()];
        buffer.get(imageBytes);
        buffer.rewind();
        return imageBytes;
    } else if (img.getFormat() == ImageFormat.YUV_420_888) {
        buffer = mJpegByteBufferPool.acquire();
        if (buffer == null) {
            buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
        }

        int numBytes = JpegUtilNative.compressJpegFromYUV420Image(new AndroidImageProxy(img), buffer,
                JPEG_QUALITY, degrees);

        if (numBytes < 0) {
            throw new RuntimeException("Error compressing jpeg.");
        }

        buffer.limit(numBytes);

        byte[] imageBytes = new byte[buffer.remaining()];
        buffer.get(imageBytes);

        buffer.clear();
        mJpegByteBufferPool.release(buffer);

        return imageBytes;
    } else {
        throw new RuntimeException("Unsupported image format.");
    }
}

From source file:org.tensorflow.demo.Camera2BasicFragment.java

private void ocrImage(Image image) {
    Log.d("predict_class", "ocrImage");

    ByteBuffer buffer = image.getPlanes()[0].getBuffer();
    byte[] bytes = new byte[buffer.remaining()];
    buffer.get(bytes);//from ww  w  .j av a 2 s. com

    ocrBitmap = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
    //Log.d("predict_class", ocrBitmap.toString());
    /* FileOutputStream output = null;
     try {
    output = new FileOutputStream(mFile);
    output.write(bytes);
     } catch (IOException e) {
    e.printStackTrace();
     } finally {
    image.close();
    if (null != output) {
        try {
            output.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
     }*/

    runInBackground(new Runnable() {
        @Override
        public void run() {
            Log.d("predict_class", "run...");
            TessBaseAPI tessBaseAPI = new TessBaseAPI();
            tessBaseAPI.init("/mnt/sdcard/", "eng", 0);
            tessBaseAPI.setImage(ocrBitmap);
            ImageUtils.saveBitmap(ocrBitmap);
            String recognisedText = tessBaseAPI.getUTF8Text();

            Log.d("predict_class", "recognisedText: " + recognisedText);
        }
    });

    Log.d("predict_class", "Saved image");
    image.close();
}