List of usage examples for android.media Image getFormat
public abstract int getFormat();
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();/* w w w . j a v a 2s .co m*/ switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:Main.java
private static boolean checkAndroidImageFormat(Image image) { int format = image.getFormat(); Plane[] planes = image.getPlanes();//from ww w . j a v a2 s . c o m switch (format) { case ImageFormat.YUV_420_888: case ImageFormat.NV21: case ImageFormat.YV12: return 3 == planes.length; case ImageFormat.RAW_SENSOR: case ImageFormat.RAW10: case ImageFormat.RAW12: case ImageFormat.JPEG: return 1 == planes.length; default: return false; } }
From source file:com.android.camera2.its.ItsUtils.java
public static byte[] getDataFromImage(Image image) throws ItsException { int format = image.getFormat(); int width = image.getWidth(); int height = image.getHeight(); byte[] data = null; // Read image data Plane[] planes = image.getPlanes();/*from w ww .ja v a 2s. co m*/ // Check image validity if (!checkAndroidImageFormat(image)) { throw new ItsException("Invalid image format passed to getDataFromImage: " + image.getFormat()); } if (format == ImageFormat.JPEG) { // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. ByteBuffer buffer = planes[0].getBuffer(); data = new byte[buffer.capacity()]; buffer.get(data); return data; } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR || format == ImageFormat.RAW10) { int offset = 0; data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8]; byte[] rowData = new byte[planes[0].getRowStride()]; for (int i = 0; i < planes.length; i++) { ByteBuffer buffer = planes[i].getBuffer(); int rowStride = planes[i].getRowStride(); int pixelStride = planes[i].getPixelStride(); int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; Logt.i(TAG, String.format("Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d", format, i, width, height, rowStride, pixelStride)); // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. int w = (i == 0) ? width : width / 2; int h = (i == 0) ? height : height / 2; for (int row = 0; row < h; row++) { if (pixelStride == bytesPerPixel) { // Special case: optimized read of the entire row int length = w * bytesPerPixel; buffer.get(data, offset, length); // Advance buffer the remainder of the row stride buffer.position(buffer.position() + rowStride - length); offset += length; } else { // Generic case: should work for any pixelStride but slower. // Use intermediate buffer to avoid read byte-by-byte from // DirectByteBuffer, which is very bad for performance. // Also need avoid access out of bound by only reading the available // bytes in the bytebuffer. int readSize = rowStride; if (buffer.remaining() < readSize) { readSize = buffer.remaining(); } buffer.get(rowData, 0, readSize); if (pixelStride >= 1) { for (int col = 0; col < w; col++) { data[offset++] = rowData[col * pixelStride]; } } else { // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for // example with RAW10. Just copy the buffer, dropping any padding at // the end of the row. int length = (w * ImageFormat.getBitsPerPixel(format)) / 8; System.arraycopy(rowData, 0, data, offset, length); offset += length; } } } } Logt.i(TAG, String.format("Done reading image, format %d", format)); return data; } else { throw new ItsException("Unsupported image format: " + format); } }
From source file:com.obviousengine.android.focus.ZslFocusCamera.java
/** * Given an image reader, extracts the JPEG image bytes and then closes the * reader.//from w w w . j a v a2s . co m * * @param img the image from which to extract jpeg bytes or compress to * jpeg. * @return The bytes of the JPEG image. Newly allocated. */ private byte[] acquireJpegBytes(Image img) { ByteBuffer buffer; if (img.getFormat() == ImageFormat.JPEG) { Image.Plane plane0 = img.getPlanes()[0]; buffer = plane0.getBuffer(); byte[] imageBytes = new byte[buffer.remaining()]; buffer.get(imageBytes); buffer.rewind(); return imageBytes; } else { throw new RuntimeException("Unsupported image format."); } }
From source file:com.android.camera.one.v2.OneCameraZslImpl.java
/** * Given an image reader, extracts the JPEG image bytes and then closes the * reader.// w w w. j av a 2 s. co m * * @param img the image from which to extract jpeg bytes or compress to * jpeg. * @param degrees the angle to rotate the image clockwise, in degrees. Rotation is * only applied to YUV images. * @return The bytes of the JPEG image. Newly allocated. */ private byte[] acquireJpegBytes(Image img, int degrees) { ByteBuffer buffer; if (img.getFormat() == ImageFormat.JPEG) { Image.Plane plane0 = img.getPlanes()[0]; buffer = plane0.getBuffer(); byte[] imageBytes = new byte[buffer.remaining()]; buffer.get(imageBytes); buffer.rewind(); return imageBytes; } else if (img.getFormat() == ImageFormat.YUV_420_888) { buffer = mJpegByteBufferPool.acquire(); if (buffer == null) { buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3); } int numBytes = JpegUtilNative.compressJpegFromYUV420Image(new AndroidImageProxy(img), buffer, JPEG_QUALITY, degrees); if (numBytes < 0) { throw new RuntimeException("Error compressing jpeg."); } buffer.limit(numBytes); byte[] imageBytes = new byte[buffer.remaining()]; buffer.get(imageBytes); buffer.clear(); mJpegByteBufferPool.release(buffer); return imageBytes; } else { throw new RuntimeException("Unsupported image format."); } }