List of usage examples for android.media Image getWidth
public abstract int getWidth();
From source file:com.android.camera2.its.ItsUtils.java
public static byte[] getDataFromImage(Image image) throws ItsException { int format = image.getFormat(); int width = image.getWidth(); int height = image.getHeight(); byte[] data = null; // Read image data Plane[] planes = image.getPlanes();/* w w w . ja va 2 s. co m*/ // Check image validity if (!checkAndroidImageFormat(image)) { throw new ItsException("Invalid image format passed to getDataFromImage: " + image.getFormat()); } if (format == ImageFormat.JPEG) { // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer. ByteBuffer buffer = planes[0].getBuffer(); data = new byte[buffer.capacity()]; buffer.get(data); return data; } else if (format == ImageFormat.YUV_420_888 || format == ImageFormat.RAW_SENSOR || format == ImageFormat.RAW10) { int offset = 0; data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8]; byte[] rowData = new byte[planes[0].getRowStride()]; for (int i = 0; i < planes.length; i++) { ByteBuffer buffer = planes[i].getBuffer(); int rowStride = planes[i].getRowStride(); int pixelStride = planes[i].getPixelStride(); int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8; Logt.i(TAG, String.format("Reading image: fmt %d, plane %d, w %d, h %d, rowStride %d, pixStride %d", format, i, width, height, rowStride, pixelStride)); // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling. int w = (i == 0) ? width : width / 2; int h = (i == 0) ? height : height / 2; for (int row = 0; row < h; row++) { if (pixelStride == bytesPerPixel) { // Special case: optimized read of the entire row int length = w * bytesPerPixel; buffer.get(data, offset, length); // Advance buffer the remainder of the row stride buffer.position(buffer.position() + rowStride - length); offset += length; } else { // Generic case: should work for any pixelStride but slower. // Use intermediate buffer to avoid read byte-by-byte from // DirectByteBuffer, which is very bad for performance. // Also need avoid access out of bound by only reading the available // bytes in the bytebuffer. int readSize = rowStride; if (buffer.remaining() < readSize) { readSize = buffer.remaining(); } buffer.get(rowData, 0, readSize); if (pixelStride >= 1) { for (int col = 0; col < w; col++) { data[offset++] = rowData[col * pixelStride]; } } else { // PixelStride of 0 can mean pixel isn't a multiple of 8 bits, for // example with RAW10. Just copy the buffer, dropping any padding at // the end of the row. int length = (w * ImageFormat.getBitsPerPixel(format)) / 8; System.arraycopy(rowData, 0, data, offset, length); offset += length; } } } } Logt.i(TAG, String.format("Done reading image, format %d", format)); return data; } else { throw new ItsException("Unsupported image format: " + format); } }
From source file:nf.frex.android.FrexActivity.java
private void setWallpaper() { final WallpaperManager wallpaperManager = WallpaperManager.getInstance(FrexActivity.this); final int desiredWidth = wallpaperManager.getDesiredMinimumWidth(); final int desiredHeight = wallpaperManager.getDesiredMinimumHeight(); final Image image = view.getImage(); final int imageWidth = image.getWidth(); final int imageHeight = image.getHeight(); final boolean useDesiredSize = desiredWidth > imageWidth || desiredHeight > imageHeight; DialogInterface.OnClickListener noListener = new DialogInterface.OnClickListener() { @Override/*w w w . j a v a2 s .c o m*/ public void onClick(DialogInterface dialog, int which) { // ok } }; if (useDesiredSize) { showYesNoDialog(this, R.string.set_wallpaper, getString(R.string.wallpaper_compute_msg, desiredWidth, desiredHeight), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { final Image wallpaperImage; try { wallpaperImage = new Image(desiredWidth, desiredHeight); } catch (OutOfMemoryError e) { alert(getString(R.string.out_of_memory)); return; } final ProgressDialog progressDialog = new ProgressDialog(FrexActivity.this); Generator.ProgressListener progressListener = new Generator.ProgressListener() { int numLines; @Override public void onStarted(int numTasks) { } @Override public void onSomeLinesComputed(int taskId, int line1, int line2) { numLines += 1 + line2 - line1; progressDialog.setProgress(numLines); } @Override public void onStopped(boolean cancelled) { progressDialog.dismiss(); if (!cancelled) { setWallpaper(wallpaperManager, wallpaperImage); } } }; final Generator wallpaperGenerator = new Generator(view.getGeneratorConfig(), SettingsActivity.NUM_CORES, progressListener); DialogInterface.OnCancelListener cancelListener = new DialogInterface.OnCancelListener() { @Override public void onCancel(DialogInterface dialog) { if (progressDialog.isShowing()) { progressDialog.dismiss(); } wallpaperGenerator.cancel(); } }; progressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); progressDialog.setCancelable(true); progressDialog.setMax(desiredHeight); progressDialog.setOnCancelListener(cancelListener); progressDialog.show(); Arrays.fill(wallpaperImage.getValues(), FractalView.MISSING_VALUE); wallpaperGenerator.start(wallpaperImage, false); } }, noListener, null); } else { showYesNoDialog(this, R.string.set_wallpaper, getString(R.string.wallpaper_replace_msg), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { setWallpaper(wallpaperManager, image); } }, noListener, null); } }
From source file:com.obviousengine.android.focus.ZslFocusCamera.java
private void savePicture(Image image, final PhotoCaptureParameters captureParams, CaptureSession session) { int heading = captureParams.heading; int width = image.getWidth(); int height = image.getHeight(); int rotation = 0; ExifInterface exif = null;/* w ww. j a v a 2s . co m*/ exif = new ExifInterface(); // TODO: Add more exif tags here. exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, width)); exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, height)); // TODO: Handle rotation correctly. // Set GPS heading direction based on sensor, if location is on. if (heading >= 0) { ExifTag directionRefTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION_REF, ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION); ExifTag directionTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION, new Rational(heading, 1)); exif.setTag(directionRefTag); exif.setTag(directionTag); } session.saveAndFinish(acquireJpegBytes(image), width, height, rotation, exif, new OnImageSavedListener() { @Override public void onImageSaved(Uri uri) { captureParams.callback.onPictureSaved(uri); } }); }
From source file:com.android.camera.one.v2.OneCameraZslImpl.java
private void savePicture(Image image, final PhotoCaptureParameters captureParams, CaptureSession session, CaptureResult result) {/*from w w w . j a v a2 s . c o m*/ int heading = captureParams.heading; int degrees = CameraUtil.getJpegRotation(captureParams.orientation, mCharacteristics); ExifInterface exif = new ExifInterface(); // TODO: Add more exif tags here. Size size = getImageSizeForOrientation(image.getWidth(), image.getHeight(), degrees); exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, size.getWidth())); exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, size.getHeight())); exif.setTag(exif.buildTag(ExifInterface.TAG_ORIENTATION, ExifInterface.Orientation.TOP_LEFT)); // Set GPS heading direction based on sensor, if location is on. if (heading >= 0) { ExifTag directionRefTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION_REF, ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION); ExifTag directionTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION, new Rational(heading, 1)); exif.setTag(directionRefTag); exif.setTag(directionTag); } new ExifUtil(exif).populateExif(Optional.<TaskImageContainer.TaskImage>absent(), Optional.of((CaptureResultProxy) new AndroidCaptureResultProxy(result)), Optional.<Location>absent()); ListenableFuture<Optional<Uri>> futureUri = session.saveAndFinish(acquireJpegBytes(image, degrees), size.getWidth(), size.getHeight(), 0, exif); Futures.addCallback(futureUri, new FutureCallback<Optional<Uri>>() { @Override public void onSuccess(Optional<Uri> uriOptional) { captureParams.callback.onPictureSaved(uriOptional.orNull()); } @Override public void onFailure(Throwable throwable) { captureParams.callback.onPictureSaved(null); } }); }
From source file:com.android.camera.one.v2.OneCameraZslImpl.java
/** * Given an image reader, extracts the JPEG image bytes and then closes the * reader./* ww w .j a va2 s. c o m*/ * * @param img the image from which to extract jpeg bytes or compress to * jpeg. * @param degrees the angle to rotate the image clockwise, in degrees. Rotation is * only applied to YUV images. * @return The bytes of the JPEG image. Newly allocated. */ private byte[] acquireJpegBytes(Image img, int degrees) { ByteBuffer buffer; if (img.getFormat() == ImageFormat.JPEG) { Image.Plane plane0 = img.getPlanes()[0]; buffer = plane0.getBuffer(); byte[] imageBytes = new byte[buffer.remaining()]; buffer.get(imageBytes); buffer.rewind(); return imageBytes; } else if (img.getFormat() == ImageFormat.YUV_420_888) { buffer = mJpegByteBufferPool.acquire(); if (buffer == null) { buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3); } int numBytes = JpegUtilNative.compressJpegFromYUV420Image(new AndroidImageProxy(img), buffer, JPEG_QUALITY, degrees); if (numBytes < 0) { throw new RuntimeException("Error compressing jpeg."); } buffer.limit(numBytes); byte[] imageBytes = new byte[buffer.remaining()]; buffer.get(imageBytes); buffer.clear(); mJpegByteBufferPool.release(buffer); return imageBytes; } else { throw new RuntimeException("Unsupported image format."); } }
From source file:org.tensorflow.demo.Camera2BasicFragment.java
private void classifyImage(Image image) { try {//ww w. j a v a 2 s . c o m Log.d("predict_class", "1"); if (image == null) { return; } if (computing) { image.close(); return; } Log.d("predict_class", "2"); computing = true; Trace.beginSection("imageAvailable"); Log.d("predict_class", image.getHeight() + ""); Log.d("predict_class", image.getWidth() + ""); final Image.Plane[] planes = image.getPlanes(); fillBytes(planes, yuvBytes); final int yRowStride = planes[0].getRowStride(); final int uvRowStride = planes[1].getRowStride(); final int uvPixelStride = planes[1].getPixelStride(); ImageUtils.convertYUV420ToARGB8888(yuvBytes[0], yuvBytes[1], yuvBytes[2], rgbBytes, previewWidth, previewHeight, yRowStride, uvRowStride, uvPixelStride, false); image.close(); } catch (final Exception e) { Log.d("predict_class", "error: " + e.getMessage()); if (image != null) { image.close(); } LOGGER.e(e, "Exception!"); Trace.endSection(); return; } rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight); final Canvas canvas = new Canvas(croppedBitmap); canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null); // For examining the actual TF input. if (SAVE_PREVIEW_BITMAP) { ImageUtils.saveBitmap(croppedBitmap); } runInBackground(new Runnable() { @Override public void run() { final long startTime = SystemClock.uptimeMillis(); final List<Classifier.Recognition> results = classifier.recognizeImage(croppedBitmap); lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime; String toastString = ""; Log.d("predict_class", results.toString()); for (Classifier.Recognition cr : results) { toastString = toastString + " " + cr.getTitle() + ": " + cr.getConfidence() + ";"; } Log.d("predict_class", toastString); //showToast(toastString); Intent intent = new Intent(getActivity(), ClassifierResultActivity.class); Gson gs = new Gson(); String resultString = gs.toJson(results); intent.putExtra("result", resultString); startActivity(intent); cropCopyBitmap = Bitmap.createBitmap(croppedBitmap); computing = false; } }); Trace.endSection(); }