Example usage for android.media Image getPlanes

List of usage examples for android.media Image getPlanes

Introduction

In this page you can find the example usage for android.media Image getPlanes.

Prototype

public abstract Plane[] getPlanes();

Source Link

Document

Get the array of pixel planes for this Image.

Usage

From source file:org.tensorflow.demo.Camera2BasicFragment.java

private void classifyImage(Image image) {
    try {/* w ww  .j  av  a  2s . co m*/
        Log.d("predict_class", "1");

        if (image == null) {
            return;
        }

        if (computing) {
            image.close();
            return;
        }
        Log.d("predict_class", "2");
        computing = true;

        Trace.beginSection("imageAvailable");
        Log.d("predict_class", image.getHeight() + "");
        Log.d("predict_class", image.getWidth() + "");

        final Image.Plane[] planes = image.getPlanes();

        fillBytes(planes, yuvBytes);

        final int yRowStride = planes[0].getRowStride();

        final int uvRowStride = planes[1].getRowStride();

        final int uvPixelStride = planes[1].getPixelStride();

        ImageUtils.convertYUV420ToARGB8888(yuvBytes[0], yuvBytes[1], yuvBytes[2], rgbBytes, previewWidth,
                previewHeight, yRowStride, uvRowStride, uvPixelStride, false);

        image.close();

    } catch (final Exception e) {
        Log.d("predict_class", "error: " + e.getMessage());

        if (image != null) {
            image.close();
        }
        LOGGER.e(e, "Exception!");
        Trace.endSection();
        return;
    }

    rgbFrameBitmap.setPixels(rgbBytes, 0, previewWidth, 0, 0, previewWidth, previewHeight);

    final Canvas canvas = new Canvas(croppedBitmap);
    canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);

    // For examining the actual TF input.
    if (SAVE_PREVIEW_BITMAP) {
        ImageUtils.saveBitmap(croppedBitmap);
    }

    runInBackground(new Runnable() {
        @Override
        public void run() {
            final long startTime = SystemClock.uptimeMillis();
            final List<Classifier.Recognition> results = classifier.recognizeImage(croppedBitmap);
            lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
            String toastString = "";
            Log.d("predict_class", results.toString());
            for (Classifier.Recognition cr : results) {
                toastString = toastString + " " + cr.getTitle() + ": " + cr.getConfidence() + ";";
            }
            Log.d("predict_class", toastString);

            //showToast(toastString);
            Intent intent = new Intent(getActivity(), ClassifierResultActivity.class);
            Gson gs = new Gson();
            String resultString = gs.toJson(results);
            intent.putExtra("result", resultString);
            startActivity(intent);

            cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
            computing = false;
        }
    });

    Trace.endSection();
}