Example usage for android.graphics YuvImage YuvImage

List of usage examples for android.graphics YuvImage YuvImage

Introduction

In this page you can find the example usage for android.graphics YuvImage YuvImage.

Prototype

public YuvImage(byte[] yuv, int format, int width, int height, int[] strides) 

Source Link

Document

Construct an YuvImage.

Usage

From source file:com.cellbots.remoteEyes.RemoteEyesActivity.java

private void uploadImage(byte[] imageData) {
    try {//  w  w  w.  j a  v  a2s.co  m
        YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null);
        yuvImage.compressToJpeg(r, 20, out); // Tweak the quality here - 20
        // seems pretty decent for
        // quality + size.
        PutMethod put = new PutMethod(putUrl);
        put.setRequestBody(new ByteArrayInputStream(out.toByteArray()));
        put.execute(mHttpState, mConnection);
    } catch (UnsupportedEncodingException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IllegalStateException e) {
        e.printStackTrace();
        resetConnection();
    } catch (ClientProtocolException e) {
        e.printStackTrace();
        resetConnection();
    } catch (IOException e) {
        e.printStackTrace();
        resetConnection();
    } finally {
        out.reset();
        if (mCamera != null) {
            mCamera.addCallbackBuffer(mCallbackBuffer);
        }
        isUploading = false;
    }

}

From source file:com.cellbots.local.EyesView.java

private void uploadImage(byte[] imageData) {
    try {/*from  w w w.  java 2s  . c  o m*/
        YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null);
        yuvImage.compressToJpeg(r, 20, out); // Tweak the quality here - 20
        // seems pretty decent for quality + size.
        if (isLocalUrl) {
            try {
                Thread.sleep(50);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            mParent.setRemoteEyesImage(out.toByteArray());
        } else {
            PutMethod put = new PutMethod(putUrl);
            put.setRequestBody(new ByteArrayInputStream(out.toByteArray()));
            int result = put.execute(mHttpState, mConnection);
        }
        //Log.e("result", result + "");
    } catch (UnsupportedEncodingException e) {
        Log.e(TAG, "UnsupportedEncodingException: Error uploading image: " + e.getMessage());
    } catch (IllegalStateException e) {
        Log.e(TAG, "IllegalStateException: Error uploading image: " + e.getMessage());
        resetConnection();
    } catch (ClientProtocolException e) {
        Log.e(TAG, "ClientProtocolException: Error uploading image: " + e.getMessage());
        resetConnection();
    } catch (UnknownHostException e) {
        Log.e(TAG, "UnknownHostException: Error uploading image: " + e.getMessage());
        resetConnection();
    } catch (NoHttpResponseException e) {
        // Silently ignore this.
    } catch (IOException e) {
        Log.e(TAG, "IOException: Error uploading image: " + e.getMessage());
        resetConnection();
    } finally {
        out.reset();
        if (mCamera != null) {
            mCamera.addCallbackBuffer(mCallbackBuffer);
        }
        isUploading = false;
    }

}

From source file:com.cellbots.eyes.EyesActivity.java

private void uploadImage(byte[] imageData) {
    try {/* w ww.  j a v a 2s .c om*/
        YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null);
        yuvImage.compressToJpeg(r, 20, out); // Tweak the quality here - 20
        // seems pretty decent for quality + size.
        if (putUrl.contains("127.0.0.1") || putUrl.contains("localhost")) {
            try {
                Thread.sleep(50);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            resetConnection();
        }
        PutMethod put = new PutMethod(putUrl);
        put.setRequestBody(new ByteArrayInputStream(out.toByteArray()));
        int result = put.execute(mHttpState, mConnection);
        //Log.e("result", result + "");
    } catch (UnsupportedEncodingException e) {
        Log.e(TAG, "UnsupportedEncodingException: Error uploading image: " + e.getMessage());
    } catch (IllegalStateException e) {
        Log.e(TAG, "IllegalStateException: Error uploading image: " + e.getMessage());
        resetConnection();
    } catch (ClientProtocolException e) {
        Log.e(TAG, "ClientProtocolException: Error uploading image: " + e.getMessage());
        resetConnection();
    } catch (UnknownHostException e) {
        Log.e(TAG, "UnknownHostException: Error uploading image: " + e.getMessage());
        resetConnection();
    } catch (NoHttpResponseException e) {
        // Silently ignore this.
    } catch (IOException e) {
        Log.e(TAG, "IOException: Error uploading image: " + e.getMessage());
        resetConnection();
    } finally {
        out.reset();
        if (mCamera != null) {
            mCamera.addCallbackBuffer(mCallbackBuffer);
        }
        isUploading = false;
    }

}

From source file:info.guardianproject.iocipher.camera.VideoCameraActivity.java

@Override
public void onPreviewFrame(byte[] data, Camera camera) {

    //even when not recording, we'll compress frames in order to estimate our FPS

    Camera.Parameters parameters = camera.getParameters();
    mLastWidth = parameters.getPreviewSize().width;
    mLastHeight = parameters.getPreviewSize().height;

    if (mRotation > 0) //flip height and width
    {/*from  w w  w  . j a v a  2  s.  c  o  m*/
        mLastWidth = parameters.getPreviewSize().height;
        mLastHeight = parameters.getPreviewSize().width;
    }

    mPreviewFormat = parameters.getPreviewFormat();

    byte[] dataResult = data;

    if (mPreCompressFrames) {
        if (mRotation > 0) {
            dataResult = rotateYUV420Degree90(data, mLastHeight, mLastWidth);

            if (getCameraDirection() == CameraInfo.CAMERA_FACING_FRONT) {
                dataResult = rotateYUV420Degree90(dataResult, mLastWidth, mLastHeight);
                dataResult = rotateYUV420Degree90(dataResult, mLastHeight, mLastWidth);
            }

        }

        YuvImage yuv = new YuvImage(dataResult, mPreviewFormat, mLastWidth, mLastHeight, null);
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        yuv.compressToJpeg(new Rect(0, 0, mLastWidth, mLastHeight), MediaConstants.sJpegQuality, out);
        dataResult = out.toByteArray();
    }

    if (mFramesTotal == 0 && fileOut != null) {
        try {
            info.guardianproject.iocipher.FileOutputStream fosThumb = new info.guardianproject.iocipher.FileOutputStream(
                    new info.guardianproject.iocipher.File(fileOut.getAbsolutePath() + ".thumb.jpg"));
            fosThumb.write(dataResult);
            fosThumb.flush();
            fosThumb.close();

        } catch (Exception e) {

            Log.e("VideoCam", "can't save thumb", e);
        }
    }

    if (mIsRecording && mFrameQ != null)
        synchronized (mFrameQ) {
            if (data != null) {

                VideoFrame vf = new VideoFrame();
                vf.image = dataResult;
                vf.duration = 1;//this is frame duration, not time //System.currentTimeMillis() - lastTime;
                vf.fps = mFPS;

                mFrameQ.add(vf);

                mFramesTotal++;

            }
        }

    mFpsCounter++;
    if ((System.currentTimeMillis() - start) >= 1000) {
        mFPS = mFpsCounter;
        mFpsCounter = 0;
        start = System.currentTimeMillis();
    }

}

From source file:com.jasompeter.openalpr.CameraActivity.java

public void startPreview() {
    try {/*from   ww w.  j  a v a 2 s . c o m*/
        mCamera.setPreviewDisplay(mSurfaceHolder);
    } catch (IOException e) {
        e.printStackTrace();
        Log.d(TAG, "Cannot set preview display.");
    }

    setCorrectOrientation(mCamera);
    setCorrectSize(mCamera, mSurfaceHolder.getSurfaceFrame().width(),
            mSurfaceHolder.getSurfaceFrame().height());

    mCamera.startPreview();

    mCamera.setPreviewCallback(new Camera.PreviewCallback() {
        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {

            if (!mEnableRecognition) {
                return;
            }

            if (camera.getParameters().getPreviewFormat() == ImageFormat.NV21) {
                Camera.Size previewSize = camera.getParameters().getPreviewSize();
                YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height,
                        null);
                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 50, baos);
                recognize(baos.toByteArray());
            }

        }
    });
}

From source file:com.cellbots.eyes.EyesActivity.java

private void appEngineUploadImage(byte[] imageData) {
    Log.e("app engine remote eyes", "called");
    try {/*  w ww  .  j a v a2  s.  c  o  m*/
        YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null);
        yuvImage.compressToJpeg(r, 20, out); // Tweak the quality here - 20
        // seems pretty decent for
        // quality + size.
        Log.e("app engine remote eyes", "upload starting");
        HttpPost httpPost = new HttpPost(postUrl);
        Log.e("app engine perf", "0");
        MultipartEntity entity = new MultipartEntity();
        Log.e("app engine perf", "1");
        entity.addPart("img", new InputStreamBody(new ByteArrayInputStream(out.toByteArray()), "video.jpg"));
        Log.e("app engine perf", "2");
        httpPost.setEntity(entity);
        Log.e("app engine perf", "3");
        HttpResponse response = httpclient.execute(httpPost);
        Log.e("app engine remote eyes", "result: " + response.getStatusLine());
        Log.e("app engine remote eyes", "upload complete");
    } catch (UnsupportedEncodingException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IllegalStateException e) {
        e.printStackTrace();
        resetAppEngineConnection();
    } catch (ClientProtocolException e) {
        e.printStackTrace();
        resetAppEngineConnection();
    } catch (IOException e) {
        e.printStackTrace();
        resetAppEngineConnection();
    } finally {
        out.reset();
        if (mCamera != null) {
            mCamera.addCallbackBuffer(mCallbackBuffer);
        }
        isUploading = false;
        Log.e("app engine remote eyes", "finished");
    }
}

From source file:com.cellbots.eyes.EyesActivity.java

private void takePicture(byte[] imageData) {
    YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null);
    yuvImage.compressToJpeg(r, 100, out);
    File dir = new File(Environment.getExternalStorageDirectory() + "/cellbot/pictures");
    dir.mkdirs();/*  w w  w.  ja  va 2 s .  c o m*/
    FileOutputStream outStream;
    try {
        String picName = dir.toString() + "/" + System.currentTimeMillis() + ".jpg";
        outStream = new FileOutputStream(picName);
        outStream.write(out.toByteArray());
        outStream.flush();
        outStream.close();
        Log.e("Picture saved:", picName);
    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        needToTakePicture = false;
        out.reset();
        if (mCamera != null) {
            mCamera.addCallbackBuffer(mCallbackBuffer);
        }
    }
}

From source file:com.cellbots.local.EyesView.java

private void takePicture(byte[] imageData, boolean isJpg) {
    if (!isJpg) {
        YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null);
        yuvImage.compressToJpeg(r, 100, out);
    }/*from  w  w  w  .j ava 2 s .  c om*/
    File dir = new File(Environment.getExternalStorageDirectory() + "/cellbots/pictures");
    dir.mkdirs();
    FileOutputStream outStream;
    try {
        String picName = dir.toString() + "/" + System.currentTimeMillis() + ".jpg";
        outStream = new FileOutputStream(picName);
        outStream.write(!isJpg ? out.toByteArray() : imageData);
        outStream.flush();
        outStream.close();
    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        needToTakePicture = false;
        out.reset();
        if (mCamera != null) {
            mCamera.addCallbackBuffer(mCallbackBuffer);
        }
    }
}

From source file:com.android.camera.manager.ThumbnailViewManager.java

private byte[] covertYuvDataToJpeg(byte[] data, int yuvWidth, int yuvHeight, int imageFormat) {
    byte[] jpeg;/*from   w  w  w. j av a  2 s .c o  m*/
    Rect rect = new Rect(0, 0, yuvWidth, yuvHeight);
    // TODO: the yuv data from native must be NV21 or YUY2.
    YuvImage yuvImg = new YuvImage(data, imageFormat, yuvWidth, yuvHeight, null);
    ByteArrayOutputStream outputstream = new ByteArrayOutputStream();
    int jpegQuality = CameraProfile.getJpegEncodingQualityParameter(CameraProfile.QUALITY_HIGH);
    yuvImg.compressToJpeg(rect, jpegQuality, outputstream);
    jpeg = outputstream.toByteArray();
    return jpeg;
}

From source file:hr.abunicic.angular.CameraActivity.java

/**
 * Method that starts detection of shapes.
 */// w  ww  .ja  v a 2  s. c  o  m
public void startDetection() {
    startPreview = !startPreview;

    mCamera.startPreview();
    mCamera.setPreviewCallback(new Camera.PreviewCallback() {
        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {

            //Getting engine results
            VisionEngineResults res = JobScheduler.getEngineResults();
            if (res != null) {
                rp = res;

                updateDescription();
            }

            if (inScheduler < 3 && startPreview) {

                inScheduler++;

                cornersView.setVisibility(View.VISIBLE);

                ByteArrayOutputStream out = new ByteArrayOutputStream();
                params = mCamera.getParameters();
                Camera.Size size = params.getPreviewSize();

                //Compressing frame image to JPEG and then to byte array
                YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
                Rect rectangle = new Rect();
                rectangle.bottom = size.height;
                rectangle.top = 0;
                rectangle.left = 0;
                rectangle.right = size.width;
                yuvImage.compressToJpeg(rectangle, 20, out);
                byte[] imageBytes = out.toByteArray();

                //Starting the PrepareTask for this frame
                new PrepareTask().executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, imageBytes);
                /*
                                    Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes , 0, imageBytes.length);
                                    try {
                Matrix matrix = new Matrix();
                        
                matrix.postRotate(90);
                        
                Bitmap scaledBitmap = Bitmap.createScaledBitmap(bitmap,bitmap.getWidth(),bitmap.getHeight(),true);
                        
                Bitmap rotatedBitmap = Bitmap.createBitmap(scaledBitmap , 0, 0, scaledBitmap .getWidth(), scaledBitmap .getHeight(), matrix, true);
                //storeImage(rotatedBitmap);
                if (mRecognizer.getCurrentState().equals(Recognizer.State.READY)) {
                    mRecognizer.recognizeBitmap(rotatedBitmap, Orientation.ORIENTATION_LANDSCAPE_RIGHT, CameraActivity.this);
                }
                                    } catch (IllegalStateException e) {
                        
                                    }
                */

                if (croppedBitmap != null) {
                    if (mRecognizer.getCurrentState().equals(Recognizer.State.READY)) {
                        storeImage(croppedBitmap);
                        mRecognizer.recognizeBitmap(croppedBitmap, Orientation.ORIENTATION_LANDSCAPE_RIGHT,
                                CameraActivity.this);
                    }
                } else {

                }

                camera.startPreview();

            } else if (!startPreview) {
                cornersView.setVisibility(View.GONE);
            }

        }
    });
}