List of usage examples for android.graphics YuvImage YuvImage
public YuvImage(byte[] yuv, int format, int width, int height, int[] strides)
From source file:Main.java
public static Bitmap yuv2bitmap(byte[] data, int width, int height) { ByteArrayOutputStream out = new ByteArrayOutputStream(); YuvImage yuvImage = new YuvImage(data, ImageFormat.NV21, width, height, null); yuvImage.compressToJpeg(new android.graphics.Rect(0, 0, width, height), 100, out); byte[] imageBytes = out.toByteArray(); Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length); // rotate/* ww w . j a v a 2 s . c om*/ Matrix matrix = new Matrix(); matrix.postRotate(90); Bitmap dst = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); return dst; }
From source file:Main.java
public static Bitmap createBitmapFromByteArray(byte[] data, Size previewSize) { YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 80, baos); byte[] jdata = baos.toByteArray(); BitmapFactory.Options opt = new BitmapFactory.Options(); opt.inMutable = true;/*from ww w . ja v a2s. c o m*/ Bitmap bitmap = BitmapFactory.decodeByteArray(jdata, 0, jdata.length, opt); Matrix matrix = new Matrix(); matrix.postRotate(-90); return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); }
From source file:Main.java
public static byte[] createFromNV21(@NonNull final byte[] data, final int width, final int height, int rotation, final Rect croppingRect) throws IOException { byte[] rotated = rotateNV21(data, width, height, rotation); final int rotatedWidth = rotation % 180 > 0 ? height : width; final int rotatedHeight = rotation % 180 > 0 ? width : height; YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21, rotatedWidth, rotatedHeight, null); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); previewImage.compressToJpeg(croppingRect, 80, outputStream); byte[] bytes = outputStream.toByteArray(); outputStream.close();/*from www. j av a2 s. c o m*/ return bytes; }
From source file:Main.java
public static byte[] createFromNV21(@NonNull final byte[] data, final int width, final int height, int rotation, final Rect croppingRect, final boolean flipHorizontal) throws IOException { byte[] rotated = rotateNV21(data, width, height, rotation, flipHorizontal); final int rotatedWidth = rotation % 180 > 0 ? height : width; final int rotatedHeight = rotation % 180 > 0 ? width : height; YuvImage previewImage = new YuvImage(rotated, ImageFormat.NV21, rotatedWidth, rotatedHeight, null); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); previewImage.compressToJpeg(croppingRect, 80, outputStream); byte[] bytes = outputStream.toByteArray(); outputStream.close();//from ww w . j av a 2 s . c o m return bytes; }
From source file:Main.java
public static Bitmap yuv2Bitmap(byte[] data, int width, int height) { final YuvImage image = new YuvImage(data, ImageFormat.NV21, width, height, null); ByteArrayOutputStream os = new ByteArrayOutputStream(data.length); if (!image.compressToJpeg(new Rect(0, 0, width, height), 100, os)) { return null; }/* w w w . j a va 2 s . c o m*/ byte[] tmp = os.toByteArray(); Bitmap bitmap = BitmapFactory.decodeByteArray(tmp, 0, tmp.length); return bitmap; }
From source file:Main.java
public static String savetoJPEG(byte[] data, int width, int height, String file) { Rect frame = new Rect(0, 0, width, height); YuvImage img = new YuvImage(data, ImageFormat.NV21, width, height, null); OutputStream os = null;/*from ww w .jav a2 s. c o m*/ File jpgfile = new File(file); try { os = new FileOutputStream(jpgfile); img.compressToJpeg(frame, 100, os); os.flush(); os.close(); } catch (Exception e) { e.printStackTrace(); } return jpgfile.getPath(); }
From source file:Main.java
@SuppressLint("NewApi") public static Bitmap NV21ToRGBABitmap(byte[] nv21, int width, int height, Context context) { TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap"); Rect rect = new Rect(0, 0, width, height); try {/* w w w .ja v a 2s . c o m*/ Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV"); Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB"); byte[] imageData = nv21; if (mRS == null) { mRS = RenderScript.create(context); mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS)); Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV)); tb.setX(width); tb.setY(height); tb.setMipmaps(false); tb.setYuvFormat(ImageFormat.NV21); ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); timings.addSplit("Prepare for ain"); Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS)); tb2.setX(width); tb2.setY(height); tb2.setMipmaps(false); aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED); timings.addSplit("Prepare for aOut"); bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); timings.addSplit("Create Bitmap"); } ain.copyFrom(imageData); timings.addSplit("ain copyFrom"); mYuvToRgb.setInput(ain); timings.addSplit("setInput ain"); mYuvToRgb.forEach(aOut); timings.addSplit("NV21 to ARGB forEach"); aOut.copyTo(bitmap); timings.addSplit("Allocation to Bitmap"); } catch (Exception e) { YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null); timings.addSplit("NV21 bytes to YuvImage"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvImage.compressToJpeg(rect, 90, baos); byte[] cur = baos.toByteArray(); timings.addSplit("YuvImage crop and compress to Jpeg Bytes"); bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length); timings.addSplit("Jpeg Bytes to Bitmap"); } timings.dumpToLog(); return bitmap; }
From source file:Main.java
@SuppressLint("NewApi") public static Bitmap NV21ToRGBABitmap(byte[] nv21, int width, int height, Context context) { TimingLogger timings = new TimingLogger(TIMING_LOG_TAG, "NV21ToRGBABitmap"); Rect rect = new Rect(0, 0, width, height); try {/* ww w. j ava 2 s .c o m*/ Class.forName("android.renderscript.Element$DataKind").getField("PIXEL_YUV"); Class.forName("android.renderscript.ScriptIntrinsicYuvToRGB"); byte[] imageData = nv21; if (mRS == null) { mRS = RenderScript.create(context); mYuvToRgb = ScriptIntrinsicYuvToRGB.create(mRS, Element.U8_4(mRS)); Type.Builder tb = new Type.Builder(mRS, Element.createPixel(mRS, Element.DataType.UNSIGNED_8, Element.DataKind.PIXEL_YUV)); tb.setX(width); tb.setY(height); tb.setMipmaps(false); tb.setYuvFormat(ImageFormat.NV21); ain = Allocation.createTyped(mRS, tb.create(), Allocation.USAGE_SCRIPT); timings.addSplit("Prepare for ain"); Type.Builder tb2 = new Type.Builder(mRS, Element.RGBA_8888(mRS)); tb2.setX(width); tb2.setY(height); tb2.setMipmaps(false); aOut = Allocation.createTyped(mRS, tb2.create(), Allocation.USAGE_SCRIPT & Allocation.USAGE_SHARED); timings.addSplit("Prepare for aOut"); bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888); timings.addSplit("Create Bitmap"); } ain.copyFrom(imageData); timings.addSplit("ain copyFrom"); mYuvToRgb.setInput(ain); timings.addSplit("setInput ain"); mYuvToRgb.forEach(aOut); timings.addSplit("NV21 to ARGB forEach"); aOut.copyTo(bitmap); timings.addSplit("Allocation to Bitmap"); } catch (Exception e) { YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null); timings.addSplit("NV21 bytes to YuvImage"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvImage.compressToJpeg(rect, 90, baos); byte[] cur = baos.toByteArray(); timings.addSplit("YuvImage crop and compress to Jpeg Bytes"); bitmap = BitmapFactory.decodeByteArray(cur, 0, cur.length); timings.addSplit("Jpeg Bytes to Bitmap"); } timings.dumpToLog(); return bitmap; }
From source file:Main.java
static public Bitmap NV21ToRGBABitmap(byte[] nv21, int width, int height) { YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, width, height, null); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, baos); byte[] cur = baos.toByteArray(); return BitmapFactory.decodeByteArray(cur, 0, cur.length); }
From source file:com.example.android.camera.CameraActivity.java
@Override protected void onResume() { super.onResume(); Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() { @Override/*from w w w . j ava 2 s . co m*/ public void onPreviewFrame(byte[] data, Camera camera) { if (notRequesting && mPreview.faces.size() >= 1 && imageFormat == ImageFormat.NV21) { // Block Request. notRequesting = false; try { Camera.Parameters parameters = camera.getParameters(); Size size = parameters.getPreviewSize(); textServerView.setText("Preparing Image to send"); YuvImage previewImg = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null); pWidth = previewImg.getWidth(); pHeight = previewImg.getHeight(); Log.d("face View", "Width: " + pWidth + " x Height: " + pHeight); prepareMatrix(matrix, 0, pWidth, pHeight); List<Rect> foundFaces = getFaces(); for (Rect cRect : foundFaces) { // Cropping ByteArrayOutputStream bao = new ByteArrayOutputStream(); previewImg.compressToJpeg(cRect, 100, bao); byte[] mydata = bao.toByteArray(); // Resizing ByteArrayOutputStream sbao = new ByteArrayOutputStream(); Bitmap bm = BitmapFactory.decodeByteArray(mydata, 0, mydata.length); Bitmap sbm = Bitmap.createScaledBitmap(bm, 100, 100, true); bm.recycle(); sbm.compress(Bitmap.CompressFormat.JPEG, 100, sbao); byte[] mysdata = sbao.toByteArray(); RequestParams params = new RequestParams(); params.put("upload", new ByteArrayInputStream(mysdata), "tmp.jpg"); textServerView.setText("Sending Image to the Server"); FaceMatchClient.post(":8080/match", params, new JsonHttpResponseHandler() { @Override public void onSuccess(JSONArray result) { Log.d("face onSuccess", result.toString()); try { JSONObject myJson = (JSONObject) result.get(0); float dist = (float) Double.parseDouble(myJson.getString("dist")); Log.d("distance", "" + dist); int level = (int) ((1 - dist) * 100); if (level > previousMatchLevel) { textView.setText("Match " + level + "% with " + myJson.getString("name") + " <" + myJson.getString("email") + "> "); loadImage(myJson.getString("classes"), myJson.getString("username")); } previousMatchLevel = level; trialCounter++; if (trialCounter < 100 && level < 74) { textServerView.setText("Retrying..."); notRequesting = true; } else if (trialCounter == 100) { textServerView.setText("Fail..."); } else { textServerView.setText("Found Good Match? If not try again!"); fdButtonClicked = false; trialCounter = 0; previousMatchLevel = 0; mCamera.stopFaceDetection(); button.setText("StartFaceDetection"); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } // informationView.showInfo(myJson); } }); } textServerView.setText("POST Sent"); textServerView.setText("Awaiting for response"); } catch (Exception e) { e.printStackTrace(); textServerView.setText("Error AsyncPOST"); } } } }; // Open the default i.e. the first rear facing camera. mCamera = Camera.open(); mCamera.setPreviewCallback(previewCallback); // To use front camera // mCamera = Camera.open(CameraActivity.getFrontCameraId()); mPreview.setCamera(mCamera); parameters = mCamera.getParameters(); imageFormat = parameters.getPreviewFormat(); PreviewSize = parameters.getPreviewSize(); }