List of usage examples for android.graphics YuvImage compressToJpeg
public boolean compressToJpeg(Rect rectangle, int quality, OutputStream stream)
From source file:com.cellbots.eyes.EyesActivity.java
private void takePicture(byte[] imageData) { YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null); yuvImage.compressToJpeg(r, 100, out); File dir = new File(Environment.getExternalStorageDirectory() + "/cellbot/pictures"); dir.mkdirs();//ww w. j ava2s .c om FileOutputStream outStream; try { String picName = dir.toString() + "/" + System.currentTimeMillis() + ".jpg"; outStream = new FileOutputStream(picName); outStream.write(out.toByteArray()); outStream.flush(); outStream.close(); Log.e("Picture saved:", picName); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { needToTakePicture = false; out.reset(); if (mCamera != null) { mCamera.addCallbackBuffer(mCallbackBuffer); } } }
From source file:com.cellbots.local.EyesView.java
private void takePicture(byte[] imageData, boolean isJpg) { if (!isJpg) { YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null); yuvImage.compressToJpeg(r, 100, out); }// www . j av a2 s . com File dir = new File(Environment.getExternalStorageDirectory() + "/cellbots/pictures"); dir.mkdirs(); FileOutputStream outStream; try { String picName = dir.toString() + "/" + System.currentTimeMillis() + ".jpg"; outStream = new FileOutputStream(picName); outStream.write(!isJpg ? out.toByteArray() : imageData); outStream.flush(); outStream.close(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { needToTakePicture = false; out.reset(); if (mCamera != null) { mCamera.addCallbackBuffer(mCallbackBuffer); } } }
From source file:com.cellbots.eyes.EyesActivity.java
private void appEngineUploadImage(byte[] imageData) { Log.e("app engine remote eyes", "called"); try {/*from ww w .j av a2 s . c o m*/ YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null); yuvImage.compressToJpeg(r, 20, out); // Tweak the quality here - 20 // seems pretty decent for // quality + size. Log.e("app engine remote eyes", "upload starting"); HttpPost httpPost = new HttpPost(postUrl); Log.e("app engine perf", "0"); MultipartEntity entity = new MultipartEntity(); Log.e("app engine perf", "1"); entity.addPart("img", new InputStreamBody(new ByteArrayInputStream(out.toByteArray()), "video.jpg")); Log.e("app engine perf", "2"); httpPost.setEntity(entity); Log.e("app engine perf", "3"); HttpResponse response = httpclient.execute(httpPost); Log.e("app engine remote eyes", "result: " + response.getStatusLine()); Log.e("app engine remote eyes", "upload complete"); } catch (UnsupportedEncodingException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IllegalStateException e) { e.printStackTrace(); resetAppEngineConnection(); } catch (ClientProtocolException e) { e.printStackTrace(); resetAppEngineConnection(); } catch (IOException e) { e.printStackTrace(); resetAppEngineConnection(); } finally { out.reset(); if (mCamera != null) { mCamera.addCallbackBuffer(mCallbackBuffer); } isUploading = false; Log.e("app engine remote eyes", "finished"); } }
From source file:com.cellbots.eyes.EyesActivity.java
private void uploadImage(byte[] imageData) { try {//from ww w . j a v a2 s . c o m YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null); yuvImage.compressToJpeg(r, 20, out); // Tweak the quality here - 20 // seems pretty decent for quality + size. if (putUrl.contains("127.0.0.1") || putUrl.contains("localhost")) { try { Thread.sleep(50); } catch (InterruptedException e) { e.printStackTrace(); } resetConnection(); } PutMethod put = new PutMethod(putUrl); put.setRequestBody(new ByteArrayInputStream(out.toByteArray())); int result = put.execute(mHttpState, mConnection); //Log.e("result", result + ""); } catch (UnsupportedEncodingException e) { Log.e(TAG, "UnsupportedEncodingException: Error uploading image: " + e.getMessage()); } catch (IllegalStateException e) { Log.e(TAG, "IllegalStateException: Error uploading image: " + e.getMessage()); resetConnection(); } catch (ClientProtocolException e) { Log.e(TAG, "ClientProtocolException: Error uploading image: " + e.getMessage()); resetConnection(); } catch (UnknownHostException e) { Log.e(TAG, "UnknownHostException: Error uploading image: " + e.getMessage()); resetConnection(); } catch (NoHttpResponseException e) { // Silently ignore this. } catch (IOException e) { Log.e(TAG, "IOException: Error uploading image: " + e.getMessage()); resetConnection(); } finally { out.reset(); if (mCamera != null) { mCamera.addCallbackBuffer(mCallbackBuffer); } isUploading = false; } }
From source file:com.cellbots.local.EyesView.java
private void uploadImage(byte[] imageData) { try {/*w ww.j a v a 2s . c om*/ YuvImage yuvImage = new YuvImage(imageData, previewFormat, previewWidth, previewHeight, null); yuvImage.compressToJpeg(r, 20, out); // Tweak the quality here - 20 // seems pretty decent for quality + size. if (isLocalUrl) { try { Thread.sleep(50); } catch (InterruptedException e) { e.printStackTrace(); } mParent.setRemoteEyesImage(out.toByteArray()); } else { PutMethod put = new PutMethod(putUrl); put.setRequestBody(new ByteArrayInputStream(out.toByteArray())); int result = put.execute(mHttpState, mConnection); } //Log.e("result", result + ""); } catch (UnsupportedEncodingException e) { Log.e(TAG, "UnsupportedEncodingException: Error uploading image: " + e.getMessage()); } catch (IllegalStateException e) { Log.e(TAG, "IllegalStateException: Error uploading image: " + e.getMessage()); resetConnection(); } catch (ClientProtocolException e) { Log.e(TAG, "ClientProtocolException: Error uploading image: " + e.getMessage()); resetConnection(); } catch (UnknownHostException e) { Log.e(TAG, "UnknownHostException: Error uploading image: " + e.getMessage()); resetConnection(); } catch (NoHttpResponseException e) { // Silently ignore this. } catch (IOException e) { Log.e(TAG, "IOException: Error uploading image: " + e.getMessage()); resetConnection(); } finally { out.reset(); if (mCamera != null) { mCamera.addCallbackBuffer(mCallbackBuffer); } isUploading = false; } }
From source file:info.guardianproject.iocipher.camera.VideoCameraActivity.java
@Override public void onPreviewFrame(byte[] data, Camera camera) { //even when not recording, we'll compress frames in order to estimate our FPS Camera.Parameters parameters = camera.getParameters(); mLastWidth = parameters.getPreviewSize().width; mLastHeight = parameters.getPreviewSize().height; if (mRotation > 0) //flip height and width {/* w w w . j a v a 2 s . c om*/ mLastWidth = parameters.getPreviewSize().height; mLastHeight = parameters.getPreviewSize().width; } mPreviewFormat = parameters.getPreviewFormat(); byte[] dataResult = data; if (mPreCompressFrames) { if (mRotation > 0) { dataResult = rotateYUV420Degree90(data, mLastHeight, mLastWidth); if (getCameraDirection() == CameraInfo.CAMERA_FACING_FRONT) { dataResult = rotateYUV420Degree90(dataResult, mLastWidth, mLastHeight); dataResult = rotateYUV420Degree90(dataResult, mLastHeight, mLastWidth); } } YuvImage yuv = new YuvImage(dataResult, mPreviewFormat, mLastWidth, mLastHeight, null); ByteArrayOutputStream out = new ByteArrayOutputStream(); yuv.compressToJpeg(new Rect(0, 0, mLastWidth, mLastHeight), MediaConstants.sJpegQuality, out); dataResult = out.toByteArray(); } if (mFramesTotal == 0 && fileOut != null) { try { info.guardianproject.iocipher.FileOutputStream fosThumb = new info.guardianproject.iocipher.FileOutputStream( new info.guardianproject.iocipher.File(fileOut.getAbsolutePath() + ".thumb.jpg")); fosThumb.write(dataResult); fosThumb.flush(); fosThumb.close(); } catch (Exception e) { Log.e("VideoCam", "can't save thumb", e); } } if (mIsRecording && mFrameQ != null) synchronized (mFrameQ) { if (data != null) { VideoFrame vf = new VideoFrame(); vf.image = dataResult; vf.duration = 1;//this is frame duration, not time //System.currentTimeMillis() - lastTime; vf.fps = mFPS; mFrameQ.add(vf); mFramesTotal++; } } mFpsCounter++; if ((System.currentTimeMillis() - start) >= 1000) { mFPS = mFpsCounter; mFpsCounter = 0; start = System.currentTimeMillis(); } }
From source file:com.jasompeter.openalpr.CameraActivity.java
public void startPreview() { try {/*from ww w .j a va 2s . c om*/ mCamera.setPreviewDisplay(mSurfaceHolder); } catch (IOException e) { e.printStackTrace(); Log.d(TAG, "Cannot set preview display."); } setCorrectOrientation(mCamera); setCorrectSize(mCamera, mSurfaceHolder.getSurfaceFrame().width(), mSurfaceHolder.getSurfaceFrame().height()); mCamera.startPreview(); mCamera.setPreviewCallback(new Camera.PreviewCallback() { @Override public void onPreviewFrame(byte[] data, Camera camera) { if (!mEnableRecognition) { return; } if (camera.getParameters().getPreviewFormat() == ImageFormat.NV21) { Camera.Size previewSize = camera.getParameters().getPreviewSize(); YuvImage yuvimage = new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null); ByteArrayOutputStream baos = new ByteArrayOutputStream(); yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 50, baos); recognize(baos.toByteArray()); } } }); }
From source file:com.android.camera.manager.ThumbnailViewManager.java
private byte[] covertYuvDataToJpeg(byte[] data, int yuvWidth, int yuvHeight, int imageFormat) { byte[] jpeg;/* w w w . j a va 2 s. c o m*/ Rect rect = new Rect(0, 0, yuvWidth, yuvHeight); // TODO: the yuv data from native must be NV21 or YUY2. YuvImage yuvImg = new YuvImage(data, imageFormat, yuvWidth, yuvHeight, null); ByteArrayOutputStream outputstream = new ByteArrayOutputStream(); int jpegQuality = CameraProfile.getJpegEncodingQualityParameter(CameraProfile.QUALITY_HIGH); yuvImg.compressToJpeg(rect, jpegQuality, outputstream); jpeg = outputstream.toByteArray(); return jpeg; }
From source file:org.deviceconnect.android.deviceplugin.host.camera.CameraOverlay.java
@Override public void onPreviewFrame(final byte[] data, final Camera camera) { synchronized (mCameraLock) { final long currentTime = System.currentTimeMillis(); if (mLastFrameTime != 0) { if ((currentTime - mLastFrameTime) < mFrameInterval) { mLastFrameTime = currentTime; return; }//from www . j a v a 2s. com } if (mCamera != null && mCamera.equals(camera)) { mCamera.setPreviewCallback(null); if (mServer != null) { int format = mPreview.getPreviewFormat(); int width = mPreview.getPreviewWidth(); int height = mPreview.getPreviewHeight(); YuvImage yuvimage = new YuvImage(data, format, width, height, null); Rect rect = new Rect(0, 0, width, height); ByteArrayOutputStream baos = new ByteArrayOutputStream(); if (yuvimage.compressToJpeg(rect, JPEG_COMPRESS_QUALITY, baos)) { byte[] jdata = baos.toByteArray(); int degree = mPreview.getCameraDisplayOrientation(mContext); if (degree == 0) { mServer.offerMedia(jdata); } else { BitmapFactory.Options bitmapFactoryOptions = new BitmapFactory.Options(); bitmapFactoryOptions.inPreferredConfig = Bitmap.Config.RGB_565; Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length, bitmapFactoryOptions); if (bmp != null) { Matrix m = new Matrix(); m.setRotate(degree * mFacingDirection); Bitmap rotatedBmp = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), m, true); if (rotatedBmp != null) { baos.reset(); if (rotatedBmp.compress(CompressFormat.JPEG, JPEG_COMPRESS_QUALITY, baos)) { mServer.offerMedia(baos.toByteArray()); } rotatedBmp.recycle(); } bmp.recycle(); } } } } mCamera.setPreviewCallback(this); } mLastFrameTime = currentTime; } }
From source file:com.example.android.camera.CameraActivity.java
@Override protected void onResume() { super.onResume(); Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() { @Override/*from www .ja va 2 s .c o m*/ public void onPreviewFrame(byte[] data, Camera camera) { if (notRequesting && mPreview.faces.size() >= 1 && imageFormat == ImageFormat.NV21) { // Block Request. notRequesting = false; try { Camera.Parameters parameters = camera.getParameters(); Size size = parameters.getPreviewSize(); textServerView.setText("Preparing Image to send"); YuvImage previewImg = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null); pWidth = previewImg.getWidth(); pHeight = previewImg.getHeight(); Log.d("face View", "Width: " + pWidth + " x Height: " + pHeight); prepareMatrix(matrix, 0, pWidth, pHeight); List<Rect> foundFaces = getFaces(); for (Rect cRect : foundFaces) { // Cropping ByteArrayOutputStream bao = new ByteArrayOutputStream(); previewImg.compressToJpeg(cRect, 100, bao); byte[] mydata = bao.toByteArray(); // Resizing ByteArrayOutputStream sbao = new ByteArrayOutputStream(); Bitmap bm = BitmapFactory.decodeByteArray(mydata, 0, mydata.length); Bitmap sbm = Bitmap.createScaledBitmap(bm, 100, 100, true); bm.recycle(); sbm.compress(Bitmap.CompressFormat.JPEG, 100, sbao); byte[] mysdata = sbao.toByteArray(); RequestParams params = new RequestParams(); params.put("upload", new ByteArrayInputStream(mysdata), "tmp.jpg"); textServerView.setText("Sending Image to the Server"); FaceMatchClient.post(":8080/match", params, new JsonHttpResponseHandler() { @Override public void onSuccess(JSONArray result) { Log.d("face onSuccess", result.toString()); try { JSONObject myJson = (JSONObject) result.get(0); float dist = (float) Double.parseDouble(myJson.getString("dist")); Log.d("distance", "" + dist); int level = (int) ((1 - dist) * 100); if (level > previousMatchLevel) { textView.setText("Match " + level + "% with " + myJson.getString("name") + " <" + myJson.getString("email") + "> "); loadImage(myJson.getString("classes"), myJson.getString("username")); } previousMatchLevel = level; trialCounter++; if (trialCounter < 100 && level < 74) { textServerView.setText("Retrying..."); notRequesting = true; } else if (trialCounter == 100) { textServerView.setText("Fail..."); } else { textServerView.setText("Found Good Match? If not try again!"); fdButtonClicked = false; trialCounter = 0; previousMatchLevel = 0; mCamera.stopFaceDetection(); button.setText("StartFaceDetection"); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } // informationView.showInfo(myJson); } }); } textServerView.setText("POST Sent"); textServerView.setText("Awaiting for response"); } catch (Exception e) { e.printStackTrace(); textServerView.setText("Error AsyncPOST"); } } } }; // Open the default i.e. the first rear facing camera. mCamera = Camera.open(); mCamera.setPreviewCallback(previewCallback); // To use front camera // mCamera = Camera.open(CameraActivity.getFrontCameraId()); mPreview.setCamera(mCamera); parameters = mCamera.getParameters(); imageFormat = parameters.getPreviewFormat(); PreviewSize = parameters.getPreviewSize(); }