List of usage examples for android.hardware Camera open
public static Camera open()
From source file:com.example.android.camera.CameraActivity.java
@Override protected void onResume() { super.onResume(); Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() { @Override// w ww .j a v a 2s .co m public void onPreviewFrame(byte[] data, Camera camera) { if (notRequesting && mPreview.faces.size() >= 1 && imageFormat == ImageFormat.NV21) { // Block Request. notRequesting = false; try { Camera.Parameters parameters = camera.getParameters(); Size size = parameters.getPreviewSize(); textServerView.setText("Preparing Image to send"); YuvImage previewImg = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null); pWidth = previewImg.getWidth(); pHeight = previewImg.getHeight(); Log.d("face View", "Width: " + pWidth + " x Height: " + pHeight); prepareMatrix(matrix, 0, pWidth, pHeight); List<Rect> foundFaces = getFaces(); for (Rect cRect : foundFaces) { // Cropping ByteArrayOutputStream bao = new ByteArrayOutputStream(); previewImg.compressToJpeg(cRect, 100, bao); byte[] mydata = bao.toByteArray(); // Resizing ByteArrayOutputStream sbao = new ByteArrayOutputStream(); Bitmap bm = BitmapFactory.decodeByteArray(mydata, 0, mydata.length); Bitmap sbm = Bitmap.createScaledBitmap(bm, 100, 100, true); bm.recycle(); sbm.compress(Bitmap.CompressFormat.JPEG, 100, sbao); byte[] mysdata = sbao.toByteArray(); RequestParams params = new RequestParams(); params.put("upload", new ByteArrayInputStream(mysdata), "tmp.jpg"); textServerView.setText("Sending Image to the Server"); FaceMatchClient.post(":8080/match", params, new JsonHttpResponseHandler() { @Override public void onSuccess(JSONArray result) { Log.d("face onSuccess", result.toString()); try { JSONObject myJson = (JSONObject) result.get(0); float dist = (float) Double.parseDouble(myJson.getString("dist")); Log.d("distance", "" + dist); int level = (int) ((1 - dist) * 100); if (level > previousMatchLevel) { textView.setText("Match " + level + "% with " + myJson.getString("name") + " <" + myJson.getString("email") + "> "); loadImage(myJson.getString("classes"), myJson.getString("username")); } previousMatchLevel = level; trialCounter++; if (trialCounter < 100 && level < 74) { textServerView.setText("Retrying..."); notRequesting = true; } else if (trialCounter == 100) { textServerView.setText("Fail..."); } else { textServerView.setText("Found Good Match? If not try again!"); fdButtonClicked = false; trialCounter = 0; previousMatchLevel = 0; mCamera.stopFaceDetection(); button.setText("StartFaceDetection"); } } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } // informationView.showInfo(myJson); } }); } textServerView.setText("POST Sent"); textServerView.setText("Awaiting for response"); } catch (Exception e) { e.printStackTrace(); textServerView.setText("Error AsyncPOST"); } } } }; // Open the default i.e. the first rear facing camera. mCamera = Camera.open(); mCamera.setPreviewCallback(previewCallback); // To use front camera // mCamera = Camera.open(CameraActivity.getFrontCameraId()); mPreview.setCamera(mCamera); parameters = mCamera.getParameters(); imageFormat = parameters.getPreviewFormat(); PreviewSize = parameters.getPreviewSize(); }
From source file:com.example.user.lstapp.CreatePlaceFragment.java
public static Camera getCameraInstance() { Camera camera = null;/*ww w.j a v a2 s . c om*/ try { camera = Camera.open(); } catch (RuntimeException re) { Log.e(TAG, "Camera is null.", re); } return camera; }
From source file:ca.nehil.rter.streamingapp2.StreamingActivity.java
private Camera openCamera() { Camera cameraDevice = Camera.open(); for (int i = 0; i < numberOfCameras && cameraDevice == null; i++) { Log.d(LOG_TAG, "opening camera #" + String.valueOf(i)); cameraDevice = Camera.open(i);// w w w.j a va 2 s .c o m } try { if (cameraDevice == null) { throw new Exception("No camera device found"); } } catch (Exception e) { cameraDevice.release(); Log.e(LOG_TAG, e.getMessage()); e.printStackTrace(); } return cameraDevice; }
From source file:gov.nasa.arc.geocam.geocam.CameraActivity.java
public void surfaceCreated(SurfaceHolder holder) { mCamera = Camera.open(); try {/*from ww w. j av a2s . co m*/ mCamera.setPreviewDisplay(holder); } catch (IOException e) { Log.e(GeoCamMobile.DEBUG_ID, "mCamera.setPreviewDisplay threw an IOException: " + e); } }
From source file:com.longle1.facedetection.MainActivity.java
@Override public void surfaceCreated(SurfaceHolder holder) { // The Surface has been created, acquire the camera and tell it where // to draw./*from w w w . ja v a 2s . c o m*/ mCamera = Camera.open(); try { mCamera.setPreviewDisplay(holder); } catch (IOException exception) { mCamera.release(); mCamera = null; } }
From source file:ca.nehil.rter.streamingapp.StreamingActivity.java
private Camera openCamera() { Log.d("CameraDebug", "openCamera"); Camera cameraDevice = Camera.open(); numberOfCameras = Camera.getNumberOfCameras(); for (int i = 0; i < numberOfCameras && cameraDevice == null; i++) { Log.d("CameraDebug", "opening camera #" + String.valueOf(i)); cameraDevice = Camera.open(i);/*w w w.jav a2 s.co m*/ } try { if (cameraDevice == null) { throw new Exception("No camera device found"); } } catch (Exception e) { Log.d("CameraDebug", "Released cam in openCamera, exception occured"); cameraDevice.release(); cameraDevice = null; Log.e("CameraDebug", e.getMessage()); e.printStackTrace(); } return cameraDevice; }
From source file:com.gsma.rcs.ri.sharing.video.OutgoingVideoSharing.java
/** * Check if good camera sizes are available for encoder. Must be used only before open camera. * * @param cameraId the camera ID//from w w w .j av a 2s . c o m * @return false if the camera don't have the good preview size for the encoder */ boolean checkCameraSize(CameraOptions cameraId) { boolean sizeAvailable = false; Camera camera = null; Method method = getCameraOpenMethod(); if (method != null) { try { camera = (Camera) method.invoke(camera, new Object[] { cameraId.getValue() }); } catch (Exception e) { camera = Camera.open(); } } else { camera = Camera.open(); } if (camera == null) { return false; } // Check common sizes Parameters param = camera.getParameters(); List<Camera.Size> sizes = param.getSupportedPreviewSizes(); for (Camera.Size size : sizes) { if ((size.width == H264Config.QVGA_WIDTH && size.height == H264Config.QVGA_HEIGHT) || (size.width == H264Config.CIF_WIDTH && size.height == H264Config.CIF_HEIGHT) || (size.width == H264Config.VGA_WIDTH && size.height == H264Config.VGA_HEIGHT)) { sizeAvailable = true; break; } } // Release camera camera.release(); return sizeAvailable; }
From source file:org.durka.hallmonitor.CoreStateManager.java
/** * With this non-CM users can use torch button in HallMonitor. Should * (Hopefully) work on every device with SystemFeature FEATURE_CAMERA_FLASH * This code has been tested on I9505 jflte with ParanoidAndroid 4.4 rc2 *//*from w w w.j a v a2s .c o m*/ // Turn On Flash public void turnOnFlash() { setTorchOn(true); camera = Camera.open(); Parameters p = camera.getParameters(); p.setFlashMode(Parameters.FLASH_MODE_TORCH); camera.setParameters(p); camera.startPreview(); Log.d(LOG_TAG, "Flash turned on!"); }
From source file:hr.abunicic.angular.CameraActivity.java
/** * Method that initializes the camera./*w w w. ja va 2 s. c o m*/ */ void initCamera() { try { mCamera = Camera.open(); } catch (Exception e) { Log.d("ERROR", "Failed to get camera: " + e.getMessage()); } if (mCamera != null) { //Creating a CameraView instance to show camera data mCameraView = new CameraView(this, mCamera); preview = (FrameLayout) findViewById(R.id.camera_view); //Adding the CameraView to the layout preview.addView(mCameraView); params = mCamera.getParameters(); List<Camera.Size> ls = params.getSupportedPreviewSizes(); Camera.Size size = ls.get(1); params.setPreviewSize(size.width, size.height); //Setting focus mode params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); mCamera.setParameters(params); mCamera.setDisplayOrientation(90); } mCameraView.setOnLongClickListener(new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { Vibrator vib = (Vibrator) getApplicationContext().getSystemService(Context.VIBRATOR_SERVICE); vib.vibrate(60); //Determining which line is selected if (rp != null) { try { selectedLine = getTouchedLine(touchX, touchY); selectedLine.color = Color.BLUE; } catch (Exception e) { } } else { } //AlertDialog for changing the length of the line AlertDialog.Builder alert = new AlertDialog.Builder(CameraActivity.this); final EditText edittext = new EditText(CameraActivity.this); alert.setMessage("Duljina stranice: "); alert.setView(edittext); alert.setPositiveButton("U redu", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int whichButton) { lineLength = edittext.getText().toString(); selectedLine.color = Color.CYAN; Log.d("ocr", "Nova duljina linije: " + lineLength + " "); selectedLine = getTouchedLine(numberX, numberY); RecognitionMethods.refreshLines(rp.getLineSegments(), selectedLine, lineLength); updateDescription(); } }); alert.show(); return true; } }); mCameraView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { Camera camera = mCamera; camera.cancelAutoFocus(); touchX = event.getX(); touchY = event.getY(); return false; } }); }
From source file:com.gsma.rcs.ri.sharing.video.OutgoingVideoSharing.java
/** * Open the camera/*from www. j a v a 2s . c om*/ * * @param cameraId Camera ID */ private void openCamera(CameraOptions cameraId) { Method method = getCameraOpenMethod(); if (mNbfCameras > 1 && method != null) { try { int hCamId = 0; if (cameraId == CameraOptions.FRONT) { Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); for (int id = 0; id < mNbfCameras; id++) { Camera.getCameraInfo(id, cameraInfo); if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { hCamId = id; break; } } } mCamera = (Camera) method.invoke(mCamera, new Object[] { hCamId }); mOpenedCameraId = cameraId; } catch (Exception e) { mCamera = Camera.open(); mOpenedCameraId = CameraOptions.BACK; } } else { mCamera = Camera.open(); mOpenedCameraId = CameraOptions.BACK; } if (mVideoPlayer != null) { mVideoPlayer.setCameraId(mOpenedCameraId.getValue()); } if (LogUtils.isActive) { Log.d(LOGTAG, "Open camera ".concat(mOpenedCameraId.toString())); } }