List of usage examples for android.app Activity getWindowManager
public WindowManager getWindowManager()
From source file:research.dlsu.cacaoapp.Camera2BasicFragment1.java
/** * Capture a still picture. This method should be called when we get a response in * {@link #mCaptureCallback} from both {@link #lockFocus()}. *//*from w w w.j av a 2 s .c o m*/ private void captureStillPicture() { try { final Activity activity = getActivity(); if (null == activity || null == mCameraDevice) { return; } // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); // Use the same AE and AF modes as the preview. captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); setAutoFlash(captureBuilder); // Orientation int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { showToast("Saved: " + mFile); Log.d(TAG, mFile.toString()); saveEntryToDatabase(); unlockFocus(); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:cliq.com.cliqgram.fragments.CameraFragment.java
/** * Capture a still picture. This method should be called when we get a response in * {@link #mCaptureCallback} from both {@link #lockFocus()}. */// w w w.j a v a 2 s . co m private void captureStillPicture() { try { final Activity activity = getActivity(); if (null == activity || null == mCameraDevice) { return; } // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); // Use the same AE and AF modes as the preview. captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); captureBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); // check to turn on/off flash mode captureBuilder.set(CaptureRequest.FLASH_MODE, flashOn ? CaptureResult.FLASH_MODE_SINGLE : CaptureResult.FLASH_MODE_OFF); // Orientation int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation)); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { showToast("Saved: " + mFile); Log.d(TAG, "Saved file: " + mFile.toString()); unlockFocus(); startImageDisplayActivity(mFile.getName()); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:org.tensorflow.demo.Camera2BasicFragment.java
/** * Capture a still picture. This method should be called when we get a response in * {@link #mCaptureCallback} from both {@link #lockFocus()}. *//*from www . j a v a 2s. c o m*/ private void captureStillPicture() { try { final Activity activity = getActivity(); if (null == activity || null == mCameraDevice) { return; } // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); // Use the same AE and AF modes as the preview. captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); setAutoFlash(captureBuilder); // Orientation int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); sensorOrientation = rotation + getOrientation(rotation); frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, INPUT_SIZE, INPUT_SIZE, sensorOrientation, MAINTAIN_ASPECT); cropToFrameTransform = new Matrix(); frameToCropTransform.invert(cropToFrameTransform); classifier = TensorFlowImageClassifier.create(getActivity().getAssets(), MODEL_FILE, LABEL_FILE, INPUT_SIZE, IMAGE_MEAN, IMAGE_STD, INPUT_NAME, OUTPUT_NAME); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { Log.d(TAG, mFile.toString()); unlockFocus(); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:github.why168.swipeback.view.SwipeBackLayout.java
private boolean newCheckDeviceHasNavigationBar(Activity activity) { WindowManager windowManager = activity.getWindowManager(); Display display = windowManager.getDefaultDisplay(); DisplayMetrics realDisplayMetrics = new DisplayMetrics(); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { display.getRealMetrics(realDisplayMetrics); }/*from w w w . j a va 2 s . com*/ int realHeight = realDisplayMetrics.heightPixels; int realWidth = realDisplayMetrics.widthPixels; DisplayMetrics displayMetrics = new DisplayMetrics(); display.getMetrics(displayMetrics); int displayHeight = displayMetrics.heightPixels; int displayWidth = displayMetrics.widthPixels; return (realWidth - displayWidth) > 0 || (realHeight - displayHeight) > 0; }
From source file:research.dlsu.crabapp.Camera2BasicFragment.java
/** * Capture a still picture. This method should be called when we get a response in * {@link #mCaptureCallback} from both {@link #lockFocus()}. *//*ww w .j a va2 s.c o m*/ private void captureStillPicture() { try { final Activity activity = getActivity(); if (null == activity || null == mCameraDevice) { return; } // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); // Use the same AE and AF modes as the preview. captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); setAutoFlash(captureBuilder); // Orientation int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { showToast("Saved: " + mFile); Log.d(TAG, mFile.toString()); cu = saveEntryToDatabase(); unlockFocus(); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:edu.uw.seeforme.seeforme.Camera2BasicFragment.java
/** * Capture a still picture. This method should be called when we get a response in * {@link #mCaptureCallback} from both {@link #lockFocus()}. *//*from w w w . j a v a 2 s.com*/ private void captureStillPicture() { try { final Activity activity = getActivity(); if (null == activity || null == mCameraDevice) { return; } // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); // Use the same AE and AF modes as the preview. captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); setAutoFlash(captureBuilder); // Orientation int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation)); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { //showToast("Saved: " + mFile); // showToast("Processing image"); Log.d(TAG, mFile.toString()); unlockFocus(); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:com.microblink.barcode.customcamera.camera2.Camera2Fragment.java
/** * Sets up member variables related to camera. * * @param width The width of available size for camera preview * @param height The height of available size for camera preview *///from w w w .ja v a 2s . co m private void setUpCameraOutputs(int width, int height) { Activity activity = getActivity(); CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); try { for (String cameraId : manager.getCameraIdList()) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); // We don't use a front facing camera in this sample. Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { continue; } StreamConfigurationMap map = characteristics .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); if (map == null) { continue; } // For still image captures, we use the largest available size. Size largest = Collections.max(Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)), new CompareSizesByArea()); // Find out if we need to swap dimension to get the preview size relative to sensor // coordinate. int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); boolean swappedDimensions = false; switch (displayRotation) { case Surface.ROTATION_0: case Surface.ROTATION_180: if (sensorOrientation == 90 || sensorOrientation == 270) { swappedDimensions = true; } break; case Surface.ROTATION_90: case Surface.ROTATION_270: if (sensorOrientation == 0 || sensorOrientation == 180) { swappedDimensions = true; } break; default: Log.e(TAG, "Display rotation is invalid: " + displayRotation); } Point displaySize = new Point(); activity.getWindowManager().getDefaultDisplay().getSize(displaySize); int maxPreviewWidth = displaySize.x; int maxPreviewHeight = displaySize.y; if (swappedDimensions) { maxPreviewWidth = displaySize.y; maxPreviewHeight = displaySize.x; } if (maxPreviewWidth > MAX_PREVIEW_WIDTH) { maxPreviewWidth = MAX_PREVIEW_WIDTH; } if (maxPreviewHeight > MAX_PREVIEW_HEIGHT) { maxPreviewHeight = MAX_PREVIEW_HEIGHT; } // Danger, W.R.! Attempting to use too large a preview size could exceed the camera // bus' bandwidth limitation, resulting in gorgeous previews but the storage of // garbage capture data. mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), maxPreviewWidth, maxPreviewHeight, largest); Log.i(TAG, "Preview size is " + mPreviewSize.toString()); mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(), ImageFormat.YUV_420_888, /*maxImages*/1); mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mBackgroundHandler); // We fit the aspect ratio of TextureView to the size of preview we picked. int orientation = getResources().getConfiguration().orientation; if (orientation == Configuration.ORIENTATION_LANDSCAPE) { mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight()); } else { mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } mCameraId = cameraId; return; } } catch (CameraAccessException e) { e.printStackTrace(); } catch (NullPointerException e) { // Currently an NPE is thrown when the Camera2API is used but not supported on the // device this code runs. ErrorDialog.newInstance("Camera error").show(getChildFragmentManager(), FRAGMENT_DIALOG); } }
From source file:com.microsoft.projectoxford.face.samples.ui.Camera2BasicFragment.java
/** * Capture a still picture. This method should be called when we get a response in * {@link #mCaptureCallback} from both {@link #lockFocus()}. *///from www . j a v a 2s .c o m private void captureStillPicture() { try { final Activity activity = getActivity(); if (null == activity || null == mCameraDevice) { return; } // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); if (!flashon) { captureBuilder.addTarget(mImageReader.getSurface()); } else { captureBuilder.addTarget(nImageReader.getSurface()); } // Use the same AE and AF modes as the preview. captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); setAutoFlash(captureBuilder); // Orientation int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation)); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { //showToast("Saved: " + mFile); Log.d(TAG, mFile.toString()); unlockFocus(); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:net.ddns.mlsoftlaberge.trycorder.TryviscamFragment.java
/** * Capture a still picture. This method should be called when we get a response in * {@link #mCaptureCallback} from both {@link #lockFocus()}. *//*ww w . ja v a2s .c om*/ private void captureStillPicture() { try { final Activity activity = getActivity(); if (null == activity || null == mCameraDevice) { return; } // This is the CaptureRequest.Builder that we use to take a picture. final CaptureRequest.Builder captureBuilder = mCameraDevice .createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(mImageReader.getSurface()); // Use the same AE and AF modes as the preview. captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); setAutoFlash(captureBuilder); setVisionMode(captureBuilder); // Orientation int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation)); CameraCaptureSession.CaptureCallback CaptureCallback = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) { showToast("Saved: " + mFile); Log.d(TAG, mFile.toString()); unlockFocus(); } }; mCaptureSession.stopRepeating(); mCaptureSession.capture(captureBuilder.build(), CaptureCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } }
From source file:kr.ac.kpu.wheeling.blackbox.Camera2VideoFragment.java
private void setUpMediaRecorder() throws IOException { final Activity activity = getActivity(); if (null == activity) { return;/*w w w. j a v a2 s . c o m*/ } mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE); mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4); if (mNextVideoAbsolutePath == null || mNextVideoAbsolutePath.isEmpty()) { mNextVideoAbsolutePath = getVideoFilePath(getActivity()); } mMediaRecorder.setOutputFile(mNextVideoAbsolutePath); mMediaRecorder.setVideoEncodingBitRate(16900000); mMediaRecorder.setAudioEncodingBitRate(96100); mMediaRecorder.setVideoFrameRate(30); mMediaRecorder.setAudioSamplingRate(48000); mMediaRecorder.setVideoSize(mVideoSize.getWidth(), mVideoSize.getHeight()); mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264); mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC); int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); switch (mSensorOrientation) { case SENSOR_ORIENTATION_DEFAULT_DEGREES: mMediaRecorder.setOrientationHint(DEFAULT_ORIENTATIONS.get(rotation)); break; case SENSOR_ORIENTATION_INVERSE_DEGREES: mMediaRecorder.setOrientationHint(INVERSE_ORIENTATIONS.get(rotation)); break; } mMediaRecorder.prepare(); }