List of usage examples for android.media MediaActionSound play
public void play(int soundName)
Play one of the predefined platform sounds for media actions.
Use this method to play a platform-specific sound for various media actions.
From source file:com.esri.arcgisruntime.sample.takescreenshot.MainActivity.java
/** * capture the map as an image/*from w w w .j a v a 2 s. co m*/ */ private void captureScreenshotAsync() { // export the image from the mMapView final ListenableFuture<Bitmap> export = mMapView.exportImageAsync(); export.addDoneListener(new Runnable() { @Override public void run() { try { Bitmap currentMapImage = export.get(); // play the camera shutter sound MediaActionSound sound = new MediaActionSound(); sound.play(MediaActionSound.SHUTTER_CLICK); Log.d(TAG, "Captured the image!!"); // save the exported bitmap to an image file SaveImageTask saveImageTask = new SaveImageTask(); saveImageTask.execute(currentMapImage); } catch (Exception e) { Toast.makeText(getApplicationContext(), getResources().getString(R.string.map_export_failure) + e.getMessage(), Toast.LENGTH_SHORT).show(); Log.e(TAG, getResources().getString(R.string.map_export_failure) + e.getMessage()); } } }); }
From source file:ca.frozen.rpicameraviewer.activities.VideoFragment.java
private void takeSnapshot() { Bitmap image = textureView.getBitmap(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_hh_mm_ss"); String name = camera.network + "_" + camera.name.replaceAll("\\s+", "") + "_" + sdf.format(new Date()) + ".jpg"; Utils.saveImage(getActivity().getContentResolver(), image, name, null); MediaActionSound sound = new MediaActionSound(); sound.play(MediaActionSound.SHUTTER_CLICK); Toast toast = Toast.makeText(getActivity(), App.getStr(R.string.image_saved), Toast.LENGTH_SHORT); toast.show();/*from www. jav a2 s. c om*/ }
From source file:ca.frozen.curlingtv.activities.VideoFragment.java
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_video, container, false); view.setOnTouchListener(new View.OnTouchListener() { @Override/*from w ww . j av a 2 s .com*/ public boolean onTouch(View v, MotionEvent event) { simpleDetector.onTouchEvent(event); scaleDetector.onTouchEvent(event); return true; } }); // configure the name nameView = (TextView) view.findViewById(R.id.video_name); nameView.setText(camera.name); // initialize the message messageView = (TextView) view.findViewById(R.id.video_message); messageView.setTextColor(App.getClr(R.color.good_text)); messageView.setText(R.string.initializing_video); // set the texture listener textureView = (TextureView) view.findViewById(R.id.video_surface); textureView.setSurfaceTextureListener(this); // create the snapshot button snapshotButton = (Button) view.findViewById(R.id.video_snapshot); snapshotButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Bitmap image = textureView.getBitmap(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_hh_mm_ss"); String name = camera.network + "_" + camera.name.replaceAll("\\s+", "") + "_" + sdf.format(new Date()) + ".jpg"; String url = Utils.saveImage(getActivity().getContentResolver(), image, name, null); MediaActionSound sound = new MediaActionSound(); sound.play(MediaActionSound.SHUTTER_CLICK); } }); // move the snapshot button over to account for the navigation bar if (fullScreen) { float scale = getContext().getResources().getDisplayMetrics().density; int margin = (int) (5 * scale + 0.5f); int extra = Utils.getNavigationBarHeight(getContext(), Configuration.ORIENTATION_LANDSCAPE); ViewGroup.MarginLayoutParams lp = (ViewGroup.MarginLayoutParams) snapshotButton.getLayoutParams(); lp.setMargins(margin, margin, margin + extra, margin); } return view; }
From source file:com.projecttango.examples.java.greenscreen.GreenScreenActivity.java
/** * Here is where you would set up your rendering logic. We're replacing it with a minimalistic, * dummy example, using a standard GLSurfaceView and a basic renderer, for illustration purposes * only./* ww w . j a v a2s . c om*/ */ private void setupRenderer() { mSurfaceView.setEGLContextClientVersion(2); mRenderer = new GreenScreenRenderer(this, new GreenScreenRenderer.RenderCallback() { @Override public void preRender() { // This is the work that you would do on your main OpenGL render thread. // We need to be careful to not run any Tango-dependent code in the // OpenGL thread unless we know the Tango Service to be properly set up // and connected. if (!mIsConnected) { return; } // Synchronize against concurrently disconnecting the service triggered // from the UI thread. synchronized (GreenScreenActivity.this) { // Connect the Tango SDK to the OpenGL texture ID where we are // going to render the camera. // NOTE: This must be done after both the texture is generated // and the Tango Service is connected. if (mConnectedTextureIdGlThread != mRenderer.getTextureId()) { mTango.connectTextureId(TangoCameraIntrinsics.TANGO_CAMERA_COLOR, mRenderer.getTextureId()); mConnectedTextureIdGlThread = mRenderer.getTextureId(); Log.d(TAG, "connected to texture id: " + mRenderer.getTextureId()); // Set up scene camera projection to match RGB camera intrinsics. mRenderer.setProjectionMatrix(projectionMatrixFromCameraIntrinsics(mIntrinsics)); mRenderer.setCameraIntrinsics(mIntrinsics); } // If there is a new RGB camera frame available, update the texture and // scene camera pose. if (mIsFrameAvailableTangoThread.compareAndSet(true, false)) { double depthTimestamp = 0; TangoPointCloudData pointCloud = mPointCloudManager.getLatestPointCloud(); if (pointCloud != null) { mRenderer.updatePointCloud(pointCloud); depthTimestamp = pointCloud.timestamp; } try { // {@code mRgbTimestampGlThread} contains the exact timestamp at // which the rendered RGB frame was acquired. mRgbTimestampGlThread = mTango.updateTexture(TangoCameraIntrinsics.TANGO_CAMERA_COLOR); // In the following code, we define t0 as the depth timestamp // and t1 as the color camera timestamp. // Calculate the relative pose between color camera frame at // timestamp color_timestamp t1 and depth. TangoPoseData poseColort1Tdeptht0; poseColort1Tdeptht0 = TangoSupport.calculateRelativePose(mRgbTimestampGlThread, TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR, depthTimestamp, TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH); if (poseColort1Tdeptht0.statusCode == TangoPoseData.POSE_VALID) { float[] colort1Tdeptht0 = poseToMatrix(poseColort1Tdeptht0); mRenderer.updateModelMatrix(colort1Tdeptht0); } else { Log.w(TAG, "Could not get relative pose from camera depth" + " " + "at " + depthTimestamp + " to camera color at " + mRgbTimestampGlThread); } } catch (Exception e) { Log.e(TAG, "Exception on the OpenGL thread", e); } } } } /** * This method is called by the renderer when the screenshot has been taken. */ @Override public void onScreenshotTaken(final Bitmap screenshotBitmap) { // Give immediate feedback to the user. MediaActionSound sound = new MediaActionSound(); sound.play(MediaActionSound.SHUTTER_CLICK); runOnUiThread(new Runnable() { @Override public void run() { mPanelFlash.setVisibility(View.VISIBLE); // Run a fade in and out animation of a white screen. ObjectAnimator fadeIn = ObjectAnimator.ofFloat(mPanelFlash, View.ALPHA, 0, 1); fadeIn.setDuration(100); fadeIn.setInterpolator(new DecelerateInterpolator()); ObjectAnimator fadeOut = ObjectAnimator.ofFloat(mPanelFlash, View.ALPHA, 1, 0); fadeOut.setInterpolator(new AccelerateInterpolator()); fadeOut.setDuration(100); AnimatorSet animation = new AnimatorSet(); animation.playSequentially(fadeIn, fadeOut); animation.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mPanelFlash.setVisibility(View.GONE); } }); animation.start(); } }); // Save bitmap to gallery in background. new BitmapSaverTask(screenshotBitmap).execute(); } }); mSurfaceView.setRenderer(mRenderer); }