Example usage for android.media MediaActionSound SHUTTER_CLICK

List of usage examples for android.media MediaActionSound SHUTTER_CLICK

Introduction

In this page you can find the example usage for android.media MediaActionSound SHUTTER_CLICK.

Prototype

int SHUTTER_CLICK

To view the source code for android.media MediaActionSound SHUTTER_CLICK.

Click Source Link

Document

The sound used by android.hardware.Camera#takePicture Camera.takePicture to indicate still image capture.

Usage

From source file:com.esri.arcgisruntime.sample.takescreenshot.MainActivity.java

/**
 * capture the map as an image/*from w  ww  . j  a  v  a2s .  c o  m*/
 */
private void captureScreenshotAsync() {

    // export the image from the mMapView
    final ListenableFuture<Bitmap> export = mMapView.exportImageAsync();
    export.addDoneListener(new Runnable() {
        @Override
        public void run() {
            try {
                Bitmap currentMapImage = export.get();
                // play the camera shutter sound
                MediaActionSound sound = new MediaActionSound();
                sound.play(MediaActionSound.SHUTTER_CLICK);
                Log.d(TAG, "Captured the image!!");
                // save the exported bitmap to an image file
                SaveImageTask saveImageTask = new SaveImageTask();
                saveImageTask.execute(currentMapImage);
            } catch (Exception e) {
                Toast.makeText(getApplicationContext(),
                        getResources().getString(R.string.map_export_failure) + e.getMessage(),
                        Toast.LENGTH_SHORT).show();
                Log.e(TAG, getResources().getString(R.string.map_export_failure) + e.getMessage());
            }
        }
    });
}

From source file:ca.frozen.curlingtv.activities.VideoFragment.java

@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
    View view = inflater.inflate(R.layout.fragment_video, container, false);
    view.setOnTouchListener(new View.OnTouchListener() {
        @Override//from   ww  w . j av  a2s. c o m
        public boolean onTouch(View v, MotionEvent event) {
            simpleDetector.onTouchEvent(event);
            scaleDetector.onTouchEvent(event);
            return true;
        }
    });

    // configure the name
    nameView = (TextView) view.findViewById(R.id.video_name);
    nameView.setText(camera.name);

    // initialize the message
    messageView = (TextView) view.findViewById(R.id.video_message);
    messageView.setTextColor(App.getClr(R.color.good_text));
    messageView.setText(R.string.initializing_video);

    // set the texture listener
    textureView = (TextureView) view.findViewById(R.id.video_surface);
    textureView.setSurfaceTextureListener(this);

    // create the snapshot button
    snapshotButton = (Button) view.findViewById(R.id.video_snapshot);
    snapshotButton.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View view) {
            Bitmap image = textureView.getBitmap();
            SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_hh_mm_ss");
            String name = camera.network + "_" + camera.name.replaceAll("\\s+", "") + "_"
                    + sdf.format(new Date()) + ".jpg";
            String url = Utils.saveImage(getActivity().getContentResolver(), image, name, null);
            MediaActionSound sound = new MediaActionSound();
            sound.play(MediaActionSound.SHUTTER_CLICK);
        }
    });

    // move the snapshot button over to account for the navigation bar
    if (fullScreen) {
        float scale = getContext().getResources().getDisplayMetrics().density;
        int margin = (int) (5 * scale + 0.5f);
        int extra = Utils.getNavigationBarHeight(getContext(), Configuration.ORIENTATION_LANDSCAPE);
        ViewGroup.MarginLayoutParams lp = (ViewGroup.MarginLayoutParams) snapshotButton.getLayoutParams();
        lp.setMargins(margin, margin, margin + extra, margin);
    }

    return view;
}

From source file:com.obviousengine.android.focus.ZslFocusCamera.java

/**
 * Instantiates a new camera based on Camera 2 API.
 *
 * @param device The underlying Camera 2 device.
 * @param characteristics The device's characteristics.
 * @param pictureSize the size of the final image to be taken.
 *///www.  j  av  a2s  .  c om
ZslFocusCamera(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
    Timber.v("Creating new ZslFocusCamera");

    this.device = device;
    this.characteristics = characteristics;
    fullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);

    cameraThread = new HandlerThread("FocusCamera");
    // If this thread stalls, it will delay viewfinder frames.
    cameraThread.setPriority(Thread.MAX_PRIORITY);
    cameraThread.start();
    cameraHandler = new Handler(cameraThread.getLooper());

    cameraListenerThread = new HandlerThread("FocusCamera-Listener");
    cameraListenerThread.start();
    cameraListenerHandler = new Handler(cameraListenerThread.getLooper());

    // TODO: Encoding on multiple cores results in preview jank due to
    // excessive GC.
    int numEncodingCores = Utils.getNumCpuCores();
    imageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10, TimeUnit.SECONDS,
            new LinkedBlockingQueue<Runnable>());

    captureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, cameraListenerHandler, imageSaverThreadPool);
    captureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
        @Override
        public void onReadyStateChange(boolean capturePossible) {
            readyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY, capturePossible);
        }
    });

    // Listen for changes to auto focus state and dispatch to
    // focusStateListener.
    captureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
            new ImageCaptureManager.MetadataChangeListener() {
                @Override
                public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                        CaptureResult result) {
                    if (focusStateListener == null) {
                        return;
                    }
                    focusStateListener.onFocusStatusUpdate(
                            AutoFocusHelper.stateFromCamera2State(result.get(CaptureResult.CONTROL_AF_STATE)),
                            result.getFrameNumber());
                }
            });

    // Allocate the image reader to store all images received from the
    // camera.
    if (pictureSize == null) {
        // TODO The default should be selected by the caller, and
        // pictureSize should never be null.
        pictureSize = getDefaultPictureSize();
    }
    captureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(),
            CAPTURE_IMAGE_FORMAT, MAX_CAPTURE_IMAGES);

    captureImageReader.setOnImageAvailableListener(captureManager, cameraHandler);
    mediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
}

From source file:com.obviousengine.android.focus.ZslFocusCamera.java

private void onShutterInvokeUI(final PhotoCaptureParameters params) {
    // Tell CaptureModule shutter has occurred so it can flash the screen.
    params.callback.onQuickExpose();/* w w  w .  j a  v a 2s .  com*/
    // Play shutter click sound.
    mediaActionSound.play(MediaActionSound.SHUTTER_CLICK);
}

From source file:ca.frozen.rpicameraviewer.activities.VideoFragment.java

private void takeSnapshot() {
    Bitmap image = textureView.getBitmap();
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy_MM_dd_hh_mm_ss");
    String name = camera.network + "_" + camera.name.replaceAll("\\s+", "") + "_" + sdf.format(new Date())
            + ".jpg";
    Utils.saveImage(getActivity().getContentResolver(), image, name, null);
    MediaActionSound sound = new MediaActionSound();
    sound.play(MediaActionSound.SHUTTER_CLICK);
    Toast toast = Toast.makeText(getActivity(), App.getStr(R.string.image_saved), Toast.LENGTH_SHORT);
    toast.show();//from  w  w  w . j ava 2 s .c om
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

/**
 * Instantiates a new camera based on Camera 2 API.
 *
 * @param device The underlying Camera 2 device.
 * @param characteristics The device's characteristics.
 * @param pictureSize the size of the final image to be taken.
 */// w  w w . j a va2 s.c o m
OneCameraZslImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
    Log.v(TAG, "Creating new OneCameraZslImpl");

    mDevice = device;
    mCharacteristics = characteristics;
    mLensRange = LensRangeCalculator.getDiopterToRatioCalculator(characteristics);
    mDirection = new CameraDirectionProvider(mCharacteristics);
    mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);

    mCameraThread = new HandlerThread("OneCamera2");
    // If this thread stalls, it will delay viewfinder frames.
    mCameraThread.setPriority(Thread.MAX_PRIORITY);
    mCameraThread.start();
    mCameraHandler = new Handler(mCameraThread.getLooper());

    mCameraListenerThread = new HandlerThread("OneCamera2-Listener");
    mCameraListenerThread.start();
    mCameraListenerHandler = new Handler(mCameraListenerThread.getLooper());

    // TODO: Encoding on multiple cores results in preview jank due to
    // excessive GC.
    int numEncodingCores = CameraUtil.getNumCpuCores();
    mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10, TimeUnit.SECONDS,
            new LinkedBlockingQueue<Runnable>());

    mCaptureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
            mImageSaverThreadPool);
    mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
        @Override
        public void onReadyStateChange(boolean capturePossible) {
            mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY, capturePossible);
        }
    });

    // Listen for changes to auto focus state and dispatch to
    // mFocusStateListener.
    mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
            new ImageCaptureManager.MetadataChangeListener() {
                @Override
                public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
                        CaptureResult result) {
                    FocusStateListener listener = mFocusStateListener;
                    if (listener != null) {
                        listener.onFocusStatusUpdate(AutoFocusHelper.stateFromCamera2State(
                                result.get(CaptureResult.CONTROL_AF_STATE)), result.getFrameNumber());
                    }
                }
            });

    // Allocate the image reader to store all images received from the
    // camera.
    if (pictureSize == null) {
        // TODO The default should be selected by the caller, and
        // pictureSize should never be null.
        pictureSize = getDefaultPictureSize();
    }
    mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(), pictureSize.getHeight(),
            sCaptureImageFormat, MAX_CAPTURE_IMAGES);

    mCaptureImageReader.setOnImageAvailableListener(mCaptureManager, mCameraHandler);
    mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
}

From source file:com.android.camera.one.v2.OneCameraZslImpl.java

private void onShutterInvokeUI(final PhotoCaptureParameters params) {
    // Tell CaptureModule shutter has occurred so it can flash the screen.
    params.callback.onQuickExpose();//from www.  j  av  a 2  s.c  om
    // Play shutter click sound.
    mMediaActionSound.play(MediaActionSound.SHUTTER_CLICK);
}

From source file:com.projecttango.examples.java.greenscreen.GreenScreenActivity.java

/**
 * Here is where you would set up your rendering logic. We're replacing it with a minimalistic,
 * dummy example, using a standard GLSurfaceView and a basic renderer, for illustration purposes
 * only.//w  ww. j  a v a  2  s. c  o  m
 */
private void setupRenderer() {
    mSurfaceView.setEGLContextClientVersion(2);
    mRenderer = new GreenScreenRenderer(this, new GreenScreenRenderer.RenderCallback() {

        @Override
        public void preRender() {
            // This is the work that you would do on your main OpenGL render thread.

            // We need to be careful to not run any Tango-dependent code in the
            // OpenGL thread unless we know the Tango Service to be properly set up
            // and connected.
            if (!mIsConnected) {
                return;
            }

            // Synchronize against concurrently disconnecting the service triggered
            // from the UI thread.
            synchronized (GreenScreenActivity.this) {
                // Connect the Tango SDK to the OpenGL texture ID where we are
                // going to render the camera.
                // NOTE: This must be done after both the texture is generated
                // and the Tango Service is connected.
                if (mConnectedTextureIdGlThread != mRenderer.getTextureId()) {
                    mTango.connectTextureId(TangoCameraIntrinsics.TANGO_CAMERA_COLOR, mRenderer.getTextureId());
                    mConnectedTextureIdGlThread = mRenderer.getTextureId();
                    Log.d(TAG, "connected to texture id: " + mRenderer.getTextureId());
                    // Set up scene camera projection to match RGB camera intrinsics.
                    mRenderer.setProjectionMatrix(projectionMatrixFromCameraIntrinsics(mIntrinsics));
                    mRenderer.setCameraIntrinsics(mIntrinsics);
                }
                // If there is a new RGB camera frame available, update the texture and
                // scene camera pose.
                if (mIsFrameAvailableTangoThread.compareAndSet(true, false)) {
                    double depthTimestamp = 0;
                    TangoPointCloudData pointCloud = mPointCloudManager.getLatestPointCloud();
                    if (pointCloud != null) {
                        mRenderer.updatePointCloud(pointCloud);
                        depthTimestamp = pointCloud.timestamp;
                    }
                    try {
                        // {@code mRgbTimestampGlThread} contains the exact timestamp at
                        // which the rendered RGB frame was acquired.
                        mRgbTimestampGlThread = mTango.updateTexture(TangoCameraIntrinsics.TANGO_CAMERA_COLOR);

                        // In the following code, we define t0 as the depth timestamp
                        // and t1 as the color camera timestamp.

                        // Calculate the relative pose between color camera frame at
                        // timestamp color_timestamp t1 and depth.
                        TangoPoseData poseColort1Tdeptht0;
                        poseColort1Tdeptht0 = TangoSupport.calculateRelativePose(mRgbTimestampGlThread,
                                TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR, depthTimestamp,
                                TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH);
                        if (poseColort1Tdeptht0.statusCode == TangoPoseData.POSE_VALID) {
                            float[] colort1Tdeptht0 = poseToMatrix(poseColort1Tdeptht0);
                            mRenderer.updateModelMatrix(colort1Tdeptht0);
                        } else {
                            Log.w(TAG, "Could not get relative pose from camera depth" + " " + "at "
                                    + depthTimestamp + " to camera color at " + mRgbTimestampGlThread);
                        }
                    } catch (Exception e) {
                        Log.e(TAG, "Exception on the OpenGL thread", e);
                    }
                }
            }
        }

        /**
         * This method is called by the renderer when the screenshot has been taken.
         */
        @Override
        public void onScreenshotTaken(final Bitmap screenshotBitmap) {
            // Give immediate feedback to the user.
            MediaActionSound sound = new MediaActionSound();
            sound.play(MediaActionSound.SHUTTER_CLICK);
            runOnUiThread(new Runnable() {
                @Override
                public void run() {
                    mPanelFlash.setVisibility(View.VISIBLE);
                    // Run a fade in and out animation of a white screen.
                    ObjectAnimator fadeIn = ObjectAnimator.ofFloat(mPanelFlash, View.ALPHA, 0, 1);
                    fadeIn.setDuration(100);
                    fadeIn.setInterpolator(new DecelerateInterpolator());
                    ObjectAnimator fadeOut = ObjectAnimator.ofFloat(mPanelFlash, View.ALPHA, 1, 0);
                    fadeOut.setInterpolator(new AccelerateInterpolator());
                    fadeOut.setDuration(100);

                    AnimatorSet animation = new AnimatorSet();
                    animation.playSequentially(fadeIn, fadeOut);
                    animation.addListener(new AnimatorListenerAdapter() {
                        @Override
                        public void onAnimationEnd(Animator animation) {
                            mPanelFlash.setVisibility(View.GONE);
                        }
                    });
                    animation.start();
                }
            });
            // Save bitmap to gallery in background.
            new BitmapSaverTask(screenshotBitmap).execute();
        }
    });

    mSurfaceView.setRenderer(mRenderer);
}