List of usage examples for android.media AudioManager AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK
int AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK
To view the source code for android.media AudioManager AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK.
Click Source Link
From source file:com.google.android.car.kitchensink.audio.AudioTestFragment.java
private void handleNavStart() { if (mAppFocusManager == null) { return;/*w w w . j a va 2 s . c o m*/ } if (mCarAudioManager == null) { return; } if (DBG) { Log.i(TAG, "Nav start"); } try { mAppFocusManager.requestAppFocus(CarAppFocusManager.APP_FOCUS_TYPE_NAVIGATION, mOwnershipCallbacks); mCarAudioManager.requestAudioFocus(mNavFocusListener, mNavAudioAttrib, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK, 0); } catch (CarNotConnectedException e) { Log.e(TAG, "Failed to set active focus", e); } }
From source file:com.hotstar.player.adplayer.player.PlayerFragment.java
private void requestAudioFocus() { AudioManager am = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE); // Request audio focus for playback int result = am.requestAudioFocus(null, // Use the music stream. AudioManager.STREAM_MUSIC,//from ww w . j a v a2s. co m // Request permanent focus. AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK); if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { AdVideoApplication.logger.i(LOG_TAG + "#requestAudioFocus()", "Gained audio focus."); } }
From source file:androidx.media.widget.VideoView2.java
/** * Sets which type of audio focus will be requested during the playback, or configures playback * to not request audio focus. Valid values for focus requests are * {@link AudioManager#AUDIOFOCUS_GAIN}, {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT}, * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK}, and * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE}. Or use * {@link AudioManager#AUDIOFOCUS_NONE} to express that audio focus should not be * requested when playback starts. You can for instance use this when playing a silent animation * through this class, and you don't want to affect other audio applications playing in the * background./*w ww .j av a 2 s. c o m*/ * * @param focusGain the type of audio focus gain that will be requested, or * {@link AudioManager#AUDIOFOCUS_NONE} to disable the use audio focus during * playback. */ public void setAudioFocusRequest(int focusGain) { if (focusGain != AudioManager.AUDIOFOCUS_NONE && focusGain != AudioManager.AUDIOFOCUS_GAIN && focusGain != AudioManager.AUDIOFOCUS_GAIN_TRANSIENT && focusGain != AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK && focusGain != AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE) { throw new IllegalArgumentException("Illegal audio focus type " + focusGain); } mAudioFocusType = focusGain; }
From source file:com.gelakinetic.mtgfam.fragments.LifeCounterFragment.java
/** * Build a LinkedList of all the things to say, which can include TTS calls and MediaPlayer calls. Then call * onUtteranceCompleted to start running through the LinkedList, even though no utterance was spoken. *///from w w w . j a v a 2 s.c o m private void announceLifeTotals() { if (mTtsInit) { mVocalizations.clear(); for (LcPlayer p : mPlayers) { switch (mStatDisplaying) { case STAT_LIFE: if (p.mLife > 9000) { /* If the life is over 9000, split the string on an IMPROBABLE_NUMBER, and insert a call to the m9000Player */ String tmp = getResources().getQuantityString(R.plurals.life_counter_spoken_life, IMPROBABLE_NUMBER, p.mName, IMPROBABLE_NUMBER); String parts[] = tmp.split(Integer.toString(IMPROBABLE_NUMBER)); mVocalizations.add(parts[0]); mVocalizations.add(OVER_9000_KEY); mVocalizations.add(parts[1]); } else { mVocalizations.add(getResources().getQuantityString(R.plurals.life_counter_spoken_life, p.mLife, p.mName, p.mLife)); } break; case STAT_POISON: mVocalizations.add(getResources().getQuantityString(R.plurals.life_counter_spoken_poison, p.mPoison, p.mName, p.mPoison)); break; } } if (mVocalizations.size() > 0) { /* Get the audio focus, and tell everyone else to be quiet for a moment */ int res = mAudioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK); if (res == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { onUtteranceCompleted(LIFE_ANNOUNCE); } } } }
From source file:com.android.talkback.SpeechController.java
/** * Called when transitioning from an idle state to a speaking state, e.g. * the queue was empty, there was no current speech, and a speech item was * added to the queue.//from w ww . j ava 2 s. com * * @see #handleSpeechCompleted() */ private void handleSpeechStarting() { // Always enable the proximity sensor when speaking. setProximitySensorState(true); boolean useAudioFocus = mUseAudioFocus; if (BuildCompat.isAtLeastN()) { List<AudioRecordingConfiguration> recordConfigurations = mAudioManager .getActiveRecordingConfigurations(); if (recordConfigurations.size() != 0) useAudioFocus = false; } if (useAudioFocus) { mAudioManager.requestAudioFocus(mAudioFocusListener, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK); } if (mIsSpeaking) { LogUtils.log(this, Log.ERROR, "Started speech while already speaking!"); } mIsSpeaking = true; }
From source file:com.firefly.sample.castcompanionlibrary.cast.VideoCastManager.java
@SuppressLint("InlinedApi") private void setUpRemoteControl(final MediaInfo info) { if (!isFeatureEnabled(BaseCastManager.FEATURE_LOCKSCREEN)) { return;/*w w w . j a va2s . com*/ } LOGD(TAG, "setupRemoteControl() was called"); mAudioManager.requestAudioFocus(null, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK); ComponentName eventReceiver = new ComponentName(mContext, VideoIntentReceiver.class.getName()); mAudioManager.registerMediaButtonEventReceiver(eventReceiver); if (mRemoteControlClientCompat == null) { Intent intent = new Intent(Intent.ACTION_MEDIA_BUTTON); intent.setComponent(mMediaButtonReceiverComponent); mRemoteControlClientCompat = new RemoteControlClientCompat( PendingIntent.getBroadcast(mContext, 0, intent, 0)); RemoteControlHelper.registerRemoteControlClient(mAudioManager, mRemoteControlClientCompat); } mRemoteControlClientCompat.addToMediaRouter(mMediaRouter); mRemoteControlClientCompat.setTransportControlFlags(RemoteControlClient.FLAG_KEY_MEDIA_PLAY_PAUSE); if (null == info) { mRemoteControlClientCompat.setPlaybackState(RemoteControlClient.PLAYSTATE_PAUSED); return; } else { mRemoteControlClientCompat.setPlaybackState(RemoteControlClient.PLAYSTATE_PLAYING); } // Update the remote control's image updateLockScreenImage(info); // update the remote control's metadata updateLockScreenMetadata(); }
From source file:air.com.snagfilms.cast.chromecast.VideoChromeCastManager.java
@SuppressLint("InlinedApi") private void setUpRemoteControl(final MediaInfo info) { if (!isFeatureEnabled(BaseChromeCastManager.FEATURE_LOCKSCREEN)) { return;/*w ww. j av a 2 s . c om*/ } Log.d(TAG, "setupRemoteControl() was called"); mAudioManager.requestAudioFocus(null, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK); ComponentName eventReceiver = new ComponentName(mContext, VideoIntentReceiver.class.getName()); mAudioManager.registerMediaButtonEventReceiver(eventReceiver); if (mRemoteControlClientCompat == null) { Intent intent = new Intent(Intent.ACTION_MEDIA_BUTTON); intent.setComponent(mMediaButtonReceiverComponent); mRemoteControlClientCompat = new RemoteControlClientCompat( PendingIntent.getBroadcast(mContext, 0, intent, 0)); RemoteControlHelper.registerRemoteControlClient(mAudioManager, mRemoteControlClientCompat); } mRemoteControlClientCompat.addToMediaRouter(mMediaRouter); mRemoteControlClientCompat.setTransportControlFlags(RemoteControlClient.FLAG_KEY_MEDIA_PLAY_PAUSE); if (null == info) { mRemoteControlClientCompat.setPlaybackState(RemoteControlClient.PLAYSTATE_PAUSED); return; } else { mRemoteControlClientCompat.setPlaybackState(RemoteControlClient.PLAYSTATE_PLAYING); } // Update the remote control's image updateLockScreenImage(info); // update the remote control's metadata updateLockScreenMetadata(); }
From source file:com.google.android.libraries.cast.companionlibrary.cast.VideoCastManager.java
@SuppressLint("InlinedApi") private void setUpRemoteControl(final MediaInfo info) { if (!isFeatureEnabled(BaseCastManager.FEATURE_LOCKSCREEN)) { return;//from w w w . ja v a 2 s. c om } LOGD(TAG, "setUpRemoteControl() was called"); mAudioManager.requestAudioFocus(null, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK); mMediaEventReceiver = new ComponentName(mContext, VideoIntentReceiver.class.getName()); mAudioManager.registerMediaButtonEventReceiver(mMediaEventReceiver); if (mRemoteControlClientCompat == null) { Intent intent = new Intent(Intent.ACTION_MEDIA_BUTTON); intent.setComponent(mMediaButtonReceiverComponent); mRemoteControlClientCompat = new RemoteControlClientCompat( PendingIntent.getBroadcast(mContext, 0, intent, 0)); RemoteControlHelper.registerRemoteControlClient(mAudioManager, mRemoteControlClientCompat); } mRemoteControlClientCompat.addToMediaRouter(mMediaRouter); mRemoteControlClientCompat.setTransportControlFlags(RemoteControlClient.FLAG_KEY_MEDIA_PLAY_PAUSE); if (info == null) { mRemoteControlClientCompat.setPlaybackState(RemoteControlClient.PLAYSTATE_PAUSED); return; } else { mRemoteControlClientCompat.setPlaybackState(RemoteControlClient.PLAYSTATE_PLAYING); } // Update the remote control's image updateLockScreenImage(info); // update the remote control's metadata updateLockScreenMetadata(); }