List of usage examples for android.media AudioManager setSpeakerphoneOn
public void setSpeakerphoneOn(boolean on)
From source file:Main.java
public static void restoreSystemAudioSetting(Context context) { AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); if (audioManager != null) { audioManager.setSpeakerphoneOn(false); audioManager.setMode(AudioManager.MODE_NORMAL); }// ww w. ja v a 2 s. co m }
From source file:Main.java
public static void openSpeaker(Context context, boolean on) { AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); if (on) {/*from w w w . j a va 2s . c om*/ audioManager.setMode(AudioManager.MODE_NORMAL); audioManager.setSpeakerphoneOn(true); } else { audioManager.setSpeakerphoneOn(false); } }
From source file:de.azapps.mirakel.helper.TaskDialogHelpers.java
public static void playbackFile(final Activity context, final FileMirakel file, final boolean loud) { final MediaPlayer mPlayer = new MediaPlayer(); final AudioManager am = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); if (!loud) {//w w w .j a va2 s .c o m am.setSpeakerphoneOn(false); am.setMode(AudioManager.MODE_IN_CALL); context.setVolumeControlStream(AudioManager.STREAM_VOICE_CALL); } try { mPlayer.reset(); if (!loud) { mPlayer.setAudioStreamType(AudioManager.STREAM_VOICE_CALL); } mPlayer.setDataSource(file.getFileStream(context).getFD()); mPlayer.prepare(); mPlayer.start(); mPlayer.setOnCompletionListener(new OnCompletionListener() { @Override public void onCompletion(final MediaPlayer mp) { audio_playback_dialog.dismiss(); } }); am.setMode(AudioManager.MODE_NORMAL); audio_playback_playing = true; } catch (final IOException e) { Log.e(TAG, "prepare() failed"); } audio_playback_dialog = new AlertDialog.Builder(context).setTitle(R.string.audio_playback_title) .setPositiveButton(R.string.audio_playback_pause, null) .setNegativeButton(R.string.audio_playback_stop, new DialogInterface.OnClickListener() { @Override public void onClick(final DialogInterface dialog, final int which) { mPlayer.release(); } }).setOnCancelListener(new OnCancelListener() { @Override public void onCancel(final DialogInterface dialog) { mPlayer.release(); dialog.cancel(); } }).create(); audio_playback_dialog.setOnShowListener(new OnShowListener() { @Override public void onShow(final DialogInterface dialog) { final Button button = ((AlertDialog) dialog).getButton(DialogInterface.BUTTON_POSITIVE); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(final View v) { if (audio_playback_playing) { button.setText(R.string.audio_playback_play); mPlayer.pause(); audio_playback_playing = false; } else { button.setText(R.string.audio_playback_pause); mPlayer.start(); audio_playback_playing = true; } } }); } }); audio_playback_dialog.show(); }
From source file:de.tubs.ibr.dtn.dtalkie.TalkieActivity.java
@SuppressWarnings("deprecation") private void setAudioOutput() { AudioManager am = (AudioManager) getSystemService(Context.AUDIO_SERVICE); if (am.isBluetoothA2dpOn()) { // play without speaker am.setSpeakerphoneOn(false); } else if (am.isWiredHeadsetOn()) { // play without speaker am.setSpeakerphoneOn(false);/*from w w w .j a v a2 s.c o m*/ } else { // without headset, enable speaker am.setSpeakerphoneOn(true); } }
From source file:com.luhuiguo.cordova.voice.VoiceHandler.java
/** * Set the voice device to be used for playback. * * @param output 1=earpiece, 2=speaker *//*from www. ja v a 2 s .co m*/ public void setVoiceOutputDevice(int output) { AudioManager audiMgr = (AudioManager) this.cordova.getActivity().getSystemService(Context.AUDIO_SERVICE); if (output == 2) { audiMgr.setSpeakerphoneOn(true); } else if (output == 1) { audiMgr.setSpeakerphoneOn(false); } else { System.out.println("VoiceHandler.setVoiceOutputDevice() Error: Unknown output device."); } }
From source file:com.example.rttytranslator.Dsp_service.java
public void startAudio() { if (!_enableDecoder) return;//from w w w. j a va2 s . co m //boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE); System.out.println("isRecording: " + isRecording); if (!isRecording) { isRecording = true; buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); buffsize = Math.max(buffsize, 3000); mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM); if (enableEcho) { AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); manager.setMode(AudioManager.MODE_IN_CALL); manager.setSpeakerphoneOn(true); } if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) { mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); } mRecorder.startRecording(); System.out.println("STARTING THREAD"); Thread ct = new captureThread(); ct.start(); } }
From source file:gstb.fd.eofficial.oa.im.widget.chatrow.EaseChatRowVoicePlayClickListener.java
public void playVoice(String filePath) { Log.e("sssssss", "sssssssssssssss"); if (!(new File(filePath).exists())) { return;/*from w w w . j a v a 2 s . c o m*/ } // playMsgId = message.getMsgId(); AudioManager audioManager = (AudioManager) activity.getSystemService(Context.AUDIO_SERVICE); mediaPlayer = new MediaPlayer(); // if (EaseUI.getInstance().getSettingsProvider().isSpeakerOpened()) { audioManager.setMode(AudioManager.MODE_NORMAL); audioManager.setSpeakerphoneOn(true); mediaPlayer.setAudioStreamType(AudioManager.STREAM_RING); // } else { // audioManager.setSpeakerphoneOn(false);// // ?Earpiece??? // audioManager.setMode(AudioManager.MODE_IN_CALL); // mediaPlayer.setAudioStreamType(AudioManager.STREAM_VOICE_CALL); // } try { mediaPlayer.setDataSource(filePath); mediaPlayer.prepare(); mediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { // TODO Auto-generated method stub mediaPlayer.release(); mediaPlayer = null; stopPlayVoice(); // stop animation } }); isPlaying = true; currentPlayListener = this; mediaPlayer.start(); showAnimation(); // ? if (message.direct() == EMMessage.Direct.RECEIVE) { // if (!message.isAcked() && chatType == EMMessage.ChatType.Chat) { // // ?? //// EMClient.getInstance().chatManager().ackMessageRead(message.getFrom(), message.getMsgId()); // } if (!message.isListened() && iv_read_status != null && iv_read_status.getVisibility() == View.VISIBLE) { // ???? iv_read_status.setVisibility(View.INVISIBLE); message.setListened(true); MessageDao.instance(activity).updateListen(message.getMsgId()); // EMClient.getInstance().chatManager().setMessageListened(message); } } } catch (Exception e) { System.out.println(); } }
From source file:org.mitre.svmp.events.WebrtcHandler.java
public WebrtcHandler(BaseServer baseServer, VideoStreamInfo vidInfo, Context c) { base = baseServer;// ww w .j a v a2s. co m context = c; // Pass in context to allow access to Android managed Audio driver. PeerConnectionFactory.initializeAndroidGlobals(context); // "Failed to initializeAndroidGlobals"); AudioManager audioManager = ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE)); boolean isWiredHeadsetOn = audioManager.isWiredHeadsetOn(); audioManager.setMode(isWiredHeadsetOn ? AudioManager.MODE_IN_CALL : AudioManager.MODE_IN_COMMUNICATION); audioManager.setSpeakerphoneOn(!isWiredHeadsetOn); sdpMediaConstraints = new MediaConstraints(); sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); pcConstraints = constraintsFromJSON(vidInfo.getPcConstraints()); Log.d(TAG, "pcConstraints: " + pcConstraints); videoConstraints = constraintsFromJSON(vidInfo.getVideoConstraints()); Log.d(TAG, "videoConstraints: " + videoConstraints); // // videoConstraints = new MediaConstraints(); // videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minWidth","720")); // videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minHeight","1280")); // videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth","720")); // videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight","1280")); // videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minFrameRate","24")); audioConstraints = new MediaConstraints(); //null; audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("audio", "true")); iceServers = iceServersFromPCConfigJSON(vidInfo.getIceServers()); onIceServers(iceServers); }
From source file:org.jitsi.android.gui.call.VideoCallActivity.java
/** * Fired when speakerphone button is clicked. * @param v the speakerphone button <tt>View</tt>. *///from w w w . j a v a 2 s . co m public void onSpeakerphoneClicked(View v) { AudioManager audioManager = JitsiApplication.getAudioManager(); audioManager.setSpeakerphoneOn(!audioManager.isSpeakerphoneOn()); updateSpeakerphoneStatus(); }
From source file:com.brejza.matt.habmodem.Dsp_service.java
public void disableEcho() { AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); manager.setMode(AudioManager.MODE_NORMAL); manager.setSpeakerphoneOn(false); enableEcho = false;//from w ww. j a v a 2 s. co m }