List of usage examples for android.media AudioRecord AudioRecord
public AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes) throws IllegalArgumentException
From source file:com.brejza.matt.habmodem.Dsp_service.java
public void startAudio() { if (!_enableDecoder) return;/*w w w . j a v a 2 s . c o m*/ boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE); System.out.println("isRecording: " + isRecording); logEvent("Starting Audio. Mic avaliable: " + mic, false); if (!isRecording) { isRecording = true; buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); buffsize = Math.max(buffsize, 3000); mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM); if (enableEcho) { AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); manager.setMode(AudioManager.MODE_IN_CALL); manager.setSpeakerphoneOn(true); } if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) { mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) { logEvent("Error - Could not initialise audio", true); return; } logEvent("Using default audio source", false); } mRecorder.startRecording(); System.out.println("STARTING THREAD"); Thread ct = new captureThread(); logEvent("Starting Audio Thread.", false); setDecoderRunningNotification(); ct.start(); } }
From source file:com.smc.tw.waltz.MainActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { if (DEBUG)//from w w w . j a v a 2 s. co m Log.d(TAG, "onCreate"); overridePendingTransition(R.anim.slide_right_in, R.anim.slide_left_out); super.onCreate(savedInstanceState); Fabric.with(this, new Crashlytics()); setContentView(R.layout.activity_main); mPowerManager = (PowerManager) getSystemService(Context.POWER_SERVICE); mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); mWakeLock = mPowerManager.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK, TAG); mPreferences = PreferenceManager.getDefaultSharedPreferences(this); mFragmentManager = getSupportFragmentManager(); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); mAudioRecordBufferSize = 5600;//AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)*10; mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, mAudioRecordBufferSize); mNotifyChannelList = new ArrayList<String>(); setupLayout(); if (savedInstanceState != null) { mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION); } // mRegistrationBroadcastReceiver = new BroadcastReceiver() { // @Override // public void onReceive(Context context, Intent intent) { // // // checking for type intent filter // if (intent.getAction().equals(MainApplication.REGISTRATION_COMPLETE)) { // // gcm successfully registered // // now subscribe to `global` topic to receive app wide notifications // String token = intent.getStringExtra("token"); // // //Toast.makeText(getApplicationContext(), "GCM registration token: " + token, Toast.LENGTH_LONG).show(); // // } else if (intent.getAction().equals(MainApplication.SENT_TOKEN_TO_SERVER)) { // // gcm registration id is stored in our server's MySQL // // Toast.makeText(getApplicationContext(), "GCM registration token is stored in server!", Toast.LENGTH_LONG).show(); // // } else if (intent.getAction().equals(MainApplication.PUSH_NOTIFICATION)) { // // new push notification is received // // Toast.makeText(getApplicationContext(), "Push notification is received!", Toast.LENGTH_LONG).show(); // } // } // }; // if (checkPlayServices()) { registerGCM(); } }
From source file:info.guardianproject.iocipher.camera.VideoCameraActivity.java
private void initAudio(final String audioPath) throws Exception { fileAudio = new File(audioPath); outputStreamAudio = new BufferedOutputStream(new info.guardianproject.iocipher.FileOutputStream(fileAudio), 8192 * 8);//from w w w . j a v a 2s . c o m if (useAAC) { aac = new AACHelper(); aac.setEncoder(MediaConstants.sAudioSampleRate, MediaConstants.sAudioChannels, MediaConstants.sAudioBitRate); } else { int minBufferSize = AudioRecord.getMinBufferSize(MediaConstants.sAudioSampleRate, MediaConstants.sChannelConfigIn, AudioFormat.ENCODING_PCM_16BIT) * 8; audioData = new byte[minBufferSize]; int audioSource = MediaRecorder.AudioSource.CAMCORDER; if (this.getCameraDirection() == CameraInfo.CAMERA_FACING_FRONT) { audioSource = MediaRecorder.AudioSource.MIC; } audioRecord = new AudioRecord(audioSource, MediaConstants.sAudioSampleRate, MediaConstants.sChannelConfigIn, AudioFormat.ENCODING_PCM_16BIT, minBufferSize); } }
From source file:com.example.sensingapp.SensingApp.java
private void startAudioRecording() { m_audioRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, m_nAudioSampleRate, AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, m_nBufferSize); if (m_audioRecorder == null) return;//from w w w . ja va 2s . c o m int i = m_audioRecorder.getState(); if (i == AudioRecord.STATE_INITIALIZED) { m_audioRecorder.startRecording(); } else { return; } if (m_blnRecordSoundLevel == true || m_blnRecordSoundFile == true) { m_processSoundThread = new Thread(new Runnable() { public void run() { processAudioData(); } }, "Audio Thread"); m_processSoundThread.start(); if (m_blnRecordSoundLevel == true) { m_soundLevelThread = new Thread(new Runnable() { public void run() { calculateAudioSoundLevel(); } }, "Sould Level Thread"); m_soundLevelThread.start(); } } }