List of usage examples for android.media AudioRecord getMinBufferSize
static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat)
From source file:Main.java
/** * Get a valid sample rate for the device * * @param channelConfiguration// w w w .j a v a2 s . c o m * the channel configuration * @param audioEncoding * the audio encoding * @return the valid sample rates */ public static int getValidSampleRates(int channelConfiguration, int audioEncoding) { for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100 }) { // add the rates you wish to check against int bffrSize = AudioRecord.getMinBufferSize(rate, channelConfiguration, audioEncoding); if (bffrSize > 0) { return bffrSize; } } return 0; }
From source file:Main.java
/** * Get a valid sample rate for the device * * @param channelConfiguration/* www . jav a 2s. c o m*/ * the channel configuration * @param audioEncoding * the audio encoding * @return the valid sample rates */ public static int getValidSampleRates(int channelConfiguration, int audioEncoding) { for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100, 48000 }) { // add the rates you wish to check against int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfiguration, audioEncoding); if (bufferSize > 0) { return rate; } } return 0; }
From source file:Main.java
public static final int getSampleRate(boolean lowest) { int sampleRate = -1; for (int rate : POSSIBLE_SAMPLE_RATES) { int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_DEFAULT, AudioFormat.ENCODING_PCM_16BIT); if (bufferSize > 0) { // buffer size is valid, Sample rate supported sampleRate = rate;//from www . j a v a 2 s . com if (lowest) { return sampleRate; } } } return sampleRate; }
From source file:Main.java
public static int getBufferSize() { return AudioRecord.getMinBufferSize(getMaxSampleRate(), android.media.AudioFormat.CHANNEL_IN_MONO, android.media.AudioFormat.ENCODING_PCM_16BIT); }
From source file:Main.java
public static int getMaxSampleRate() { int maxSampleRate = -1; int[] possibleSampleRates = { 48000, 44100, 22050, 16000, 11025, 8000 }; for (int i = 0; i < possibleSampleRates.length; i++) { int bufferSize = AudioRecord.getMinBufferSize(possibleSampleRates[i], AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); if (bufferSize > 0) { maxSampleRate = possibleSampleRates[i]; return maxSampleRate; }//w w w . j av a 2 s . com } return maxSampleRate; }
From source file:zlyh.dmitry.recaller.threading.PlayBlockThread.java
@Override public void run() { AudioTrack audioTrack = null;//from ww w. j a v a 2 s. c o m FileInputStream in = null; try { File rawpcm = new File(path); if (!rawpcm.exists()) { this.interrupt(); } togglePlaying(true); final int audioLength = (int) rawpcm.length(); final int minBufferSize = AudioRecord.getMinBufferSize(RecordRunnable.frequency, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, RecordRunnable.frequency, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM); final int block = 256 * 1024; byte[] byteData = new byte[block]; try { in = new FileInputStream(rawpcm); } catch (FileNotFoundException e) { e.printStackTrace(); this.interrupt(); } if (in != null) { try { int bytesread = 0; int offset; audioTrack.play(); while (bytesread < audioLength && !isInterrupted()) { offset = in.read(byteData, 0, block); if (offset != -1) { audioTrack.write(byteData, 0, offset); bytesread += offset; } else { break; } } in.close(); togglePlaying(false); if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } } catch (Exception e) { e.printStackTrace(); try { in.close(); } catch (IOException e1) { e1.printStackTrace(); } if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } togglePlaying(false); } } } catch (Exception e) { e.printStackTrace(); if (audioTrack != null) { if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } } if (in != null) { try { in.close(); } catch (IOException e1) { e1.printStackTrace(); } } togglePlaying(false); } }
From source file:com.inmobi.ultrapush.InfoRecActivity.java
@Override protected void onResume() { super.onResume(); TextView tv = (TextView) findViewById(R.id.textview_info_rec); tv.setMovementMethod(new ScrollingMovementMethod()); tv.setText("Testing..."); // TODO: No use... tv.invalidate();//from w w w .j a va2s . c om // Show supported sample rate and corresponding minimum buffer size. String[] requested = new String[] { "8000", "11025", "16000", "22050", "32000", "44100", "48000", "96000" }; String st = "sampleRate minBufSize\n"; ArrayList<String> validated = new ArrayList<String>(); for (String s : requested) { int rate = Integer.parseInt(s); int minBufSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); if (minBufSize != AudioRecord.ERROR_BAD_VALUE) { validated.add(s); st += s + " \t" + Integer.toString(minBufSize) + "\n"; } } requested = validated.toArray(new String[0]); tv.setText(st); tv.invalidate(); // Test audio source String[] audioSourceString = new String[] { "DEFAULT", "MIC", "VOICE_UPLINK", "VOICE_DOWNLINK", "VOICE_CALL", "CAMCORDER", "VOICE_RECOGNITION" }; int[] audioSourceId = new int[] { MediaRecorder.AudioSource.DEFAULT, // Default audio source MediaRecorder.AudioSource.MIC, // Microphone audio source MediaRecorder.AudioSource.VOICE_UPLINK, // Voice call uplink (Tx) audio source MediaRecorder.AudioSource.VOICE_DOWNLINK, // Voice call downlink (Rx) audio source MediaRecorder.AudioSource.VOICE_CALL, // Voice call uplink + downlink audio source MediaRecorder.AudioSource.CAMCORDER, // Microphone audio source with same orientation as camera if available, the main device microphone otherwise (apilv7) MediaRecorder.AudioSource.VOICE_RECOGNITION, // Microphone audio source tuned for voice recognition if available, behaves like DEFAULT otherwise. (apilv7) // MediaRecorder.AudioSource.VOICE_COMMUNICATION, // Microphone audio source tuned for voice communications such as VoIP. It will for instance take advantage of echo cancellation or automatic gain control if available. It otherwise behaves like DEFAULT if no voice processing is applied. (apilv11) // MediaRecorder.AudioSource.REMOTE_SUBMIX, // Audio source for a submix of audio streams to be presented remotely. (apilv19) }; tv.append("\n-- Audio Source Test --"); for (String s : requested) { int sampleRate = Integer.parseInt(s); int recBufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); tv.append("\n(" + Integer.toString(sampleRate) + "Hz, MONO, 16BIT)\n"); for (int iass = 0; iass < audioSourceId.length; iass++) { st = ""; // wait for AudioRecord fully released... try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } AudioRecord record; record = new AudioRecord(audioSourceId[iass], sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, recBufferSize); if (record.getState() == AudioRecord.STATE_INITIALIZED) { st += audioSourceString[iass] + " successed"; int as = record.getAudioSource(); if (as != audioSourceId[iass]) { int i = 0; while (i < audioSourceId.length) { if (as == audioSourceId[iass]) { break; } i++; } if (i >= audioSourceId.length) { st += "(auto set to \"unknown source\")"; } else { st += "(auto set to " + audioSourceString[i] + ")"; } } st += "\n"; } else { st += audioSourceString[iass] + " failed\n"; } record.release(); record = null; tv.append(st); tv.invalidate(); } } }
From source file:edu.nchu.cs.dmlab.firemap.mjpeg.RecMicToMp3.java
/** * Start recording//from w w w . j a v a2s.c o m */ public void start() { // just skip if recording has happened if (mIsRecording) { return; } new Thread() { @Override public void run() { if (mHandler != null) mHandler.sendEmptyMessage(Message.MSG_DIALOG_OPEN); android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // caculate minimax buffersize final int minBufferSize = AudioRecord.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); if (minBufferSize < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_GET_MIN_BUFFERSIZE); } return; } AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize * 2); // PCM buffer size (5sec) short[] buffer = new short[mSampleRate * (16 / 8) * 1 * 5]; // SampleRate[Hz] * 16bit * Mono * 5sec byte[] mp3buffer = new byte[(int) (7200 + buffer.length * 2 * 1.25)]; FileOutputStream output = null; try { output = new FileOutputStream(mFile); } catch (FileNotFoundException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_CREATE_FILE); } return; } // Lame init SimpleLame.init(mSampleRate, 1, mSampleRate, 32); try { try { audioRecord.startRecording(); mIsRecording = true; } catch (IllegalStateException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_REC_START); } return; } try { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_REC_STARTED); } int readSize = 0; while (mIsRecording) { readSize = audioRecord.read(buffer, 0, minBufferSize); if (readSize < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_AUDIO_RECORD); } break; // no data } else if (readSize == 0) { ; // fetch data } else { int encResult = SimpleLame.encode(buffer, buffer, readSize, mp3buffer); if (encResult < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_AUDIO_ENCODE); } break; } if (encResult != 0) { try { output.write(mp3buffer, 0, encResult); } catch (IOException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_WRITE_FILE); } break; } } } } int flushResult = SimpleLame.flush(mp3buffer); if (flushResult < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_AUDIO_ENCODE); } } if (flushResult != 0) { try { output.write(mp3buffer, 0, flushResult); } catch (IOException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_WRITE_FILE); } } } try { output.close(); } catch (IOException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_CLOSE_FILE); } } } finally { audioRecord.stop(); audioRecord.release(); } } finally { SimpleLame.close(); mIsRecording = false; } if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_REC_STOPPED); } // Upload Audio uploadVoice(); if (mHandler != null) mHandler.sendEmptyMessage(Message.MSG_DIALOG_CLOSE); } }.start(); }
From source file:re.serialout.MainScreen.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main);/*from ww w.j a v a 2 s . c o m*/ //Set up patient tracking dbhelper = new PatientDbHelper(this); //Set up communications AudioSerialOutMono.activate(); AudioSerialOutMono.context = this.getApplicationContext(); for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100 }) { //Just here to make sure that the buffer types we are using are ok for the phone. //Debug thing. int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_CONFIGURATION_DEFAULT, AudioFormat.ENCODING_PCM_16BIT); if (bufferSize > 0) { System.out.println("Buffer size not 0 for rate: " + rate); } } am = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); am.registerMediaButtonEventReceiver(new ComponentName(this, ButtonReciver.class)); setUpFreqSlider(); // playSine.start(); playSine.adjustFreq(14000); TextView pHoutput = (TextView) findViewById(R.id.instrText); //Check for pateint ID and log patient in commandInterface = new CommandInterface(buadRate, this.getApplicationContext(), pHoutput); if (savedInstanceState != null) { idSet = savedInstanceState.getBoolean("idSet"); if (idSet) { idText = savedInstanceState.getString("idText"); hideIDViews(); settings = dbhelper.getSettings(Integer.parseInt(idText)); if (settings.isFirstTime()) { Intent intent = new Intent(this, DemographicActivity.class); intent.putExtra("patientID", idText); startActivity(intent); } if (settings.isSurveyReady()) { (findViewById(R.id.startSurvey)).setVisibility(View.VISIBLE); } } } SharedPreferences sharedPreferences = getPreferences(MODE_PRIVATE); clincMode = sharedPreferences.getBoolean("clincMode", false); if (!clincMode) { System.out.println("Not in clinic mode"); idSet = sharedPreferences.getBoolean("idSet", false); if (idSet) { idText = sharedPreferences.getString("idText", ""); System.out.println("id is: " + idText); hideIDViews(); } } // new checkForUpdates().execute(); }
From source file:com.example.rttytranslator.Dsp_service.java
public void startAudio() { if (!_enableDecoder) return;//w w w. ja v a2s . com //boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE); System.out.println("isRecording: " + isRecording); if (!isRecording) { isRecording = true; buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); buffsize = Math.max(buffsize, 3000); mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM); if (enableEcho) { AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); manager.setMode(AudioManager.MODE_IN_CALL); manager.setSpeakerphoneOn(true); } if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) { mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); } mRecorder.startRecording(); System.out.println("STARTING THREAD"); Thread ct = new captureThread(); ct.start(); } }