List of usage examples for android.media AudioFormat ENCODING_PCM_16BIT
int ENCODING_PCM_16BIT
To view the source code for android.media AudioFormat ENCODING_PCM_16BIT.
Click Source Link
From source file:edu.nchu.cs.dmlab.firemap.mjpeg.RecMicToMp3.java
/** * Start recording//from w w w .ja v a 2s . c o m */ public void start() { // just skip if recording has happened if (mIsRecording) { return; } new Thread() { @Override public void run() { if (mHandler != null) mHandler.sendEmptyMessage(Message.MSG_DIALOG_OPEN); android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // caculate minimax buffersize final int minBufferSize = AudioRecord.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); if (minBufferSize < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_GET_MIN_BUFFERSIZE); } return; } AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize * 2); // PCM buffer size (5sec) short[] buffer = new short[mSampleRate * (16 / 8) * 1 * 5]; // SampleRate[Hz] * 16bit * Mono * 5sec byte[] mp3buffer = new byte[(int) (7200 + buffer.length * 2 * 1.25)]; FileOutputStream output = null; try { output = new FileOutputStream(mFile); } catch (FileNotFoundException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_CREATE_FILE); } return; } // Lame init SimpleLame.init(mSampleRate, 1, mSampleRate, 32); try { try { audioRecord.startRecording(); mIsRecording = true; } catch (IllegalStateException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_REC_START); } return; } try { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_REC_STARTED); } int readSize = 0; while (mIsRecording) { readSize = audioRecord.read(buffer, 0, minBufferSize); if (readSize < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_AUDIO_RECORD); } break; // no data } else if (readSize == 0) { ; // fetch data } else { int encResult = SimpleLame.encode(buffer, buffer, readSize, mp3buffer); if (encResult < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_AUDIO_ENCODE); } break; } if (encResult != 0) { try { output.write(mp3buffer, 0, encResult); } catch (IOException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_WRITE_FILE); } break; } } } } int flushResult = SimpleLame.flush(mp3buffer); if (flushResult < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_AUDIO_ENCODE); } } if (flushResult != 0) { try { output.write(mp3buffer, 0, flushResult); } catch (IOException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_WRITE_FILE); } } } try { output.close(); } catch (IOException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_CLOSE_FILE); } } } finally { audioRecord.stop(); audioRecord.release(); } } finally { SimpleLame.close(); mIsRecording = false; } if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_REC_STOPPED); } // Upload Audio uploadVoice(); if (mHandler != null) mHandler.sendEmptyMessage(Message.MSG_DIALOG_CLOSE); } }.start(); }
From source file:com.ibm.watson.developer_cloud.android.text_to_speech.v1.TTSUtility.java
private void initPlayer() { stopTtsPlayer();//from ww w .j a v a 2s. co m // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); synchronized (this) { audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); if (audioTrack != null) audioTrack.play(); } }
From source file:re.serialout.MainScreen.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main);//from w ww . java2 s . c o m //Set up patient tracking dbhelper = new PatientDbHelper(this); //Set up communications AudioSerialOutMono.activate(); AudioSerialOutMono.context = this.getApplicationContext(); for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100 }) { //Just here to make sure that the buffer types we are using are ok for the phone. //Debug thing. int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_CONFIGURATION_DEFAULT, AudioFormat.ENCODING_PCM_16BIT); if (bufferSize > 0) { System.out.println("Buffer size not 0 for rate: " + rate); } } am = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); am.registerMediaButtonEventReceiver(new ComponentName(this, ButtonReciver.class)); setUpFreqSlider(); // playSine.start(); playSine.adjustFreq(14000); TextView pHoutput = (TextView) findViewById(R.id.instrText); //Check for pateint ID and log patient in commandInterface = new CommandInterface(buadRate, this.getApplicationContext(), pHoutput); if (savedInstanceState != null) { idSet = savedInstanceState.getBoolean("idSet"); if (idSet) { idText = savedInstanceState.getString("idText"); hideIDViews(); settings = dbhelper.getSettings(Integer.parseInt(idText)); if (settings.isFirstTime()) { Intent intent = new Intent(this, DemographicActivity.class); intent.putExtra("patientID", idText); startActivity(intent); } if (settings.isSurveyReady()) { (findViewById(R.id.startSurvey)).setVisibility(View.VISIBLE); } } } SharedPreferences sharedPreferences = getPreferences(MODE_PRIVATE); clincMode = sharedPreferences.getBoolean("clincMode", false); if (!clincMode) { System.out.println("Not in clinic mode"); idSet = sharedPreferences.getBoolean("idSet", false); if (idSet) { idText = sharedPreferences.getString("idText", ""); System.out.println("id is: " + idText); hideIDViews(); } } // new checkForUpdates().execute(); }
From source file:com.example.rttytranslator.Dsp_service.java
public void startAudio() { if (!_enableDecoder) return;/*from ww w . ja v a2s.co m*/ //boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE); System.out.println("isRecording: " + isRecording); if (!isRecording) { isRecording = true; buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); buffsize = Math.max(buffsize, 3000); mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM); if (enableEcho) { AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); manager.setMode(AudioManager.MODE_IN_CALL); manager.setSpeakerphoneOn(true); } if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) { mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); } mRecorder.startRecording(); System.out.println("STARTING THREAD"); Thread ct = new captureThread(); ct.start(); } }
From source file:com.royer.bangstopwatch.app.StopwatchFragment.java
@Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); Log.d(TAG, "Enter onActivityCreated..."); InitTimeDisplayView();//from w ww. jav a2 s . c o m mLapList = (ListView) getView().findViewById(R.id.listLap); this.registerForContextMenu(mLapList); btnStart = (Button) getView().findViewById(R.id.btnStart); btnStart.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (state == STATE_NONE) { // detect does device support record ? if (AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT) < 0) { Context context = getActivity().getApplicationContext(); Toast toast = Toast.makeText(context, R.string.strNoRecorder, 5); toast.show(); return; } AudioManager audiomanager = (AudioManager) getActivity() .getSystemService(Context.AUDIO_SERVICE); Log.d(TAG, "AudioMode = " + audiomanager.getMode()); if (audiomanager.getMode() != AudioManager.MODE_NORMAL) { Context context = getActivity().getApplicationContext(); Toast toast = Toast.makeText(context, R.string.strInCalling, 5); toast.show(); return; } state = STATE_COUNTDOWN; DialogFragment newFragment = CountdownDialog.NewInstance(5, getTag()); newFragment.show(getFragmentManager(), "countdownDialog"); } else { changeState(); state = STATE_NONE; updateRealElapseTime(); printTime(); // unBind Recordservice if (mBound) { mService.stopRecord(); mService.unsetBang(); getActivity().unbindService(mConnection); getActivity().stopService(new Intent(getActivity(), RecordService.class)); mBound = false; } } ((MainActivity) getActivity()).EnableTab(1, state == STATE_NONE); } }); if (savedInstanceState != null) { Log.d(TAG, "savedInstanceState " + savedInstanceState.toString()); _timekeeper = savedInstanceState.getParcelable(STATE_TIMEKEEPER); mLapManager = savedInstanceState.getParcelable(STATE_LAPS); state = savedInstanceState.getInt(STATE_STATE); mBound = savedInstanceState.getBoolean(STATE_BOUNDING); ((MainActivity) getActivity()).EnableTab(1, state == STATE_NONE); } else { Log.d(TAG, "savedInstanceState == NULL"); if (_timekeeper == null) _timekeeper = new Timekeeper(); if (mLapManager == null) mLapManager = new LapManager(); } InitLapList(); printTime(); updateState(); Log.d(TAG, "Leave OnActivityCreated..."); }
From source file:de.badaix.snapcast.MainActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100, 48000 }) { // add the rates you wish to check against Log.d(TAG, "Samplerate: " + rate); int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT); if (bufferSize > 0) { Log.d(TAG, "Samplerate: " + rate + ", buffer: " + bufferSize); }// w w w . j av a 2s .c om } AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { String rate = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); nativeSampleRate = Integer.valueOf(rate); // String size = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); // tvInfo.setText("Sample rate: " + rate + ", buffer size: " + size); } coordinatorLayout = (CoordinatorLayout) findViewById(R.id.myCoordinatorLayout); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); // Create the adapter that will return a fragment for each of the three // primary sections of the activity. sectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.container); mViewPager.setAdapter(sectionsPagerAdapter); tabLayout = (TabLayout) findViewById(R.id.tabs); tabLayout.setupWithViewPager(mViewPager); mViewPager.setVisibility(View.GONE); setActionbarSubtitle("Host: no Snapserver found"); new Thread(new Runnable() { @Override public void run() { Log.d(TAG, "copying snapclient"); Setup.copyBinAsset(MainActivity.this, "snapclient", "snapclient"); Log.d(TAG, "done copying snapclient"); } }).start(); sectionsPagerAdapter.setHideOffline(Settings.getInstance(this).getBoolean("hide_offline", false)); }
From source file:com.xperia64.timidityae.Globals.java
public static int[] validRates(boolean stereo, boolean sixteen) { ArrayList<Integer> valid = new ArrayList<Integer>(); for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100, 48000, 88200, 96000 }) { int bufferSize = AudioTrack.getMinBufferSize(rate, (stereo) ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO, (sixteen) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT); if (bufferSize > 0) { //System.out.println(rate+" "+bufferSize); // buffer size is valid, Sample rate supported valid.add(rate);/*from ww w. jav a 2 s . com*/ } } int[] rates = new int[valid.size()]; for (int i = 0; i < rates.length; i++) rates[i] = valid.get(i); return rates; }
From source file:com.tt.engtrain.showcontent.ContentListItemActivity.java
@Override protected void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_baselist); mSpeechEvaluator = SpeechEvaluator.createEvaluator(ContentListItemActivity.this, null); mToast = Toast.makeText(this, "", Toast.LENGTH_SHORT); int iMinBufSize = AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, iMinBufSize, AudioTrack.MODE_STREAM); mTts = SpeechSynthesizer.createSynthesizer(this, mTtsInitListener); initData();/*w w w. j a v a 2 s . c o m*/ initListView(); // initspker(); }
From source file:com.ece420.lab3.MainActivity.java
private void queryNativeAudioParameters() { AudioManager myAudioMgr = (AudioManager) getSystemService(Context.AUDIO_SERVICE); nativeSampleRate = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); nativeSampleBufSize = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); int recBufSize = AudioRecord.getMinBufferSize(Integer.parseInt(nativeSampleRate), AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); supportRecording = true;/*from w w w. j av a2 s .com*/ if (recBufSize == AudioRecord.ERROR || recBufSize == AudioRecord.ERROR_BAD_VALUE) { supportRecording = false; } }
From source file:com.xperia64.timidityae.Globals.java
public static SparseIntArray validBuffers(int[] rates, boolean stereo, boolean sixteen) { SparseIntArray buffers = new SparseIntArray(); for (int rate : rates) { buffers.put(rate,//w w w .ja va2 s . c o m AudioTrack.getMinBufferSize(rate, (stereo) ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO, (sixteen) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT)); } return buffers; /*HashMap<Integer, Integer> buffers = new HashMap<Integer, Integer>(); for(int rate : rates) { buffers.put(rate, AudioTrack.getMinBufferSize(rate, (stereo)?AudioFormat.CHANNEL_OUT_STEREO:AudioFormat.CHANNEL_OUT_MONO, (sixteen)?AudioFormat.ENCODING_PCM_16BIT:AudioFormat.ENCODING_PCM_8BIT)); } return buffers;*/ }