List of usage examples for android.media AudioFormat ENCODING_PCM_16BIT
int ENCODING_PCM_16BIT
To view the source code for android.media AudioFormat ENCODING_PCM_16BIT.
Click Source Link
From source file:net.reichholf.dreamdroid.fragment.SignalFragment.java
void playSound(double freqOfTone) { double duration = 0.1; // seconds int sampleRate = 8000; // a number double dnumSamples = duration * sampleRate; dnumSamples = Math.ceil(dnumSamples); int numSamples = (int) dnumSamples; double sample[] = new double[numSamples]; byte generatedSnd[] = new byte[2 * numSamples]; for (int i = 0; i < numSamples; ++i) { // Fill the sample array sample[i] = Math.sin(freqOfTone * 2 * Math.PI * i / (sampleRate)); }//ww w . ja va 2 s.c o m // convert to 16 bit pcm sound array // assumes the sample buffer is normalized. int idx = 0; int i = 0; int ramp = numSamples / 20; // Amplitude ramp as a percent of sample // count for (i = 0; i < numSamples; ++i) { // Ramp amplitude up (to avoid // clicks) if (i < ramp) { double dVal = sample[i]; // Ramp up to maximum final short val = (short) ((dVal * 32767 * i / ramp)); // in 16 bit wav PCM, first byte is the low order byte generatedSnd[idx++] = (byte) (val & 0x00ff); generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8); } else if (i < numSamples - ramp) { // Max amplitude for most of the samples double dVal = sample[i]; // scale to maximum amplitude final short val = (short) ((dVal * 32767)); // in 16 bit wav PCM, first byte is the low order byte generatedSnd[idx++] = (byte) (val & 0x00ff); generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8); } else { double dVal = sample[i]; // Ramp down to zero final short val = (short) ((dVal * 32767 * (numSamples - i) / ramp)); // in 16 bit wav PCM, first byte is the low order byte generatedSnd[idx++] = (byte) (val & 0x00ff); generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8); } } AudioTrack audioTrack = null; // Get audio track try { audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, (int) numSamples * 2, AudioTrack.MODE_STATIC); // Load the track audioTrack.write(generatedSnd, 0, generatedSnd.length); audioTrack.play(); // Play the track } catch (Exception e) { } int x = 0; do { // Montior playback to find when done if (audioTrack != null) x = audioTrack.getPlaybackHeadPosition(); else x = numSamples; } while (x < numSamples); if (audioTrack != null) audioTrack.release(); // Track play done. Release track. }
From source file:uk.co.armedpineapple.cth.SDLActivity.java
public static Object audioInit(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) { int channelConfig = isStereo ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO; int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT; int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1); Log.v("SDL", "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer"); // Let the user pick a larger buffer if they really want -- but ye // gods they probably shouldn't, the minimums are horrifyingly high // latency already desiredFrames = Math.max(desiredFrames, (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize); mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM); audioStartThread();/*ww w .j av a 2 s. c om*/ Log.v("SDL", "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer"); if (is16Bit) { audioBuffer = new short[desiredFrames * (isStereo ? 2 : 1)]; } else { audioBuffer = new byte[desiredFrames * (isStereo ? 2 : 1)]; } return audioBuffer; }
From source file:com.bangz.shotrecorder.RecordActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_recorder); ActionBar bar = getSupportActionBar(); bar.setDisplayHomeAsUpEnabled(true); strModeNames = getResources().getStringArray(R.array.mode_names); strModeValNames = getResources().getStringArray(R.array.mode_value_names); mMode = MODE.COMSTOCK;//from w ww . j a va 2 s . c o m setDigitFont(); initButtonsListener(); TextView v = (TextView) findViewById(R.id.textTIME); v.setText("------"); FragmentManager fm = getSupportFragmentManager(); SplitListFragment splitfragment = (SplitListFragment) fm.findFragmentById(R.id.splitlist); mSplitAdapter = new SplitArrayAdapter(this, mSplitManager.getSplits()); splitfragment.setListAdapter(mSplitAdapter); if (savedInstanceState == null) { Intent intent = getIntent(); if (intent.getBooleanExtra(EXTRA_STARTFROMNOTIFY, false)) { mState = STATE_RECORDING; int mode = intent.getIntExtra(RecordService.EXTRA_MODE, 0); Log.d(TAG, "get mode in RecordActivity from service mode = " + mode); mMode = MODE.values()[mode]; mSampleRate = intent.getIntExtra(RecordService.EXTRA_SAMPLERATE, 44100); mChannels = intent.getIntExtra(RecordService.EXTRA_CHANNLES, AudioFormat.CHANNEL_IN_MONO); mEncoding = intent.getIntExtra(RecordService.EXTRA_ENCODDING, AudioFormat.ENCODING_PCM_16BIT); mMaxShots = intent.getIntExtra(RecordService.EXTRA_MAXSHOT, 0); mMaxParTime = intent.getIntExtra(RecordService.EXTRA_MAXPARTIME, 0) / 1000.0f; mCaptureSize = intent.getIntExtra(RecordService.EXTRA_CAPTURESIZE, 0); mMaxRecordTime = intent.getIntExtra(RecordService.EXTRA_MAXRECORDTIME, 5 * 60); } } updateMode(); updateStatus(); }
From source file:net.sf.asap.PlayerService.java
public void run() { // read file//from w ww .ja v a 2s. c o m String filename = uri.getPath(); byte[] module = new byte[ASAPInfo.MAX_MODULE_LENGTH]; int moduleLen; try { InputStream is; switch (uri.getScheme()) { case "file": if (Util.isZip(filename)) { String zipFilename = filename; filename = uri.getFragment(); is = new ZipInputStream(zipFilename, filename); } else is = new FileInputStream(filename); break; case "http": is = httpGet(uri); break; default: throw new FileNotFoundException(uri.toString()); } moduleLen = Util.readAndClose(is, module); } catch (IOException ex) { showError(R.string.error_reading_file); return; } // load file try { asap.load(filename, module, moduleLen); info = asap.getInfo(); switch (song) { case SONG_DEFAULT: song = info.getDefaultSong(); break; case SONG_LAST: song = info.getSongs() - 1; break; default: break; } playSong(); } catch (Exception ex) { showError(R.string.invalid_file); return; } PendingIntent contentIntent = PendingIntent.getActivity(this, 0, new Intent(this, Player.class), 0); String title = info.getTitleOrFilename(); Notification notification = new Notification(R.drawable.icon, title, System.currentTimeMillis()); notification.flags |= Notification.FLAG_ONGOING_EVENT; notification.setLatestEventInfo(this, title, info.getAuthor(), contentIntent); startForegroundCompat(NOTIFICATION_ID, notification); // playback int channelConfig = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO : AudioFormat.CHANNEL_CONFIGURATION_STEREO; int bufferLen = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, channelConfig, AudioFormat.ENCODING_PCM_16BIT) >> 1; if (bufferLen < 16384) bufferLen = 16384; final byte[] byteBuffer = new byte[bufferLen << 1]; final short[] shortBuffer = new short[bufferLen]; audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, channelConfig, AudioFormat.ENCODING_PCM_16BIT, bufferLen << 1, AudioTrack.MODE_STREAM); audioTrack.play(); for (;;) { synchronized (this) { if (bufferLen < shortBuffer.length || isPaused()) { try { wait(); } catch (InterruptedException ex) { } } if (stop) { audioTrack.stop(); return; } } synchronized (asap) { int pos = seekPosition; if (pos >= 0) { seekPosition = -1; try { asap.seek(pos); } catch (Exception ex) { } } bufferLen = asap.generate(byteBuffer, byteBuffer.length, ASAPSampleFormat.S16_L_E) >> 1; } for (int i = 0; i < bufferLen; i++) shortBuffer[i] = (short) ((byteBuffer[i << 1] & 0xff) | byteBuffer[i << 1 | 1] << 8); audioTrack.write(shortBuffer, 0, bufferLen); } }
From source file:com.brejza.matt.habmodem.Dsp_service.java
public void startAudio() { if (!_enableDecoder) return;/* w ww .ja v a 2 s . c o m*/ boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE); System.out.println("isRecording: " + isRecording); logEvent("Starting Audio. Mic avaliable: " + mic, false); if (!isRecording) { isRecording = true; buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); buffsize = Math.max(buffsize, 3000); mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM); if (enableEcho) { AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); manager.setMode(AudioManager.MODE_IN_CALL); manager.setSpeakerphoneOn(true); } if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) { mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) { logEvent("Error - Could not initialise audio", true); return; } logEvent("Using default audio source", false); } mRecorder.startRecording(); System.out.println("STARTING THREAD"); Thread ct = new captureThread(); logEvent("Starting Audio Thread.", false); setDecoderRunningNotification(); ct.start(); } }
From source file:com.smc.tw.waltz.MainActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { if (DEBUG)/* w ww . ja v a 2 s . c o m*/ Log.d(TAG, "onCreate"); overridePendingTransition(R.anim.slide_right_in, R.anim.slide_left_out); super.onCreate(savedInstanceState); Fabric.with(this, new Crashlytics()); setContentView(R.layout.activity_main); mPowerManager = (PowerManager) getSystemService(Context.POWER_SERVICE); mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); mWakeLock = mPowerManager.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK, TAG); mPreferences = PreferenceManager.getDefaultSharedPreferences(this); mFragmentManager = getSupportFragmentManager(); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); mAudioRecordBufferSize = 5600;//AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)*10; mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, mAudioRecordBufferSize); mNotifyChannelList = new ArrayList<String>(); setupLayout(); if (savedInstanceState != null) { mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION); } // mRegistrationBroadcastReceiver = new BroadcastReceiver() { // @Override // public void onReceive(Context context, Intent intent) { // // // checking for type intent filter // if (intent.getAction().equals(MainApplication.REGISTRATION_COMPLETE)) { // // gcm successfully registered // // now subscribe to `global` topic to receive app wide notifications // String token = intent.getStringExtra("token"); // // //Toast.makeText(getApplicationContext(), "GCM registration token: " + token, Toast.LENGTH_LONG).show(); // // } else if (intent.getAction().equals(MainApplication.SENT_TOKEN_TO_SERVER)) { // // gcm registration id is stored in our server's MySQL // // Toast.makeText(getApplicationContext(), "GCM registration token is stored in server!", Toast.LENGTH_LONG).show(); // // } else if (intent.getAction().equals(MainApplication.PUSH_NOTIFICATION)) { // // new push notification is received // // Toast.makeText(getApplicationContext(), "Push notification is received!", Toast.LENGTH_LONG).show(); // } // } // }; // if (checkPlayServices()) { registerGCM(); } }
From source file:com.cypress.cysmart.RDKEmulatorView.RemoteControlEmulatorFragment.java
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { /**//from w w w .j a v a 2 s .c om * Getting the current orientation of the screen * Loading different view for LandScape and portrait */ int currentOrientation = getResources().getConfiguration().orientation; if (currentOrientation == Configuration.ORIENTATION_LANDSCAPE) { mParentView = inflater.inflate(R.layout.rdk_emulator_view_landscape, container, false); } else { mParentView = inflater.inflate(R.layout.rdk_emulator_view_portrait, container, false); } mProgressDialog = new ProgressDialog(getActivity()); /** * Getting the ID's of all Emulator view UI elements */ Button mTrackpadView = (Button) mParentView.findViewById(R.id.trackpad_btn); Button mMicrophoneView = (Button) mParentView.findViewById(R.id.microphone_btn); mVolumePlusbtn = (ImageButton) mParentView.findViewById(R.id.volume_plus_btn); mVolumeMinusBtn = (ImageButton) mParentView.findViewById(R.id.volume_minus_btn); mChannelPlusBtn = (ImageButton) mParentView.findViewById(R.id.channel_plus_btn); mChannelMinusBtn = (ImageButton) mParentView.findViewById(R.id.channel_minus_btn); mLeftBtn = (ImageButton) mParentView.findViewById(R.id.left_btn); mRightBtn = (ImageButton) mParentView.findViewById(R.id.right_btn); mBackBtn = (ImageButton) mParentView.findViewById(R.id.back_btn); mGesturebtn = (ImageButton) mParentView.findViewById(R.id.gesture_btn); mExitBtn = (ImageButton) mParentView.findViewById(R.id.exit_btn); mPowerBtn = (ImageButton) mParentView.findViewById(R.id.power_btn); mRecBtn = (ImageButton) mParentView.findViewById(R.id.record_btn); mRecBtn = (ImageButton) mParentView.findViewById(R.id.record_btn); /** * AudioTrack class initialisation as follows * streamType- AudioManager.STREAM_MUSIC, * sampleRateInHz- 16000, * channelConfig- AudioFormat.CHANNEL_OUT_MONO, * audioFormat-AudioFormat.ENCODING_PCM_16BIT, * bufferSizeInBytes-8000, * mode- AudioTrack.MODE_STREAM * */ mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, SAMPLE_RATE, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, BUFFER_SIZE, AudioTrack.MODE_STREAM); /** * TrackPAd button click listner */ mTrackpadView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { TrackpadEmulatorFragment trackpadService = new TrackpadEmulatorFragment(); try { displayView(trackpadService); } catch (Exception e) { e.printStackTrace(); } } }); /** * Microphone Button click listner */ mMicrophoneView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { MicrophoneEmulatorFragment microphoneService = new MicrophoneEmulatorFragment(); microphoneService.create(mservice); displayView(microphoneService); } }); return mParentView; }
From source file:com.ferdi2005.secondgram.voip.VoIPService.java
@Override public void onCreate() { super.onCreate(); FileLog.d("=============== VoIPService STARTING ==============="); AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1 && am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER) != null) { int outFramesPerBuffer = Integer .parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); VoIPController.setNativeBufferSize(outFramesPerBuffer); } else {//from ww w. ja v a 2 s.c o m VoIPController.setNativeBufferSize( AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT) / 2); } final SharedPreferences preferences = getSharedPreferences("mainconfig", MODE_PRIVATE); VoIPServerConfig.setConfig(preferences.getString("voip_server_config", "{}")); if (System.currentTimeMillis() - preferences.getLong("voip_server_config_updated", 0) > 24 * 3600000) { ConnectionsManager.getInstance().sendRequest(new TLRPC.TL_phone_getCallConfig(), new RequestDelegate() { @Override public void run(TLObject response, TLRPC.TL_error error) { if (error == null) { String data = ((TLRPC.TL_dataJSON) response).data; VoIPServerConfig.setConfig(data); preferences.edit().putString("voip_server_config", data) .putLong("voip_server_config_updated", BuildConfig.DEBUG ? 0 : System.currentTimeMillis()) .apply(); } } }); } try { controller = new VoIPController(); controller.setConnectionStateListener(this); controller.setConfig(MessagesController.getInstance().callPacketTimeout / 1000.0, MessagesController.getInstance().callConnectTimeout / 1000.0, preferences.getInt("VoipDataSaving", VoIPController.DATA_SAVING_NEVER)); cpuWakelock = ((PowerManager) getSystemService(POWER_SERVICE)) .newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "telegram-voip"); cpuWakelock.acquire(); btAdapter = am.isBluetoothScoAvailableOffCall() ? BluetoothAdapter.getDefaultAdapter() : null; IntentFilter filter = new IntentFilter(); filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); filter.addAction(ACTION_HEADSET_PLUG); if (btAdapter != null) { filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); filter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED); } filter.addAction(TelephonyManager.ACTION_PHONE_STATE_CHANGED); filter.addAction(getPackageName() + ".END_CALL"); filter.addAction(getPackageName() + ".DECLINE_CALL"); filter.addAction(getPackageName() + ".ANSWER_CALL"); registerReceiver(receiver, filter); ConnectionsManager.getInstance().setAppPaused(false, false); soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0); spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); spFailedID = soundPool.load(this, R.raw.voip_failed, 1); spEndId = soundPool.load(this, R.raw.voip_end, 1); spBusyId = soundPool.load(this, R.raw.voip_busy, 1); am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); if (btAdapter != null && btAdapter.isEnabled()) { int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); if (headsetState == BluetoothProfile.STATE_CONNECTED) am.setBluetoothScoOn(true); for (StateListener l : stateListeners) l.onAudioSettingsChanged(); } NotificationCenter.getInstance().addObserver(this, NotificationCenter.appDidLogout); } catch (Exception x) { FileLog.e("error initializing voip controller", x); callFailed(); } }
From source file:com.github.olga_yakovleva.rhvoice.android.RHVoiceService.java
@Override protected void onSynthesizeText(SynthesisRequest request, SynthesisCallback callback) { if (BuildConfig.DEBUG) { Log.v(TAG, "onSynthesize called"); logLanguage(request.getLanguage(), request.getCountry(), request.getVariant()); }/*from w w w . ja v a 2 s . co m*/ Tts tts = ttsManager.acquireForSynthesis(); if (tts == null) { if (BuildConfig.DEBUG) Log.w(TAG, "Not initialized yet"); callback.error(); return; } try { speaking = true; Map<String, LanguageSettings> languageSettings = getLanguageSettings(tts); String voiceName = ""; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) voiceName = request.getVoiceName(); final Candidate bestMatch = findBestVoice(tts, request.getLanguage(), request.getCountry(), request.getVariant(), voiceName, languageSettings); if (bestMatch.voice == null) { if (BuildConfig.DEBUG) Log.e(TAG, "Unsupported language"); callback.error(); return; } if (BuildConfig.DEBUG) Log.v(TAG, "Selected voice: " + bestMatch.voice.getSource().getName()); currentVoice = bestMatch.voice; StringBuilder voiceProfileSpecBuilder = new StringBuilder(); voiceProfileSpecBuilder.append(bestMatch.voice.getSource().getName()); for (Map.Entry<String, LanguageSettings> entry : languageSettings.entrySet()) { if (entry.getKey().equals(bestMatch.voice.getLanguage())) continue; if (entry.getValue().detect) { String name = entry.getValue().voice.getSource().getName(); voiceProfileSpecBuilder.append("+").append(name); } } String profileSpec = voiceProfileSpecBuilder.toString(); if (BuildConfig.DEBUG) Log.v(TAG, "Synthesizing the following text: " + request.getText()); int rate = request.getSpeechRate(); if (BuildConfig.DEBUG) Log.v(TAG, "rate=" + rate); int pitch = request.getPitch(); if (BuildConfig.DEBUG) Log.v(TAG, "pitch=" + pitch); if (BuildConfig.DEBUG) Log.v(TAG, "Profile: " + profileSpec); final SynthesisParameters params = new SynthesisParameters(); params.setVoiceProfile(profileSpec); params.setRate(((double) rate) / 100.0); params.setPitch(((double) pitch) / 100.0); final Player player = new Player(callback); callback.start(24000, AudioFormat.ENCODING_PCM_16BIT, 1); tts.engine.speak(request.getText(), params, player); callback.done(); } catch (RHVoiceException e) { if (BuildConfig.DEBUG) Log.e(TAG, "Synthesis error", e); callback.error(); } finally { speaking = false; ttsManager.release(tts); } }
From source file:info.guardianproject.iocipher.camera.VideoCameraActivity.java
private void initAudio(final String audioPath) throws Exception { fileAudio = new File(audioPath); outputStreamAudio = new BufferedOutputStream(new info.guardianproject.iocipher.FileOutputStream(fileAudio), 8192 * 8);/*from w w w . j a v a2 s . c o m*/ if (useAAC) { aac = new AACHelper(); aac.setEncoder(MediaConstants.sAudioSampleRate, MediaConstants.sAudioChannels, MediaConstants.sAudioBitRate); } else { int minBufferSize = AudioRecord.getMinBufferSize(MediaConstants.sAudioSampleRate, MediaConstants.sChannelConfigIn, AudioFormat.ENCODING_PCM_16BIT) * 8; audioData = new byte[minBufferSize]; int audioSource = MediaRecorder.AudioSource.CAMCORDER; if (this.getCameraDirection() == CameraInfo.CAMERA_FACING_FRONT) { audioSource = MediaRecorder.AudioSource.MIC; } audioRecord = new AudioRecord(audioSource, MediaConstants.sAudioSampleRate, MediaConstants.sChannelConfigIn, AudioFormat.ENCODING_PCM_16BIT, minBufferSize); } }