List of usage examples for android.media AudioTrack getMinBufferSize
static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat)
From source file:Main.java
public static final int getMinimumBufferSize(int sampleRate) { return AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); }
From source file:com.n0n3m4.q3e.Q3ECallbackObj.java
public void init(int size) { if (mAudioTrack != null) return;//from w w w .j a v a 2s . c o m if ((Q3EUtils.q3ei.isQ3) || (Q3EUtils.q3ei.isRTCW) || (Q3EUtils.q3ei.isQ1) || (Q3EUtils.q3ei.isQ2)) size /= 8; mAudioData = new byte[size]; int sampleFreq = 44100; /* _.---"'"""""'`--.._ _,.-' `-._ _,." -. .-"" ___...---------.._ `. `---'"" `-. `. `. \ `. \ \ \ . \ | . | | _________ | | _,.-'" `"'-.._ : | _,-' `-._.' | _.' OUYA `. ' _.-. _,+......__ `. . .' `-"' `"-.,-""--._ \ / / ,' | __ \ \ / ` .. +" ) \ \ / `.' \ ,-"`-.. | | \ / / " | .' \ '. _.' .' |,.."--"""--..| " | `""`. | ," `-._ | | | .' `-._+ | | / `. / | | ` ' | / | `-.....--.__ | | / | `./ "| / `-.........--.- ' | ,' ' /| || `.' ,' .' |_,-+ / / ' '.`. _,' ,' `. | ' _,.. / / `. `"'"'""'" _,^--------"`. | `.'_ _/ /... _.`:.________,.' `._,.-..| "' `.__.' `._ / "' */ int bufferSize = Math.max((Q3EUtils.isOuya) ? 0 : 3 * size, AudioTrack.getMinBufferSize(sampleFreq, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT)); mAudioTrack = new Q3EAudioTrack(AudioManager.STREAM_MUSIC, sampleFreq, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); mAudioTrack.play(); long sleeptime = (size * 1000000000l) / (2 * 2 * sampleFreq); ScheduledThreadPoolExecutor stpe = new ScheduledThreadPoolExecutor(5); stpe.scheduleAtFixedRate(new Runnable() { @Override public void run() { if (reqThreadrunning) { Q3EJNI.requestAudioData(); } } }, 0, sleeptime, TimeUnit.NANOSECONDS); }
From source file:com.ibm.watson.developer_cloud.android.text_to_speech.v1.TTSUtility.java
private void initPlayer() { stopTtsPlayer();//from ww w .ja v a2 s .c o m // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); synchronized (this) { audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); if (audioTrack != null) audioTrack.play(); } }
From source file:net.sf.asap.Player.java
public void run() { int config = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO : AudioFormat.CHANNEL_CONFIGURATION_STEREO; int len = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, config, AudioFormat.ENCODING_PCM_8BIT); if (len < 16384) len = 16384;/*from ww w . ja va 2 s . c o m*/ final byte[] buffer = new byte[len]; audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, config, AudioFormat.ENCODING_PCM_8BIT, len, AudioTrack.MODE_STREAM); audioTrack.play(); for (;;) { synchronized (this) { if (len < buffer.length || isPaused()) { try { wait(); } catch (InterruptedException ex) { } } if (stop) { audioTrack.stop(); return; } } synchronized (asap) { len = asap.generate(buffer, buffer.length, ASAPSampleFormat.U8); } audioTrack.write(buffer, 0, len); } }
From source file:com.xperia64.timidityae.Globals.java
public static int[] validRates(boolean stereo, boolean sixteen) { ArrayList<Integer> valid = new ArrayList<Integer>(); for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100, 48000, 88200, 96000 }) { int bufferSize = AudioTrack.getMinBufferSize(rate, (stereo) ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO, (sixteen) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT); if (bufferSize > 0) { //System.out.println(rate+" "+bufferSize); // buffer size is valid, Sample rate supported valid.add(rate);/*from ww w . j a v a 2 s . c om*/ } } int[] rates = new int[valid.size()]; for (int i = 0; i < rates.length; i++) rates[i] = valid.get(i); return rates; }
From source file:com.tt.engtrain.showcontent.ContentListItemActivity.java
@Override protected void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_baselist); mSpeechEvaluator = SpeechEvaluator.createEvaluator(ContentListItemActivity.this, null); mToast = Toast.makeText(this, "", Toast.LENGTH_SHORT); int iMinBufSize = AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, iMinBufSize, AudioTrack.MODE_STREAM); mTts = SpeechSynthesizer.createSynthesizer(this, mTtsInitListener); initData();//w w w . j av a2 s .com initListView(); // initspker(); }
From source file:com.xperia64.timidityae.Globals.java
public static SparseIntArray validBuffers(int[] rates, boolean stereo, boolean sixteen) { SparseIntArray buffers = new SparseIntArray(); for (int rate : rates) { buffers.put(rate,//from w w w. java 2 s.c o m AudioTrack.getMinBufferSize(rate, (stereo) ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO, (sixteen) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT)); } return buffers; /*HashMap<Integer, Integer> buffers = new HashMap<Integer, Integer>(); for(int rate : rates) { buffers.put(rate, AudioTrack.getMinBufferSize(rate, (stereo)?AudioFormat.CHANNEL_OUT_STEREO:AudioFormat.CHANNEL_OUT_MONO, (sixteen)?AudioFormat.ENCODING_PCM_16BIT:AudioFormat.ENCODING_PCM_8BIT)); } return buffers;*/ }
From source file:uk.co.armedpineapple.cth.SDLActivity.java
public static Object audioInit(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) { int channelConfig = isStereo ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO; int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT; int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1); Log.v("SDL", "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer"); // Let the user pick a larger buffer if they really want -- but ye // gods they probably shouldn't, the minimums are horrifyingly high // latency already desiredFrames = Math.max(desiredFrames, (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize); mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM); audioStartThread();/* w ww . j av a2 s. c om*/ Log.v("SDL", "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer"); if (is16Bit) { audioBuffer = new short[desiredFrames * (isStereo ? 2 : 1)]; } else { audioBuffer = new byte[desiredFrames * (isStereo ? 2 : 1)]; } return audioBuffer; }
From source file:net.sf.asap.PlayerService.java
public void run() { // read file// www.ja va 2s .c om String filename = uri.getPath(); byte[] module = new byte[ASAPInfo.MAX_MODULE_LENGTH]; int moduleLen; try { InputStream is; switch (uri.getScheme()) { case "file": if (Util.isZip(filename)) { String zipFilename = filename; filename = uri.getFragment(); is = new ZipInputStream(zipFilename, filename); } else is = new FileInputStream(filename); break; case "http": is = httpGet(uri); break; default: throw new FileNotFoundException(uri.toString()); } moduleLen = Util.readAndClose(is, module); } catch (IOException ex) { showError(R.string.error_reading_file); return; } // load file try { asap.load(filename, module, moduleLen); info = asap.getInfo(); switch (song) { case SONG_DEFAULT: song = info.getDefaultSong(); break; case SONG_LAST: song = info.getSongs() - 1; break; default: break; } playSong(); } catch (Exception ex) { showError(R.string.invalid_file); return; } PendingIntent contentIntent = PendingIntent.getActivity(this, 0, new Intent(this, Player.class), 0); String title = info.getTitleOrFilename(); Notification notification = new Notification(R.drawable.icon, title, System.currentTimeMillis()); notification.flags |= Notification.FLAG_ONGOING_EVENT; notification.setLatestEventInfo(this, title, info.getAuthor(), contentIntent); startForegroundCompat(NOTIFICATION_ID, notification); // playback int channelConfig = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO : AudioFormat.CHANNEL_CONFIGURATION_STEREO; int bufferLen = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, channelConfig, AudioFormat.ENCODING_PCM_16BIT) >> 1; if (bufferLen < 16384) bufferLen = 16384; final byte[] byteBuffer = new byte[bufferLen << 1]; final short[] shortBuffer = new short[bufferLen]; audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, channelConfig, AudioFormat.ENCODING_PCM_16BIT, bufferLen << 1, AudioTrack.MODE_STREAM); audioTrack.play(); for (;;) { synchronized (this) { if (bufferLen < shortBuffer.length || isPaused()) { try { wait(); } catch (InterruptedException ex) { } } if (stop) { audioTrack.stop(); return; } } synchronized (asap) { int pos = seekPosition; if (pos >= 0) { seekPosition = -1; try { asap.seek(pos); } catch (Exception ex) { } } bufferLen = asap.generate(byteBuffer, byteBuffer.length, ASAPSampleFormat.S16_L_E) >> 1; } for (int i = 0; i < bufferLen; i++) shortBuffer[i] = (short) ((byteBuffer[i << 1] & 0xff) | byteBuffer[i << 1 | 1] << 8); audioTrack.write(shortBuffer, 0, bufferLen); } }
From source file:com.ferdi2005.secondgram.voip.VoIPService.java
@Override public void onCreate() { super.onCreate(); FileLog.d("=============== VoIPService STARTING ==============="); AudioManager am = (AudioManager) getSystemService(AUDIO_SERVICE); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1 && am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER) != null) { int outFramesPerBuffer = Integer .parseInt(am.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER)); VoIPController.setNativeBufferSize(outFramesPerBuffer); } else {/*from w w w . j av a 2 s .c o m*/ VoIPController.setNativeBufferSize( AudioTrack.getMinBufferSize(48000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT) / 2); } final SharedPreferences preferences = getSharedPreferences("mainconfig", MODE_PRIVATE); VoIPServerConfig.setConfig(preferences.getString("voip_server_config", "{}")); if (System.currentTimeMillis() - preferences.getLong("voip_server_config_updated", 0) > 24 * 3600000) { ConnectionsManager.getInstance().sendRequest(new TLRPC.TL_phone_getCallConfig(), new RequestDelegate() { @Override public void run(TLObject response, TLRPC.TL_error error) { if (error == null) { String data = ((TLRPC.TL_dataJSON) response).data; VoIPServerConfig.setConfig(data); preferences.edit().putString("voip_server_config", data) .putLong("voip_server_config_updated", BuildConfig.DEBUG ? 0 : System.currentTimeMillis()) .apply(); } } }); } try { controller = new VoIPController(); controller.setConnectionStateListener(this); controller.setConfig(MessagesController.getInstance().callPacketTimeout / 1000.0, MessagesController.getInstance().callConnectTimeout / 1000.0, preferences.getInt("VoipDataSaving", VoIPController.DATA_SAVING_NEVER)); cpuWakelock = ((PowerManager) getSystemService(POWER_SERVICE)) .newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "telegram-voip"); cpuWakelock.acquire(); btAdapter = am.isBluetoothScoAvailableOffCall() ? BluetoothAdapter.getDefaultAdapter() : null; IntentFilter filter = new IntentFilter(); filter.addAction(ConnectivityManager.CONNECTIVITY_ACTION); filter.addAction(ACTION_HEADSET_PLUG); if (btAdapter != null) { filter.addAction(BluetoothHeadset.ACTION_CONNECTION_STATE_CHANGED); filter.addAction(AudioManager.ACTION_SCO_AUDIO_STATE_UPDATED); } filter.addAction(TelephonyManager.ACTION_PHONE_STATE_CHANGED); filter.addAction(getPackageName() + ".END_CALL"); filter.addAction(getPackageName() + ".DECLINE_CALL"); filter.addAction(getPackageName() + ".ANSWER_CALL"); registerReceiver(receiver, filter); ConnectionsManager.getInstance().setAppPaused(false, false); soundPool = new SoundPool(1, AudioManager.STREAM_VOICE_CALL, 0); spConnectingId = soundPool.load(this, R.raw.voip_connecting, 1); spRingbackID = soundPool.load(this, R.raw.voip_ringback, 1); spFailedID = soundPool.load(this, R.raw.voip_failed, 1); spEndId = soundPool.load(this, R.raw.voip_end, 1); spBusyId = soundPool.load(this, R.raw.voip_busy, 1); am.registerMediaButtonEventReceiver(new ComponentName(this, VoIPMediaButtonReceiver.class)); if (btAdapter != null && btAdapter.isEnabled()) { int headsetState = btAdapter.getProfileConnectionState(BluetoothProfile.HEADSET); updateBluetoothHeadsetState(headsetState == BluetoothProfile.STATE_CONNECTED); if (headsetState == BluetoothProfile.STATE_CONNECTED) am.setBluetoothScoOn(true); for (StateListener l : stateListeners) l.onAudioSettingsChanged(); } NotificationCenter.getInstance().addObserver(this, NotificationCenter.appDidLogout); } catch (Exception x) { FileLog.e("error initializing voip controller", x); callFailed(); } }