List of usage examples for android.media AudioTrack MODE_STREAM
int MODE_STREAM
To view the source code for android.media AudioTrack MODE_STREAM.
Click Source Link
From source file:Main.java
public static AudioTrack createTrack(int samplingRate) { AudioTrack track = new AudioTrack(AudioManager.STREAM_MUSIC, samplingRate, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_DEFAULT, samplingRate, AudioTrack.MODE_STREAM); return track; }
From source file:zlyh.dmitry.recaller.threading.PlayBlockThread.java
@Override public void run() { AudioTrack audioTrack = null;/*from ww w . j av a 2 s . co m*/ FileInputStream in = null; try { File rawpcm = new File(path); if (!rawpcm.exists()) { this.interrupt(); } togglePlaying(true); final int audioLength = (int) rawpcm.length(); final int minBufferSize = AudioRecord.getMinBufferSize(RecordRunnable.frequency, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, RecordRunnable.frequency, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM); final int block = 256 * 1024; byte[] byteData = new byte[block]; try { in = new FileInputStream(rawpcm); } catch (FileNotFoundException e) { e.printStackTrace(); this.interrupt(); } if (in != null) { try { int bytesread = 0; int offset; audioTrack.play(); while (bytesread < audioLength && !isInterrupted()) { offset = in.read(byteData, 0, block); if (offset != -1) { audioTrack.write(byteData, 0, offset); bytesread += offset; } else { break; } } in.close(); togglePlaying(false); if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } } catch (Exception e) { e.printStackTrace(); try { in.close(); } catch (IOException e1) { e1.printStackTrace(); } if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } togglePlaying(false); } } } catch (Exception e) { e.printStackTrace(); if (audioTrack != null) { if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } } if (in != null) { try { in.close(); } catch (IOException e1) { e1.printStackTrace(); } } togglePlaying(false); } }
From source file:com.n0n3m4.q3e.Q3ECallbackObj.java
public void init(int size) { if (mAudioTrack != null) return;/* w w w .ja v a 2 s. c o m*/ if ((Q3EUtils.q3ei.isQ3) || (Q3EUtils.q3ei.isRTCW) || (Q3EUtils.q3ei.isQ1) || (Q3EUtils.q3ei.isQ2)) size /= 8; mAudioData = new byte[size]; int sampleFreq = 44100; /* _.---"'"""""'`--.._ _,.-' `-._ _,." -. .-"" ___...---------.._ `. `---'"" `-. `. `. \ `. \ \ \ . \ | . | | _________ | | _,.-'" `"'-.._ : | _,-' `-._.' | _.' OUYA `. ' _.-. _,+......__ `. . .' `-"' `"-.,-""--._ \ / / ,' | __ \ \ / ` .. +" ) \ \ / `.' \ ,-"`-.. | | \ / / " | .' \ '. _.' .' |,.."--"""--..| " | `""`. | ," `-._ | | | .' `-._+ | | / `. / | | ` ' | / | `-.....--.__ | | / | `./ "| / `-.........--.- ' | ,' ' /| || `.' ,' .' |_,-+ / / ' '.`. _,' ,' `. | ' _,.. / / `. `"'"'""'" _,^--------"`. | `.'_ _/ /... _.`:.________,.' `._,.-..| "' `.__.' `._ / "' */ int bufferSize = Math.max((Q3EUtils.isOuya) ? 0 : 3 * size, AudioTrack.getMinBufferSize(sampleFreq, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT)); mAudioTrack = new Q3EAudioTrack(AudioManager.STREAM_MUSIC, sampleFreq, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); mAudioTrack.play(); long sleeptime = (size * 1000000000l) / (2 * 2 * sampleFreq); ScheduledThreadPoolExecutor stpe = new ScheduledThreadPoolExecutor(5); stpe.scheduleAtFixedRate(new Runnable() { @Override public void run() { if (reqThreadrunning) { Q3EJNI.requestAudioData(); } } }, 0, sleeptime, TimeUnit.NANOSECONDS); }
From source file:com.ibm.watson.developer_cloud.android.text_to_speech.v1.TTSUtility.java
private void initPlayer() { stopTtsPlayer();/*w w w . java 2s .co m*/ // IMPORTANT: minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); synchronized (this) { audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); if (audioTrack != null) audioTrack.play(); } }
From source file:net.sf.asap.Player.java
public void run() { int config = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO : AudioFormat.CHANNEL_CONFIGURATION_STEREO; int len = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, config, AudioFormat.ENCODING_PCM_8BIT); if (len < 16384) len = 16384;/*from w w w. j a v a2s . c om*/ final byte[] buffer = new byte[len]; audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, config, AudioFormat.ENCODING_PCM_8BIT, len, AudioTrack.MODE_STREAM); audioTrack.play(); for (;;) { synchronized (this) { if (len < buffer.length || isPaused()) { try { wait(); } catch (InterruptedException ex) { } } if (stop) { audioTrack.stop(); return; } } synchronized (asap) { len = asap.generate(buffer, buffer.length, ASAPSampleFormat.U8); } audioTrack.write(buffer, 0, len); } }
From source file:com.example.rttytranslator.Dsp_service.java
public void startAudio() { if (!_enableDecoder) return;/*from w ww. j ava 2s. c om*/ //boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE); System.out.println("isRecording: " + isRecording); if (!isRecording) { isRecording = true; buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); buffsize = Math.max(buffsize, 3000); mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM); if (enableEcho) { AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); manager.setMode(AudioManager.MODE_IN_CALL); manager.setSpeakerphoneOn(true); } if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) { mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); } mRecorder.startRecording(); System.out.println("STARTING THREAD"); Thread ct = new captureThread(); ct.start(); } }
From source file:com.tt.engtrain.showcontent.ContentListItemActivity.java
@Override protected void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_baselist); mSpeechEvaluator = SpeechEvaluator.createEvaluator(ContentListItemActivity.this, null); mToast = Toast.makeText(this, "", Toast.LENGTH_SHORT); int iMinBufSize = AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, iMinBufSize, AudioTrack.MODE_STREAM); mTts = SpeechSynthesizer.createSynthesizer(this, mTtsInitListener); initData();//from w w w. j a va 2 s.c o m initListView(); // initspker(); }
From source file:com.xperia64.timidityae.Globals.java
public static void reloadSettings(Activity c, AssetManager assets) { prefs = PreferenceManager.getDefaultSharedPreferences(c); firstRun = prefs.getBoolean("tplusFirstRun", true); theme = Integer.parseInt(prefs.getString("fbTheme", "1")); showHiddenFiles = prefs.getBoolean("hiddenSwitch", false); defaultFolder = prefs.getString("defaultPath", Environment.getExternalStorageDirectory().getAbsolutePath()); dataFolder = prefs.getString("dataDir", Environment.getExternalStorageDirectory() + "/TimidityAE/"); manConfig = prefs.getBoolean("manualConfig", false); JNIHandler.currsamp = defSamp = Integer.parseInt(prefs.getString("tplusResamp", "0")); mono = Integer.parseInt(prefs.getString("sdlChanValue", "2")); sixteen = true;//prefs.getString("tplusBits", "16").equals("16"); aRate = Integer.parseInt(prefs.getString("tplusRate", Integer.toString(AudioTrack.getNativeOutputSampleRate(AudioTrack.MODE_STREAM)))); buff = Integer.parseInt(prefs.getString("tplusBuff", "192000")); showVideos = prefs.getBoolean("videoSwitch", true); shouldLolNag = prefs.getBoolean("shouldLolNag", true); keepWav = prefs.getBoolean("keepPartialWav", false); useDefaultBack = prefs.getBoolean("useDefBack", false); compressCfg = prefs.getBoolean("compressCfg", true); reShuffle = prefs.getBoolean("reShuffle", false); freeInsts = prefs.getBoolean("tplusUnload", true); preserveSilence = prefs.getBoolean("tplusSilKey", true); if (!onlyNative) nativeMidi = prefs.getBoolean("nativeMidiSwitch", false); else// www . jav a 2 s .c o m nativeMidi = true; }
From source file:uk.co.armedpineapple.cth.SDLActivity.java
public static Object audioInit(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) { int channelConfig = isStereo ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO; int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT; int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1); Log.v("SDL", "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit") + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer"); // Let the user pick a larger buffer if they really want -- but ye // gods they probably shouldn't, the minimums are horrifyingly high // latency already desiredFrames = Math.max(desiredFrames, (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize); mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat, desiredFrames * frameSize, AudioTrack.MODE_STREAM); audioStartThread();/* w w w. j a v a 2 s .co m*/ Log.v("SDL", "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " " + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit") + " " + (mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer"); if (is16Bit) { audioBuffer = new short[desiredFrames * (isStereo ? 2 : 1)]; } else { audioBuffer = new byte[desiredFrames * (isStereo ? 2 : 1)]; } return audioBuffer; }
From source file:net.sf.asap.PlayerService.java
public void run() { // read file//from www . j a va2 s .co m String filename = uri.getPath(); byte[] module = new byte[ASAPInfo.MAX_MODULE_LENGTH]; int moduleLen; try { InputStream is; switch (uri.getScheme()) { case "file": if (Util.isZip(filename)) { String zipFilename = filename; filename = uri.getFragment(); is = new ZipInputStream(zipFilename, filename); } else is = new FileInputStream(filename); break; case "http": is = httpGet(uri); break; default: throw new FileNotFoundException(uri.toString()); } moduleLen = Util.readAndClose(is, module); } catch (IOException ex) { showError(R.string.error_reading_file); return; } // load file try { asap.load(filename, module, moduleLen); info = asap.getInfo(); switch (song) { case SONG_DEFAULT: song = info.getDefaultSong(); break; case SONG_LAST: song = info.getSongs() - 1; break; default: break; } playSong(); } catch (Exception ex) { showError(R.string.invalid_file); return; } PendingIntent contentIntent = PendingIntent.getActivity(this, 0, new Intent(this, Player.class), 0); String title = info.getTitleOrFilename(); Notification notification = new Notification(R.drawable.icon, title, System.currentTimeMillis()); notification.flags |= Notification.FLAG_ONGOING_EVENT; notification.setLatestEventInfo(this, title, info.getAuthor(), contentIntent); startForegroundCompat(NOTIFICATION_ID, notification); // playback int channelConfig = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO : AudioFormat.CHANNEL_CONFIGURATION_STEREO; int bufferLen = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, channelConfig, AudioFormat.ENCODING_PCM_16BIT) >> 1; if (bufferLen < 16384) bufferLen = 16384; final byte[] byteBuffer = new byte[bufferLen << 1]; final short[] shortBuffer = new short[bufferLen]; audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, channelConfig, AudioFormat.ENCODING_PCM_16BIT, bufferLen << 1, AudioTrack.MODE_STREAM); audioTrack.play(); for (;;) { synchronized (this) { if (bufferLen < shortBuffer.length || isPaused()) { try { wait(); } catch (InterruptedException ex) { } } if (stop) { audioTrack.stop(); return; } } synchronized (asap) { int pos = seekPosition; if (pos >= 0) { seekPosition = -1; try { asap.seek(pos); } catch (Exception ex) { } } bufferLen = asap.generate(byteBuffer, byteBuffer.length, ASAPSampleFormat.S16_L_E) >> 1; } for (int i = 0; i < bufferLen; i++) shortBuffer[i] = (short) ((byteBuffer[i << 1] & 0xff) | byteBuffer[i << 1 | 1] << 8); audioTrack.write(shortBuffer, 0, bufferLen); } }