List of usage examples for android.media AudioFormat CHANNEL_CONFIGURATION_STEREO
int CHANNEL_CONFIGURATION_STEREO
To view the source code for android.media AudioFormat CHANNEL_CONFIGURATION_STEREO.
Click Source Link
From source file:Main.java
/** * Convert Android AudioFormat.CHANNEL_CONFIGURATION constants to integers * /*from www. j av a 2 s. co m*/ * @param androidChannels * Android AudioFormat constant */ public static int getChannelConfig(int androidChannels) { switch (androidChannels) { case AudioFormat.CHANNEL_CONFIGURATION_MONO: return 1; case AudioFormat.CHANNEL_CONFIGURATION_STEREO: return 2; default: return 1; } }
From source file:Main.java
/** * Convert integers to AudioFormat.CHANNEL_CONFIGURATION constants * /*w w w .j ava2 s .c o m*/ * @param numChannels * number of channels, typically 1 or 2 */ public static int getAndroidChannelConfig(int numChannels) { switch (numChannels) { case 1: return AudioFormat.CHANNEL_CONFIGURATION_MONO; case 2: return AudioFormat.CHANNEL_CONFIGURATION_STEREO; default: return AudioFormat.CHANNEL_CONFIGURATION_DEFAULT; } }
From source file:com.n0n3m4.q3e.Q3ECallbackObj.java
public void init(int size) { if (mAudioTrack != null) return;/*from w w w . j a v a2s . c om*/ if ((Q3EUtils.q3ei.isQ3) || (Q3EUtils.q3ei.isRTCW) || (Q3EUtils.q3ei.isQ1) || (Q3EUtils.q3ei.isQ2)) size /= 8; mAudioData = new byte[size]; int sampleFreq = 44100; /* _.---"'"""""'`--.._ _,.-' `-._ _,." -. .-"" ___...---------.._ `. `---'"" `-. `. `. \ `. \ \ \ . \ | . | | _________ | | _,.-'" `"'-.._ : | _,-' `-._.' | _.' OUYA `. ' _.-. _,+......__ `. . .' `-"' `"-.,-""--._ \ / / ,' | __ \ \ / ` .. +" ) \ \ / `.' \ ,-"`-.. | | \ / / " | .' \ '. _.' .' |,.."--"""--..| " | `""`. | ," `-._ | | | .' `-._+ | | / `. / | | ` ' | / | `-.....--.__ | | / | `./ "| / `-.........--.- ' | ,' ' /| || `.' ,' .' |_,-+ / / ' '.`. _,' ,' `. | ' _,.. / / `. `"'"'""'" _,^--------"`. | `.'_ _/ /... _.`:.________,.' `._,.-..| "' `.__.' `._ / "' */ int bufferSize = Math.max((Q3EUtils.isOuya) ? 0 : 3 * size, AudioTrack.getMinBufferSize(sampleFreq, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT)); mAudioTrack = new Q3EAudioTrack(AudioManager.STREAM_MUSIC, sampleFreq, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); mAudioTrack.play(); long sleeptime = (size * 1000000000l) / (2 * 2 * sampleFreq); ScheduledThreadPoolExecutor stpe = new ScheduledThreadPoolExecutor(5); stpe.scheduleAtFixedRate(new Runnable() { @Override public void run() { if (reqThreadrunning) { Q3EJNI.requestAudioData(); } } }, 0, sleeptime, TimeUnit.NANOSECONDS); }
From source file:net.sf.asap.Player.java
public void run() { int config = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO : AudioFormat.CHANNEL_CONFIGURATION_STEREO; int len = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, config, AudioFormat.ENCODING_PCM_8BIT); if (len < 16384) len = 16384;/*from ww w. ja v a 2s . com*/ final byte[] buffer = new byte[len]; audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, config, AudioFormat.ENCODING_PCM_8BIT, len, AudioTrack.MODE_STREAM); audioTrack.play(); for (;;) { synchronized (this) { if (len < buffer.length || isPaused()) { try { wait(); } catch (InterruptedException ex) { } } if (stop) { audioTrack.stop(); return; } } synchronized (asap) { len = asap.generate(buffer, buffer.length, ASAPSampleFormat.U8); } audioTrack.write(buffer, 0, len); } }
From source file:com.tt.engtrain.showcontent.ContentListItemActivity.java
@Override protected void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_baselist); mSpeechEvaluator = SpeechEvaluator.createEvaluator(ContentListItemActivity.this, null); mToast = Toast.makeText(this, "", Toast.LENGTH_SHORT); int iMinBufSize = AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, iMinBufSize, AudioTrack.MODE_STREAM); mTts = SpeechSynthesizer.createSynthesizer(this, mTtsInitListener); initData();/*from w w w.ja va2s . c o m*/ initListView(); // initspker(); }
From source file:net.sf.asap.PlayerService.java
public void run() { // read file//from w w w . j av a 2s .co m String filename = uri.getPath(); byte[] module = new byte[ASAPInfo.MAX_MODULE_LENGTH]; int moduleLen; try { InputStream is; switch (uri.getScheme()) { case "file": if (Util.isZip(filename)) { String zipFilename = filename; filename = uri.getFragment(); is = new ZipInputStream(zipFilename, filename); } else is = new FileInputStream(filename); break; case "http": is = httpGet(uri); break; default: throw new FileNotFoundException(uri.toString()); } moduleLen = Util.readAndClose(is, module); } catch (IOException ex) { showError(R.string.error_reading_file); return; } // load file try { asap.load(filename, module, moduleLen); info = asap.getInfo(); switch (song) { case SONG_DEFAULT: song = info.getDefaultSong(); break; case SONG_LAST: song = info.getSongs() - 1; break; default: break; } playSong(); } catch (Exception ex) { showError(R.string.invalid_file); return; } PendingIntent contentIntent = PendingIntent.getActivity(this, 0, new Intent(this, Player.class), 0); String title = info.getTitleOrFilename(); Notification notification = new Notification(R.drawable.icon, title, System.currentTimeMillis()); notification.flags |= Notification.FLAG_ONGOING_EVENT; notification.setLatestEventInfo(this, title, info.getAuthor(), contentIntent); startForegroundCompat(NOTIFICATION_ID, notification); // playback int channelConfig = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO : AudioFormat.CHANNEL_CONFIGURATION_STEREO; int bufferLen = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, channelConfig, AudioFormat.ENCODING_PCM_16BIT) >> 1; if (bufferLen < 16384) bufferLen = 16384; final byte[] byteBuffer = new byte[bufferLen << 1]; final short[] shortBuffer = new short[bufferLen]; audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, channelConfig, AudioFormat.ENCODING_PCM_16BIT, bufferLen << 1, AudioTrack.MODE_STREAM); audioTrack.play(); for (;;) { synchronized (this) { if (bufferLen < shortBuffer.length || isPaused()) { try { wait(); } catch (InterruptedException ex) { } } if (stop) { audioTrack.stop(); return; } } synchronized (asap) { int pos = seekPosition; if (pos >= 0) { seekPosition = -1; try { asap.seek(pos); } catch (Exception ex) { } } bufferLen = asap.generate(byteBuffer, byteBuffer.length, ASAPSampleFormat.S16_L_E) >> 1; } for (int i = 0; i < bufferLen; i++) shortBuffer[i] = (short) ((byteBuffer[i << 1] & 0xff) | byteBuffer[i << 1 | 1] << 8); audioTrack.write(shortBuffer, 0, bufferLen); } }