List of usage examples for android.media AudioFormat ENCODING_PCM_16BIT
int ENCODING_PCM_16BIT
To view the source code for android.media AudioFormat ENCODING_PCM_16BIT.
Click Source Link
From source file:Main.java
public static final int getDefaultEncodingFormat() { return AudioFormat.ENCODING_PCM_16BIT; }
From source file:Main.java
public static final int getMinimumBufferSize(int sampleRate) { return AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); }
From source file:Main.java
public static final int getSampleRate(boolean lowest) { int sampleRate = -1; for (int rate : POSSIBLE_SAMPLE_RATES) { int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_DEFAULT, AudioFormat.ENCODING_PCM_16BIT); if (bufferSize > 0) { // buffer size is valid, Sample rate supported sampleRate = rate;/*from w w w.j a va2 s. co m*/ if (lowest) { return sampleRate; } } } return sampleRate; }
From source file:Main.java
/** * Convert Android AudioFormat.ENCODING_PCM constants to integers * //from ww w.j ava 2 s .c o m * @param androidEncoding * Android AudioFormat constant */ public static int getPcmEncoding(int androidEncoding) { switch (androidEncoding) { case AudioFormat.ENCODING_PCM_8BIT: return 8; case AudioFormat.ENCODING_PCM_16BIT: return 16; default: return 8; } }
From source file:Main.java
/** * Convert integers to AudioFormat.ENCODING_PCM constants * // ww w. j a v a 2 s . c om * @param bitsPerSample * bits in a sample of audio, typically 8 or 16 */ public static int getAndroidPcmEncoding(int bitsPerSample) { switch (bitsPerSample) { case 8: return AudioFormat.ENCODING_PCM_8BIT; case 16: return AudioFormat.ENCODING_PCM_16BIT; default: return AudioFormat.ENCODING_DEFAULT; } }
From source file:Main.java
public static int getBufferSize() { return AudioRecord.getMinBufferSize(getMaxSampleRate(), android.media.AudioFormat.CHANNEL_IN_MONO, android.media.AudioFormat.ENCODING_PCM_16BIT); }
From source file:Main.java
public static int getMaxSampleRate() { int maxSampleRate = -1; int[] possibleSampleRates = { 48000, 44100, 22050, 16000, 11025, 8000 }; for (int i = 0; i < possibleSampleRates.length; i++) { int bufferSize = AudioRecord.getMinBufferSize(possibleSampleRates[i], AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); if (bufferSize > 0) { maxSampleRate = possibleSampleRates[i]; return maxSampleRate; }/*w w w . j a va 2 s . c om*/ } return maxSampleRate; }
From source file:zlyh.dmitry.recaller.threading.PlayBlockThread.java
@Override public void run() { AudioTrack audioTrack = null;// www .j av a 2 s.com FileInputStream in = null; try { File rawpcm = new File(path); if (!rawpcm.exists()) { this.interrupt(); } togglePlaying(true); final int audioLength = (int) rawpcm.length(); final int minBufferSize = AudioRecord.getMinBufferSize(RecordRunnable.frequency, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, RecordRunnable.frequency, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM); final int block = 256 * 1024; byte[] byteData = new byte[block]; try { in = new FileInputStream(rawpcm); } catch (FileNotFoundException e) { e.printStackTrace(); this.interrupt(); } if (in != null) { try { int bytesread = 0; int offset; audioTrack.play(); while (bytesread < audioLength && !isInterrupted()) { offset = in.read(byteData, 0, block); if (offset != -1) { audioTrack.write(byteData, 0, offset); bytesread += offset; } else { break; } } in.close(); togglePlaying(false); if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } } catch (Exception e) { e.printStackTrace(); try { in.close(); } catch (IOException e1) { e1.printStackTrace(); } if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } togglePlaying(false); } } } catch (Exception e) { e.printStackTrace(); if (audioTrack != null) { if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } } if (in != null) { try { in.close(); } catch (IOException e1) { e1.printStackTrace(); } } togglePlaying(false); } }
From source file:com.inmobi.ultrapush.InfoRecActivity.java
@Override protected void onResume() { super.onResume(); TextView tv = (TextView) findViewById(R.id.textview_info_rec); tv.setMovementMethod(new ScrollingMovementMethod()); tv.setText("Testing..."); // TODO: No use... tv.invalidate();//w w w. j a v a 2s . c om // Show supported sample rate and corresponding minimum buffer size. String[] requested = new String[] { "8000", "11025", "16000", "22050", "32000", "44100", "48000", "96000" }; String st = "sampleRate minBufSize\n"; ArrayList<String> validated = new ArrayList<String>(); for (String s : requested) { int rate = Integer.parseInt(s); int minBufSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); if (minBufSize != AudioRecord.ERROR_BAD_VALUE) { validated.add(s); st += s + " \t" + Integer.toString(minBufSize) + "\n"; } } requested = validated.toArray(new String[0]); tv.setText(st); tv.invalidate(); // Test audio source String[] audioSourceString = new String[] { "DEFAULT", "MIC", "VOICE_UPLINK", "VOICE_DOWNLINK", "VOICE_CALL", "CAMCORDER", "VOICE_RECOGNITION" }; int[] audioSourceId = new int[] { MediaRecorder.AudioSource.DEFAULT, // Default audio source MediaRecorder.AudioSource.MIC, // Microphone audio source MediaRecorder.AudioSource.VOICE_UPLINK, // Voice call uplink (Tx) audio source MediaRecorder.AudioSource.VOICE_DOWNLINK, // Voice call downlink (Rx) audio source MediaRecorder.AudioSource.VOICE_CALL, // Voice call uplink + downlink audio source MediaRecorder.AudioSource.CAMCORDER, // Microphone audio source with same orientation as camera if available, the main device microphone otherwise (apilv7) MediaRecorder.AudioSource.VOICE_RECOGNITION, // Microphone audio source tuned for voice recognition if available, behaves like DEFAULT otherwise. (apilv7) // MediaRecorder.AudioSource.VOICE_COMMUNICATION, // Microphone audio source tuned for voice communications such as VoIP. It will for instance take advantage of echo cancellation or automatic gain control if available. It otherwise behaves like DEFAULT if no voice processing is applied. (apilv11) // MediaRecorder.AudioSource.REMOTE_SUBMIX, // Audio source for a submix of audio streams to be presented remotely. (apilv19) }; tv.append("\n-- Audio Source Test --"); for (String s : requested) { int sampleRate = Integer.parseInt(s); int recBufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); tv.append("\n(" + Integer.toString(sampleRate) + "Hz, MONO, 16BIT)\n"); for (int iass = 0; iass < audioSourceId.length; iass++) { st = ""; // wait for AudioRecord fully released... try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } AudioRecord record; record = new AudioRecord(audioSourceId[iass], sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, recBufferSize); if (record.getState() == AudioRecord.STATE_INITIALIZED) { st += audioSourceString[iass] + " successed"; int as = record.getAudioSource(); if (as != audioSourceId[iass]) { int i = 0; while (i < audioSourceId.length) { if (as == audioSourceId[iass]) { break; } i++; } if (i >= audioSourceId.length) { st += "(auto set to \"unknown source\")"; } else { st += "(auto set to " + audioSourceString[i] + ")"; } } st += "\n"; } else { st += audioSourceString[iass] + " failed\n"; } record.release(); record = null; tv.append(st); tv.invalidate(); } } }
From source file:com.n0n3m4.q3e.Q3ECallbackObj.java
public void init(int size) { if (mAudioTrack != null) return;//from ww w .j a v a2s. c o m if ((Q3EUtils.q3ei.isQ3) || (Q3EUtils.q3ei.isRTCW) || (Q3EUtils.q3ei.isQ1) || (Q3EUtils.q3ei.isQ2)) size /= 8; mAudioData = new byte[size]; int sampleFreq = 44100; /* _.---"'"""""'`--.._ _,.-' `-._ _,." -. .-"" ___...---------.._ `. `---'"" `-. `. `. \ `. \ \ \ . \ | . | | _________ | | _,.-'" `"'-.._ : | _,-' `-._.' | _.' OUYA `. ' _.-. _,+......__ `. . .' `-"' `"-.,-""--._ \ / / ,' | __ \ \ / ` .. +" ) \ \ / `.' \ ,-"`-.. | | \ / / " | .' \ '. _.' .' |,.."--"""--..| " | `""`. | ," `-._ | | | .' `-._+ | | / `. / | | ` ' | / | `-.....--.__ | | / | `./ "| / `-.........--.- ' | ,' ' /| || `.' ,' .' |_,-+ / / ' '.`. _,' ,' `. | ' _,.. / / `. `"'"'""'" _,^--------"`. | `.'_ _/ /... _.`:.________,.' `._,.-..| "' `.__.' `._ / "' */ int bufferSize = Math.max((Q3EUtils.isOuya) ? 0 : 3 * size, AudioTrack.getMinBufferSize(sampleFreq, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT)); mAudioTrack = new Q3EAudioTrack(AudioManager.STREAM_MUSIC, sampleFreq, AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM); mAudioTrack.play(); long sleeptime = (size * 1000000000l) / (2 * 2 * sampleFreq); ScheduledThreadPoolExecutor stpe = new ScheduledThreadPoolExecutor(5); stpe.scheduleAtFixedRate(new Runnable() { @Override public void run() { if (reqThreadrunning) { Q3EJNI.requestAudioData(); } } }, 0, sleeptime, TimeUnit.NANOSECONDS); }