List of usage examples for android.media AudioTrack release
public void release()
From source file:net.reichholf.dreamdroid.fragment.SignalFragment.java
void playSound(double freqOfTone) { double duration = 0.1; // seconds int sampleRate = 8000; // a number double dnumSamples = duration * sampleRate; dnumSamples = Math.ceil(dnumSamples); int numSamples = (int) dnumSamples; double sample[] = new double[numSamples]; byte generatedSnd[] = new byte[2 * numSamples]; for (int i = 0; i < numSamples; ++i) { // Fill the sample array sample[i] = Math.sin(freqOfTone * 2 * Math.PI * i / (sampleRate)); }/*from w w w . jav a 2s . c om*/ // convert to 16 bit pcm sound array // assumes the sample buffer is normalized. int idx = 0; int i = 0; int ramp = numSamples / 20; // Amplitude ramp as a percent of sample // count for (i = 0; i < numSamples; ++i) { // Ramp amplitude up (to avoid // clicks) if (i < ramp) { double dVal = sample[i]; // Ramp up to maximum final short val = (short) ((dVal * 32767 * i / ramp)); // in 16 bit wav PCM, first byte is the low order byte generatedSnd[idx++] = (byte) (val & 0x00ff); generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8); } else if (i < numSamples - ramp) { // Max amplitude for most of the samples double dVal = sample[i]; // scale to maximum amplitude final short val = (short) ((dVal * 32767)); // in 16 bit wav PCM, first byte is the low order byte generatedSnd[idx++] = (byte) (val & 0x00ff); generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8); } else { double dVal = sample[i]; // Ramp down to zero final short val = (short) ((dVal * 32767 * (numSamples - i) / ramp)); // in 16 bit wav PCM, first byte is the low order byte generatedSnd[idx++] = (byte) (val & 0x00ff); generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8); } } AudioTrack audioTrack = null; // Get audio track try { audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, (int) numSamples * 2, AudioTrack.MODE_STATIC); // Load the track audioTrack.write(generatedSnd, 0, generatedSnd.length); audioTrack.play(); // Play the track } catch (Exception e) { } int x = 0; do { // Montior playback to find when done if (audioTrack != null) x = audioTrack.getPlaybackHeadPosition(); else x = numSamples; } while (x < numSamples); if (audioTrack != null) audioTrack.release(); // Track play done. Release track. }
From source file:zlyh.dmitry.recaller.threading.PlayBlockThread.java
@Override public void run() { AudioTrack audioTrack = null; FileInputStream in = null;//from ww w .j av a2s. c o m try { File rawpcm = new File(path); if (!rawpcm.exists()) { this.interrupt(); } togglePlaying(true); final int audioLength = (int) rawpcm.length(); final int minBufferSize = AudioRecord.getMinBufferSize(RecordRunnable.frequency, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, RecordRunnable.frequency, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM); final int block = 256 * 1024; byte[] byteData = new byte[block]; try { in = new FileInputStream(rawpcm); } catch (FileNotFoundException e) { e.printStackTrace(); this.interrupt(); } if (in != null) { try { int bytesread = 0; int offset; audioTrack.play(); while (bytesread < audioLength && !isInterrupted()) { offset = in.read(byteData, 0, block); if (offset != -1) { audioTrack.write(byteData, 0, offset); bytesread += offset; } else { break; } } in.close(); togglePlaying(false); if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } } catch (Exception e) { e.printStackTrace(); try { in.close(); } catch (IOException e1) { e1.printStackTrace(); } if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } togglePlaying(false); } } } catch (Exception e) { e.printStackTrace(); if (audioTrack != null) { if (audioTrack.getState() == AudioTrack.PLAYSTATE_PLAYING) { audioTrack.stop(); } if (audioTrack.getState() == AudioTrack.STATE_INITIALIZED) { audioTrack.release(); } } if (in != null) { try { in.close(); } catch (IOException e1) { e1.printStackTrace(); } } togglePlaying(false); } }