Example usage for android.media AudioTrack MODE_STATIC

List of usage examples for android.media AudioTrack MODE_STATIC

Introduction

In this page you can find the example usage for android.media AudioTrack MODE_STATIC.

Prototype

int MODE_STATIC

To view the source code for android.media AudioTrack MODE_STATIC.

Click Source Link

Document

Creation mode where audio data is transferred from Java to the native layer only once before the audio starts playing.

Usage

From source file:net.reichholf.dreamdroid.fragment.SignalFragment.java

void playSound(double freqOfTone) {
    double duration = 0.1; // seconds
    int sampleRate = 8000; // a number

    double dnumSamples = duration * sampleRate;
    dnumSamples = Math.ceil(dnumSamples);
    int numSamples = (int) dnumSamples;
    double sample[] = new double[numSamples];
    byte generatedSnd[] = new byte[2 * numSamples];

    for (int i = 0; i < numSamples; ++i) { // Fill the sample array
        sample[i] = Math.sin(freqOfTone * 2 * Math.PI * i / (sampleRate));
    }/* w  ww . ja  v a2 s  .c  o m*/

    // convert to 16 bit pcm sound array
    // assumes the sample buffer is normalized.
    int idx = 0;
    int i = 0;

    int ramp = numSamples / 20; // Amplitude ramp as a percent of sample
    // count

    for (i = 0; i < numSamples; ++i) { // Ramp amplitude up (to avoid
        // clicks)
        if (i < ramp) {
            double dVal = sample[i];
            // Ramp up to maximum
            final short val = (short) ((dVal * 32767 * i / ramp));
            // in 16 bit wav PCM, first byte is the low order byte
            generatedSnd[idx++] = (byte) (val & 0x00ff);
            generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
        } else if (i < numSamples - ramp) {
            // Max amplitude for most of the samples
            double dVal = sample[i];
            // scale to maximum amplitude
            final short val = (short) ((dVal * 32767));
            // in 16 bit wav PCM, first byte is the low order byte
            generatedSnd[idx++] = (byte) (val & 0x00ff);
            generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
        } else {
            double dVal = sample[i];
            // Ramp down to zero
            final short val = (short) ((dVal * 32767 * (numSamples - i) / ramp));
            // in 16 bit wav PCM, first byte is the low order byte
            generatedSnd[idx++] = (byte) (val & 0x00ff);
            generatedSnd[idx++] = (byte) ((val & 0xff00) >>> 8);
        }
    }

    AudioTrack audioTrack = null; // Get audio track
    try {
        audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, (int) numSamples * 2, AudioTrack.MODE_STATIC);
        // Load the track
        audioTrack.write(generatedSnd, 0, generatedSnd.length);
        audioTrack.play(); // Play the track
    } catch (Exception e) {
    }

    int x = 0;
    do { // Montior playback to find when done
        if (audioTrack != null)
            x = audioTrack.getPlaybackHeadPosition();
        else
            x = numSamples;
    } while (x < numSamples);

    if (audioTrack != null)
        audioTrack.release(); // Track play done. Release track.
}

From source file:org.noise_planet.noisecapture.CalibrationLinearityActivity.java

private void playNewTrack() {

    double rms = dbToRms(99 - (splLoop++) * DB_STEP);
    short[] data = makeWhiteNoiseSignal(44100, rms);
    double[] fftCenterFreq = FFTSignalProcessing
            .computeFFTCenterFrequency(AudioProcess.REALTIME_SAMPLE_RATE_LIMITATION);
    FFTSignalProcessing fftSignalProcessing = new FFTSignalProcessing(44100, fftCenterFreq, 44100);
    fftSignalProcessing.addSample(data);
    whiteNoisedB = fftSignalProcessing.computeGlobalLeq();
    freqLeqStats.add(new LinearCalibrationResult(fftSignalProcessing.processSample(true, false, false)));
    LOGGER.info("Emit white noise of " + whiteNoisedB + " dB");
    if (audioTrack == null) {
        audioTrack = new AudioTrack(getAudioOutput(), 44100, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, data.length * (Short.SIZE / 8), AudioTrack.MODE_STATIC);
    } else {/*w  w w .  j a v  a2 s  .  com*/
        try {
            audioTrack.pause();
            audioTrack.flush();
        } catch (IllegalStateException ex) {
            // Ignore
        }
    }
    audioTrack.setLoopPoints(0, audioTrack.write(data, 0, data.length), -1);
    audioTrack.play();
}