Example usage for android.media AudioFormat ENCODING_PCM_8BIT

List of usage examples for android.media AudioFormat ENCODING_PCM_8BIT

Introduction

In this page you can find the example usage for android.media AudioFormat ENCODING_PCM_8BIT.

Prototype

int ENCODING_PCM_8BIT

To view the source code for android.media AudioFormat ENCODING_PCM_8BIT.

Click Source Link

Document

Audio data format: PCM 8 bit per sample.

Usage

From source file:Main.java

/**
 * Convert Android AudioFormat.ENCODING_PCM constants to integers
 * /*from  ww  w . j a v a  2s . co  m*/
 * @param androidEncoding
 *            Android AudioFormat constant
 */
public static int getPcmEncoding(int androidEncoding) {
    switch (androidEncoding) {
    case AudioFormat.ENCODING_PCM_8BIT:
        return 8;
    case AudioFormat.ENCODING_PCM_16BIT:
        return 16;
    default:
        return 8;
    }
}

From source file:Main.java

/**
 * Convert integers to AudioFormat.ENCODING_PCM constants
 * // w w w.ja v  a 2  s .  c o  m
 * @param bitsPerSample
 *            bits in a sample of audio, typically 8 or 16
 */
public static int getAndroidPcmEncoding(int bitsPerSample) {
    switch (bitsPerSample) {
    case 8:
        return AudioFormat.ENCODING_PCM_8BIT;
    case 16:
        return AudioFormat.ENCODING_PCM_16BIT;
    default:
        return AudioFormat.ENCODING_DEFAULT;
    }
}

From source file:net.sf.asap.Player.java

public void run() {
    int config = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO
            : AudioFormat.CHANNEL_CONFIGURATION_STEREO;
    int len = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, config, AudioFormat.ENCODING_PCM_8BIT);
    if (len < 16384)
        len = 16384;/* w w w  . j a va2s .  c o  m*/
    final byte[] buffer = new byte[len];
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, config,
            AudioFormat.ENCODING_PCM_8BIT, len, AudioTrack.MODE_STREAM);
    audioTrack.play();

    for (;;) {
        synchronized (this) {
            if (len < buffer.length || isPaused()) {
                try {
                    wait();
                } catch (InterruptedException ex) {
                }
            }
            if (stop) {
                audioTrack.stop();
                return;
            }
        }
        synchronized (asap) {
            len = asap.generate(buffer, buffer.length, ASAPSampleFormat.U8);
        }
        audioTrack.write(buffer, 0, len);
    }
}

From source file:com.xperia64.timidityae.Globals.java

public static int[] validRates(boolean stereo, boolean sixteen) {
    ArrayList<Integer> valid = new ArrayList<Integer>();
    for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100, 48000, 88200, 96000 }) {

        int bufferSize = AudioTrack.getMinBufferSize(rate,
                (stereo) ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO,
                (sixteen) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT);
        if (bufferSize > 0) {
            //System.out.println(rate+" "+bufferSize);
            // buffer size is valid, Sample rate supported
            valid.add(rate);/*  w w  w.  j a  v  a  2 s. c  om*/
        }
    }
    int[] rates = new int[valid.size()];
    for (int i = 0; i < rates.length; i++)
        rates[i] = valid.get(i);
    return rates;
}

From source file:com.xperia64.timidityae.Globals.java

public static SparseIntArray validBuffers(int[] rates, boolean stereo, boolean sixteen) {
    SparseIntArray buffers = new SparseIntArray();
    for (int rate : rates) {
        buffers.put(rate,/*w  ww  . ja  va  2  s  . co  m*/
                AudioTrack.getMinBufferSize(rate,
                        (stereo) ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO,
                        (sixteen) ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT));
    }
    return buffers;
    /*HashMap<Integer, Integer> buffers = new HashMap<Integer, Integer>();
    for(int rate : rates)
    {
       buffers.put(rate, AudioTrack.getMinBufferSize(rate, (stereo)?AudioFormat.CHANNEL_OUT_STEREO:AudioFormat.CHANNEL_OUT_MONO, (sixteen)?AudioFormat.ENCODING_PCM_16BIT:AudioFormat.ENCODING_PCM_8BIT));
    }
    return buffers;*/
}

From source file:uk.co.armedpineapple.cth.SDLActivity.java

public static Object audioInit(int sampleRate, boolean is16Bit, boolean isStereo, int desiredFrames) {
    int channelConfig = isStereo ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
    int audioFormat = is16Bit ? AudioFormat.ENCODING_PCM_16BIT : AudioFormat.ENCODING_PCM_8BIT;
    int frameSize = (isStereo ? 2 : 1) * (is16Bit ? 2 : 1);

    Log.v("SDL", "SDL audio: wanted " + (isStereo ? "stereo" : "mono") + " " + (is16Bit ? "16-bit" : "8-bit")
            + " " + (sampleRate / 1000f) + "kHz, " + desiredFrames + " frames buffer");

    // Let the user pick a larger buffer if they really want -- but ye
    // gods they probably shouldn't, the minimums are horrifyingly high
    // latency already
    desiredFrames = Math.max(desiredFrames,
            (AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat) + frameSize - 1) / frameSize);

    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, audioFormat,
            desiredFrames * frameSize, AudioTrack.MODE_STREAM);

    audioStartThread();/*from   ww w . j  a  v  a 2s.  c  o m*/

    Log.v("SDL",
            "SDL audio: got " + ((mAudioTrack.getChannelCount() >= 2) ? "stereo" : "mono") + " "
                    + ((mAudioTrack.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT) ? "16-bit" : "8-bit")
                    + " " + (mAudioTrack.getSampleRate() / 1000f) + "kHz, " + desiredFrames + " frames buffer");

    if (is16Bit) {
        audioBuffer = new short[desiredFrames * (isStereo ? 2 : 1)];
    } else {
        audioBuffer = new byte[desiredFrames * (isStereo ? 2 : 1)];
    }
    return audioBuffer;
}