Example usage for android.media AudioFormat CHANNEL_CONFIGURATION_MONO

List of usage examples for android.media AudioFormat CHANNEL_CONFIGURATION_MONO

Introduction

In this page you can find the example usage for android.media AudioFormat CHANNEL_CONFIGURATION_MONO.

Prototype

int CHANNEL_CONFIGURATION_MONO

To view the source code for android.media AudioFormat CHANNEL_CONFIGURATION_MONO.

Click Source Link

Usage

From source file:Main.java

/**
 * Convert Android AudioFormat.CHANNEL_CONFIGURATION constants to integers
 * /*from   ww  w.j  a v  a 2 s . c  om*/
 * @param androidChannels
 *            Android AudioFormat constant
 */
public static int getChannelConfig(int androidChannels) {
    switch (androidChannels) {
    case AudioFormat.CHANNEL_CONFIGURATION_MONO:
        return 1;
    case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
        return 2;
    default:
        return 1;
    }
}

From source file:Main.java

/**
 * Convert integers to AudioFormat.CHANNEL_CONFIGURATION constants
 * //from  w  w w .ja  va  2 s  .c o  m
 * @param numChannels
 *            number of channels, typically 1 or 2
 */
public static int getAndroidChannelConfig(int numChannels) {
    switch (numChannels) {
    case 1:
        return AudioFormat.CHANNEL_CONFIGURATION_MONO;
    case 2:
        return AudioFormat.CHANNEL_CONFIGURATION_STEREO;
    default:
        return AudioFormat.CHANNEL_CONFIGURATION_DEFAULT;
    }
}

From source file:net.sf.asap.Player.java

public void run() {
    int config = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO
            : AudioFormat.CHANNEL_CONFIGURATION_STEREO;
    int len = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, config, AudioFormat.ENCODING_PCM_8BIT);
    if (len < 16384)
        len = 16384;//from  ww w .ja  v a 2  s .  c om
    final byte[] buffer = new byte[len];
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, config,
            AudioFormat.ENCODING_PCM_8BIT, len, AudioTrack.MODE_STREAM);
    audioTrack.play();

    for (;;) {
        synchronized (this) {
            if (len < buffer.length || isPaused()) {
                try {
                    wait();
                } catch (InterruptedException ex) {
                }
            }
            if (stop) {
                audioTrack.stop();
                return;
            }
        }
        synchronized (asap) {
            len = asap.generate(buffer, buffer.length, ASAPSampleFormat.U8);
        }
        audioTrack.write(buffer, 0, len);
    }
}

From source file:net.sf.asap.PlayerService.java

public void run() {
    // read file//from w  w  w. j a  va 2  s  .  c  om
    String filename = uri.getPath();
    byte[] module = new byte[ASAPInfo.MAX_MODULE_LENGTH];
    int moduleLen;
    try {
        InputStream is;
        switch (uri.getScheme()) {
        case "file":
            if (Util.isZip(filename)) {
                String zipFilename = filename;
                filename = uri.getFragment();
                is = new ZipInputStream(zipFilename, filename);
            } else
                is = new FileInputStream(filename);
            break;
        case "http":
            is = httpGet(uri);
            break;
        default:
            throw new FileNotFoundException(uri.toString());
        }
        moduleLen = Util.readAndClose(is, module);
    } catch (IOException ex) {
        showError(R.string.error_reading_file);
        return;
    }

    // load file
    try {
        asap.load(filename, module, moduleLen);
        info = asap.getInfo();
        switch (song) {
        case SONG_DEFAULT:
            song = info.getDefaultSong();
            break;
        case SONG_LAST:
            song = info.getSongs() - 1;
            break;
        default:
            break;
        }
        playSong();
    } catch (Exception ex) {
        showError(R.string.invalid_file);
        return;
    }

    PendingIntent contentIntent = PendingIntent.getActivity(this, 0, new Intent(this, Player.class), 0);
    String title = info.getTitleOrFilename();
    Notification notification = new Notification(R.drawable.icon, title, System.currentTimeMillis());
    notification.flags |= Notification.FLAG_ONGOING_EVENT;
    notification.setLatestEventInfo(this, title, info.getAuthor(), contentIntent);
    startForegroundCompat(NOTIFICATION_ID, notification);

    // playback
    int channelConfig = info.getChannels() == 1 ? AudioFormat.CHANNEL_CONFIGURATION_MONO
            : AudioFormat.CHANNEL_CONFIGURATION_STEREO;
    int bufferLen = AudioTrack.getMinBufferSize(ASAP.SAMPLE_RATE, channelConfig,
            AudioFormat.ENCODING_PCM_16BIT) >> 1;
    if (bufferLen < 16384)
        bufferLen = 16384;
    final byte[] byteBuffer = new byte[bufferLen << 1];
    final short[] shortBuffer = new short[bufferLen];
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, ASAP.SAMPLE_RATE, channelConfig,
            AudioFormat.ENCODING_PCM_16BIT, bufferLen << 1, AudioTrack.MODE_STREAM);
    audioTrack.play();

    for (;;) {
        synchronized (this) {
            if (bufferLen < shortBuffer.length || isPaused()) {
                try {
                    wait();
                } catch (InterruptedException ex) {
                }
            }
            if (stop) {
                audioTrack.stop();
                return;
            }
        }
        synchronized (asap) {
            int pos = seekPosition;
            if (pos >= 0) {
                seekPosition = -1;
                try {
                    asap.seek(pos);
                } catch (Exception ex) {
                }
            }
            bufferLen = asap.generate(byteBuffer, byteBuffer.length, ASAPSampleFormat.S16_L_E) >> 1;
        }
        for (int i = 0; i < bufferLen; i++)
            shortBuffer[i] = (short) ((byteBuffer[i << 1] & 0xff) | byteBuffer[i << 1 | 1] << 8);
        audioTrack.write(shortBuffer, 0, bufferLen);
    }
}