Example usage for android.media AudioFormat ENCODING_PCM_16BIT

List of usage examples for android.media AudioFormat ENCODING_PCM_16BIT

Introduction

In this page you can find the example usage for android.media AudioFormat ENCODING_PCM_16BIT.

Prototype

int ENCODING_PCM_16BIT

To view the source code for android.media AudioFormat ENCODING_PCM_16BIT.

Click Source Link

Document

Audio data format: PCM 16 bit per sample.

Usage

From source file:org.noise_planet.noisecapture.CalibrationLinearityActivity.java

private void playNewTrack() {

    double rms = dbToRms(99 - (splLoop++) * DB_STEP);
    short[] data = makeWhiteNoiseSignal(44100, rms);
    double[] fftCenterFreq = FFTSignalProcessing
            .computeFFTCenterFrequency(AudioProcess.REALTIME_SAMPLE_RATE_LIMITATION);
    FFTSignalProcessing fftSignalProcessing = new FFTSignalProcessing(44100, fftCenterFreq, 44100);
    fftSignalProcessing.addSample(data);
    whiteNoisedB = fftSignalProcessing.computeGlobalLeq();
    freqLeqStats.add(new LinearCalibrationResult(fftSignalProcessing.processSample(true, false, false)));
    LOGGER.info("Emit white noise of " + whiteNoisedB + " dB");
    if (audioTrack == null) {
        audioTrack = new AudioTrack(getAudioOutput(), 44100, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, data.length * (Short.SIZE / 8), AudioTrack.MODE_STATIC);
    } else {//from  w w w.ja  v  a  2  s. c  o  m
        try {
            audioTrack.pause();
            audioTrack.flush();
        } catch (IllegalStateException ex) {
            // Ignore
        }
    }
    audioTrack.setLoopPoints(0, audioTrack.write(data, 0, data.length), -1);
    audioTrack.play();
}

From source file:cc.echonet.coolmicapp.MainActivity.java

public void startRecording(View view) {
    if (isThreadOn) {
        stopRecording(view);//  w w  w.  j  av a 2  s .  c om

        return;
    }

    if (!checkPermission()) {
        Toast.makeText(getApplicationContext(), "Missing Permissions. Please request them in the Settings.",
                Toast.LENGTH_LONG).show();
        return;
    }

    if (Wrapper.getState() != Wrapper.WrapperInitializationStatus.WRAPPER_INTITIALIZED) {
        Toast.makeText(getApplicationContext(), "Native components not ready.", Toast.LENGTH_LONG).show();
        return;
    }

    if (!isOnline()) {
        Toast.makeText(getApplicationContext(), "Check Internet Connection !", Toast.LENGTH_LONG).show();
        return;
    }

    if (!coolmic.isConnectionSet()) {
        Toast.makeText(getApplicationContext(), "Set the connection details !", Toast.LENGTH_LONG).show();
        return;
    }

    invalidateOptionsMenu();
    isThreadOn = true;
    //screenreceiver.setThreadStatus(true);
    startService(new Intent(getBaseContext(), MyService.class));
    RedFlashLight();
    timeInMilliseconds = 0L;
    timeSwapBuff = 0L;
    start_button.startAnimation(animation);
    start_button.setBackground(trans);
    trans.startTransition(5000);
    start_button.setText(R.string.broadcasting);
    streamThread = new Thread(new Runnable() {
        @Override
        public void run() {
            if (isThreadOn) {
                try {
                    String portnum;
                    String server = coolmic.getServerName();
                    Integer port_num = 8000;

                    if (server.indexOf(":") > 0) {
                        String[] split = server.split(":");
                        server = split[0];
                        portnum = split[1];
                        port_num = Integer.parseInt(portnum);
                    }

                    Log.d("VS", server);
                    Log.d("VS", port_num.toString());
                    String username = coolmic.getUsername();
                    String password = coolmic.getPassword();
                    String mountpoint = coolmic.getMountpoint();
                    String sampleRate_string = coolmic.getSampleRate();
                    String channel_string = coolmic.getChannels();
                    String quality_string = coolmic.getQuality();
                    String title = coolmic.getTitle();
                    String artist = coolmic.getArtist();

                    Log.d("VS", String.format(
                            "Server: %s Port: %d Username: %s Password: %s Mountpoint: %s Samplerate: %s Channels: %s Quality: %s Title: %s Artist: %s",
                            server, port_num, username, password, mountpoint, sampleRate_string, channel_string,
                            quality_string, title, artist));

                    Integer buffersize = AudioRecord.getMinBufferSize(Integer.parseInt(sampleRate_string),
                            Integer.parseInt(channel_string) == 1 ? AudioFormat.CHANNEL_IN_MONO
                                    : AudioFormat.CHANNEL_IN_STEREO,
                            AudioFormat.ENCODING_PCM_16BIT);
                    Log.d("VS", "Minimum Buffer Size: " + String.valueOf(buffersize));
                    Wrapper.init(MainActivity.this, server, port_num, username, password, mountpoint,
                            "audio/ogg; codec=vorbis", Integer.parseInt(sampleRate_string),
                            Integer.parseInt(channel_string), buffersize);

                    int status = Wrapper.start();

                    Log.d("VS", "Status:" + status);

                    if (status != 0) {
                        throw new Exception("Failed to start Recording: " + String.valueOf(status));
                    }

                    strStreamFetchStatsURL = String.format("http://%s:%s@%s:%s/admin/stats.xml?mount=/%s",
                            username, password, server, port_num, mountpoint);
                } catch (Exception e) {
                    e.printStackTrace();
                    Log.e("VS", "Recording Start: Exception: ", e);

                    MainActivity.this.runOnUiThread(new Runnable() {
                        public void run() {
                            stopRecording(null);

                            Toast.makeText(MainActivity.this, "Failed to start Recording. ", Toast.LENGTH_LONG)
                                    .show();
                        }
                    });
                }
            }

        }

    });
    streamThread.start();
}

From source file:com.inmobi.ultrapush.AnalyzeActivity.java

/**
 * Return a array of verified audio sampling rates.
 *
 * @param requested: the sampling rates to be verified
 *//*from   w w w .  j a  va 2 s  . c om*/
private static String[] validateAudioRates(String[] requested) {
    ArrayList<String> validated = new ArrayList<String>();
    for (String s : requested) {
        int rate;
        String[] sv = s.split("::");
        if (sv.length == 1) {
            rate = Integer.parseInt(sv[0]);
        } else {
            rate = Integer.parseInt(sv[1]);
        }
        if (rate != 0) {
            if (AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT) != AudioRecord.ERROR_BAD_VALUE) {
                validated.add(s);
            }
        } else {
            validated.add(s);
        }
    }
    return validated.toArray(new String[0]);
}

From source file:com.example.sensingapp.SensingApp.java

/** Called when the activity is first created. */
@Override/*from w  w  w  .  j av  a 2 s  .c  om*/
public void onCreate(Bundle savedInstanceState) {
    int i;
    Location location = null;

    super.onCreate(savedInstanceState);

    if (android.os.Build.VERSION.SDK_INT > 9) {
        StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
        StrictMode.setThreadPolicy(policy);
    }

    try {
        Class.forName("android.os.AsyncTask");
    } catch (ClassNotFoundException e) {
        e.printStackTrace();
    }

    m_smSurScan = (SensorManager) getSystemService(SENSOR_SERVICE);

    PackageManager pm = getPackageManager();
    m_riHome = pm.resolveActivity(new Intent(Intent.ACTION_MAIN).addCategory(Intent.CATEGORY_HOME), 0);

    m_nBufferSize = AudioRecord.getMinBufferSize(m_nAudioSampleRate, AudioFormat.CHANNEL_IN_STEREO,
            AudioFormat.ENCODING_PCM_16BIT);

    m_tmCellular = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);

    checkSensorAvailability();

    //Get Existing Project Name and Existing User Name
    preconfigSetting();

    /* When the power button is pressed and the screen goes off, the sensors will stop work by default,
     * Here keep the CPU on to keep sensor alive and also use SCREEN_OFF notification to re-enable GPS/WiFi
     */
    PowerManager pwrManager = (PowerManager) getSystemService(Context.POWER_SERVICE);
    m_wakeLock = pwrManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, TAG);
    IntentFilter filter = new IntentFilter(Intent.ACTION_SCREEN_ON);
    filter.addAction(Intent.ACTION_SCREEN_OFF);

    registerReceiver(m_ScreenOffReceiver, filter);

    show_screen1();
}

From source file:com.example.sensingapp.SensingApp.java

private void startAudioRecording() {
    m_audioRecorder = new AudioRecord(MediaRecorder.AudioSource.MIC, m_nAudioSampleRate,
            AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, m_nBufferSize);

    if (m_audioRecorder == null)
        return;/*from w  ww  . j  av a 2 s .  c om*/

    int i = m_audioRecorder.getState();
    if (i == AudioRecord.STATE_INITIALIZED) {
        m_audioRecorder.startRecording();
    } else {
        return;
    }

    if (m_blnRecordSoundLevel == true || m_blnRecordSoundFile == true) {
        m_processSoundThread = new Thread(new Runnable() {
            public void run() {
                processAudioData();
            }
        }, "Audio Thread");

        m_processSoundThread.start();

        if (m_blnRecordSoundLevel == true) {
            m_soundLevelThread = new Thread(new Runnable() {
                public void run() {
                    calculateAudioSoundLevel();
                }
            }, "Sould Level Thread");

            m_soundLevelThread.start();
        }
    }

}