Example usage for android.media AudioRecord getMinBufferSize

List of usage examples for android.media AudioRecord getMinBufferSize

Introduction

In this page you can find the example usage for android.media AudioRecord getMinBufferSize.

Prototype

static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) 

Source Link

Document

Returns the minimum buffer size required for the successful creation of an AudioRecord object, in byte units.

Usage

From source file:com.royer.bangstopwatch.app.StopwatchFragment.java

@Override
public void onActivityCreated(Bundle savedInstanceState) {
    super.onActivityCreated(savedInstanceState);

    Log.d(TAG, "Enter onActivityCreated...");

    InitTimeDisplayView();/*from w w w .  ja v  a2s . co  m*/

    mLapList = (ListView) getView().findViewById(R.id.listLap);
    this.registerForContextMenu(mLapList);

    btnStart = (Button) getView().findViewById(R.id.btnStart);
    btnStart.setOnClickListener(new View.OnClickListener() {

        @Override
        public void onClick(View v) {
            if (state == STATE_NONE) {
                // detect does device support record ?
                if (AudioRecord.getMinBufferSize(44100, AudioFormat.CHANNEL_IN_MONO,
                        AudioFormat.ENCODING_PCM_16BIT) < 0) {
                    Context context = getActivity().getApplicationContext();

                    Toast toast = Toast.makeText(context, R.string.strNoRecorder, 5);
                    toast.show();
                    return;
                }

                AudioManager audiomanager = (AudioManager) getActivity()
                        .getSystemService(Context.AUDIO_SERVICE);
                Log.d(TAG, "AudioMode = " + audiomanager.getMode());
                if (audiomanager.getMode() != AudioManager.MODE_NORMAL) {
                    Context context = getActivity().getApplicationContext();

                    Toast toast = Toast.makeText(context, R.string.strInCalling, 5);
                    toast.show();
                    return;
                }

                state = STATE_COUNTDOWN;
                DialogFragment newFragment = CountdownDialog.NewInstance(5, getTag());
                newFragment.show(getFragmentManager(), "countdownDialog");

            } else {
                changeState();
                state = STATE_NONE;
                updateRealElapseTime();
                printTime();

                // unBind Recordservice
                if (mBound) {
                    mService.stopRecord();
                    mService.unsetBang();
                    getActivity().unbindService(mConnection);
                    getActivity().stopService(new Intent(getActivity(), RecordService.class));
                    mBound = false;
                }
            }
            ((MainActivity) getActivity()).EnableTab(1, state == STATE_NONE);
        }
    });

    if (savedInstanceState != null) {

        Log.d(TAG, "savedInstanceState " + savedInstanceState.toString());
        _timekeeper = savedInstanceState.getParcelable(STATE_TIMEKEEPER);
        mLapManager = savedInstanceState.getParcelable(STATE_LAPS);
        state = savedInstanceState.getInt(STATE_STATE);
        mBound = savedInstanceState.getBoolean(STATE_BOUNDING);
        ((MainActivity) getActivity()).EnableTab(1, state == STATE_NONE);

    } else {
        Log.d(TAG, "savedInstanceState == NULL");
        if (_timekeeper == null)
            _timekeeper = new Timekeeper();
        if (mLapManager == null)
            mLapManager = new LapManager();
    }
    InitLapList();

    printTime();
    updateState();

    Log.d(TAG, "Leave OnActivityCreated...");
}

From source file:de.badaix.snapcast.MainActivity.java

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    for (int rate : new int[] { 8000, 11025, 16000, 22050, 44100, 48000 }) { // add the rates you wish to check against
        Log.d(TAG, "Samplerate: " + rate);
        int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_OUT_STEREO,
                AudioFormat.ENCODING_PCM_16BIT);
        if (bufferSize > 0) {
            Log.d(TAG, "Samplerate: " + rate + ", buffer: " + bufferSize);
        }//from ww  w  .j  a  v a  2 s  .  c  o m
    }

    AudioManager audioManager = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
        String rate = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
        nativeSampleRate = Integer.valueOf(rate);
        //            String size = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
        //            tvInfo.setText("Sample rate: " + rate + ", buffer size: " + size);
    }

    coordinatorLayout = (CoordinatorLayout) findViewById(R.id.myCoordinatorLayout);
    Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
    setSupportActionBar(toolbar);
    // Create the adapter that will return a fragment for each of the three
    // primary sections of the activity.
    sectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager());

    // Set up the ViewPager with the sections adapter.
    mViewPager = (ViewPager) findViewById(R.id.container);
    mViewPager.setAdapter(sectionsPagerAdapter);

    tabLayout = (TabLayout) findViewById(R.id.tabs);
    tabLayout.setupWithViewPager(mViewPager);
    mViewPager.setVisibility(View.GONE);

    setActionbarSubtitle("Host: no Snapserver found");

    new Thread(new Runnable() {
        @Override
        public void run() {
            Log.d(TAG, "copying snapclient");
            Setup.copyBinAsset(MainActivity.this, "snapclient", "snapclient");
            Log.d(TAG, "done copying snapclient");
        }
    }).start();

    sectionsPagerAdapter.setHideOffline(Settings.getInstance(this).getBoolean("hide_offline", false));
}

From source file:edu.polyu.screamalert.SoundProcessing.java

public static AudioRecord findAudioRecord() {
    try {/* www .  ja va 2 s  .  c  om*/
        recordBufferSize = AudioRecord.getMinBufferSize(Config.RECORDER_SAMPLERATE, Config.RECORDER_CHANNELS,
                Config.RECORDER_AUDIO_ENCODING);
        nSubframePerBuf = recordBufferSize / frameShift / 2; // e.g., 8192/128/2 = 32             
        System.out.println("recordBufferSize: " + recordBufferSize);
        if (recordBufferSize != AudioRecord.ERROR_BAD_VALUE) {
            // check if we can instantiate and have a success
            AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, Config.RECORDER_SAMPLERATE,
                    Config.RECORDER_CHANNELS, Config.RECORDER_AUDIO_ENCODING, recordBufferSize);
            if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
                return recorder;
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    Toast.makeText(thisContext, "Fail to create AudioRecord object", Toast.LENGTH_LONG).show();
    return null;
}

From source file:edu.gvsu.masl.echoprint.AudioFingerprinter.java

/**
 * The main thread<br>//w w w.  ja  va2s  . co  m
 * Records audio and generates the audio fingerprint, then it queries the server for a match and forwards the results to the listener.
 */
public void run() {
    this.isRunning = true;
    try {
        // create the audio buffer
        // get the minimum buffer size
        int minBufferSize = AudioRecord.getMinBufferSize(FREQUENCY, CHANNEL, ENCODING);

        // and the actual buffer size for the audio to record
        // frequency * seconds to record.
        bufferSize = Math.max(minBufferSize, this.FREQUENCY * this.secondsToRecord);

        audioData = new short[bufferSize];

        // start recorder
        mRecordInstance = new AudioRecord(MediaRecorder.AudioSource.MIC, FREQUENCY, CHANNEL, ENCODING,
                minBufferSize);

        willStartListening();

        mRecordInstance.startRecording();
        boolean firstRun = true;
        do {
            try {
                willStartListeningPass();

                long time = System.currentTimeMillis();
                // fill audio buffer with mic data.
                int samplesIn = 0;
                do {
                    samplesIn += mRecordInstance.read(audioData, samplesIn, bufferSize - samplesIn);

                    if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED)
                        break;
                } while (samplesIn < bufferSize);
                Log.d("Fingerprinter", "Audio recorded: " + (System.currentTimeMillis() - time) + " millis");

                // see if the process was stopped.
                if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED
                        || (!firstRun && !this.continuous))
                    break;

                // create an echoprint codegen wrapper and get the code
                time = System.currentTimeMillis();
                Codegen codegen = new Codegen();
                String code = codegen.generate(audioData, samplesIn);
                Log.d("Fingerprinter",
                        "Codegen created in: " + (System.currentTimeMillis() - time) + " millis");

                if (code.length() == 0) {
                    // no code?
                    // not enough audio data?
                    continue;
                }

                didGenerateFingerprintCode(code);

                // fetch data from echonest
                time = System.currentTimeMillis();

                String urlstr = SERVER_URL + code;
                HttpClient client = new DefaultHttpClient();
                HttpGet get = new HttpGet(urlstr);

                // get response
                HttpResponse response = client.execute(get);
                // Examine the response status
                Log.d("Fingerprinter", response.getStatusLine().toString());

                // Get hold of the response entity
                HttpEntity entity = response.getEntity();
                // If the response does not enclose an entity, there is no need
                // to worry about connection release

                String result = "";
                if (entity != null) {
                    // A Simple JSON Response Read
                    InputStream instream = entity.getContent();
                    result = convertStreamToString(instream);
                    // now you have the string representation of the HTML request
                    instream.close();
                }
                Log.d("Fingerprinter",
                        "Results fetched in: " + (System.currentTimeMillis() - time) + " millis");

                // parse JSON
                JSONObject jobj = new JSONObject(result);

                if (jobj.has("code"))
                    Log.d("Fingerprinter", "Response code:" + jobj.getInt("code") + " ("
                            + this.messageForCode(jobj.getInt("code")) + ")");

                if (jobj.has("match")) {
                    if (jobj.getBoolean("match")) {
                        Hashtable<String, String> match = new Hashtable<String, String>();
                        match.put(SCORE_KEY, jobj.getDouble(SCORE_KEY) + "");
                        match.put(TRACK_ID_KEY, jobj.getString(TRACK_ID_KEY));

                        // the metadata dictionary IS NOT included by default in the API demo server
                        // replace line 66/67 in API.py with:
                        // return json.dumps({"ok":True,"message":response.message(), "match":response.match(), "score":response.score, \
                        // "qtime":response.qtime, "track_id":response.TRID, "total_time":response.total_time, "metadata":response.metadata})
                        if (jobj.has("metadata")) {
                            JSONObject metadata = jobj.getJSONObject("metadata");

                            if (metadata.has(SCORE_KEY))
                                match.put(META_SCORE_KEY, metadata.getDouble(SCORE_KEY) + "");
                            if (metadata.has(TITLE_KEY))
                                match.put(TITLE_KEY, metadata.getString(TITLE_KEY));
                            if (metadata.has(ARTIST_KEY))
                                match.put(ARTIST_KEY, metadata.getString(ARTIST_KEY));
                            if (metadata.has(ALBUM_KEY))
                                match.put(ALBUM_KEY, metadata.getString(ALBUM_KEY));
                        }

                        didFindMatchForCode(match, code);
                    } else
                        didNotFindMatchForCode(code);
                } else {
                    didFailWithException(new Exception("Unknown error"));
                }

                firstRun = false;

                didFinishListeningPass();
            } catch (Exception e) {
                e.printStackTrace();
                Log.e("Fingerprinter", e.getLocalizedMessage());

                didFailWithException(e);
            }
        } while (this.continuous);
    } catch (Exception e) {
        e.printStackTrace();
        Log.e("Fingerprinter", e.getLocalizedMessage());

        didFailWithException(e);
    }

    if (mRecordInstance != null) {
        mRecordInstance.stop();
        mRecordInstance.release();
        mRecordInstance = null;
    }
    this.isRunning = false;

    didFinishListening();
}

From source file:com.example.echoprint.AudioFingerprinter.java

/**
 * The main thread<br>/*from  w w w .j  a v  a 2 s . c o m*/
 * Records audio and generates the audio fingerprint, then it queries the server for a match and forwards the results to the listener.
 */
public void run() {
    this.isRunning = true;
    try {
        // create the audio buffer
        // get the minimum buffer size
        int minBufferSize = AudioRecord.getMinBufferSize(FREQUENCY, CHANNEL, ENCODING);

        // and the actual buffer size for the audio to record
        // frequency * seconds to record.
        bufferSize = Math.max(minBufferSize, this.FREQUENCY * this.secondsToRecord);

        audioData = new short[bufferSize];

        // start recorder
        mRecordInstance = new AudioRecord(MediaRecorder.AudioSource.MIC, FREQUENCY, CHANNEL, ENCODING,
                minBufferSize);

        willStartListening();

        mRecordInstance.startRecording();
        boolean firstRun = true;
        do {
            try {
                willStartListeningPass();

                long time = System.currentTimeMillis();
                // fill audio buffer with mic data.
                int samplesIn = 0;
                do {
                    samplesIn += mRecordInstance.read(audioData, samplesIn, bufferSize - samplesIn);

                    if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED)
                        break;
                } while (samplesIn < bufferSize);
                Log.d("Fingerprinter", "Audio recorded: " + (System.currentTimeMillis() - time) + " millis");

                // see if the process was stopped.
                if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED
                        || (!firstRun && !this.continuous))
                    break;

                // create an echoprint codegen wrapper and get the code
                time = System.currentTimeMillis();
                Codegen codegen = new Codegen();
                String code = codegen.generate(audioData, samplesIn);
                Log.d("Fingerprinter",
                        "Codegen created in: " + (System.currentTimeMillis() - time) + " millis");

                if (code.length() == 0) {
                    // no code?
                    // not enough audio data?
                    continue;
                }

                didGenerateFingerprintCode(code);

                // fetch data from echonest
                time = System.currentTimeMillis();

                String urlstr = SERVER_URL + code;
                //String urlstr = "http://developer.echonest.com/api/v4/song/identify?api_key=GUYHX8VIYSCI79EZI&code=";
                HttpClient client = new DefaultHttpClient();
                HttpGet get = new HttpGet(urlstr);

                // get response
                HttpResponse response = client.execute(get);
                // Examine the response status
                Log.d("Fingerprinter", response.getStatusLine().toString());

                // Get hold of the response entity
                HttpEntity entity = response.getEntity();
                // If the response does not enclose an entity, there is no need
                // to worry about connection release

                String result = "";
                if (entity != null) {
                    // A Simple JSON Response Read
                    InputStream instream = entity.getContent();
                    result = convertStreamToString(instream);
                    // now you have the string representation of the HTML request
                    instream.close();
                }
                Log.d("Fingerprinter",
                        "Results fetched in: " + (System.currentTimeMillis() - time) + " millis");

                // parse JSON
                JSONObject jobj = new JSONObject(result);

                if (jobj.has("code"))
                    Log.d("Fingerprinter", "Response code:" + jobj.getInt("code") + " ("
                            + this.messageForCode(jobj.getInt("code")) + ")");

                if (jobj.has("match")) {
                    if (jobj.getBoolean("match")) {
                        Hashtable<String, String> match = new Hashtable<String, String>();
                        match.put(SCORE_KEY, jobj.getDouble(SCORE_KEY) + "");
                        match.put(TRACK_ID_KEY, jobj.getString(TRACK_ID_KEY));

                        // the metadata dictionary IS NOT included by default in the API demo server
                        // replace line 66/67 in API.py with:
                        // return json.dumps({"ok":True,"message":response.message(), "match":response.match(), "score":response.score, \
                        // "qtime":response.qtime, "track_id":response.TRID, "total_time":response.total_time, "metadata":response.metadata})
                        if (jobj.has("metadata")) {
                            JSONObject metadata = jobj.getJSONObject("metadata");

                            if (metadata.has(SCORE_KEY))
                                match.put(META_SCORE_KEY, metadata.getDouble(SCORE_KEY) + "");
                            if (metadata.has(TITLE_KEY))
                                match.put(TITLE_KEY, metadata.getString(TITLE_KEY));
                            if (metadata.has(ARTIST_KEY))
                                match.put(ARTIST_KEY, metadata.getString(ARTIST_KEY));
                            if (metadata.has(ALBUM_KEY))
                                match.put(ALBUM_KEY, metadata.getString(ALBUM_KEY));
                        }

                        didFindMatchForCode(match, code);
                    } else
                        didNotFindMatchForCode(code);
                } else {
                    didFailWithException(new Exception("Unknown error"));
                }

                firstRun = false;

                didFinishListeningPass();
            } catch (Exception e) {
                e.printStackTrace();
                Log.e("Fingerprinter", e.getLocalizedMessage());

                didFailWithException(e);
            }
        } while (this.continuous);
    } catch (Exception e) {
        e.printStackTrace();
        Log.e("Fingerprinter", e.getLocalizedMessage());

        didFailWithException(e);
    }

    if (mRecordInstance != null) {
        mRecordInstance.stop();
        mRecordInstance.release();
        mRecordInstance = null;
    }
    this.isRunning = false;

    didFinishListening();
}

From source file:com.suan.weclient.fragment.mass.VoiceFragment.java

private void initRecorder() {

    // ?//w  ww.j a  v a 2s .  co m
    bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
    // AudioRecord
    audioRecord = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes);

}

From source file:com.ece420.lab3.MainActivity.java

private void queryNativeAudioParameters() {
    AudioManager myAudioMgr = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
    nativeSampleRate = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
    nativeSampleBufSize = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
    int recBufSize = AudioRecord.getMinBufferSize(Integer.parseInt(nativeSampleRate),
            AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
    supportRecording = true;//from w ww  .j  a  va  2 s . co  m
    if (recBufSize == AudioRecord.ERROR || recBufSize == AudioRecord.ERROR_BAD_VALUE) {
        supportRecording = false;
    }
}

From source file:edu.stanford.mobisocial.dungbeetle.feed.objects.VoiceObj.java

private void copyWaveFile(String inFilename, String outFilename) {
    FileInputStream in = null;//from   w  w  w.  jav  a 2 s  .c o m
    FileOutputStream out = null;
    long totalAudioLen = 0;
    long totalDataLen = totalAudioLen + 36;
    long longSampleRate = RECORDER_SAMPLERATE;
    int channels = 1;
    long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels / 8;

    int bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS,
            RECORDER_AUDIO_ENCODING);
    byte[] data = new byte[bufferSize];

    try {
        in = new FileInputStream(inFilename);
        out = new FileOutputStream(outFilename);
        totalAudioLen = in.getChannel().size();
        totalDataLen = totalAudioLen + 36;

        Log.w("PlayAllAudioAction", "File size: " + totalDataLen);

        WriteWaveFileHeader(out, totalAudioLen, totalDataLen, longSampleRate, channels, byteRate);

        while (in.read(data) != -1) {
            out.write(data);
        }

        in.close();
        out.close();
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.xzg.fingerprinter.AudioFingerprinter.java

/**
 * The main thread<br>//w  w  w  . ja v  a2 s  .  c o m
 * Records audio and generates the audio fingerprint, then it queries the
 * server for a match and forwards the results to the listener.
 */
public void run() {
    this.isRunning = true;
    try {
        // create the audio buffer
        // get the minimum buffer size
        int minBufferSize = AudioRecord.getMinBufferSize(FREQUENCY, CHANNEL, ENCODING);

        System.out.println("minBufferSize: " + minBufferSize);
        // and the actual buffer size for the audio to record
        // frequency * seconds to record.
        bufferSize = Math.max(minBufferSize, this.FREQUENCY * this.secondsToRecord);

        System.out.println("BufferSize: " + bufferSize);
        audioData = new byte[bufferSize * 2];

        // start recorder
        mRecordInstance = new AudioRecord(MediaRecorder.AudioSource.MIC, FREQUENCY, CHANNEL, ENCODING,
                minBufferSize);

        willStartListening();

        mRecordInstance.startRecording();
        boolean firstRun = true;
        do {
            try {
                willStartListeningPass();

                long time = System.currentTimeMillis();
                // fill audio buffer with mic data.
                int samplesIn = 0;
                do {
                    samplesIn += mRecordInstance.read(audioData, samplesIn, bufferSize - samplesIn);

                    if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED)
                        break;
                } while (samplesIn < bufferSize);
                Log.d("Fingerprinter", "Audio recorded: " + (System.currentTimeMillis() - time) + " millis");

                // see if the process was stopped.
                if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED
                        || (!firstRun && !this.continuous))
                    break;

                Log.d("Fingerprinter", "Recod state: " + mRecordInstance.getRecordingState());
                byte[] audioDataByteFormat = (audioData);
                Wave w = new Wave();
                w.data = audioDataByteFormat;
                WaveFileManager wm = new WaveFileManager();
                wm.setWave(w);
                wm.saveWaveAsFile("/sdcard/xzgrecord.wav");

                Clip c = Clip.newInstance(audioDataByteFormat, this.FREQUENCY);
                // create an echoprint codegen wrapper and get the code
                time = System.currentTimeMillis();
                Codegen codegen = new Codegen(c);
                String code = codegen.genCode();
                // Log.d("Fingerprinter","codegen before");
                // String code = codegen.generate(audioData, samplesIn);
                Log.d("Fingerprinter", "codegen after");
                Log.d("Fingerprinter",
                        "Codegen created in: " + (System.currentTimeMillis() - time) + " millis");
                //
                Log.d("Fingerprinter", "code length is " + code.length());
                if (code.length() == 0) {
                    // no code?
                    // not enough audio data?
                    continue;
                }

                // fetch data from echonest
                long startTime = System.currentTimeMillis();

                String result = fetchServerResult(code);

                long endTime = System.currentTimeMillis();
                long fetchTime = endTime - startTime;
                Log.d("Fingerprinter", "Results fetched in: " + (fetchTime) + " millis");

                Log.d("Fingerprinter", "HTTP result: " + result);
                // parse JSON
                JSONObject jsonResult = new JSONObject(result);

                if (jsonResult.has("id"))
                    Log.d("Fingerprinter", "Response code:" + jsonResult.getInt("id"));

                if (jsonResult.has("id")) {
                    if (jsonResult.getInt("id") >= 0) {
                        Hashtable<String, String> match = parseResult(jsonResult);

                        didFindMatchForCode(match, code);
                    } else
                        didNotFindMatchForCode(code);
                } else {
                    didFailWithException(new Exception("Unknown error"));
                }
                //
                firstRun = false;

                didFinishListeningPass();
            } catch (Exception e) {
                e.printStackTrace();
                Log.e("Fingerprinter", e.getLocalizedMessage());

                didFailWithException(e);
            }
        } while (this.continuous);
    } catch (Exception e) {
        e.printStackTrace();
        Log.e("Fingerprinter", e.getLocalizedMessage());

        didFailWithException(e);
    }

    if (mRecordInstance != null) {
        mRecordInstance.stop();
        mRecordInstance.release();
        mRecordInstance = null;
    }
    this.isRunning = false;

    didFinishListening();
}

From source file:com.brejza.matt.habmodem.Dsp_service.java

public void startAudio() {
    if (!_enableDecoder)
        return;/* w w  w  .j  a v a2 s.c  o m*/

    boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE);

    System.out.println("isRecording: " + isRecording);
    logEvent("Starting Audio. Mic avaliable: " + mic, false);
    if (!isRecording) {
        isRecording = true;

        buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT);
        buffsize = Math.max(buffsize, 3000);

        mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT, buffsize);

        mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO,
                AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM);

        if (enableEcho) {
            AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
            manager.setMode(AudioManager.MODE_IN_CALL);
            manager.setSpeakerphoneOn(true);
        }

        if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {

            mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT, buffsize);

            if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
                logEvent("Error - Could not initialise audio", true);
                return;
            }
            logEvent("Using default audio source", false);
        }

        mRecorder.startRecording();
        System.out.println("STARTING THREAD");
        Thread ct = new captureThread();
        logEvent("Starting Audio Thread.", false);
        setDecoderRunningNotification();
        ct.start();
    }
}