List of usage examples for android.media AudioRecord AudioRecord
public AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes) throws IllegalArgumentException
From source file:Main.java
/** * Check correct buffer size for your AudioRecord instance * * @param audioSource//from ww w . j a v a 2 s . c om * the audio source * @param fs * the fs * @param channelConfiguration * the channel configuration * @param audioEncoding * the audio encoding * @return the int */ public static int getValidBufferSize(int audioSource, int fs, int channelConfiguration, int audioEncoding) { for (int bufferSize : new int[] { 256, 512, 1024, 2048, 4096 }) { // add the rates you wish to check against AudioRecord audioRecordTemp = new AudioRecord(audioSource, fs, channelConfiguration, audioEncoding, bufferSize); if (audioRecordTemp != null && audioRecordTemp.getState() == AudioRecord.STATE_INITIALIZED) { return bufferSize; } } return 0; }
From source file:Main.java
/** * Check correct buffer size for your AudioRecord instance * * @param audioSource/*from w w w. ja v a 2s .c om*/ * the audio source * @param fs * the fs * @param channelConfiguration * the channel configuration * @param audioEncoding * the audio encoding * @return the int */ public static int checkCorrectBufferSize(int audioSource, int fs, int channelConfiguration, int audioEncoding) { for (int buffer : new int[] { 256, 512, 1024, 2048, 4096 }) { // add the rates you wish to check against AudioRecord audioRecordTemp = new AudioRecord(audioSource, fs, channelConfiguration, audioEncoding, buffer); if (audioRecordTemp != null && audioRecordTemp.getState() == AudioRecord.STATE_INITIALIZED) { return buffer; } } return 0; }
From source file:edu.nchu.cs.dmlab.firemap.mjpeg.RecMicToMp3.java
/** * Start recording//from ww w. ja v a 2s . co m */ public void start() { // just skip if recording has happened if (mIsRecording) { return; } new Thread() { @Override public void run() { if (mHandler != null) mHandler.sendEmptyMessage(Message.MSG_DIALOG_OPEN); android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // caculate minimax buffersize final int minBufferSize = AudioRecord.getMinBufferSize(mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); if (minBufferSize < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_GET_MIN_BUFFERSIZE); } return; } AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, mSampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, minBufferSize * 2); // PCM buffer size (5sec) short[] buffer = new short[mSampleRate * (16 / 8) * 1 * 5]; // SampleRate[Hz] * 16bit * Mono * 5sec byte[] mp3buffer = new byte[(int) (7200 + buffer.length * 2 * 1.25)]; FileOutputStream output = null; try { output = new FileOutputStream(mFile); } catch (FileNotFoundException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_CREATE_FILE); } return; } // Lame init SimpleLame.init(mSampleRate, 1, mSampleRate, 32); try { try { audioRecord.startRecording(); mIsRecording = true; } catch (IllegalStateException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_REC_START); } return; } try { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_REC_STARTED); } int readSize = 0; while (mIsRecording) { readSize = audioRecord.read(buffer, 0, minBufferSize); if (readSize < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_AUDIO_RECORD); } break; // no data } else if (readSize == 0) { ; // fetch data } else { int encResult = SimpleLame.encode(buffer, buffer, readSize, mp3buffer); if (encResult < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_AUDIO_ENCODE); } break; } if (encResult != 0) { try { output.write(mp3buffer, 0, encResult); } catch (IOException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_WRITE_FILE); } break; } } } } int flushResult = SimpleLame.flush(mp3buffer); if (flushResult < 0) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_AUDIO_ENCODE); } } if (flushResult != 0) { try { output.write(mp3buffer, 0, flushResult); } catch (IOException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_WRITE_FILE); } } } try { output.close(); } catch (IOException e) { if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_ERROR_CLOSE_FILE); } } } finally { audioRecord.stop(); audioRecord.release(); } } finally { SimpleLame.close(); mIsRecording = false; } if (mHandler != null) { mHandler.sendEmptyMessage(Message.MSG_REC_STOPPED); } // Upload Audio uploadVoice(); if (mHandler != null) mHandler.sendEmptyMessage(Message.MSG_DIALOG_CLOSE); } }.start(); }
From source file:com.example.rttytranslator.Dsp_service.java
public void startAudio() { if (!_enableDecoder) return;/*from w w w. j a v a2s . c om*/ //boolean mic = this.getPackageManager().hasSystemFeature(PackageManager.FEATURE_MICROPHONE); System.out.println("isRecording: " + isRecording); if (!isRecording) { isRecording = true; buffsize = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); buffsize = Math.max(buffsize, 3000); mRecorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); mPlayer = new AudioTrack(AudioManager.STREAM_MUSIC, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, 2 * buffsize, AudioTrack.MODE_STREAM); if (enableEcho) { AudioManager manager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); manager.setMode(AudioManager.MODE_IN_CALL); manager.setSpeakerphoneOn(true); } if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) { mRecorder = new AudioRecord(AudioSource.DEFAULT, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffsize); } mRecorder.startRecording(); System.out.println("STARTING THREAD"); Thread ct = new captureThread(); ct.start(); } }
From source file:edu.gvsu.masl.echoprint.AudioFingerprinter.java
/** * The main thread<br>//from w w w . java 2 s .c o m * Records audio and generates the audio fingerprint, then it queries the server for a match and forwards the results to the listener. */ public void run() { this.isRunning = true; try { // create the audio buffer // get the minimum buffer size int minBufferSize = AudioRecord.getMinBufferSize(FREQUENCY, CHANNEL, ENCODING); // and the actual buffer size for the audio to record // frequency * seconds to record. bufferSize = Math.max(minBufferSize, this.FREQUENCY * this.secondsToRecord); audioData = new short[bufferSize]; // start recorder mRecordInstance = new AudioRecord(MediaRecorder.AudioSource.MIC, FREQUENCY, CHANNEL, ENCODING, minBufferSize); willStartListening(); mRecordInstance.startRecording(); boolean firstRun = true; do { try { willStartListeningPass(); long time = System.currentTimeMillis(); // fill audio buffer with mic data. int samplesIn = 0; do { samplesIn += mRecordInstance.read(audioData, samplesIn, bufferSize - samplesIn); if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) break; } while (samplesIn < bufferSize); Log.d("Fingerprinter", "Audio recorded: " + (System.currentTimeMillis() - time) + " millis"); // see if the process was stopped. if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED || (!firstRun && !this.continuous)) break; // create an echoprint codegen wrapper and get the code time = System.currentTimeMillis(); Codegen codegen = new Codegen(); String code = codegen.generate(audioData, samplesIn); Log.d("Fingerprinter", "Codegen created in: " + (System.currentTimeMillis() - time) + " millis"); if (code.length() == 0) { // no code? // not enough audio data? continue; } didGenerateFingerprintCode(code); // fetch data from echonest time = System.currentTimeMillis(); String urlstr = SERVER_URL + code; HttpClient client = new DefaultHttpClient(); HttpGet get = new HttpGet(urlstr); // get response HttpResponse response = client.execute(get); // Examine the response status Log.d("Fingerprinter", response.getStatusLine().toString()); // Get hold of the response entity HttpEntity entity = response.getEntity(); // If the response does not enclose an entity, there is no need // to worry about connection release String result = ""; if (entity != null) { // A Simple JSON Response Read InputStream instream = entity.getContent(); result = convertStreamToString(instream); // now you have the string representation of the HTML request instream.close(); } Log.d("Fingerprinter", "Results fetched in: " + (System.currentTimeMillis() - time) + " millis"); // parse JSON JSONObject jobj = new JSONObject(result); if (jobj.has("code")) Log.d("Fingerprinter", "Response code:" + jobj.getInt("code") + " (" + this.messageForCode(jobj.getInt("code")) + ")"); if (jobj.has("match")) { if (jobj.getBoolean("match")) { Hashtable<String, String> match = new Hashtable<String, String>(); match.put(SCORE_KEY, jobj.getDouble(SCORE_KEY) + ""); match.put(TRACK_ID_KEY, jobj.getString(TRACK_ID_KEY)); // the metadata dictionary IS NOT included by default in the API demo server // replace line 66/67 in API.py with: // return json.dumps({"ok":True,"message":response.message(), "match":response.match(), "score":response.score, \ // "qtime":response.qtime, "track_id":response.TRID, "total_time":response.total_time, "metadata":response.metadata}) if (jobj.has("metadata")) { JSONObject metadata = jobj.getJSONObject("metadata"); if (metadata.has(SCORE_KEY)) match.put(META_SCORE_KEY, metadata.getDouble(SCORE_KEY) + ""); if (metadata.has(TITLE_KEY)) match.put(TITLE_KEY, metadata.getString(TITLE_KEY)); if (metadata.has(ARTIST_KEY)) match.put(ARTIST_KEY, metadata.getString(ARTIST_KEY)); if (metadata.has(ALBUM_KEY)) match.put(ALBUM_KEY, metadata.getString(ALBUM_KEY)); } didFindMatchForCode(match, code); } else didNotFindMatchForCode(code); } else { didFailWithException(new Exception("Unknown error")); } firstRun = false; didFinishListeningPass(); } catch (Exception e) { e.printStackTrace(); Log.e("Fingerprinter", e.getLocalizedMessage()); didFailWithException(e); } } while (this.continuous); } catch (Exception e) { e.printStackTrace(); Log.e("Fingerprinter", e.getLocalizedMessage()); didFailWithException(e); } if (mRecordInstance != null) { mRecordInstance.stop(); mRecordInstance.release(); mRecordInstance = null; } this.isRunning = false; didFinishListening(); }
From source file:edu.polyu.screamalert.SoundProcessing.java
public static AudioRecord findAudioRecord() { try {//from w ww .java 2s . c o m recordBufferSize = AudioRecord.getMinBufferSize(Config.RECORDER_SAMPLERATE, Config.RECORDER_CHANNELS, Config.RECORDER_AUDIO_ENCODING); nSubframePerBuf = recordBufferSize / frameShift / 2; // e.g., 8192/128/2 = 32 System.out.println("recordBufferSize: " + recordBufferSize); if (recordBufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.DEFAULT, Config.RECORDER_SAMPLERATE, Config.RECORDER_CHANNELS, Config.RECORDER_AUDIO_ENCODING, recordBufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { return recorder; } } } catch (Exception e) { e.printStackTrace(); } Toast.makeText(thisContext, "Fail to create AudioRecord object", Toast.LENGTH_LONG).show(); return null; }
From source file:com.example.echoprint.AudioFingerprinter.java
/** * The main thread<br>/*from w w w . j a v a 2 s . c o m*/ * Records audio and generates the audio fingerprint, then it queries the server for a match and forwards the results to the listener. */ public void run() { this.isRunning = true; try { // create the audio buffer // get the minimum buffer size int minBufferSize = AudioRecord.getMinBufferSize(FREQUENCY, CHANNEL, ENCODING); // and the actual buffer size for the audio to record // frequency * seconds to record. bufferSize = Math.max(minBufferSize, this.FREQUENCY * this.secondsToRecord); audioData = new short[bufferSize]; // start recorder mRecordInstance = new AudioRecord(MediaRecorder.AudioSource.MIC, FREQUENCY, CHANNEL, ENCODING, minBufferSize); willStartListening(); mRecordInstance.startRecording(); boolean firstRun = true; do { try { willStartListeningPass(); long time = System.currentTimeMillis(); // fill audio buffer with mic data. int samplesIn = 0; do { samplesIn += mRecordInstance.read(audioData, samplesIn, bufferSize - samplesIn); if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) break; } while (samplesIn < bufferSize); Log.d("Fingerprinter", "Audio recorded: " + (System.currentTimeMillis() - time) + " millis"); // see if the process was stopped. if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED || (!firstRun && !this.continuous)) break; // create an echoprint codegen wrapper and get the code time = System.currentTimeMillis(); Codegen codegen = new Codegen(); String code = codegen.generate(audioData, samplesIn); Log.d("Fingerprinter", "Codegen created in: " + (System.currentTimeMillis() - time) + " millis"); if (code.length() == 0) { // no code? // not enough audio data? continue; } didGenerateFingerprintCode(code); // fetch data from echonest time = System.currentTimeMillis(); String urlstr = SERVER_URL + code; //String urlstr = "http://developer.echonest.com/api/v4/song/identify?api_key=GUYHX8VIYSCI79EZI&code="; HttpClient client = new DefaultHttpClient(); HttpGet get = new HttpGet(urlstr); // get response HttpResponse response = client.execute(get); // Examine the response status Log.d("Fingerprinter", response.getStatusLine().toString()); // Get hold of the response entity HttpEntity entity = response.getEntity(); // If the response does not enclose an entity, there is no need // to worry about connection release String result = ""; if (entity != null) { // A Simple JSON Response Read InputStream instream = entity.getContent(); result = convertStreamToString(instream); // now you have the string representation of the HTML request instream.close(); } Log.d("Fingerprinter", "Results fetched in: " + (System.currentTimeMillis() - time) + " millis"); // parse JSON JSONObject jobj = new JSONObject(result); if (jobj.has("code")) Log.d("Fingerprinter", "Response code:" + jobj.getInt("code") + " (" + this.messageForCode(jobj.getInt("code")) + ")"); if (jobj.has("match")) { if (jobj.getBoolean("match")) { Hashtable<String, String> match = new Hashtable<String, String>(); match.put(SCORE_KEY, jobj.getDouble(SCORE_KEY) + ""); match.put(TRACK_ID_KEY, jobj.getString(TRACK_ID_KEY)); // the metadata dictionary IS NOT included by default in the API demo server // replace line 66/67 in API.py with: // return json.dumps({"ok":True,"message":response.message(), "match":response.match(), "score":response.score, \ // "qtime":response.qtime, "track_id":response.TRID, "total_time":response.total_time, "metadata":response.metadata}) if (jobj.has("metadata")) { JSONObject metadata = jobj.getJSONObject("metadata"); if (metadata.has(SCORE_KEY)) match.put(META_SCORE_KEY, metadata.getDouble(SCORE_KEY) + ""); if (metadata.has(TITLE_KEY)) match.put(TITLE_KEY, metadata.getString(TITLE_KEY)); if (metadata.has(ARTIST_KEY)) match.put(ARTIST_KEY, metadata.getString(ARTIST_KEY)); if (metadata.has(ALBUM_KEY)) match.put(ALBUM_KEY, metadata.getString(ALBUM_KEY)); } didFindMatchForCode(match, code); } else didNotFindMatchForCode(code); } else { didFailWithException(new Exception("Unknown error")); } firstRun = false; didFinishListeningPass(); } catch (Exception e) { e.printStackTrace(); Log.e("Fingerprinter", e.getLocalizedMessage()); didFailWithException(e); } } while (this.continuous); } catch (Exception e) { e.printStackTrace(); Log.e("Fingerprinter", e.getLocalizedMessage()); didFailWithException(e); } if (mRecordInstance != null) { mRecordInstance.stop(); mRecordInstance.release(); mRecordInstance = null; } this.isRunning = false; didFinishListening(); }
From source file:com.inmobi.ultrapush.InfoRecActivity.java
@Override protected void onResume() { super.onResume(); TextView tv = (TextView) findViewById(R.id.textview_info_rec); tv.setMovementMethod(new ScrollingMovementMethod()); tv.setText("Testing..."); // TODO: No use... tv.invalidate();/*from w ww . j a va 2 s. com*/ // Show supported sample rate and corresponding minimum buffer size. String[] requested = new String[] { "8000", "11025", "16000", "22050", "32000", "44100", "48000", "96000" }; String st = "sampleRate minBufSize\n"; ArrayList<String> validated = new ArrayList<String>(); for (String s : requested) { int rate = Integer.parseInt(s); int minBufSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); if (minBufSize != AudioRecord.ERROR_BAD_VALUE) { validated.add(s); st += s + " \t" + Integer.toString(minBufSize) + "\n"; } } requested = validated.toArray(new String[0]); tv.setText(st); tv.invalidate(); // Test audio source String[] audioSourceString = new String[] { "DEFAULT", "MIC", "VOICE_UPLINK", "VOICE_DOWNLINK", "VOICE_CALL", "CAMCORDER", "VOICE_RECOGNITION" }; int[] audioSourceId = new int[] { MediaRecorder.AudioSource.DEFAULT, // Default audio source MediaRecorder.AudioSource.MIC, // Microphone audio source MediaRecorder.AudioSource.VOICE_UPLINK, // Voice call uplink (Tx) audio source MediaRecorder.AudioSource.VOICE_DOWNLINK, // Voice call downlink (Rx) audio source MediaRecorder.AudioSource.VOICE_CALL, // Voice call uplink + downlink audio source MediaRecorder.AudioSource.CAMCORDER, // Microphone audio source with same orientation as camera if available, the main device microphone otherwise (apilv7) MediaRecorder.AudioSource.VOICE_RECOGNITION, // Microphone audio source tuned for voice recognition if available, behaves like DEFAULT otherwise. (apilv7) // MediaRecorder.AudioSource.VOICE_COMMUNICATION, // Microphone audio source tuned for voice communications such as VoIP. It will for instance take advantage of echo cancellation or automatic gain control if available. It otherwise behaves like DEFAULT if no voice processing is applied. (apilv11) // MediaRecorder.AudioSource.REMOTE_SUBMIX, // Audio source for a submix of audio streams to be presented remotely. (apilv19) }; tv.append("\n-- Audio Source Test --"); for (String s : requested) { int sampleRate = Integer.parseInt(s); int recBufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); tv.append("\n(" + Integer.toString(sampleRate) + "Hz, MONO, 16BIT)\n"); for (int iass = 0; iass < audioSourceId.length; iass++) { st = ""; // wait for AudioRecord fully released... try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } AudioRecord record; record = new AudioRecord(audioSourceId[iass], sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, recBufferSize); if (record.getState() == AudioRecord.STATE_INITIALIZED) { st += audioSourceString[iass] + " successed"; int as = record.getAudioSource(); if (as != audioSourceId[iass]) { int i = 0; while (i < audioSourceId.length) { if (as == audioSourceId[iass]) { break; } i++; } if (i >= audioSourceId.length) { st += "(auto set to \"unknown source\")"; } else { st += "(auto set to " + audioSourceString[i] + ")"; } } st += "\n"; } else { st += audioSourceString[iass] + " failed\n"; } record.release(); record = null; tv.append(st); tv.invalidate(); } } }
From source file:com.suan.weclient.fragment.mass.VoiceFragment.java
private void initRecorder() { // ?// ww w . j a va2 s .c om bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat); // AudioRecord audioRecord = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes); }
From source file:com.xzg.fingerprinter.AudioFingerprinter.java
/** * The main thread<br>/* w w w . j a v a 2 s .c o m*/ * Records audio and generates the audio fingerprint, then it queries the * server for a match and forwards the results to the listener. */ public void run() { this.isRunning = true; try { // create the audio buffer // get the minimum buffer size int minBufferSize = AudioRecord.getMinBufferSize(FREQUENCY, CHANNEL, ENCODING); System.out.println("minBufferSize: " + minBufferSize); // and the actual buffer size for the audio to record // frequency * seconds to record. bufferSize = Math.max(minBufferSize, this.FREQUENCY * this.secondsToRecord); System.out.println("BufferSize: " + bufferSize); audioData = new byte[bufferSize * 2]; // start recorder mRecordInstance = new AudioRecord(MediaRecorder.AudioSource.MIC, FREQUENCY, CHANNEL, ENCODING, minBufferSize); willStartListening(); mRecordInstance.startRecording(); boolean firstRun = true; do { try { willStartListeningPass(); long time = System.currentTimeMillis(); // fill audio buffer with mic data. int samplesIn = 0; do { samplesIn += mRecordInstance.read(audioData, samplesIn, bufferSize - samplesIn); if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED) break; } while (samplesIn < bufferSize); Log.d("Fingerprinter", "Audio recorded: " + (System.currentTimeMillis() - time) + " millis"); // see if the process was stopped. if (mRecordInstance.getRecordingState() == AudioRecord.RECORDSTATE_STOPPED || (!firstRun && !this.continuous)) break; Log.d("Fingerprinter", "Recod state: " + mRecordInstance.getRecordingState()); byte[] audioDataByteFormat = (audioData); Wave w = new Wave(); w.data = audioDataByteFormat; WaveFileManager wm = new WaveFileManager(); wm.setWave(w); wm.saveWaveAsFile("/sdcard/xzgrecord.wav"); Clip c = Clip.newInstance(audioDataByteFormat, this.FREQUENCY); // create an echoprint codegen wrapper and get the code time = System.currentTimeMillis(); Codegen codegen = new Codegen(c); String code = codegen.genCode(); // Log.d("Fingerprinter","codegen before"); // String code = codegen.generate(audioData, samplesIn); Log.d("Fingerprinter", "codegen after"); Log.d("Fingerprinter", "Codegen created in: " + (System.currentTimeMillis() - time) + " millis"); // Log.d("Fingerprinter", "code length is " + code.length()); if (code.length() == 0) { // no code? // not enough audio data? continue; } // fetch data from echonest long startTime = System.currentTimeMillis(); String result = fetchServerResult(code); long endTime = System.currentTimeMillis(); long fetchTime = endTime - startTime; Log.d("Fingerprinter", "Results fetched in: " + (fetchTime) + " millis"); Log.d("Fingerprinter", "HTTP result: " + result); // parse JSON JSONObject jsonResult = new JSONObject(result); if (jsonResult.has("id")) Log.d("Fingerprinter", "Response code:" + jsonResult.getInt("id")); if (jsonResult.has("id")) { if (jsonResult.getInt("id") >= 0) { Hashtable<String, String> match = parseResult(jsonResult); didFindMatchForCode(match, code); } else didNotFindMatchForCode(code); } else { didFailWithException(new Exception("Unknown error")); } // firstRun = false; didFinishListeningPass(); } catch (Exception e) { e.printStackTrace(); Log.e("Fingerprinter", e.getLocalizedMessage()); didFailWithException(e); } } while (this.continuous); } catch (Exception e) { e.printStackTrace(); Log.e("Fingerprinter", e.getLocalizedMessage()); didFailWithException(e); } if (mRecordInstance != null) { mRecordInstance.stop(); mRecordInstance.release(); mRecordInstance = null; } this.isRunning = false; didFinishListening(); }