List of usage examples for android.speech SpeechRecognizer CONFIDENCE_SCORES
String CONFIDENCE_SCORES
To view the source code for android.speech SpeechRecognizer CONFIDENCE_SCORES.
Click Source Link
From source file:ai.api.unityhelper.RecognitionHelper.java
protected void onResults(final Bundle results) { JSONObject resultJson = new JSONObject(); try {//from w w w. ja v a 2s . c om resultJson.put("status", "success"); final ArrayList<String> recognitionResults = results .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (recognitionResults == null || recognitionResults.size() == 0) { resultJson.put("recognitionResults", new JSONArray()); } else { resultJson.put("recognitionResults", new JSONArray(recognitionResults)); float[] rates = null; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { rates = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES); if (rates != null && rates.length > 0) { final JSONArray ratesArray = new JSONArray(); for (int i = 0; i < rates.length; i++) { ratesArray.put(rates[i]); } resultJson.put("confidence", ratesArray); } } } } catch (JSONException je) { Log.e(TAG, je.getMessage(), je); } clearRecognizer(); String resultJsonString = resultJson.toString(); if (resultObject != null) { resultObject.setResult(resultJsonString); } }
From source file:atlc.granadaaccessibilityranking.VoiceActivity.java
/******************************************************************************************************** * This class implements the {@link android.speech.RecognitionListener} interface, * thus it implement its methods. However not all of them were interesting to us: * ****************************************************************************************************** *///from ww w.j a va 2 s.c om @SuppressLint("InlinedApi") /* * (non-Javadoc) * * Invoked when the ASR provides recognition results * * @see android.speech.RecognitionListener#onResults(android.os.Bundle) */ @Override public void onResults(Bundle results) { if (results != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { //Checks the API level because the confidence scores are supported only from API level 14: //http://developer.android.com/reference/android/speech/SpeechRecognizer.html#CONFIDENCE_SCORES //Processes the recognition results and their confidences processAsrResults(results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION), results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES)); // Attention: It is not RecognizerIntent.EXTRA_RESULTS, that is for intents (see the ASRWithIntent app) } else { //Processes the recognition results and their confidences processAsrResults(results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION), null); } } else //Processes recognition errors processAsrError(SpeechRecognizer.ERROR_NO_MATCH); }
From source file:conversandroid.RichASR.java
/******************************************************************************************************** * Process ASR events/*from ww w . j ava2s. c o m*/ * ****************************************************************************************************** */ /* * (non-Javadoc) * * Invoked when the ASR provides recognition results * * @see android.speech.RecognitionListener#onResults(android.os.Bundle) */ @Override public void onResults(Bundle results) { if (results != null) { Log.i(LOGTAG, "ASR results received ok"); ((TextView) findViewById(R.id.feedbackTxt)).setText("Results ready :D"); //Retrieves the N-best list and the confidences from the ASR result ArrayList<String> nBestList = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); float[] nBestConfidences = null; if (Build.VERSION.SDK_INT >= 14) //Checks the API level because the confidence scores are supported only from API level 14 nBestConfidences = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES); //Creates a collection of strings, each one with a recognition result and its confidence //following the structure "Phrase matched (conf: 0.5)" ArrayList<String> nBestView = new ArrayList<String>(); for (int i = 0; i < nBestList.size(); i++) { if (nBestConfidences != null) { if (nBestConfidences[i] >= 0) nBestView.add( nBestList.get(i) + " (conf: " + String.format("%.2f", nBestConfidences[i]) + ")"); else nBestView.add(nBestList.get(i) + " (no confidence value available)"); } else nBestView.add(nBestList.get(i) + " (no confidence value available)"); } //Includes the collection in the ListView of the GUI setListView(nBestView); } else { Log.e(LOGTAG, "ASR results null"); //There was a recognition error ((TextView) findViewById(R.id.feedbackTxt)).setText("Error"); } stopListening(); }
From source file:com.glabs.homegenie.util.VoiceControl.java
@Override public void onResults(Bundle results) { if ((results != null) && results.containsKey(SpeechRecognizer.RESULTS_RECOGNITION)) { List<String> heard = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); float[] scores = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES); String msg = ""; for (String s : heard) { Toast.makeText(_hgcontext.getApplicationContext(), "Executing: " + s, 20000).show(); interpretInput(s);/*from www.jav a2 s. c o m*/ // msg += s; break; } } }