List of usage examples for android.speech SpeechRecognizer RESULTS_RECOGNITION
String RESULTS_RECOGNITION
To view the source code for android.speech SpeechRecognizer RESULTS_RECOGNITION.
Click Source Link
From source file:com.github.zagum.speechrecognitionview.sample.MainActivity.java
private void showResults(Bundle results) { ArrayList<String> matches = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); Toast.makeText(this, matches.get(0), Toast.LENGTH_LONG).show(); }
From source file:ai.api.unityhelper.RecognitionHelper.java
protected void onResults(final Bundle results) { JSONObject resultJson = new JSONObject(); try {//from ww w . j a v a2 s . c o m resultJson.put("status", "success"); final ArrayList<String> recognitionResults = results .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (recognitionResults == null || recognitionResults.size() == 0) { resultJson.put("recognitionResults", new JSONArray()); } else { resultJson.put("recognitionResults", new JSONArray(recognitionResults)); float[] rates = null; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { rates = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES); if (rates != null && rates.length > 0) { final JSONArray ratesArray = new JSONArray(); for (int i = 0; i < rates.length; i++) { ratesArray.put(rates[i]); } resultJson.put("confidence", ratesArray); } } } } catch (JSONException je) { Log.e(TAG, je.getMessage(), je); } clearRecognizer(); String resultJsonString = resultJson.toString(); if (resultObject != null) { resultObject.setResult(resultJsonString); } }
From source file:com.vyasware.vaani.MainActivity.java
@Override public void onResults(Bundle results) { Log.i(LOG_TAG, "onResults"); ArrayList<String> matches = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); input = matches.get(0);//from ww w .j ava2s. c o m /* for (String result : matches) text += result + "\n";*/ returnedText.setText(input); new Thread(new Runnable() { @Override public void run() { performAction(input); } }).start(); }
From source file:com.chexiaoya.gaodemapdemo.SpeechSearchActivity.java
@Override public void onResults(Bundle results) { if (results != null) { long end2finish = System.currentTimeMillis() - speechEndTime; status = STATUS_None;/*from ww w . ja va 2 s.c o m*/ ArrayList<String> nbest = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); String json_res = results.getString("origin_result"); try { // print("origin_result=\n" + new // JSONObject(json_res).toString(4)); } catch (Exception e) { // print("origin_result=[warning: bad json]\n" + json_res); } // btn.setText(""); String strEnd2Finish = ""; if (end2finish < 60 * 1000) { strEnd2Finish = "(waited " + end2finish + "ms)"; } Log.i(TAG, "onResults-----> " + nbest.get(0) + strEnd2Finish); String result = nbest.get(0) + strEnd2Finish; mResultText.setText(result); } }
From source file:com.chexiaoya.gaodemapdemo.SpeechSearchActivity.java
@Override public void onPartialResults(Bundle partialResults) { ArrayList<String> nbest = partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (nbest.size() > 0) { Log.i(TAG, "onPartialResults----> " + nbest.get(0)); String result = nbest.get(0); mResultText.setText(result);/*from ww w.j av a2 s. c om*/ } }
From source file:com.ct.speech.HintReceiver.java
/** * Fire an intent to start the speech recognition activity. * //from w ww . ja va2 s. c o m * @param args * Argument array with the following string args: [req * code][number of matches][prompt string] Google speech * recognizer */ private void startSpeechRecognitionActivity(JSONArray args) { // int reqCode = 42; // Hitchhiker? // global now int maxMatches = 2; String prompt = ""; String language = ""; try { if (args.length() > 0) { // Request code - passed back to the caller on a successful // operation String temp = args.getString(0); reqCode = Integer.parseInt(temp); } if (args.length() > 1) { // Maximum number of matches, 0 means the recognizer decides String temp = args.getString(1); maxMatches = Integer.parseInt(temp); } if (args.length() > 2) { // Optional text prompt prompt = args.getString(2); } if (args.length() > 3) { // Optional language specified language = args.getString(3); } } catch (Exception e) { Log.e(TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString())); } final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra("calling_package", "com.ct.BasicAppFrame"); // If specific language if (!language.equals("")) { intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); } if (maxMatches > 0) intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches); if (!(prompt.length() == 0)) intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt); // ctx.startActivityForResult(this, intent, reqCode); //removed to try // using recognizer directly try { this.ctx.runOnUiThread(new Runnable() { public void run() { final SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer((Context) ctx); RecognitionListener listener = new RecognitionListener() { @Override public void onResults(Bundle results) { //closeRecordedFile(); sendBackResults(results); ArrayList<String> voiceResults = results .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (voiceResults == null) { Log.e(TAG, "No voice results"); } else { // Log.d(TAG, "Printing matches: "); for (@SuppressWarnings("unused") String match : voiceResults) { // Log.d(TAG, match); } } recognizer.destroy(); } @Override public void onReadyForSpeech(Bundle params) { // Log.d(TAG, "Ready for speech"); } @Override public void onError(int error) { Log.d(TAG, "Error listening for speech: " + error); if (error == SpeechRecognizer.ERROR_NO_MATCH) { sendBackResults(NO_MATCH); } else if (error == SpeechRecognizer.ERROR_SPEECH_TIMEOUT) { sendBackResults(NO_INPUT); } else { speechFailure("unknown error"); } recognizer.destroy(); } @Override public void onBeginningOfSpeech() { // Log.d(TAG, "Speech starting"); setStartOfSpeech(); } @Override //doesn't fire in Android after Ice Cream Sandwich public void onBufferReceived(byte[] buffer) { } @Override public void onEndOfSpeech() { setEndOfSpeech(); } @Override public void onEvent(int eventType, Bundle params) { // TODO Auto-generated method stub } @Override public void onPartialResults(Bundle partialResults) { // TODO Auto-generated method stub } @Override public void onRmsChanged(float rmsdB) { // TODO Auto-generated method stub } }; recognizer.setRecognitionListener(listener); Log.d(TAG, "starting speech recognition activity"); recognizer.startListening(intent); } }); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:com.baidu.android.voicedemo.ActivityTouch.java
@Override public void onResults(Bundle results) { long end2finish = System.currentTimeMillis() - speechEndTime; ArrayList<String> nbest = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); print("?" + Arrays.toString(nbest.toArray(new String[nbest.size()]))); String json_res = results.getString("origin_result"); try {/* www .jav a 2 s .co m*/ print("origin_result=\n" + new JSONObject(json_res).toString(4)); } catch (Exception e) { print("origin_result=[warning: bad json]\n" + json_res); } btn.setText(""); String strEnd2Finish = ""; if (end2finish < 60 * 1000) { strEnd2Finish = "(waited " + end2finish + "ms)"; } txtResult.setText(nbest.get(0) + strEnd2Finish); }
From source file:com.baidu.android.voicedemo.ActivityTouch.java
@Override public void onPartialResults(Bundle partialResults) { ArrayList<String> nbest = partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (nbest.size() > 0) { print("~" + Arrays.toString(nbest.toArray(new String[0]))); txtResult.setText(nbest.get(0)); }/*from w w w .ja va 2 s . c o m*/ }
From source file:conversandroid.RichASR.java
/******************************************************************************************************** * Process ASR events/* w w w . j a v a 2 s. com*/ * ****************************************************************************************************** */ /* * (non-Javadoc) * * Invoked when the ASR provides recognition results * * @see android.speech.RecognitionListener#onResults(android.os.Bundle) */ @Override public void onResults(Bundle results) { if (results != null) { Log.i(LOGTAG, "ASR results received ok"); ((TextView) findViewById(R.id.feedbackTxt)).setText("Results ready :D"); //Retrieves the N-best list and the confidences from the ASR result ArrayList<String> nBestList = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); float[] nBestConfidences = null; if (Build.VERSION.SDK_INT >= 14) //Checks the API level because the confidence scores are supported only from API level 14 nBestConfidences = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES); //Creates a collection of strings, each one with a recognition result and its confidence //following the structure "Phrase matched (conf: 0.5)" ArrayList<String> nBestView = new ArrayList<String>(); for (int i = 0; i < nBestList.size(); i++) { if (nBestConfidences != null) { if (nBestConfidences[i] >= 0) nBestView.add( nBestList.get(i) + " (conf: " + String.format("%.2f", nBestConfidences[i]) + ")"); else nBestView.add(nBestList.get(i) + " (no confidence value available)"); } else nBestView.add(nBestList.get(i) + " (no confidence value available)"); } //Includes the collection in the ListView of the GUI setListView(nBestView); } else { Log.e(LOGTAG, "ASR results null"); //There was a recognition error ((TextView) findViewById(R.id.feedbackTxt)).setText("Error"); } stopListening(); }
From source file:atlc.granadaaccessibilityranking.VoiceActivity.java
/******************************************************************************************************** * This class implements the {@link android.speech.RecognitionListener} interface, * thus it implement its methods. However not all of them were interesting to us: * ****************************************************************************************************** */// w w w . ja v a 2s . co m @SuppressLint("InlinedApi") /* * (non-Javadoc) * * Invoked when the ASR provides recognition results * * @see android.speech.RecognitionListener#onResults(android.os.Bundle) */ @Override public void onResults(Bundle results) { if (results != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { //Checks the API level because the confidence scores are supported only from API level 14: //http://developer.android.com/reference/android/speech/SpeechRecognizer.html#CONFIDENCE_SCORES //Processes the recognition results and their confidences processAsrResults(results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION), results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES)); // Attention: It is not RecognizerIntent.EXTRA_RESULTS, that is for intents (see the ASRWithIntent app) } else { //Processes the recognition results and their confidences processAsrResults(results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION), null); } } else //Processes recognition errors processAsrError(SpeechRecognizer.ERROR_NO_MATCH); }