List of usage examples for android.speech RecognitionListener RecognitionListener
RecognitionListener
From source file:com.annuletconsulting.homecommand.node.MainFragment.java
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { speechRecognizer = SpeechRecognizer.createSpeechRecognizer(getActivity()); speechRecognizer.setRecognitionListener(new RecognitionListener() { @Override// www .ja v a 2 s . co m public void onReadyForSpeech(Bundle params) { for (String key : params.keySet()) Log.d(TAG, (String) params.get(key)); } @Override public void onBeginningOfSpeech() { Log.d(TAG, "Begin"); ignore = false; } @Override public void onRmsChanged(float rmsdB) { // Log.d(TAG, "Rms changed: "+rmsdB); } @Override public void onBufferReceived(byte[] buffer) { Log.d(TAG, "Buffer Received: " + buffer.toString()); } @Override public void onEndOfSpeech() { Log.d(TAG, "Endofspeech()"); } @Override public void onError(int error) { Log.d(TAG, "error: " + error); } @Override public void onResults(Bundle results) { Log.d(TAG, "onResults()"); for (String key : results.keySet()) { Log.d(TAG, key + ": " + results.get(key).toString()); // Iterator<String> it = ((ArrayList<String>) // results.get(key)).listIterator(); // while (it.hasNext()) // Log.d(TAG, it.next()); } if (!ignore) sendToServer(results.getStringArrayList(RESULTS_KEY).get(0)); } @Override public void onPartialResults(Bundle partialResults) { // Log.d(TAG, "onPartialResults()"); // String firstWord = partialResults.getStringArrayList(RESULTS_KEY).get(0).split(" ")[0]; // Log.d(TAG, firstWord); // if (firstWord.length() > 0 && !firstWord.equalsIgnoreCase("computer") && !firstWord.equalsIgnoreCase("android")) { // Log.d(TAG, "Killing this Recognition."); // ignore = true; // stopRecognizing(); // startListening(); // } } @Override public void onEvent(int eventType, Bundle params) { Log.d(TAG, "onEvent() type: " + eventType); for (String key : params.keySet()) Log.d(TAG, (String) params.get(key)); } }); View v = inflater.inflate(R.layout.main_fragment, null); button = (Button) v.findViewById(R.id.listen_button); button.setBackgroundResource(R.drawable.stopped); button.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { toggleListenMode(); } }); instance = this; return v; }
From source file:com.ct.speech.HintReceiver.java
/** * Fire an intent to start the speech recognition activity. * /*w w w. ja v a2 s. c o m*/ * @param args * Argument array with the following string args: [req * code][number of matches][prompt string] Google speech * recognizer */ private void startSpeechRecognitionActivity(JSONArray args) { // int reqCode = 42; // Hitchhiker? // global now int maxMatches = 2; String prompt = ""; String language = ""; try { if (args.length() > 0) { // Request code - passed back to the caller on a successful // operation String temp = args.getString(0); reqCode = Integer.parseInt(temp); } if (args.length() > 1) { // Maximum number of matches, 0 means the recognizer decides String temp = args.getString(1); maxMatches = Integer.parseInt(temp); } if (args.length() > 2) { // Optional text prompt prompt = args.getString(2); } if (args.length() > 3) { // Optional language specified language = args.getString(3); } } catch (Exception e) { Log.e(TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString())); } final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra("calling_package", "com.ct.BasicAppFrame"); // If specific language if (!language.equals("")) { intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); } if (maxMatches > 0) intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches); if (!(prompt.length() == 0)) intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt); // ctx.startActivityForResult(this, intent, reqCode); //removed to try // using recognizer directly try { this.ctx.runOnUiThread(new Runnable() { public void run() { final SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer((Context) ctx); RecognitionListener listener = new RecognitionListener() { @Override public void onResults(Bundle results) { //closeRecordedFile(); sendBackResults(results); ArrayList<String> voiceResults = results .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (voiceResults == null) { Log.e(TAG, "No voice results"); } else { // Log.d(TAG, "Printing matches: "); for (@SuppressWarnings("unused") String match : voiceResults) { // Log.d(TAG, match); } } recognizer.destroy(); } @Override public void onReadyForSpeech(Bundle params) { // Log.d(TAG, "Ready for speech"); } @Override public void onError(int error) { Log.d(TAG, "Error listening for speech: " + error); if (error == SpeechRecognizer.ERROR_NO_MATCH) { sendBackResults(NO_MATCH); } else if (error == SpeechRecognizer.ERROR_SPEECH_TIMEOUT) { sendBackResults(NO_INPUT); } else { speechFailure("unknown error"); } recognizer.destroy(); } @Override public void onBeginningOfSpeech() { // Log.d(TAG, "Speech starting"); setStartOfSpeech(); } @Override //doesn't fire in Android after Ice Cream Sandwich public void onBufferReceived(byte[] buffer) { } @Override public void onEndOfSpeech() { setEndOfSpeech(); } @Override public void onEvent(int eventType, Bundle params) { // TODO Auto-generated method stub } @Override public void onPartialResults(Bundle partialResults) { // TODO Auto-generated method stub } @Override public void onRmsChanged(float rmsdB) { // TODO Auto-generated method stub } }; recognizer.setRecognitionListener(listener); Log.d(TAG, "starting speech recognition activity"); recognizer.startListening(intent); } }); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:com.mhennessy.mapfly.MainActivity.java
public void setMapLocationBasedOnSpeech() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, "com.mhennessy.mapfly"); SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer(this.getApplicationContext()); // Stop flying so that messages can be displayed to the user without // being overwritten by pitch/roll info. setFlyingEnabled(false);/*from w w w . ja v a2s.c om*/ RecognitionListener listener = new RecognitionListener() { @Override public void onResults(Bundle results) { ArrayList<String> voiceResults = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (voiceResults == null) { Log.e(TAG, "No voice results"); } else { Log.d(TAG, "Printing matches: "); for (String match : voiceResults) { Log.d(TAG, match); } String bestMatch = voiceResults.get(0); setMapLocation(bestMatch); } } @Override public void onReadyForSpeech(Bundle params) { setTitle("Say something!"); Log.d(TAG, "Ready for speech"); } @Override public void onError(int error) { setTitle("Speach Error"); Log.d(TAG, "Error listening for speech: " + error); } @Override public void onBeginningOfSpeech() { Log.d(TAG, "Speech starting"); } @Override public void onBufferReceived(byte[] buffer) { // no-op } @Override public void onEndOfSpeech() { // no-op } @Override public void onEvent(int eventType, Bundle params) { // no-op } @Override public void onPartialResults(Bundle partialResults) { // no-op } @Override public void onRmsChanged(float rmsdB) { // no-op } }; recognizer.setRecognitionListener(listener); recognizer.startListening(intent); }
From source file:com.example.haizhu.myvoiceassistant.ui.RobotChatActivity.java
public void initListener() { recognitionListener = new RecognitionListener() { @Override//from ww w . j a v a2 s . c om public void onReadyForSpeech(Bundle bundle) { } @Override public void onBeginningOfSpeech() { } @Override public void onRmsChanged(float v) { final int VTAG = 0xFF00AA01; Integer rawHeight = (Integer) speechWave.getTag(VTAG); if (rawHeight == null) { rawHeight = speechWave.getLayoutParams().height; speechWave.setTag(VTAG, rawHeight); } RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) speechWave.getLayoutParams(); params.height = (int) (rawHeight * v * 0.01); params.height = Math.max(params.height, speechWave.getMeasuredWidth()); speechWave.setLayoutParams(params); } @Override public void onBufferReceived(byte[] bytes) { } @Override public void onEndOfSpeech() { } @Override public void onError(int error) { StringBuilder sb = new StringBuilder(); switch (error) { case SpeechRecognizer.ERROR_AUDIO: sb.append(""); break; case SpeechRecognizer.ERROR_SPEECH_TIMEOUT: sb.append(""); break; case SpeechRecognizer.ERROR_CLIENT: sb.append(""); break; case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS: sb.append("???"); break; case SpeechRecognizer.ERROR_NETWORK: sb.append(""); break; case SpeechRecognizer.ERROR_NO_MATCH: sb.append("?"); break; case SpeechRecognizer.ERROR_RECOGNIZER_BUSY: sb.append(""); break; case SpeechRecognizer.ERROR_SERVER: sb.append("?"); break; case SpeechRecognizer.ERROR_NETWORK_TIMEOUT: sb.append(""); break; } sb.append(":" + error); print("" + sb.toString()); Message message = mhandler.obtainMessage(RECOGNIZE_ERROR); message.obj = sb.toString(); mhandler.sendMessage(message); } @Override public void onResults(Bundle results) { ArrayList<String> nbest = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); print("?" + Arrays.toString(nbest.toArray(new String[nbest.size()]))); String json_res = results.getString("origin_result"); String results_nlu_json = results.getString("results_nlu"); Message message = mhandler.obtainMessage(RECOGNIZE_SUCESS); message.obj = results_nlu_json; mhandler.sendMessage(message); //?UI ResultsAnalysisManager.analyseResult(results_nlu_json); print("result_nlu=\n" + results_nlu_json); try { print("origin_result=\n" + new JSONObject(json_res).toString(4)); } catch (Exception e) { print("origin_result=[warning: bad json]\n" + json_res); } } @Override public void onPartialResults(Bundle bundle) { } @Override public void onEvent(int eventType, Bundle bundle) { switch (eventType) { case 11: String reason = bundle.get("reason") + ""; print("EVENT_ERROR, " + reason); Message message = mhandler.obtainMessage(RECOGNIZE_ERROR); message.obj = reason; mhandler.sendMessage(message); break; } } }; speechSynthesizerListener = new SpeechSynthesizerListener() { @Override public void onStartWorking(SpeechSynthesizer speechSynthesizer) { } @Override public void onSpeechStart(SpeechSynthesizer speechSynthesizer) { } @Override public void onNewDataArrive(SpeechSynthesizer speechSynthesizer, byte[] bytes, boolean b) { } @Override public void onBufferProgressChanged(SpeechSynthesizer speechSynthesizer, int i) { } @Override public void onSpeechProgressChanged(SpeechSynthesizer speechSynthesizer, int i) { } @Override public void onSpeechPause(SpeechSynthesizer speechSynthesizer) { } @Override public void onSpeechResume(SpeechSynthesizer speechSynthesizer) { } @Override public void onCancel(SpeechSynthesizer speechSynthesizer) { } @Override public void onSynthesizeFinish(SpeechSynthesizer speechSynthesizer) { } @Override public void onSpeechFinish(SpeechSynthesizer speechSynthesizer) { } @Override public void onError(SpeechSynthesizer speechSynthesizer, SpeechError speechError) { } }; }