List of usage examples for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH
String ACTION_RECOGNIZE_SPEECH
To view the source code for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH.
Click Source Link
From source file:com.glabs.homegenie.util.VoiceControl.java
public void startListen() { _recognizer = getSpeechRecognizer(); _recognizer.setRecognitionListener(this); ///*from w w w . j a v a 2 s . c om*/ //speech recognition is supported - detect user button clicks //start the speech recognition intent passing required data Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); //indicate package //recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass().getPackage().getName()); //message to display while listening recognizerIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Your wish is my command!"); //set speech model recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); //specify number of results to retrieve recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1); //start listening //startActivityForResult(listenIntent, VR_REQUEST); //startActivityForResult(recognizerIntent, VR_REQUEST); _recognizer.startListening(recognizerIntent); }
From source file:net.olejon.mdapp.Icd10Activity.java
@Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: { NavUtils.navigateUpFromSameTask(this); return true; }/*from w ww. ja v a 2s . com*/ case R.id.icd10_menu_voice_search: { try { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "nb-NO"); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE); } catch (Exception e) { new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title)) .content(getString(R.string.device_not_supported_dialog_message)) .positiveText(getString(R.string.device_not_supported_dialog_positive_button)) .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show(); } return true; } default: { return super.onOptionsItemSelected(item); } } }
From source file:org.apache.cordova.nodialogspeechrecognizer.SpeechRecognizer.java
/** * Fire an intent to start the speech recognition activity. * * @param args// w ww . java 2 s .com * Argument array with the following string args: [req code][number * of matches][prompt string] */ private void startSpeechRecognitionActivity(JSONArray args) { int maxMatches = 1; String prompt = ""; String language = Locale.getDefault().toString(); try { if (args.length() > 0) { // Maximum number of matches, 0 means the recognizer decides String temp = args.getString(0); maxMatches = Integer.parseInt(temp); } if (args.length() > 1) { // Optional text prompt prompt = args.getString(1); } if (args.length() > 2) { // Optional language specified language = args.getString(2); } } catch (Exception e) { Log.e(LOG_TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString())); } // Create the intent and set parameters final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, cordova.getActivity().getPackageName()); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); if (maxMatches > 0) intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches); if (!prompt.equals("")) intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt); try { Handler loopHandler = new Handler(Looper.getMainLooper()); loopHandler.post(new Runnable() { @Override public void run() { speech.startListening(intent); } }); } catch (Exception e) { Log.e("error", "er", e); } // cordova.startActivityForResult(this, intent, REQUEST_CODE); }
From source file:com.phonegap.plugins.speech.XSpeechRecognizer.java
/** * Fire an intent to start the speech recognition activity. * * @param args Argument array with the following string args: [req code][number of matches] *///ww w . j a v a 2 s. co m private void startSpeechRecognitionActivity(JSONArray args) { int maxMatches = 0; String language = Locale.getDefault().toString(); try { if (args.length() > 0) { // Maximum number of matches, 0 means the recognizer decides String temp = args.getString(0); maxMatches = Integer.parseInt(temp); } if (args.length() > 1) { // Language language = args.getString(1); } } catch (Exception e) { Log.e(TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString())); } // Create the intent and set parameters final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); // intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test"); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); if (maxMatches > 0) intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches); Handler loopHandler = new Handler(Looper.getMainLooper()); loopHandler.post(new Runnable() { @Override public void run() { recognizer.startListening(intent); } }); }
From source file:com.surveyorexpert.TalkToMe.java
private void sendRecognizeIntent() { // Intent /*from w ww .ja v a 2 s . co m*/ intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Press when complete"); intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 100); startActivityForResult(intent, SPEECH_REQUEST_CODE); // Toast.makeText(getBaseContext(),"TalkToMe Done" , Toast.LENGTH_LONG).show(); }
From source file:me.tylerbwong.pokebase.gui.fragments.PokebaseFragment.java
@Override public void onButtonClicked(int buttonCode) { if (buttonCode == MaterialSearchBar.BUTTON_SPEECH) { Intent voiceIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.speech_prompt)); startActivityForResult(voiceIntent, RECOGNIZER_REQ_CODE); }/* w w w. j av a2 s . c om*/ }
From source file:com.todoroo.astrid.voice.VoiceInputAssistant.java
/** * Fire an intent to start the speech recognition activity. * This is fired by the listener on the microphone-button. * * @param prompt Specify the R.string.string_id resource for the prompt-text during voice-recognition here *//*from w w w .j av a2s.c o m*/ public void startVoiceRecognitionActivity(Fragment fragment, int prompt) { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel); intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1); intent.putExtra(RecognizerIntent.EXTRA_PROMPT, ContextManager.getContext().getString(prompt)); String detailMessage = "Error! No Fragment or Activity was registered to handle this voiceinput-request!"; if (activity != null) activity.startActivityForResult(intent, requestCode); else if (fragment != null) fragment.startActivityForResult(intent, requestCode); else Log.e("Astrid VoiceInputAssistant", detailMessage, new IllegalStateException(detailMessage)); }
From source file:com.annuletconsulting.homecommand.node.MainFragment.java
protected void startRecognizing() { isListening = false;//www. j a v a 2s.c om button.setBackgroundResource(R.drawable.listening); Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true); recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass().getPackage().getName()); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 3); //3 seems to be the minimum. when I set to 1 I get 5. speechRecognizer.startListening(recognizerIntent); }
From source file:net.olejon.mdapp.Icd10ChapterActivity.java
@Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: { NavUtils.navigateUpFromSameTask(this); return true; }/* w w w. ja v a 2 s . co m*/ case R.id.icd10_chapter_menu_voice_search: { try { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "nb-NO"); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE); } catch (Exception e) { new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title)) .content(getString(R.string.device_not_supported_dialog_message)) .positiveText(getString(R.string.device_not_supported_dialog_positive_button)) .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show(); } return true; } default: { return super.onOptionsItemSelected(item); } } }
From source file:net.olejon.mdapp.DiseasesAndTreatmentsActivity.java
@Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: { NavUtils.navigateUpFromSameTask(this); return true; }/*from ww w . j a va 2 s . c om*/ case R.id.diseases_and_treatments_menu_voice_search: { String language = (mSearchLanguage.equals("")) ? "en-US" : "nb-NO"; try { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE); } catch (Exception e) { new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title)) .content(getString(R.string.device_not_supported_dialog_message)) .positiveText(getString(R.string.device_not_supported_dialog_positive_button)) .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show(); } return true; } case R.id.diseases_and_treatments_menu_clear_recent_searches: { clearRecentSearches(); return true; } default: { return super.onOptionsItemSelected(item); } } }