List of usage examples for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH
String ACTION_RECOGNIZE_SPEECH
To view the source code for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH.
Click Source Link
From source file:conversandroid.SimpleASR.java
/** * Sets up the listener for the button that the user * must click to start talking//from ww w. j av a 2 s . c o m */ @SuppressLint("DefaultLocale") private void setSpeakButton() { //Gain reference to speak button Button speak = (Button) findViewById(R.id.speech_btn); final PackageManager packM = getPackageManager(); //Set up click listener speak.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { //Speech recognition does not currently work on simulated devices, //it the user is attempting to run the app in a simulated device //they will get a Toast if ("generic".equals(Build.BRAND.toLowerCase())) { Toast toast = Toast.makeText(getApplicationContext(), "ASR is not supported on virtual devices", Toast.LENGTH_SHORT); toast.show(); Log.d(LOGTAG, "ASR attempt on virtual device"); } else { // find out whether speech recognition is supported List<ResolveInfo> intActivities = packM .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); if (intActivities.size() != 0) { setRecognitionParams(); //Read speech recognition parameters from GUI listen(); //Set up the recognizer with the parameters and start listening } else { Toast toast = Toast.makeText(getApplicationContext(), "ASR not supported", Toast.LENGTH_SHORT); toast.show(); Log.d(LOGTAG, "ASR not supported"); } } } }); }
From source file:org.alfresco.mobile.android.application.fragments.search.SearchFragment.java
private void speechToText() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, Locale.getDefault()); try {/* www . j a v a 2 s. com*/ if (intent.resolveActivity(getActivity().getPackageManager()) == null) { AlfrescoNotificationManager.getInstance(getActivity()).showAlertCrouton(getActivity(), getString(R.string.feature_disable)); return; } startActivityForResult(intent, RequestCode.TEXT_TO_SPEECH); } catch (ActivityNotFoundException a) { AlfrescoNotificationManager.getInstance(getActivity()).showToast(R.string.file_editor_error_speech); } }
From source file:com.nicefontaine.seanachie.ui.imagestory.ImageStoryFragment.java
private Intent getSpeechIntent(String text) { return new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH) .putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM) .putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault()) .putExtra(RecognizerIntent.EXTRA_PROMPT, text); }
From source file:com.activiti.android.ui.fragments.form.picker.IdmPickerFragment.java
private void speechToText() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, Locale.getDefault()); try {// www . jav a 2 s . c o m startActivityForResult(intent, RequestCode.TEXT_TO_SPEECH); } catch (ActivityNotFoundException a) { // Error } }
From source file:com.pixplicity.castdemo.MainActivity.java
/** * Android voice recognition//w w w .ja va 2 s . c o m */ private void startVoiceRecognitionActivity() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "en-US"); intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.message_to_cast)); startActivityForResult(intent, REQUEST_SPEECH_RECOGNITION); }
From source file:ir.occc.android.irc.activity.ConversationActivity.java
/** * On resume// w w w . j ava2 s . c o m */ @Override public void onResume() { // register the receivers as early as possible, otherwise we may loose a broadcast message channelReceiver = new ConversationReceiver(server.getId(), this); registerReceiver(channelReceiver, new IntentFilter(Broadcast.CONVERSATION_MESSAGE)); registerReceiver(channelReceiver, new IntentFilter(Broadcast.CONVERSATION_NEW)); registerReceiver(channelReceiver, new IntentFilter(Broadcast.CONVERSATION_REMOVE)); registerReceiver(channelReceiver, new IntentFilter(Broadcast.CONVERSATION_TOPIC)); serverReceiver = new ServerReceiver(this); registerReceiver(serverReceiver, new IntentFilter(Broadcast.SERVER_UPDATE)); super.onResume(); // Check if speech recognition is enabled and available if (new Settings(this).isVoiceRecognitionEnabled()) { PackageManager pm = getPackageManager(); Button speechButton = (Button) findViewById(R.id.speech); List<ResolveInfo> activities = pm .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); if (activities.size() != 0) { ((Button) findViewById(R.id.speech)).setOnClickListener(new SpeechClickListener(this)); speechButton.setVisibility(View.VISIBLE); } } // Start service Intent intent = new Intent(this, IRCService.class); intent.setAction(IRCService.ACTION_FOREGROUND); startService(intent); bindService(intent, this, 0); if (!server.isConnected()) { ((EditText) findViewById(R.id.input)).setEnabled(false); } else { ((EditText) findViewById(R.id.input)).setEnabled(true); } // Optimization - cache field lookup Collection<Conversation> mConversations = server.getConversations(); MessageListAdapter mAdapter; // Fill view with messages that have been buffered while paused for (Conversation conversation : mConversations) { String name = conversation.getName(); mAdapter = pagerAdapter.getItemAdapter(name); if (mAdapter != null) { mAdapter.addBulkMessages(conversation.getBuffer()); conversation.clearBuffer(); } else { // Was conversation created while we were paused? if (pagerAdapter.getPositionByName(name) == -1) { onNewConversation(name); } } // Clear new message notifications for the selected conversation if (conversation.getStatus() == Conversation.STATUS_SELECTED && conversation.getNewMentions() > 0) { Intent ackIntent = new Intent(this, IRCService.class); ackIntent.setAction(IRCService.ACTION_ACK_NEW_MENTIONS); ackIntent.putExtra(IRCService.EXTRA_ACK_SERVERID, serverId); ackIntent.putExtra(IRCService.EXTRA_ACK_CONVTITLE, name); startService(ackIntent); } } // Remove views for conversations that ended while we were paused int numViews = pagerAdapter.getCount(); if (numViews > mConversations.size()) { for (int i = 0; i < numViews; ++i) { if (!mConversations.contains(pagerAdapter.getItem(i))) { pagerAdapter.removeConversation(i--); --numViews; } } } // Join channel that has been selected in JoinActivity (onActivityResult()) if (joinChannelBuffer != null) { new Thread() { @Override public void run() { binder.getService().getConnection(serverId).joinChannel(joinChannelBuffer); joinChannelBuffer = null; } }.start(); } server.setIsForeground(true); }
From source file:br.ufrgs.ufrgsmapas.libs.SearchBox.java
/*** * Start the voice input activity manually *///from w w w . j a va 2 s .c o m public void startVoiceRecognition() { if (isMicEnabled()) { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "pt-BR"); if (mContainerActivity != null) { mContainerActivity.startActivityForResult(intent, VOICE_RECOGNITION_CODE); } else if (mContainerFragment != null) { mContainerFragment.startActivityForResult(intent, VOICE_RECOGNITION_CODE); } else if (mContainerSupportFragment != null) { mContainerSupportFragment.startActivityForResult(intent, VOICE_RECOGNITION_CODE); } } }
From source file:org.catrobat.catroid.ui.ScratchConverterActivity.java
public void displaySpeechRecognizer() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); startActivityForResult(intent, Constants.INTENT_REQUEST_CODE_SPEECH); }
From source file:com.eng.arab.translator.androidtranslator.activity.NumberViewActivity.java
private void setupFloatingSearch() { mSearchView.setOnHomeActionClickListener(new FloatingSearchView.OnHomeActionClickListener() { @Override// w w w . j a v a 2 s.c om public void onHomeClicked() { } }); mSearchView.setOnQueryChangeListener(new FloatingSearchView.OnQueryChangeListener() { @Override public void onSearchTextChanged(String oldQuery, final String newQuery) { if (!oldQuery.equals("") && newQuery.equals("")) { mSearchView.clearSuggestions(); } else { //this shows the top left circular progress //you can call it where ever you want, but //it makes sense to do it when loading something in //the background. mSearchView.showProgress(); //simulates a query call to a data source //with a new query. NumberDataHelper.findSuggestions(getApplicationContext(), newQuery, 5, FIND_SUGGESTION_SIMULATED_DELAY, new NumberDataHelper.OnFindSuggestionsListener() { @Override public void onResults(List<NumberSuggestion> results) { //this will swap the data and //render the collapse/expand animations as necessary mSearchView.swapSuggestions(results); //let the users know that the background //process has completed mSearchView.hideProgress(); } }); } Log.d(TAG, "onSearchTextChanged()"); } }); mSearchView.setOnSearchListener(new FloatingSearchView.OnSearchListener() { @Override public void onSuggestionClicked(final SearchSuggestion searchSuggestion) { NumberSuggestion numberSuggestion = (NumberSuggestion) searchSuggestion; NumberDataHelper.findNumbers(getApplicationContext(), numberSuggestion.getWord(), new NumberDataHelper.OnFindNumberListener() { @Override public void onResults(List<NumberWrapper> results) { mSearchResultsAdapter.swapData(results); } }); Log.d(TAG, "onSuggestionClicked()"); mLastQuery = searchSuggestion.getWord(); } @Override public void onSearchAction(String query) { mLastQuery = query; NumberDataHelper.findNumbers(getApplicationContext(), query, new NumberDataHelper.OnFindNumberListener() { @Override public void onResults(List<NumberWrapper> results) { mSearchResultsAdapter.swapData(results); } }); Log.d(TAG, "onSearchAction()"); } }); mSearchView.setOnFocusChangeListener(new FloatingSearchView.OnFocusChangeListener() { @Override public void onFocus() { //show suggestions when search bar gains focus (typically history suggestions) // NumberDataHelper cdh = new NumberDataHelper(); // cdh.setContext(getApplicationContext()); mSearchView.swapSuggestions(NumberDataHelper.getHistory(getApplicationContext(), 5)); Log.d(TAG, "onFocus()"); } @Override public void onFocusCleared() { //set the title of the bar so that when focus is returned a new query begins mSearchView.setSearchBarTitle(mLastQuery); //you can also set setSearchText(...) to make keep the query there when not focused and when focus returns //mSearchView.setSearchText(searchSuggestion.getWord()); Log.d(TAG, "onFocusCleared()"); } }); //handle menu clicks the same way as you would //in a regular activity mSearchView.setOnMenuItemClickListener(new FloatingSearchView.OnMenuItemClickListener() { @Override public void onActionMenuItemSelected(MenuItem item) { if (item.getItemId() == R.id.action_refresh_list) { /*mIsDarkSearchTheme = true; //demonstrate setting colors for items mSearchView.setBackgroundColor(Color.parseColor("#787878")); mSearchView.setViewTextColor(Color.parseColor("#e9e9e9")); mSearchView.setHintTextColor(Color.parseColor("#e9e9e9")); mSearchView.setActionMenuOverflowColor(Color.parseColor("#e9e9e9")); mSearchView.setMenuItemIconColor(Color.parseColor("#e9e9e9")); mSearchView.setLeftActionIconColor(Color.parseColor("#e9e9e9")); mSearchView.setClearBtnColor(Color.parseColor("#e9e9e9")); mSearchView.setDividerColor(Color.parseColor("#BEBEBE")); mSearchView.setLeftActionIconColor(Color.parseColor("#e9e9e9"));*/ populateCardList(); } else if (item.getItemId() == R.id.action_voice_rec) { Intent voiceRecognize = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); voiceRecognize.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass().getPackage().getName()); voiceRecognize.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); voiceRecognize.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "ar-EG"); voiceRecognize.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say the letter in ARABIC..."); /*voiceRecognize.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true); */ voiceRecognize.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 100); startActivityForResult(voiceRecognize, REQUEST_CODE); } else { //just print action //Toast.makeText(getApplicationContext().getApplicationContext(), item.getTitle(), // Toast.LENGTH_SHORT).show(); } } }); //use this listener to listen to menu clicks when app:floatingSearch_leftAction="showHome" mSearchView.setOnHomeActionClickListener(new FloatingSearchView.OnHomeActionClickListener() { @Override public void onHomeClicked() { startActivity(new Intent(NumberViewActivity.this, MainActivity.class) .addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP)); overridePendingTransition(R.anim.left_to_right, R.anim.right_to_left); Log.d(TAG, "onHomeClicked()"); } }); /* * Here you have access to the left icon and the text of a given suggestion * item after as it is bound to the suggestion list. You can utilize this * callback to change some properties of the left icon and the text. For example, you * can load the left icon images using your favorite image loading library, or change text color. * * * Important: * Keep in mind that the suggestion list is a RecyclerView, so views are reused for different * items in the list. */ mSearchView.setOnBindSuggestionCallback(new SearchSuggestionsAdapter.OnBindSuggestionCallback() { @Override public void onBindSuggestion(View suggestionView, ImageView leftIcon, TextView textView, SearchSuggestion item, int itemPosition) { NumberSuggestion numberSuggestion = (NumberSuggestion) item; String textColor = mIsDarkSearchTheme ? "#ffffff" : "#000000"; String textLight = mIsDarkSearchTheme ? "#bfbfbf" : "#787878"; if (numberSuggestion.getIsHistory()) { leftIcon.setImageDrawable( ResourcesCompat.getDrawable(getResources(), R.drawable.ic_history_black_24dp, null)); Util.setIconColor(leftIcon, Color.parseColor(textColor)); leftIcon.setAlpha(.36f); } else { leftIcon.setImageDrawable(ResourcesCompat.getDrawable(getResources(), R.drawable.ic_lightbulb_outline_black_24dp, null)); Util.setIconColor(leftIcon, Color.parseColor(textColor)); leftIcon.setAlpha(.36f); /*leftIcon.setAlpha(0.0f); leftIcon.setImageDrawable(null);*/ } textView.setTextColor(Color.parseColor(textColor)); String text = numberSuggestion.getWord().replaceFirst(mSearchView.getQuery(), "<font color=\"" + textLight + "\">" + mSearchView.getQuery() + "</font>"); textView.setText(Html.fromHtml(text)); } }); //listen for when suggestion list expands/shrinks in order to move down/up the //search results list mSearchView.setOnSuggestionsListHeightChanged(new FloatingSearchView.OnSuggestionsListHeightChanged() { @Override public void onSuggestionsListHeightChanged(float newHeight) { mSearchResultsList.setTranslationY(newHeight); } }); }
From source file:org.botlibre.sdk.activity.MicConfiguration.java
public void googleListening(View v) { txt.setText("Status: ON"); setMicIcon(true, false);//from www.jav a 2s. co m Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, MainActivity.voice.language); try { startActivityForResult(intent, 1); editTextForGoogle.setText(""); } catch (ActivityNotFoundException a) { Toast t = Toast.makeText(getApplicationContext(), "Your device doesn't support Speech to Text", Toast.LENGTH_SHORT); t.show(); } }