List of usage examples for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH
String ACTION_RECOGNIZE_SPEECH
To view the source code for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH.
Click Source Link
From source file:com.example.SpeechRecognizer.java
/** * Checks if a recognizer is present on this device *//* w w w . j av a 2 s .com*/ private boolean IsSpeechRecognizerPresent() { PackageManager pm = ctx.getPackageManager(); List activities = pm.queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); return !activities.isEmpty(); }
From source file:com.vyasware.vaani.MainActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); returnedText = (TextView) findViewById(R.id.textView1); outputText = (TextView) findViewById(R.id.textView2); progressBar = (ProgressBar) findViewById(R.id.progressBar1); toggleButton = (ToggleButton) findViewById(R.id.toggleButton1); progressBar.setVisibility(View.INVISIBLE); speech = SpeechRecognizer.createSpeechRecognizer(this); speech.setRecognitionListener(this); recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); // recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, // "en"); recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName()); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "hi-IN"); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "hi-IN"); recognizerIntent.putExtra(RecognizerIntent.EXTRA_ONLY_RETURN_LANGUAGE_PREFERENCE, "hi-IN"); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 5); tts = new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() { @Override//from ww w . j ava2 s.c o m public void onInit(int status) { if (status != TextToSpeech.ERROR) { tts.setLanguage(new Locale("hi_IN")); tts.setSpeechRate(0.9f); } } }); returnedText.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(Intent.ACTION_WEB_SEARCH); intent.putExtra(SearchManager.QUERY, returnedText.getText()); if (intent.resolveActivity(getPackageManager()) != null) startActivity(intent); } }); toggleButton.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { progressBar.setVisibility(View.VISIBLE); progressBar.setIndeterminate(true); speech.startListening(recognizerIntent); outputText.setText(""); } else { progressBar.setIndeterminate(false); progressBar.setVisibility(View.INVISIBLE); speech.stopListening(); } } }); }
From source file:com.sunildhaker.watch.heart.ConnectActivity.java
private void displaySpeechRecognizer() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); // Start the activity, the intent will be populated with the speech text startActivityForResult(intent, SPEECH_REQUEST_CODE); }
From source file:com.uphyca.android.nagiharae.MainActivity.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main);/* ww w. j a v a2 s.co m*/ FragmentManager manager = getSupportFragmentManager(); mSocketClientFragment = new SocketClientFragment(); manager.beginTransaction().add(mSocketClientFragment, "socket").commit(); findViewById(R.id.button1).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mSocketClientFragment.connect(); } }); Fragment f = manager.findFragmentByTag("adk"); if (f == null) { mAdkFragment = new AdkFragment(); manager.beginTransaction().add(mAdkFragment, "adk").commit(); } else { mAdkFragment = (AdkFragment) f; } mAdkFragment.setOnAccessoryStateChangedListener(new AdkFragment.OnAccessoryStateChangedListener() { @Override public void onOpend() { Log.d(TAG, "connected"); mButton.setEnabled(true); } @Override public void onClosed() { Log.d(TAG, "not connected"); mButton.setEnabled(false); } }); mAdkFragment.setOnLedStateChangedListener(new AdkFragment.OnLedStateChangedListener() { @Override public void ledStateChanged(boolean isOn) { if (isOn) { // ??? Toast.makeText(MainActivity.this, "??", Toast.LENGTH_LONG).show(); } else { Toast.makeText(MainActivity.this, "?", Toast.LENGTH_LONG).show(); } } }); mButton = (Button) findViewById(R.id.button); mButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { try { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); // ACTION_WEB_SEARCH intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH); intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "??"); startActivityForResult(intent, REQUEST_CODE); } catch (ActivityNotFoundException e) { // ????????????? Toast.makeText(MainActivity.this, "ActivityNotFoundException", Toast.LENGTH_LONG).show(); } } }); }
From source file:com.monmonja.library.utils.PlayServiceUtils.java
public static boolean isSpeechRecognitionActivityPresented(Activity callerActivity) { try {//from ww w . j ava 2 s . com // getting an instance of package manager PackageManager pm = callerActivity.getPackageManager(); // a list of activities, which can process speech recognition Intent List activities = pm.queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); if (activities.size() != 0) { // if list not empty return true; // then we can recognize the speech } } catch (Exception e) { } return false; // we have no activities to recognize the speech }
From source file:conversandroid.SimpleASR.java
/** * Initializes the speech recognizer and starts listening to the user input *//*w w w . ja va 2 s. co m*/ private void listen() { //Disable button so that ASR is not launched until the previous recognition result is achieved Button speak = (Button) findViewById(R.id.speech_btn); speak.setEnabled(false); // Check we have permission to record audio checkASRPermission(); Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); // Specify language model intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel); // Specify mx number of recognition results intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, numberRecoResults); // Start listening startActivityForResult(intent, ASR_CODE); }
From source file:org.apache.cordova.plugins.speech.SpeechRecognizer.java
/** * Checks if a recognizer is present on this device *///from w ww. j a v a 2 s.co m private boolean IsSpeechRecognizerPresent() { PackageManager pm = cordova.getActivity().getPackageManager(); List<ResolveInfo> activities = pm .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); return !activities.isEmpty(); }
From source file:com.github.zagum.speechrecognitionview.sample.MainActivity.java
private void startRecognition() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getPackageName()); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en"); speechRecognizer.startListening(intent); }
From source file:com.tdispatch.passenger.fragment.SearchAddressFragment.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Bundle args = getArguments();// ww w . j a va 2s. c om if (args != null) { mType = args.getInt(Const.Bundle.TYPE); mAddress = args.getParcelable(Const.Bundle.LOCATION); } else { throw new IllegalArgumentException("Arguments not passed"); } // Check to see if a voice recognition activity is present on device PackageManager pm = mContext.getPackageManager(); List<ResolveInfo> activities = pm .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); mVoiceSearchAvailable = (activities.size() != 0); }
From source file:de.domjos.schooltools.helper.Helper.java
public static void displaySpeechRecognizer(Activity activity, int req_code) { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); activity.startActivityForResult(intent, req_code); }