List of usage examples for android.speech SpeechRecognizer createSpeechRecognizer
public static SpeechRecognizer createSpeechRecognizer(final Context context)
From source file:ai.api.unityhelper.RecognitionHelper.java
protected void initializeRecognizer() { synchronized (speechRecognizerLock) { if (speechRecognizer != null) { speechRecognizer.destroy();/*from ww w . j av a 2 s.c o m*/ speechRecognizer = null; } final ComponentName googleRecognizerComponent = RecognizerChecker.findGoogleRecognizer(context); if (googleRecognizerComponent == null) { speechRecognizer = SpeechRecognizer.createSpeechRecognizer(context); } else { speechRecognizer = SpeechRecognizer.createSpeechRecognizer(context, googleRecognizerComponent); } speechRecognizer.setRecognitionListener(new InternalRecognitionListener()); } }
From source file:com.telepromptu.TeleprompterService.java
private void startListening() { if (speechRecognizer == null) { speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this); speechRecognizer.setRecognitionListener(new DictationListener()); Intent speechIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); speechIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName()); speechIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1); // speechIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS,true); // speechIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS, 300000); // speechIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, 300000); speechRecognizer.startListening(speechIntent); }//from ww w . j av a2 s . c o m }
From source file:com.ct.speech.HintReceiver.java
/** * Fire an intent to start the speech recognition activity. * /*from w ww. ja v a2 s. c om*/ * @param args * Argument array with the following string args: [req * code][number of matches][prompt string] Google speech * recognizer */ private void startSpeechRecognitionActivity(JSONArray args) { // int reqCode = 42; // Hitchhiker? // global now int maxMatches = 2; String prompt = ""; String language = ""; try { if (args.length() > 0) { // Request code - passed back to the caller on a successful // operation String temp = args.getString(0); reqCode = Integer.parseInt(temp); } if (args.length() > 1) { // Maximum number of matches, 0 means the recognizer decides String temp = args.getString(1); maxMatches = Integer.parseInt(temp); } if (args.length() > 2) { // Optional text prompt prompt = args.getString(2); } if (args.length() > 3) { // Optional language specified language = args.getString(3); } } catch (Exception e) { Log.e(TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString())); } final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra("calling_package", "com.ct.BasicAppFrame"); // If specific language if (!language.equals("")) { intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); } if (maxMatches > 0) intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches); if (!(prompt.length() == 0)) intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt); // ctx.startActivityForResult(this, intent, reqCode); //removed to try // using recognizer directly try { this.ctx.runOnUiThread(new Runnable() { public void run() { final SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer((Context) ctx); RecognitionListener listener = new RecognitionListener() { @Override public void onResults(Bundle results) { //closeRecordedFile(); sendBackResults(results); ArrayList<String> voiceResults = results .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (voiceResults == null) { Log.e(TAG, "No voice results"); } else { // Log.d(TAG, "Printing matches: "); for (@SuppressWarnings("unused") String match : voiceResults) { // Log.d(TAG, match); } } recognizer.destroy(); } @Override public void onReadyForSpeech(Bundle params) { // Log.d(TAG, "Ready for speech"); } @Override public void onError(int error) { Log.d(TAG, "Error listening for speech: " + error); if (error == SpeechRecognizer.ERROR_NO_MATCH) { sendBackResults(NO_MATCH); } else if (error == SpeechRecognizer.ERROR_SPEECH_TIMEOUT) { sendBackResults(NO_INPUT); } else { speechFailure("unknown error"); } recognizer.destroy(); } @Override public void onBeginningOfSpeech() { // Log.d(TAG, "Speech starting"); setStartOfSpeech(); } @Override //doesn't fire in Android after Ice Cream Sandwich public void onBufferReceived(byte[] buffer) { } @Override public void onEndOfSpeech() { setEndOfSpeech(); } @Override public void onEvent(int eventType, Bundle params) { // TODO Auto-generated method stub } @Override public void onPartialResults(Bundle partialResults) { // TODO Auto-generated method stub } @Override public void onRmsChanged(float rmsdB) { // TODO Auto-generated method stub } }; recognizer.setRecognitionListener(listener); Log.d(TAG, "starting speech recognition activity"); recognizer.startListening(intent); } }); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:com.appsimobile.appsii.module.search.SearchController.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); AppsiInjector.inject(this); mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(getContext()); mSpeechRecognizer.setRecognitionListener(new RecognitionListenerImpl()); mSearchAdapter = new SearchAdapter(this, this, this); mPeopleCallbacks = new PeopleCallbacks(); mAppsCallbacks = new AppsCallbacks(); mRecentSearchedCallbacks = new RecentSearchedCallbacks(); mSearchSuggestionsAdapter = new SearchSuggestionsAdapter(); getLoaderManager().initLoader(RECENT_SEARCHES_LOADER, null, mRecentSearchedCallbacks); }
From source file:de.dfki.iui.mmir.plugins.speech.android.AndroidSpeechRecognizer.java
private void _startSpeechRecognitionActivity(JSONArray args, CallbackContext callbackContext, boolean isWithEndOfSpeechDetection) { int maxMatches = 0; String prompt = "";//TODO remove? (not used when ASR is directly used as service here...) String language = Locale.getDefault().toString(); boolean isIntermediate = false; try {/*from www.ja v a 2 s . co m*/ if (args.length() > 0) { // Optional language specified language = args.getString(0); } if (args.length() > 1) { isIntermediate = args.getBoolean(1); } if (args.length() > 2) { // Maximum number of matches, 0 means that the recognizer "decides" String temp = args.getString(2); maxMatches = Integer.parseInt(temp); } if (args.length() > 3) { // Optional text prompt prompt = args.getString(3); } //TODO if ... withoutEndOfSpeechDetection = ... } catch (Exception e) { Log.e(PLUGIN_NAME, String.format("startSpeechRecognitionActivity exception: %s", e.toString())); } // Create the intent and set parameters Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language); if (!isWithEndOfSpeechDetection) { // try to simulate start/stop-recording behavior (without end-of-speech detection) //NOTE these setting do not seem to have any effect for default Google Recognizer API level > 16 intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, 10000l); intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, new Long(10000)); intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS, new Long(6 * 1000)); } if (maxMatches > 0) intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches); if (!prompt.equals("")) intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt); if (isIntermediate) intent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true); //NOTE the extra package seems to be required for older Android versions, but not since API level 17(?) if (SDK_VERSION <= Build.VERSION_CODES.JELLY_BEAN) intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, cordova.getActivity().getPackageName()); synchronized (speechLock) { if (speech != null) { speech.destroy(); } speech = SpeechRecognizer.createSpeechRecognizer(cordova.getActivity()); disableSoundFeedback(); ++recCounter; currentRecognizer = new ASRHandler(recCounter, enableMicLevelsListeners, callbackContext, this); currentRecognizer.setHapticPrompt( (Vibrator) this.cordova.getActivity().getSystemService(Context.VIBRATOR_SERVICE)); speech.setRecognitionListener(currentRecognizer); speech.startListening(intent); } }
From source file:org.botlibre.sdk.activity.ChatActivity.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_chat); // Remove flag button if a single bot app. if (MainActivity.launchType == LaunchType.Bot) { //findViewById(R.id.flagButton).setVisibility(View.GONE); }/*from www . java2 s.c om*/ //permission required. ActivityCompat.requestPermissions(ChatActivity.this, new String[] { Manifest.permission.RECORD_AUDIO }, 1); //set/Save the current volume from the device. setStreamVolume(); //Music Volume is Enabled. muteMicBeep(false); //For "scream" issue micLastStat = MainActivity.listenInBackground; getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); this.instance = (InstanceConfig) MainActivity.instance; if (this.instance == null) { return; } /*if (MainActivity.showAds) { AdView mAdView = (AdView) findViewById(R.id.adView); AdRequest adRequest = new AdRequest.Builder().build(); mAdView.loadAd(adRequest); } else { AdView mAdView = (AdView) findViewById(R.id.adView); mAdView.setVisibility(View.GONE); }*/ setTitle(this.instance.name); ((TextView) findViewById(R.id.title)).setText(this.instance.name); HttpGetImageAction.fetchImage(this, this.instance.avatar, findViewById(R.id.icon)); ttsInit = false; tts = new TextToSpeech(this, this); if (!MainActivity.handsFreeSpeech) { setMicIcon(false, false); } else if (!MainActivity.listenInBackground) { setMicIcon(false, false); } //Last time will be saved for the MIC. if (MainActivity.listenInBackground && MainActivity.handsFreeSpeech) { microphoneThread(thread); } speech = SpeechRecognizer.createSpeechRecognizer(this); speech.setRecognitionListener(this); //scrollVie added and stuff scrollView = findViewById(R.id.chatList); menuMLayout = (LinearLayout) findViewById(R.id.menuMLayout); chatCLayout = (LinearLayout) findViewById(R.id.chatCLayout); responseLayout = (LinearLayout) findViewById(R.id.responseLayout); chatToolBar = (LinearLayout) findViewById(R.id.chatToolBar); videoView = (VideoView) findViewById(R.id.videoView); resetVideoErrorListener(); videoError = false; imageView = (ImageView) findViewById(R.id.imageView); videoLayout = findViewById(R.id.videoLayout); textView = (EditText) findViewById(R.id.messageText); textView.setOnEditorActionListener(new OnEditorActionListener() { @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { submitChat(); return false; } }); if (MainActivity.translate) { findViewById(R.id.yandex).setVisibility(View.VISIBLE); } else { findViewById(R.id.yandex).setVisibility(View.GONE); } Spinner emoteSpin = (Spinner) findViewById(R.id.emoteSpin); emoteSpin.setAdapter( new EmoteSpinAdapter(this, R.layout.emote_list, Arrays.asList(EmotionalState.values()))); ListView list = (ListView) findViewById(R.id.chatList); list.setAdapter(new ChatListAdapter(this, R.layout.chat_list, this.messages)); list.setTranscriptMode(ListView.TRANSCRIPT_MODE_ALWAYS_SCROLL); ImageButton button = (ImageButton) findViewById(R.id.speakButton); button.setOnClickListener(new View.OnClickListener() { @TargetApi(23) @Override public void onClick(View v) { if (MainActivity.handsFreeSpeech) { //set the current volume to the setting. setStreamVolume(); //if its ON Or OFF - Switching back and forth MainActivity.listenInBackground = !MainActivity.listenInBackground; //saving the boolean data of MainActivity.listeningInBackground SharedPreferences.Editor cookies = MainActivity.current.getPreferences(Context.MODE_PRIVATE) .edit(); cookies.putBoolean("listenInBackground", MainActivity.listenInBackground); cookies.commit(); if (MainActivity.listenInBackground) { micLastStat = true; try { microphoneThread(thread); } catch (Exception ignore) { } beginListening(); } else { micLastStat = false; microphoneThread(thread); stopListening(); } } else { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, MainActivity.voice.language); try { startActivityForResult(intent, RESULT_SPEECH); textView.setText(""); } catch (ActivityNotFoundException a) { Toast t = Toast.makeText(getApplicationContext(), "Your device doesn't support Speech to Text", Toast.LENGTH_SHORT); t.show(); } } } }); //adding functionality on clicking the image imageView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (stateLayouts == 4) { stateLayouts = 0; } switch (stateLayouts) { case 0: scrollView.setVisibility(View.VISIBLE); chatCLayout.setVisibility(View.VISIBLE); menuMLayout.setVisibility(View.VISIBLE); responseLayout.setVisibility(View.VISIBLE); chatToolBar.setVisibility(View.VISIBLE); break; case 1: scrollView.setVisibility(View.GONE); break; case 2: responseLayout.setVisibility(View.GONE); chatToolBar.setVisibility(View.GONE); break; case 3: menuMLayout.setVisibility(View.GONE); chatCLayout.setVisibility(View.GONE); break; } stateLayouts++; } }); //adding functionality on clicking the image videoLayout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (stateLayouts == 4) { stateLayouts = 0; } switch (stateLayouts) { case 0: scrollView.setVisibility(View.VISIBLE); chatCLayout.setVisibility(View.VISIBLE); menuMLayout.setVisibility(View.VISIBLE); responseLayout.setVisibility(View.VISIBLE); chatToolBar.setVisibility(View.VISIBLE); break; case 1: scrollView.setVisibility(View.GONE); break; case 2: responseLayout.setVisibility(View.GONE); chatToolBar.setVisibility(View.GONE); break; case 3: menuMLayout.setVisibility(View.GONE); chatCLayout.setVisibility(View.GONE); break; } stateLayouts++; } }); GestureDetector.SimpleOnGestureListener listener = new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDoubleTapEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_UP) { boolean isVideo = !MainActivity.disableVideo && !videoError && response != null && response.isVideo(); View imageView = findViewById(R.id.imageView); View videoLayout = findViewById(R.id.videoLayout); if (imageView.getVisibility() == View.VISIBLE) { imageView.setVisibility(View.GONE); } else if (!isVideo) { imageView.setVisibility(View.VISIBLE); } if (videoLayout.getVisibility() == View.VISIBLE) { videoLayout.setVisibility(View.GONE); } else if (isVideo) { videoLayout.setVisibility(View.VISIBLE); } return true; } return false; } }; final GestureDetector detector = new GestureDetector(this, listener); findViewById(R.id.chatList).setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { return detector.onTouchEvent(event); } }); listener = new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDoubleTapEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_UP) { View avatarLayout = findViewById(R.id.avatarLayout); if (avatarLayout.getVisibility() == View.VISIBLE) { avatarLayout.setVisibility(View.GONE); } else { avatarLayout.setVisibility(View.VISIBLE); } return true; } return false; } }; final GestureDetector detector2 = new GestureDetector(this, listener); /*findViewById(R.id.responseText).setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { return detector2.onTouchEvent(event); } });*/ WebView responseView = (WebView) findViewById(R.id.responseText); responseView.getSettings().setJavaScriptEnabled(true); responseView.getSettings().setDomStorageEnabled(true); responseView.addJavascriptInterface(new WebAppInterface(this), "Android"); findViewById(R.id.responseImageView).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { View avatarLayout = findViewById(R.id.avatarLayout); if (avatarLayout.getVisibility() == View.VISIBLE) { avatarLayout.setVisibility(View.GONE); } else { avatarLayout.setVisibility(View.VISIBLE); } } }); HttpGetImageAction.fetchImage(this, instance.avatar, this.imageView); HttpGetImageAction.fetchImage(this, instance.avatar, (ImageView) findViewById(R.id.responseImageView)); final ChatConfig config = new ChatConfig(); config.instance = instance.id; config.avatar = this.avatarId; if (MainActivity.translate && MainActivity.voice != null) { config.language = MainActivity.voice.language; } if (MainActivity.disableVideo) { config.avatarFormat = "image"; } else { config.avatarFormat = MainActivity.webm ? "webm" : "mp4"; } config.avatarHD = MainActivity.hd; config.speak = !MainActivity.deviceVoice; // This is required because of a bug in TextToSpeech that prevents onInit being called if an AsynchTask is called... Thread thread1 = new Thread() { public void run() { for (int count = 0; count < 5; count++) { if (ttsInit) { break; } try { Thread.sleep(1000); } catch (Exception exception) { } } HttpAction action = new HttpChatAction(ChatActivity.this, config); action.execute(); } }; thread1.start(); }
From source file:com.mhennessy.mapfly.MainActivity.java
public void setMapLocationBasedOnSpeech() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, "com.mhennessy.mapfly"); SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer(this.getApplicationContext()); // Stop flying so that messages can be displayed to the user without // being overwritten by pitch/roll info. setFlyingEnabled(false);//from w w w.ja va 2s .c o m RecognitionListener listener = new RecognitionListener() { @Override public void onResults(Bundle results) { ArrayList<String> voiceResults = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (voiceResults == null) { Log.e(TAG, "No voice results"); } else { Log.d(TAG, "Printing matches: "); for (String match : voiceResults) { Log.d(TAG, match); } String bestMatch = voiceResults.get(0); setMapLocation(bestMatch); } } @Override public void onReadyForSpeech(Bundle params) { setTitle("Say something!"); Log.d(TAG, "Ready for speech"); } @Override public void onError(int error) { setTitle("Speach Error"); Log.d(TAG, "Error listening for speech: " + error); } @Override public void onBeginningOfSpeech() { Log.d(TAG, "Speech starting"); } @Override public void onBufferReceived(byte[] buffer) { // no-op } @Override public void onEndOfSpeech() { // no-op } @Override public void onEvent(int eventType, Bundle params) { // no-op } @Override public void onPartialResults(Bundle partialResults) { // no-op } @Override public void onRmsChanged(float rmsdB) { // no-op } }; recognizer.setRecognitionListener(listener); recognizer.startListening(intent); }
From source file:nl.hnogames.domoticz.SpeechSettingsActivity.java
@Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: finish();//w ww . j ava2 s. co m return true; case R.id.action_speech: if (speechRecognizer == null) speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this); if (recognitionProgressView == null) recognitionProgressView = (RecognitionProgressView) findViewById(R.id.recognition_view); if (recognitionListener == null) { recognitionListener = new RecognitionListenerAdapter() { @Override public void onResults(Bundle results) { showSpeechResults(results); stopRecognition(); } }; } int[] colors = { ContextCompat.getColor(this, R.color.material_amber_600), ContextCompat.getColor(this, R.color.material_blue_600), ContextCompat.getColor(this, R.color.material_deep_purple_600), ContextCompat.getColor(this, R.color.material_green_600), ContextCompat.getColor(this, R.color.material_orange_600) }; recognitionProgressView.setColors(colors); recognitionProgressView.setSpeechRecognizer(speechRecognizer); recognitionProgressView.setRecognitionListener(recognitionListener); recognitionProgressView.postDelayed(new Runnable() { @Override public void run() { startRecognition(); } }, 50); return true; } return super.onOptionsItemSelected(item); }
From source file:com.glabs.homegenie.util.VoiceControl.java
/** * lazy initialize the speech recognizer *//*from ww w .j av a 2 s .c om*/ private SpeechRecognizer getSpeechRecognizer() { if (_recognizer == null) { _recognizer = SpeechRecognizer.createSpeechRecognizer(_hgcontext); _recognizer.setRecognitionListener(this); } return _recognizer; }
From source file:android.support.v17.leanback.app.SearchSupportFragment.java
@Override public void onResume() { super.onResume(); mIsPaused = false;/*from ww w. j av a2s . c om*/ if (mSpeechRecognitionCallback == null && null == mSpeechRecognizer) { mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(getActivity()); mSearchBar.setSpeechRecognizer(mSpeechRecognizer); } if (mPendingStartRecognitionWhenPaused) { mPendingStartRecognitionWhenPaused = false; mSearchBar.startRecognition(); } else { // Ensure search bar state consistency when using external recognizer mSearchBar.stopRecognition(); } }