List of usage examples for android.speech RecognizerIntent EXTRA_RESULTS
String EXTRA_RESULTS
To view the source code for android.speech RecognizerIntent EXTRA_RESULTS.
Click Source Link
From source file:org.protocoderrunner.apprunner.AppRunnerActivity.java
/** * Handle the results from the recognition activity. *///from www. ja va2 s. co m @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == VOICE_RECOGNITION_REQUEST_CODE && resultCode == Activity.RESULT_OK) { // Fill the list view with the strings the recognizer thought it // could have heard ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); for (String _string : matches) { MLog.d(TAG, "" + _string); } onVoiceRecognitionListener.onNewResult(matches.get(0)); } else if (requestCode == 22 && resultCode == Activity.RESULT_OK) { String result = data.getStringExtra("json"); mAppRunnerFragment.interp.callJsFunction("onResult", result); } if (onBluetoothListener != null) { onBluetoothListener.onActivityResult(requestCode, resultCode, data); } super.onActivityResult(requestCode, resultCode, data); }
From source file:root.magicword.MagicWord.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == SPEECH_REQUEST_CODE) { if (resultCode == RESULT_OK) { ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); if (matches.size() == 0) { tts.speak("Heard nothing", TextToSpeech.QUEUE_FLUSH, null); } else { mostLikelyThingHeard = matches.get(0); String magicWord = this.getResources().getString(R.string.magicword); if (mostLikelyThingHeard.equals(magicWord)) { tts.speak("You said the magic word!", TextToSpeech.QUEUE_FLUSH, null); } else { tts.speak("The magic word is " + mostLikelyThingHeard + " try again", TextToSpeech.QUEUE_FLUSH, null); }/* w w w . j a v a2s .c om*/ } // result.setText("heard: " + matches); result.setText("heard: " + mostLikelyThingHeard); } else { Log.d(TAG, "result NOT ok"); } } super.onActivityResult(requestCode, resultCode, data); }
From source file:com.eugene.fithealthmaingit.UI.ChooseAddMealSearchFragment.java
/** * Set the text based on google voice then implement search *//*from w w w . ja v a2s . com*/ @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQ_CODE_SPEECH_INPUT) { if (resultCode == Activity.RESULT_OK && null != data) { ArrayList<String> result = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); mEtSearch.setInputType(InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_CAP_SENTENCES); mEtSearch.setText(result.get(0)); searchFood(mEtSearch.getText().toString(), 0); mItem.clear(); mEtSearchAdapter.notifyDataSetChanged(); mEtSearch.clearFocus(); } } }
From source file:com.eugene.fithealthmaingit.UI.NavFragments.FragmentSearch.java
/** * Set the text based on google voice then implement search *///from ww w. j a v a 2 s.c om @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQ_CODE_SPEECH_INPUT) { if (resultCode == Activity.RESULT_OK && null != data) { ArrayList<String> result = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); edit_text_search.setText(result.get(0)); } } }
From source file:com.theultimatelabs.scale.ScaleActivity.java
protected void onActivityResult(int requestCode, int resultCode, Intent data) { Log.v(TAG, "GOT SPEECH RESULT " + resultCode + " req: " + requestCode); if (resultCode == RESULT_OK) { ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); Log.i(TAG, "Check for density"); StringBuilder densityName = new StringBuilder(); double density = findMatch(matches, mDensitiesJson, densityName); Log.i(TAG, "Check for volume"); StringBuilder volumeName = new StringBuilder(); double volume = findMatch(matches, mVolumesJson, volumeName); Log.i(TAG, "Check for weight"); StringBuilder weightName = new StringBuilder(); double weight = findMatch(matches, mWeightsJson, weightName); if (density != 0 && volume != 0) { mUnitsRatio = 1000.0 / density / volume; mUnitsText = String.format("%s of %s", volumeName, densityName); } else if (weight != 0) { mUnitsRatio = 1.0 / weight;/*from w w w . ja v a 2s.c o m*/ mUnitsText = String.format("%s", weightName); } else { Toast.makeText(this, "Does not compute", Toast.LENGTH_LONG).show(); mTts.speak("Does not compute", TextToSpeech.QUEUE_FLUSH, null); } Editor settingsEditor = mSettings.edit(); mUnitsView.setText(mUnitsText); settingsEditor.putString("unitsText", mUnitsText); settingsEditor.putFloat("unitsRatio", (float) mUnitsRatio); settingsEditor.commit(); } super.onActivityResult(requestCode, resultCode, data); // startActivity(new Intent(Intent.ACTION_VIEW, // Uri.parse("http://www.youtube.com/watch?v=2qBgMmRMpOo"))); }
From source file:com.example.castCambot.MainActivity.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_CODE && resultCode == RESULT_OK) { ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); if (matches.size() > 0) { Log.d(TAG, matches.get(0));// www. ja va2 s. c om //sendMessage(matches.get(0)); Context context = getApplicationContext(); int duration = Toast.LENGTH_SHORT; String text = matches.get(0); Toast toast = Toast.makeText(context, text, duration); toast.show(); String goForward = "forward"; String turnLeft = "left"; String turnRight = "right"; String goBackwards = "backward"; if (text.contains(goForward)) { myHttpPost("go_forward"); mySleep(1000); myHttpPost("stop"); } if (text.contains(turnLeft)) { myHttpPost("turn_left"); mySleep(500); myHttpPost("stop"); } if (text.contains(turnRight)) { myHttpPost("turn_right"); mySleep(500); myHttpPost("stop"); } if (text.contains(goBackwards)) { myHttpPost("go_backward"); mySleep(1000); myHttpPost("stop"); } //auto trigger voiceButton on click //Button voiceButton = (Button) findViewById(R.id.voiceButton); //voiceButton.performClick(); } } super.onActivityResult(requestCode, resultCode, data); }
From source file:ir.occc.android.irc.activity.ConversationActivity.java
/** * On activity result// ww w. ja v a2 s. com */ @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (resultCode != RESULT_OK) { // ignore other result codes return; } switch (requestCode) { case REQUEST_CODE_SPEECH: ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); if (matches.size() > 0) { ((EditText) findViewById(R.id.input)).setText(matches.get(0)); } break; case REQUEST_CODE_JOIN: joinChannelBuffer = data.getExtras().getString("channel"); break; case REQUEST_CODE_USERS: Intent intent = new Intent(this, UserActivity.class); intent.putExtra(Extra.USER, data.getStringExtra(Extra.USER)); startActivityForResult(intent, REQUEST_CODE_USER); break; case REQUEST_CODE_NICK_COMPLETION: insertNickCompletion((EditText) findViewById(R.id.input), data.getExtras().getString(Extra.USER)); break; case REQUEST_CODE_USER: final int actionId = data.getExtras().getInt(Extra.ACTION); final String nickname = data.getExtras().getString(Extra.USER); final IRCConnection connection = binder.getService().getConnection(server.getId()); final String conversation = server.getSelectedConversation(); final Handler handler = new Handler(); // XXX: Implement me - The action should be handled after onResume() // to catch the broadcasts... now we just wait a second // Yes .. that's very ugly - we need some kind of queue that is handled after onResume() new Thread() { @Override public void run() { try { Thread.sleep(1000); } catch (InterruptedException e) { // Do nothing } String nicknameWithoutPrefix = nickname; while (nicknameWithoutPrefix.startsWith("@") || nicknameWithoutPrefix.startsWith("+") || nicknameWithoutPrefix.startsWith(".") || nicknameWithoutPrefix.startsWith("%")) { // Strip prefix(es) now nicknameWithoutPrefix = nicknameWithoutPrefix.substring(1); } switch (actionId) { case User.ACTION_REPLY: final String replyText = nicknameWithoutPrefix + ": "; handler.post(new Runnable() { @Override public void run() { EditText input = (EditText) findViewById(R.id.input); input.setText(replyText); input.setSelection(replyText.length()); } }); break; case User.ACTION_QUERY: Conversation query = server.getConversation(nicknameWithoutPrefix); if (query == null) { // Open a query if there's none yet query = new Query(nicknameWithoutPrefix); query.setHistorySize(binder.getService().getSettings().getHistorySize()); server.addConversation(query); Intent intent = Broadcast.createConversationIntent(Broadcast.CONVERSATION_NEW, server.getId(), nicknameWithoutPrefix); binder.getService().sendBroadcast(intent); } break; case User.ACTION_OP: connection.op(conversation, nicknameWithoutPrefix); break; case User.ACTION_DEOP: connection.deOp(conversation, nicknameWithoutPrefix); break; case User.ACTION_VOICE: connection.voice(conversation, nicknameWithoutPrefix); break; case User.ACTION_DEVOICE: connection.deVoice(conversation, nicknameWithoutPrefix); break; case User.ACTION_KICK: connection.kick(conversation, nicknameWithoutPrefix); break; case User.ACTION_BAN: connection.ban(conversation, nicknameWithoutPrefix + "!*@*"); break; } } }.start(); break; } }
From source file:com.techno.jay.codingcontests.Home.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == MaterialSearchView.REQUEST_VOICE && resultCode == RESULT_OK) { ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); if (matches != null && matches.size() > 0) { String searchWrd = matches.get(0); if (!TextUtils.isEmpty(searchWrd)) { searchView.setQuery(searchWrd, false); }// w w w . ja va 2 s . com } return; } if (!bp.handleActivityResult(requestCode, resultCode, data)) super.onActivityResult(requestCode, resultCode, data); }
From source file:android.support.v17.leanback.app.SearchSupportFragment.java
/** * Sets the text of the search query based on the {@link RecognizerIntent#EXTRA_RESULTS} in * the given intent, and optionally submit the query. If more than one result is present * in the results list, the first will be used. * * @param intent Intent received from a speech recognition service. * @param submit Whether to submit the query. *//*from www .j a va 2 s .c o m*/ public void setSearchQuery(Intent intent, boolean submit) { ArrayList<String> matches = intent.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); if (matches != null && matches.size() > 0) { setSearchQuery(matches.get(0), submit); } }
From source file:com.example.michel.facetrack.FaceTrackerActivity.java
/** * Callback for speech recognition activity * *//*from ww w .ja v a2 s . c om*/ @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); System.out.println("hello #"); switch (requestCode) { case SPEECH_RECOGNITION_CODE: { if (resultCode == RESULT_OK && null != data) { ArrayList<String> result = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); updateState(result.get(0)); } else { updateState(""); } break; } } }