Example usage for android.speech RecognizerIntent EXTRA_PROMPT

List of usage examples for android.speech RecognizerIntent EXTRA_PROMPT

Introduction

In this page you can find the example usage for android.speech RecognizerIntent EXTRA_PROMPT.

Prototype

String EXTRA_PROMPT

To view the source code for android.speech RecognizerIntent EXTRA_PROMPT.

Click Source Link

Document

Optional text prompt to show to the user when asking them to speak.

Usage

From source file:com.ct.speech.HintReceiver.java

/**
 * Fire an intent to start the speech recognition activity.
 * //from ww w.  ja v a 2  s  . c  om
 * @param args
 *            Argument array with the following string args: [req
 *            code][number of matches][prompt string] Google speech
 *            recognizer
 */

private void startSpeechRecognitionActivity(JSONArray args) {
    // int reqCode = 42; // Hitchhiker? // global now
    int maxMatches = 2;
    String prompt = "";
    String language = "";
    try {
        if (args.length() > 0) {
            // Request code - passed back to the caller on a successful
            // operation
            String temp = args.getString(0);
            reqCode = Integer.parseInt(temp);
        }
        if (args.length() > 1) {
            // Maximum number of matches, 0 means the recognizer decides
            String temp = args.getString(1);
            maxMatches = Integer.parseInt(temp);
        }
        if (args.length() > 2) {
            // Optional text prompt
            prompt = args.getString(2);
        }
        if (args.length() > 3) {
            // Optional language specified
            language = args.getString(3);
        }
    } catch (Exception e) {
        Log.e(TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
    }
    final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra("calling_package", "com.ct.BasicAppFrame");
    // If specific language
    if (!language.equals("")) {
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    }
    if (maxMatches > 0)
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);
    if (!(prompt.length() == 0))
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    // ctx.startActivityForResult(this, intent, reqCode); //removed to try
    // using recognizer directly
    try {
        this.ctx.runOnUiThread(new Runnable() {
            public void run() {
                final SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer((Context) ctx);
                RecognitionListener listener = new RecognitionListener() {
                    @Override
                    public void onResults(Bundle results) {
                        //closeRecordedFile();
                        sendBackResults(results);
                        ArrayList<String> voiceResults = results
                                .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
                        if (voiceResults == null) {
                            Log.e(TAG, "No voice results");
                        } else {
                            // Log.d(TAG, "Printing matches: ");
                            for (@SuppressWarnings("unused")
                            String match : voiceResults) {
                                // Log.d(TAG, match);
                            }
                        }
                        recognizer.destroy();
                    }

                    @Override
                    public void onReadyForSpeech(Bundle params) {
                        // Log.d(TAG, "Ready for speech");
                    }

                    @Override
                    public void onError(int error) {
                        Log.d(TAG, "Error listening for speech: " + error);
                        if (error == SpeechRecognizer.ERROR_NO_MATCH) {
                            sendBackResults(NO_MATCH);
                        } else if (error == SpeechRecognizer.ERROR_SPEECH_TIMEOUT) {
                            sendBackResults(NO_INPUT);
                        } else {
                            speechFailure("unknown error");
                        }
                        recognizer.destroy();
                    }

                    @Override
                    public void onBeginningOfSpeech() {
                        // Log.d(TAG, "Speech starting");
                        setStartOfSpeech();
                    }

                    @Override
                    //doesn't fire in Android after Ice Cream Sandwich
                    public void onBufferReceived(byte[] buffer) {
                    }

                    @Override
                    public void onEndOfSpeech() {
                        setEndOfSpeech();
                    }

                    @Override
                    public void onEvent(int eventType, Bundle params) {
                        // TODO Auto-generated method stub

                    }

                    @Override
                    public void onPartialResults(Bundle partialResults) {
                        // TODO Auto-generated method stub

                    }

                    @Override
                    public void onRmsChanged(float rmsdB) {
                        // TODO Auto-generated method stub

                    }
                };
                recognizer.setRecognitionListener(listener);
                Log.d(TAG, "starting speech recognition activity");
                recognizer.startListening(intent);
            }
        });
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:com.justplay1.shoppist.features.search.SearchFragment.java

private void startTextToSpeech(String prompt, int requestCode) {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    try {//from w  w w . ja va2s. c  om
        startActivityForResult(intent, requestCode);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getContext(), getString(R.string.recognition_not_present), Toast.LENGTH_SHORT).show();
    }
}

From source file:de.dfki.iui.mmir.plugins.speech.android.AndroidSpeechRecognizer.java

private void _startSpeechRecognitionActivity(JSONArray args, CallbackContext callbackContext,
        boolean isWithEndOfSpeechDetection) {
    int maxMatches = 0;
    String prompt = "";//TODO remove? (not used when ASR is directly used as service here...)
    String language = Locale.getDefault().toString();
    boolean isIntermediate = false;

    try {//  w  w w .j av a 2 s.  c om
        if (args.length() > 0) {
            // Optional language specified
            language = args.getString(0);
        }
        if (args.length() > 1) {
            isIntermediate = args.getBoolean(1);
        }
        if (args.length() > 2) {
            // Maximum number of matches, 0 means that the recognizer "decides"
            String temp = args.getString(2);
            maxMatches = Integer.parseInt(temp);
        }
        if (args.length() > 3) {
            // Optional text prompt
            prompt = args.getString(3);
        }

        //TODO if ... withoutEndOfSpeechDetection = ...
    } catch (Exception e) {
        Log.e(PLUGIN_NAME, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
    }

    // Create the intent and set parameters
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);

    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

    if (!isWithEndOfSpeechDetection) {

        // try to simulate start/stop-recording behavior (without end-of-speech detection) 

        //NOTE these setting do not seem to have any effect for default Google Recognizer API level > 16

        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, 10000l);

        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, new Long(10000));
        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
                new Long(6 * 1000));
    }

    if (maxMatches > 0)
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);

    if (!prompt.equals(""))
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);

    if (isIntermediate)
        intent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);

    //NOTE the extra package seems to be required for older Android versions, but not since API level 17(?)
    if (SDK_VERSION <= Build.VERSION_CODES.JELLY_BEAN)
        intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, cordova.getActivity().getPackageName());

    synchronized (speechLock) {

        if (speech != null) {
            speech.destroy();
        }
        speech = SpeechRecognizer.createSpeechRecognizer(cordova.getActivity());

        disableSoundFeedback();

        ++recCounter;
        currentRecognizer = new ASRHandler(recCounter, enableMicLevelsListeners, callbackContext, this);
        currentRecognizer.setHapticPrompt(
                (Vibrator) this.cordova.getActivity().getSystemService(Context.VIBRATOR_SERVICE));
        speech.setRecognitionListener(currentRecognizer);
        speech.startListening(intent);

    }
}

From source file:com.nicefontaine.seanachie.ui.imagestory.ImageStoryFragment.java

private Intent getSpeechIntent(String text) {
    return new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH)
            .putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
            .putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault())
            .putExtra(RecognizerIntent.EXTRA_PROMPT, text);
}

From source file:com.pixplicity.castdemo.MainActivity.java

/**
 * Android voice recognition//w ww .  j ava2 s . c  o m
 */
private void startVoiceRecognitionActivity() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "en-US");
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.message_to_cast));
    startActivityForResult(intent, REQUEST_SPEECH_RECOGNITION);
}

From source file:com.google.sample.cast.refplayer.chatting.MainActivity.java

private void startVoiceRecognitionActivity() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.message_to_cast));
    startActivityForResult(intent, REQUEST_CODE);
}

From source file:com.eng.arab.translator.androidtranslator.activity.NumberViewActivity.java

private void setupFloatingSearch() {
    mSearchView.setOnHomeActionClickListener(new FloatingSearchView.OnHomeActionClickListener() {
        @Override/* w ww  .ja va  2 s.  c o m*/
        public void onHomeClicked() {

        }
    });

    mSearchView.setOnQueryChangeListener(new FloatingSearchView.OnQueryChangeListener() {

        @Override
        public void onSearchTextChanged(String oldQuery, final String newQuery) {

            if (!oldQuery.equals("") && newQuery.equals("")) {
                mSearchView.clearSuggestions();
            } else {

                //this shows the top left circular progress
                //you can call it where ever you want, but
                //it makes sense to do it when loading something in
                //the background.
                mSearchView.showProgress();

                //simulates a query call to a data source
                //with a new query.
                NumberDataHelper.findSuggestions(getApplicationContext(), newQuery, 5,
                        FIND_SUGGESTION_SIMULATED_DELAY, new NumberDataHelper.OnFindSuggestionsListener() {

                            @Override
                            public void onResults(List<NumberSuggestion> results) {

                                //this will swap the data and
                                //render the collapse/expand animations as necessary
                                mSearchView.swapSuggestions(results);

                                //let the users know that the background
                                //process has completed
                                mSearchView.hideProgress();
                            }
                        });
            }

            Log.d(TAG, "onSearchTextChanged()");
        }
    });

    mSearchView.setOnSearchListener(new FloatingSearchView.OnSearchListener() {
        @Override
        public void onSuggestionClicked(final SearchSuggestion searchSuggestion) {

            NumberSuggestion numberSuggestion = (NumberSuggestion) searchSuggestion;
            NumberDataHelper.findNumbers(getApplicationContext(), numberSuggestion.getWord(),
                    new NumberDataHelper.OnFindNumberListener() {

                        @Override
                        public void onResults(List<NumberWrapper> results) {
                            mSearchResultsAdapter.swapData(results);
                        }

                    });
            Log.d(TAG, "onSuggestionClicked()");

            mLastQuery = searchSuggestion.getWord();
        }

        @Override
        public void onSearchAction(String query) {
            mLastQuery = query;

            NumberDataHelper.findNumbers(getApplicationContext(), query,
                    new NumberDataHelper.OnFindNumberListener() {

                        @Override
                        public void onResults(List<NumberWrapper> results) {
                            mSearchResultsAdapter.swapData(results);
                        }

                    });
            Log.d(TAG, "onSearchAction()");
        }
    });

    mSearchView.setOnFocusChangeListener(new FloatingSearchView.OnFocusChangeListener() {
        @Override
        public void onFocus() {

            //show suggestions when search bar gains focus (typically history suggestions)
            //                NumberDataHelper cdh = new NumberDataHelper();
            //                cdh.setContext(getApplicationContext());
            mSearchView.swapSuggestions(NumberDataHelper.getHistory(getApplicationContext(), 5));

            Log.d(TAG, "onFocus()");
        }

        @Override
        public void onFocusCleared() {

            //set the title of the bar so that when focus is returned a new query begins
            mSearchView.setSearchBarTitle(mLastQuery);

            //you can also set setSearchText(...) to make keep the query there when not focused and when focus returns
            //mSearchView.setSearchText(searchSuggestion.getWord());

            Log.d(TAG, "onFocusCleared()");
        }
    });

    //handle menu clicks the same way as you would
    //in a regular activity
    mSearchView.setOnMenuItemClickListener(new FloatingSearchView.OnMenuItemClickListener() {
        @Override
        public void onActionMenuItemSelected(MenuItem item) {

            if (item.getItemId() == R.id.action_refresh_list) {

                /*mIsDarkSearchTheme = true;
                        
                //demonstrate setting colors for items
                mSearchView.setBackgroundColor(Color.parseColor("#787878"));
                mSearchView.setViewTextColor(Color.parseColor("#e9e9e9"));
                mSearchView.setHintTextColor(Color.parseColor("#e9e9e9"));
                mSearchView.setActionMenuOverflowColor(Color.parseColor("#e9e9e9"));
                mSearchView.setMenuItemIconColor(Color.parseColor("#e9e9e9"));
                mSearchView.setLeftActionIconColor(Color.parseColor("#e9e9e9"));
                mSearchView.setClearBtnColor(Color.parseColor("#e9e9e9"));
                mSearchView.setDividerColor(Color.parseColor("#BEBEBE"));
                mSearchView.setLeftActionIconColor(Color.parseColor("#e9e9e9"));*/
                populateCardList();
            } else if (item.getItemId() == R.id.action_voice_rec) {
                Intent voiceRecognize = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
                        getClass().getPackage().getName());
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                        RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "ar-EG");
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say the letter in ARABIC...");
                /*voiceRecognize.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true); */
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 100);
                startActivityForResult(voiceRecognize, REQUEST_CODE);
            } else {

                //just print action
                //Toast.makeText(getApplicationContext().getApplicationContext(), item.getTitle(),
                //        Toast.LENGTH_SHORT).show();
            }

        }
    });

    //use this listener to listen to menu clicks when app:floatingSearch_leftAction="showHome"
    mSearchView.setOnHomeActionClickListener(new FloatingSearchView.OnHomeActionClickListener() {
        @Override
        public void onHomeClicked() {
            startActivity(new Intent(NumberViewActivity.this, MainActivity.class)
                    .addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP));
            overridePendingTransition(R.anim.left_to_right, R.anim.right_to_left);
            Log.d(TAG, "onHomeClicked()");
        }
    });

    /*
     * Here you have access to the left icon and the text of a given suggestion
     * item after as it is bound to the suggestion list. You can utilize this
     * callback to change some properties of the left icon and the text. For example, you
     * can load the left icon images using your favorite image loading library, or change text color.
     *
     *
     * Important:
     * Keep in mind that the suggestion list is a RecyclerView, so views are reused for different
     * items in the list.
     */
    mSearchView.setOnBindSuggestionCallback(new SearchSuggestionsAdapter.OnBindSuggestionCallback() {
        @Override
        public void onBindSuggestion(View suggestionView, ImageView leftIcon, TextView textView,
                SearchSuggestion item, int itemPosition) {
            NumberSuggestion numberSuggestion = (NumberSuggestion) item;

            String textColor = mIsDarkSearchTheme ? "#ffffff" : "#000000";
            String textLight = mIsDarkSearchTheme ? "#bfbfbf" : "#787878";

            if (numberSuggestion.getIsHistory()) {
                leftIcon.setImageDrawable(
                        ResourcesCompat.getDrawable(getResources(), R.drawable.ic_history_black_24dp, null));

                Util.setIconColor(leftIcon, Color.parseColor(textColor));
                leftIcon.setAlpha(.36f);
            } else {
                leftIcon.setImageDrawable(ResourcesCompat.getDrawable(getResources(),
                        R.drawable.ic_lightbulb_outline_black_24dp, null));

                Util.setIconColor(leftIcon, Color.parseColor(textColor));
                leftIcon.setAlpha(.36f);
                /*leftIcon.setAlpha(0.0f);
                leftIcon.setImageDrawable(null);*/
            }

            textView.setTextColor(Color.parseColor(textColor));
            String text = numberSuggestion.getWord().replaceFirst(mSearchView.getQuery(),
                    "<font color=\"" + textLight + "\">" + mSearchView.getQuery() + "</font>");
            textView.setText(Html.fromHtml(text));
        }

    });

    //listen for when suggestion list expands/shrinks in order to move down/up the
    //search results list
    mSearchView.setOnSuggestionsListHeightChanged(new FloatingSearchView.OnSuggestionsListHeightChanged() {
        @Override
        public void onSuggestionsListHeightChanged(float newHeight) {
            mSearchResultsList.setTranslationY(newHeight);
        }
    });
}

From source file:org.bishoph.oxdemo.OXDemo.java

public void startVoiceRecognitionActivity() {
    Log.v("OXDemo", "startVoiceRecognitionActivity init...");
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    PackageManager pm = getPackageManager();
    List<ResolveInfo> activities = pm.queryIntentActivities(intent, 0);
    if (activities.size() > 0) {
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Speech recognition demo");
        startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
        Log.v("OXDemo", "...done");
    } else {//from  w w w  . j av  a 2 s .com
        // createTask(folder_id,
        // "Auto creation  "+getSimpleRandomString());
        Log.v("OXDemo", "No voice recognition!");
    }
}

From source file:com.neighbor.ex.tong.ui.activity.MainActivity2Activity.java

private void promptSpeechInput() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getResources().getString(R.string.voice_prompt));
    startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
}

From source file:onion.chat.MainActivity.java

private void listen() {
    //inform();/* w w  w  .  j  a v  a2s. c  om*/
    PackageManager pm = getPackageManager();
    List<ResolveInfo> activities = pm
            .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
    if (activities.size() == 0) {
        Toast.makeText(this, "Voice recognizer not present", Toast.LENGTH_SHORT).show();
    } else {
        try {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

            intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass().getPackage().getName());

            intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Its Ur's");

            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
            startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
        } catch (Exception e) {
            String toSpeak = "Oops your device doesn't support Voice recognition";
            Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
            t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        }
    }
}