Example usage for android.speech RecognizerIntent EXTRA_CALLING_PACKAGE

List of usage examples for android.speech RecognizerIntent EXTRA_CALLING_PACKAGE

Introduction

In this page you can find the example usage for android.speech RecognizerIntent EXTRA_CALLING_PACKAGE.

Prototype

String EXTRA_CALLING_PACKAGE

To view the source code for android.speech RecognizerIntent EXTRA_CALLING_PACKAGE.

Click Source Link

Document

The extra key used in an intent to the speech recognizer for voice search.

Usage

From source file:com.telepromptu.TeleprompterService.java

private void startListening() {
    if (speechRecognizer == null) {
        speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
        speechRecognizer.setRecognitionListener(new DictationListener());
        Intent speechIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        speechIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName());
        speechIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
        //          speechIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS,true);
        //          speechIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS, 300000);
        //          speechIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, 300000);
        speechRecognizer.startListening(speechIntent);
    }//from  ww  w.  j a v  a2  s. c  om
}

From source file:de.dfki.iui.mmir.plugins.speech.android.AndroidSpeechRecognizer.java

private void _startSpeechRecognitionActivity(JSONArray args, CallbackContext callbackContext,
        boolean isWithEndOfSpeechDetection) {
    int maxMatches = 0;
    String prompt = "";//TODO remove? (not used when ASR is directly used as service here...)
    String language = Locale.getDefault().toString();
    boolean isIntermediate = false;

    try {/*from w  ww . ja v  a2s.  c  om*/
        if (args.length() > 0) {
            // Optional language specified
            language = args.getString(0);
        }
        if (args.length() > 1) {
            isIntermediate = args.getBoolean(1);
        }
        if (args.length() > 2) {
            // Maximum number of matches, 0 means that the recognizer "decides"
            String temp = args.getString(2);
            maxMatches = Integer.parseInt(temp);
        }
        if (args.length() > 3) {
            // Optional text prompt
            prompt = args.getString(3);
        }

        //TODO if ... withoutEndOfSpeechDetection = ...
    } catch (Exception e) {
        Log.e(PLUGIN_NAME, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
    }

    // Create the intent and set parameters
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);

    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

    if (!isWithEndOfSpeechDetection) {

        // try to simulate start/stop-recording behavior (without end-of-speech detection) 

        //NOTE these setting do not seem to have any effect for default Google Recognizer API level > 16

        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, 10000l);

        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, new Long(10000));
        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
                new Long(6 * 1000));
    }

    if (maxMatches > 0)
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);

    if (!prompt.equals(""))
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);

    if (isIntermediate)
        intent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);

    //NOTE the extra package seems to be required for older Android versions, but not since API level 17(?)
    if (SDK_VERSION <= Build.VERSION_CODES.JELLY_BEAN)
        intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, cordova.getActivity().getPackageName());

    synchronized (speechLock) {

        if (speech != null) {
            speech.destroy();
        }
        speech = SpeechRecognizer.createSpeechRecognizer(cordova.getActivity());

        disableSoundFeedback();

        ++recCounter;
        currentRecognizer = new ASRHandler(recCounter, enableMicLevelsListeners, callbackContext, this);
        currentRecognizer.setHapticPrompt(
                (Vibrator) this.cordova.getActivity().getSystemService(Context.VIBRATOR_SERVICE));
        speech.setRecognitionListener(currentRecognizer);
        speech.startListening(intent);

    }
}

From source file:com.eng.arab.translator.androidtranslator.activity.NumberViewActivity.java

private void setupFloatingSearch() {
    mSearchView.setOnHomeActionClickListener(new FloatingSearchView.OnHomeActionClickListener() {
        @Override/*from  w  ww . j a va  2  s  .  c  o  m*/
        public void onHomeClicked() {

        }
    });

    mSearchView.setOnQueryChangeListener(new FloatingSearchView.OnQueryChangeListener() {

        @Override
        public void onSearchTextChanged(String oldQuery, final String newQuery) {

            if (!oldQuery.equals("") && newQuery.equals("")) {
                mSearchView.clearSuggestions();
            } else {

                //this shows the top left circular progress
                //you can call it where ever you want, but
                //it makes sense to do it when loading something in
                //the background.
                mSearchView.showProgress();

                //simulates a query call to a data source
                //with a new query.
                NumberDataHelper.findSuggestions(getApplicationContext(), newQuery, 5,
                        FIND_SUGGESTION_SIMULATED_DELAY, new NumberDataHelper.OnFindSuggestionsListener() {

                            @Override
                            public void onResults(List<NumberSuggestion> results) {

                                //this will swap the data and
                                //render the collapse/expand animations as necessary
                                mSearchView.swapSuggestions(results);

                                //let the users know that the background
                                //process has completed
                                mSearchView.hideProgress();
                            }
                        });
            }

            Log.d(TAG, "onSearchTextChanged()");
        }
    });

    mSearchView.setOnSearchListener(new FloatingSearchView.OnSearchListener() {
        @Override
        public void onSuggestionClicked(final SearchSuggestion searchSuggestion) {

            NumberSuggestion numberSuggestion = (NumberSuggestion) searchSuggestion;
            NumberDataHelper.findNumbers(getApplicationContext(), numberSuggestion.getWord(),
                    new NumberDataHelper.OnFindNumberListener() {

                        @Override
                        public void onResults(List<NumberWrapper> results) {
                            mSearchResultsAdapter.swapData(results);
                        }

                    });
            Log.d(TAG, "onSuggestionClicked()");

            mLastQuery = searchSuggestion.getWord();
        }

        @Override
        public void onSearchAction(String query) {
            mLastQuery = query;

            NumberDataHelper.findNumbers(getApplicationContext(), query,
                    new NumberDataHelper.OnFindNumberListener() {

                        @Override
                        public void onResults(List<NumberWrapper> results) {
                            mSearchResultsAdapter.swapData(results);
                        }

                    });
            Log.d(TAG, "onSearchAction()");
        }
    });

    mSearchView.setOnFocusChangeListener(new FloatingSearchView.OnFocusChangeListener() {
        @Override
        public void onFocus() {

            //show suggestions when search bar gains focus (typically history suggestions)
            //                NumberDataHelper cdh = new NumberDataHelper();
            //                cdh.setContext(getApplicationContext());
            mSearchView.swapSuggestions(NumberDataHelper.getHistory(getApplicationContext(), 5));

            Log.d(TAG, "onFocus()");
        }

        @Override
        public void onFocusCleared() {

            //set the title of the bar so that when focus is returned a new query begins
            mSearchView.setSearchBarTitle(mLastQuery);

            //you can also set setSearchText(...) to make keep the query there when not focused and when focus returns
            //mSearchView.setSearchText(searchSuggestion.getWord());

            Log.d(TAG, "onFocusCleared()");
        }
    });

    //handle menu clicks the same way as you would
    //in a regular activity
    mSearchView.setOnMenuItemClickListener(new FloatingSearchView.OnMenuItemClickListener() {
        @Override
        public void onActionMenuItemSelected(MenuItem item) {

            if (item.getItemId() == R.id.action_refresh_list) {

                /*mIsDarkSearchTheme = true;
                        
                //demonstrate setting colors for items
                mSearchView.setBackgroundColor(Color.parseColor("#787878"));
                mSearchView.setViewTextColor(Color.parseColor("#e9e9e9"));
                mSearchView.setHintTextColor(Color.parseColor("#e9e9e9"));
                mSearchView.setActionMenuOverflowColor(Color.parseColor("#e9e9e9"));
                mSearchView.setMenuItemIconColor(Color.parseColor("#e9e9e9"));
                mSearchView.setLeftActionIconColor(Color.parseColor("#e9e9e9"));
                mSearchView.setClearBtnColor(Color.parseColor("#e9e9e9"));
                mSearchView.setDividerColor(Color.parseColor("#BEBEBE"));
                mSearchView.setLeftActionIconColor(Color.parseColor("#e9e9e9"));*/
                populateCardList();
            } else if (item.getItemId() == R.id.action_voice_rec) {
                Intent voiceRecognize = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
                        getClass().getPackage().getName());
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                        RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "ar-EG");
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say the letter in ARABIC...");
                /*voiceRecognize.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true); */
                voiceRecognize.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 100);
                startActivityForResult(voiceRecognize, REQUEST_CODE);
            } else {

                //just print action
                //Toast.makeText(getApplicationContext().getApplicationContext(), item.getTitle(),
                //        Toast.LENGTH_SHORT).show();
            }

        }
    });

    //use this listener to listen to menu clicks when app:floatingSearch_leftAction="showHome"
    mSearchView.setOnHomeActionClickListener(new FloatingSearchView.OnHomeActionClickListener() {
        @Override
        public void onHomeClicked() {
            startActivity(new Intent(NumberViewActivity.this, MainActivity.class)
                    .addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP));
            overridePendingTransition(R.anim.left_to_right, R.anim.right_to_left);
            Log.d(TAG, "onHomeClicked()");
        }
    });

    /*
     * Here you have access to the left icon and the text of a given suggestion
     * item after as it is bound to the suggestion list. You can utilize this
     * callback to change some properties of the left icon and the text. For example, you
     * can load the left icon images using your favorite image loading library, or change text color.
     *
     *
     * Important:
     * Keep in mind that the suggestion list is a RecyclerView, so views are reused for different
     * items in the list.
     */
    mSearchView.setOnBindSuggestionCallback(new SearchSuggestionsAdapter.OnBindSuggestionCallback() {
        @Override
        public void onBindSuggestion(View suggestionView, ImageView leftIcon, TextView textView,
                SearchSuggestion item, int itemPosition) {
            NumberSuggestion numberSuggestion = (NumberSuggestion) item;

            String textColor = mIsDarkSearchTheme ? "#ffffff" : "#000000";
            String textLight = mIsDarkSearchTheme ? "#bfbfbf" : "#787878";

            if (numberSuggestion.getIsHistory()) {
                leftIcon.setImageDrawable(
                        ResourcesCompat.getDrawable(getResources(), R.drawable.ic_history_black_24dp, null));

                Util.setIconColor(leftIcon, Color.parseColor(textColor));
                leftIcon.setAlpha(.36f);
            } else {
                leftIcon.setImageDrawable(ResourcesCompat.getDrawable(getResources(),
                        R.drawable.ic_lightbulb_outline_black_24dp, null));

                Util.setIconColor(leftIcon, Color.parseColor(textColor));
                leftIcon.setAlpha(.36f);
                /*leftIcon.setAlpha(0.0f);
                leftIcon.setImageDrawable(null);*/
            }

            textView.setTextColor(Color.parseColor(textColor));
            String text = numberSuggestion.getWord().replaceFirst(mSearchView.getQuery(),
                    "<font color=\"" + textLight + "\">" + mSearchView.getQuery() + "</font>");
            textView.setText(Html.fromHtml(text));
        }

    });

    //listen for when suggestion list expands/shrinks in order to move down/up the
    //search results list
    mSearchView.setOnSuggestionsListHeightChanged(new FloatingSearchView.OnSuggestionsListHeightChanged() {
        @Override
        public void onSuggestionsListHeightChanged(float newHeight) {
            mSearchResultsList.setTranslationY(newHeight);
        }
    });
}

From source file:com.mhennessy.mapfly.MainActivity.java

public void setMapLocationBasedOnSpeech() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, "com.mhennessy.mapfly");

    SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer(this.getApplicationContext());

    // Stop flying so that messages can be displayed to the user without
    // being overwritten by pitch/roll info.
    setFlyingEnabled(false);/*  w  w w  .j  a  v a 2  s .  co m*/
    RecognitionListener listener = new RecognitionListener() {
        @Override
        public void onResults(Bundle results) {
            ArrayList<String> voiceResults = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
            if (voiceResults == null) {
                Log.e(TAG, "No voice results");
            } else {
                Log.d(TAG, "Printing matches: ");
                for (String match : voiceResults) {
                    Log.d(TAG, match);
                }
                String bestMatch = voiceResults.get(0);
                setMapLocation(bestMatch);
            }
        }

        @Override
        public void onReadyForSpeech(Bundle params) {
            setTitle("Say something!");
            Log.d(TAG, "Ready for speech");
        }

        @Override
        public void onError(int error) {
            setTitle("Speach Error");
            Log.d(TAG, "Error listening for speech: " + error);
        }

        @Override
        public void onBeginningOfSpeech() {
            Log.d(TAG, "Speech starting");
        }

        @Override
        public void onBufferReceived(byte[] buffer) {
            // no-op
        }

        @Override
        public void onEndOfSpeech() {
            // no-op
        }

        @Override
        public void onEvent(int eventType, Bundle params) {
            // no-op
        }

        @Override
        public void onPartialResults(Bundle partialResults) {
            // no-op
        }

        @Override
        public void onRmsChanged(float rmsdB) {
            // no-op
        }
    };
    recognizer.setRecognitionListener(listener);
    recognizer.startListening(intent);
}

From source file:onion.chat.MainActivity.java

private void listen() {
    //inform();/*ww w  .  j  a v  a 2s  .  com*/
    PackageManager pm = getPackageManager();
    List<ResolveInfo> activities = pm
            .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
    if (activities.size() == 0) {
        Toast.makeText(this, "Voice recognizer not present", Toast.LENGTH_SHORT).show();
    } else {
        try {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

            intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass().getPackage().getName());

            intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Its Ur's");

            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
            startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
        } catch (Exception e) {
            String toSpeak = "Oops your device doesn't support Voice recognition";
            Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
            t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        }
    }
}

From source file:nl.hnogames.domoticz.SpeechSettingsActivity.java

private void startRecognition() {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
        if (PermissionsUtil.canAccessAudioState(this)) {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getPackageName());
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());

            speechRecognizer.startListening(intent);
            listeningSpeechRecognition = true;
            playRecognitionAnimation();/*from  w  w w  .  j a  va2 s .c  om*/
        } else {
            permissionHelper.request(PermissionsUtil.INITIAL_AUDIO_PERMS);
        }
    } else {
        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getPackageName());
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
        speechRecognizer.startListening(intent);

        listeningSpeechRecognition = true;
        playRecognitionAnimation();
    }
}

From source file:com.wizardsofm.deskclock.alarms.AlarmActivity.java

public void startListening() {

    makeText(this, getResources().getString(R.string.prompt_voice_commands), Toast.LENGTH_SHORT).show();

    speech = SpeechRecognizer.createSpeechRecognizer(this);
    speech.setRecognitionListener(this);

    intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "en");
    intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName());
    //        intent.putExtra("android.speech.extra.DICTATION_MODE", true);
    speech.startListening(intent);//from  ww  w  .j  av  a 2  s  .  c om

    resumeCounting = true;
    keepListening();

}

From source file:com.delexus.imitationzhihu.MySearchView.java

/**
 * Create and return an Intent that can launch the voice search activity, perform a specific
 * voice transcription, and forward the results to the searchable activity.
 *
 * @param baseIntent The voice app search intent to start from
 * @return A completely-configured intent ready to send to the voice search activity
 *//*from w  w  w  .j a v a2  s . co  m*/
private Intent createVoiceAppSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    ComponentName searchActivity = searchable.getSearchActivity();

    // create the necessary intent to set up a search-and-forward operation
    // in the voice search system.   We have to keep the bundle separate,
    // because it becomes immutable once it enters the PendingIntent
    Intent queryIntent = new Intent(Intent.ACTION_SEARCH);
    queryIntent.setComponent(searchActivity);
    PendingIntent pending = PendingIntent.getActivity(getContext(), 0, queryIntent,
            PendingIntent.FLAG_ONE_SHOT);

    // Now set up the bundle that will be inserted into the pending intent
    // when it's time to do the search.  We always build it here (even if empty)
    // because the voice search activity will always need to insert "QUERY" into
    // it anyway.
    Bundle queryExtras = new Bundle();
    if (mAppSearchData != null) {
        queryExtras.putParcelable(SearchManager.APP_DATA, mAppSearchData);
    }

    // Now build the intent to launch the voice search.  Add all necessary
    // extras to launch the voice recognizer, and then all the necessary extras
    // to forward the results to the searchable activity
    Intent voiceIntent = new Intent(baseIntent);

    // Add all of the configuration options supplied by the searchable's metadata
    String languageModel = RecognizerIntent.LANGUAGE_MODEL_FREE_FORM;
    String prompt = null;
    String language = null;
    int maxResults = 1;

    Resources resources = getResources();
    if (searchable.getVoiceLanguageModeId() != 0) {
        languageModel = resources.getString(searchable.getVoiceLanguageModeId());
    }
    if (searchable.getVoicePromptTextId() != 0) {
        prompt = resources.getString(searchable.getVoicePromptTextId());
    }
    if (searchable.getVoiceLanguageId() != 0) {
        language = resources.getString(searchable.getVoiceLanguageId());
    }
    if (searchable.getVoiceMaxResults() != 0) {
        maxResults = searchable.getVoiceMaxResults();
    }

    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());

    // Add the values that configure forwarding the results
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pending);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, queryExtras);

    return voiceIntent;
}

From source file:android.support.v7.widget.SearchView.java

/**
 * Create and return an Intent that can launch the voice search activity for web search.
 *//*from w w w. ja  v  a2  s . co m*/
@TargetApi(Build.VERSION_CODES.FROYO)
private Intent createVoiceWebSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    Intent voiceIntent = new Intent(baseIntent);
    ComponentName searchActivity = searchable.getSearchActivity();
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());
    return voiceIntent;
}

From source file:io.github.vomitcuddle.SearchViewAllowEmpty.SearchView.java

/**
 * Create and return an Intent that can launch the voice search activity, perform a specific
 * voice transcription, and forward the results to the searchable activity.
 *
 * @param baseIntent The voice app search intent to start from
 * @return A completely-configured intent ready to send to the voice search activity
 *//*from  w w  w . ja  v  a  2  s . c o  m*/
private Intent createVoiceAppSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    ComponentName searchActivity = searchable.getSearchActivity();

    // create the necessary intent to set up a search-and-forward operation
    // in the voice search system.   We have to keep the bundle separate,
    // because it becomes immutable once it enters the PendingIntent
    Intent queryIntent = new Intent(Intent.ACTION_SEARCH);
    queryIntent.setComponent(searchActivity);
    PendingIntent pending = PendingIntent.getActivity(getContext(), 0, queryIntent,
            PendingIntent.FLAG_ONE_SHOT);

    // Now set up the bundle that will be inserted into the pending intent
    // when it's time to do the search.  We always build it here (even if empty)
    // because the voice search activity will always need to insert "QUERY" into
    // it anyway.
    Bundle queryExtras = new Bundle();
    if (mAppSearchData != null) {
        queryExtras.putParcelable(SearchManager.APP_DATA, mAppSearchData);
    }

    // Now build the intent to launch the voice search.  Add all necessary
    // extras to launch the voice recognizer, and then all the necessary extras
    // to forward the results to the searchable activity
    Intent voiceIntent = new Intent(baseIntent);

    // Add all of the configuration options supplied by the searchable's metadata
    String languageModel = RecognizerIntent.LANGUAGE_MODEL_FREE_FORM;
    String prompt = null;
    String language = null;
    int maxResults = 1;

    Resources resources = getResources();
    if (searchable.getVoiceLanguageModeId() != 0) {
        languageModel = resources.getString(searchable.getVoiceLanguageModeId());
    }
    if (searchable.getVoicePromptTextId() != 0) {
        prompt = resources.getString(searchable.getVoicePromptTextId());
    }
    if (searchable.getVoiceLanguageId() != 0) {
        language = resources.getString(searchable.getVoiceLanguageId());
    }
    if (searchable.getVoiceMaxResults() != 0) {
        maxResults = searchable.getVoiceMaxResults();
    }
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());

    // Add the values that configure forwarding the results
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pending);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, queryExtras);

    return voiceIntent;
}