Example usage for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL

List of usage examples for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL

Introduction

In this page you can find the example usage for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL.

Prototype

String EXTRA_LANGUAGE_MODEL

To view the source code for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL.

Click Source Link

Document

Informs the recognizer which speech model to prefer when performing #ACTION_RECOGNIZE_SPEECH .

Usage

From source file:me.willowcheng.makerthings.ui.OpenHABMainActivity.java

private void launchVoiceRecognition() {
    Intent callbackIntent = new Intent(this, OpenHABVoiceService.class);
    callbackIntent.putExtra(OpenHABVoiceService.OPENHAB_BASE_URL_EXTRA, openHABBaseUrl);
    PendingIntent openhabPendingIntent = PendingIntent.getService(this, 0, callbackIntent, 0);

    Intent speechIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    // Display an hint to the user about what he should say.
    speechIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.info_voice_input));
    speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    speechIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
    speechIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, openhabPendingIntent);

    startActivity(speechIntent);//from ww  w .  jav  a2s  .c  o  m
}

From source file:com.androzic.vnspeech.MapFragment.java

private void onTalk() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    Locale locale = new Locale("vi", "VN");
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, locale);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "??c lnh");
    try {/* w w w  .j  a va  2s  .c  o m*/
        startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getContext(), "Thit b ny khng h tr nhn dng gi?ng ni.",
                Toast.LENGTH_SHORT).show();
    }
}

From source file:io.github.vomitcuddle.SearchViewAllowEmpty.SearchView.java

/**
 * Create and return an Intent that can launch the voice search activity, perform a specific
 * voice transcription, and forward the results to the searchable activity.
 *
 * @param baseIntent The voice app search intent to start from
 * @return A completely-configured intent ready to send to the voice search activity
 */// ww  w  .j a v a2  s .  c o m
private Intent createVoiceAppSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    ComponentName searchActivity = searchable.getSearchActivity();

    // create the necessary intent to set up a search-and-forward operation
    // in the voice search system.   We have to keep the bundle separate,
    // because it becomes immutable once it enters the PendingIntent
    Intent queryIntent = new Intent(Intent.ACTION_SEARCH);
    queryIntent.setComponent(searchActivity);
    PendingIntent pending = PendingIntent.getActivity(getContext(), 0, queryIntent,
            PendingIntent.FLAG_ONE_SHOT);

    // Now set up the bundle that will be inserted into the pending intent
    // when it's time to do the search.  We always build it here (even if empty)
    // because the voice search activity will always need to insert "QUERY" into
    // it anyway.
    Bundle queryExtras = new Bundle();
    if (mAppSearchData != null) {
        queryExtras.putParcelable(SearchManager.APP_DATA, mAppSearchData);
    }

    // Now build the intent to launch the voice search.  Add all necessary
    // extras to launch the voice recognizer, and then all the necessary extras
    // to forward the results to the searchable activity
    Intent voiceIntent = new Intent(baseIntent);

    // Add all of the configuration options supplied by the searchable's metadata
    String languageModel = RecognizerIntent.LANGUAGE_MODEL_FREE_FORM;
    String prompt = null;
    String language = null;
    int maxResults = 1;

    Resources resources = getResources();
    if (searchable.getVoiceLanguageModeId() != 0) {
        languageModel = resources.getString(searchable.getVoiceLanguageModeId());
    }
    if (searchable.getVoicePromptTextId() != 0) {
        prompt = resources.getString(searchable.getVoicePromptTextId());
    }
    if (searchable.getVoiceLanguageId() != 0) {
        language = resources.getString(searchable.getVoiceLanguageId());
    }
    if (searchable.getVoiceMaxResults() != 0) {
        maxResults = searchable.getVoiceMaxResults();
    }
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());

    // Add the values that configure forwarding the results
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pending);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, queryExtras);

    return voiceIntent;
}

From source file:android.support.v7.widget.SearchView.java

/**
 * Create and return an Intent that can launch the voice search activity, perform a specific
 * voice transcription, and forward the results to the searchable activity.
 *
 * @param baseIntent The voice app search intent to start from
 * @return A completely-configured intent ready to send to the voice search activity
 *//*from ww  w .j a  v  a  2s.  c o  m*/
@TargetApi(Build.VERSION_CODES.FROYO)
private Intent createVoiceAppSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    ComponentName searchActivity = searchable.getSearchActivity();

    // create the necessary intent to set up a search-and-forward operation
    // in the voice search system.   We have to keep the bundle separate,
    // because it becomes immutable once it enters the PendingIntent
    Intent queryIntent = new Intent(Intent.ACTION_SEARCH);
    queryIntent.setComponent(searchActivity);
    PendingIntent pending = PendingIntent.getActivity(getContext(), 0, queryIntent,
            PendingIntent.FLAG_ONE_SHOT);

    // Now set up the bundle that will be inserted into the pending intent
    // when it's time to do the search.  We always build it here (even if empty)
    // because the voice search activity will always need to insert "QUERY" into
    // it anyway.
    Bundle queryExtras = new Bundle();
    if (mAppSearchData != null) {
        queryExtras.putParcelable(SearchManager.APP_DATA, mAppSearchData);
    }

    // Now build the intent to launch the voice search.  Add all necessary
    // extras to launch the voice recognizer, and then all the necessary extras
    // to forward the results to the searchable activity
    Intent voiceIntent = new Intent(baseIntent);

    // Add all of the configuration options supplied by the searchable's metadata
    String languageModel = RecognizerIntent.LANGUAGE_MODEL_FREE_FORM;
    String prompt = null;
    String language = null;
    int maxResults = 1;

    if (Build.VERSION.SDK_INT >= 8) {
        Resources resources = getResources();
        if (searchable.getVoiceLanguageModeId() != 0) {
            languageModel = resources.getString(searchable.getVoiceLanguageModeId());
        }
        if (searchable.getVoicePromptTextId() != 0) {
            prompt = resources.getString(searchable.getVoicePromptTextId());
        }
        if (searchable.getVoiceLanguageId() != 0) {
            language = resources.getString(searchable.getVoiceLanguageId());
        }
        if (searchable.getVoiceMaxResults() != 0) {
            maxResults = searchable.getVoiceMaxResults();
        }
    }
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());

    // Add the values that configure forwarding the results
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pending);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, queryExtras);

    return voiceIntent;
}

From source file:cm.aptoide.com.actionbarsherlock.widget.SearchView.java

/**
 * Create and return an Intent that can launch the voice search activity, perform a specific
 * voice transcription, and forward the results to the searchable activity.
 *
 * @param baseIntent The voice app search intent to start from
 * @return A completely-configured intent ready to send to the voice search activity
 *///from  ww  w .  j  ava  2  s  .c  om
private Intent createVoiceAppSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    ComponentName searchActivity = searchable.getSearchActivity();

    // create the necessary intent to set up a search-and-forward operation
    // in the voice search system.   We have to keep the bundle separate,
    // because it becomes immutable once it enters the PendingIntent
    Intent queryIntent = new Intent(Intent.ACTION_SEARCH);
    queryIntent.setComponent(searchActivity);
    PendingIntent pending = PendingIntent.getActivity(getContext(), 0, queryIntent,
            PendingIntent.FLAG_ONE_SHOT);

    // Now set up the bundle that will be inserted into the pending intent
    // when it's time to do the search.  We always build it here (even if empty)
    // because the voice search activity will always need to insert "QUERY" into
    // it anyway.
    Bundle queryExtras = new Bundle();

    // Now build the intent to launch the voice search.  Add all necessary
    // extras to launch the voice recognizer, and then all the necessary extras
    // to forward the results to the searchable activity
    Intent voiceIntent = new Intent(baseIntent);

    // Add all of the configuration options supplied by the searchable's metadata
    String languageModel = RecognizerIntent.LANGUAGE_MODEL_FREE_FORM;
    String prompt = null;
    String language = null;
    int maxResults = 1;

    Resources resources = getResources();
    if (searchable.getVoiceLanguageModeId() != 0) {
        languageModel = resources.getString(searchable.getVoiceLanguageModeId());
    }
    if (searchable.getVoicePromptTextId() != 0) {
        prompt = resources.getString(searchable.getVoicePromptTextId());
    }
    if (searchable.getVoiceLanguageId() != 0) {
        language = resources.getString(searchable.getVoiceLanguageId());
    }
    if (searchable.getVoiceMaxResults() != 0) {
        maxResults = searchable.getVoiceMaxResults();
    }
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());

    // Add the values that configure forwarding the results
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pending);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, queryExtras);

    return voiceIntent;
}

From source file:com.example.navigationsearchview.NavigationSearchView.java

/**
 * Create and return an Intent that can launch the voice search activity,
 * perform a specific voice transcription, and forward the results to the
 * searchable activity.//from w  w  w  .  ja v  a2 s. co  m
 *
 * @param baseIntent
 *            The voice app search intent to start from
 * @return A completely-configured intent ready to send to the voice search
 *         activity
 */
@TargetApi(Build.VERSION_CODES.FROYO)
private Intent createVoiceAppSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    ComponentName searchActivity = searchable.getSearchActivity();

    // create the necessary intent to set up a search-and-forward operation
    // in the voice search system. We have to keep the bundle separate,
    // because it becomes immutable once it enters the PendingIntent
    Intent queryIntent = new Intent(Intent.ACTION_SEARCH);
    queryIntent.setComponent(searchActivity);
    PendingIntent pending = PendingIntent.getActivity(getContext(), 0, queryIntent,
            PendingIntent.FLAG_ONE_SHOT);

    // Now set up the bundle that will be inserted into the pending intent
    // when it's time to do the search. We always build it here (even if
    // empty)
    // because the voice search activity will always need to insert "QUERY"
    // into
    // it anyway.
    Bundle queryExtras = new Bundle();
    if (mAppSearchData != null) {
        queryExtras.putParcelable(SearchManager.APP_DATA, mAppSearchData);
    }

    // Now build the intent to launch the voice search. Add all necessary
    // extras to launch the voice recognizer, and then all the necessary
    // extras
    // to forward the results to the searchable activity
    Intent voiceIntent = new Intent(baseIntent);

    // Add all of the configuration options supplied by the searchable's
    // metadata
    String languageModel = RecognizerIntent.LANGUAGE_MODEL_FREE_FORM;
    String prompt = null;
    String language = null;
    int maxResults = 1;

    if (Build.VERSION.SDK_INT >= 8) {
        Resources resources = getResources();
        if (searchable.getVoiceLanguageModeId() != 0) {
            languageModel = resources.getString(searchable.getVoiceLanguageModeId());
        }
        if (searchable.getVoicePromptTextId() != 0) {
            prompt = resources.getString(searchable.getVoicePromptTextId());
        }
        if (searchable.getVoiceLanguageId() != 0) {
            language = resources.getString(searchable.getVoiceLanguageId());
        }
        if (searchable.getVoiceMaxResults() != 0) {
            maxResults = searchable.getVoiceMaxResults();
        }
    }
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());

    // Add the values that configure forwarding the results
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pending);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, queryExtras);

    return voiceIntent;
}

From source file:org.botlibre.sdk.activity.ChatActivity.java

@TargetApi(23)
private void beginListening() {
    lastReply = System.currentTimeMillis();
    setStreamVolume();//  www  .  j av a  2  s .c  o m
    debug("beginListening:");

    try {
        if (!MainActivity.handsFreeSpeech) {
            return;
        }
        if (MainActivity.handsFreeSpeech) {
            muteMicBeep(true);
            isListening = true;
        }

        if (!MainActivity.listenInBackground) {
            muteMicBeep(false);
            return;
        }

    } catch (Exception ignore) {
        Log.e("Error", "BeginListening");
    }

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    if (MainActivity.offlineSpeech) {
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, MainActivity.voice.language);

        if (!this.failedOfflineLanguage) {
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US");
            // intent.putExtra(RecognizerIntent.EXTRA_PREFER_OFFLINE, true);
        }
        intent.putExtra(RecognizerIntent.EXTRA_PREFER_OFFLINE, true);
    } else {
        if (MainActivity.voice.language != null && !MainActivity.voice.language.isEmpty()) {
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, MainActivity.voice.language);
            if (!this.failedOfflineLanguage) {
                intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, MainActivity.voice.language);
            }
        } else {
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, "en");
            if (!this.failedOfflineLanguage) {
                intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en");
            }
        }
    }

    try {
        Log.d("BeginListening", "StartListening");
        this.speech.startListening(intent);
        setMicIcon(true, false);
    } catch (ActivityNotFoundException a) {
        Log.d("BeginListening", "CatchError: " + a.getMessage());
        Toast t = Toast.makeText(getApplicationContext(), "Your device doesn't support Speech to Text",
                Toast.LENGTH_SHORT);
        t.show();
    }
}

From source file:com.tandong.sa.sherlock.widget.SearchView.java

/**
 * Create and return an Intent that can launch the voice search activity,
 * perform a specific voice transcription, and forward the results to the
 * searchable activity./*from ww w .  j  a  v a2s  .c  om*/
 * 
 * @param baseIntent
 *            The voice app search intent to start from
 * @return A completely-configured intent ready to send to the voice search
 *         activity
 */
private Intent createVoiceAppSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    ComponentName searchActivity = searchable.getSearchActivity();

    // create the necessary intent to set up a search-and-forward operation
    // in the voice search system. We have to keep the bundle separate,
    // because it becomes immutable once it enters the PendingIntent
    Intent queryIntent = new Intent(Intent.ACTION_SEARCH);
    queryIntent.setComponent(searchActivity);
    PendingIntent pending = PendingIntent.getActivity(getContext(), 0, queryIntent,
            PendingIntent.FLAG_ONE_SHOT);

    // Now set up the bundle that will be inserted into the pending intent
    // when it's time to do the search. We always build it here (even if
    // empty)
    // because the voice search activity will always need to insert "QUERY"
    // into
    // it anyway.
    Bundle queryExtras = new Bundle();

    // Now build the intent to launch the voice search. Add all necessary
    // extras to launch the voice recognizer, and then all the necessary
    // extras
    // to forward the results to the searchable activity
    Intent voiceIntent = new Intent(baseIntent);

    // Add all of the configuration options supplied by the searchable's
    // metadata
    String languageModel = RecognizerIntent.LANGUAGE_MODEL_FREE_FORM;
    String prompt = null;
    String language = null;
    int maxResults = 1;

    Resources resources = getResources();
    if (searchable.getVoiceLanguageModeId() != 0) {
        languageModel = resources.getString(searchable.getVoiceLanguageModeId());
    }
    if (searchable.getVoicePromptTextId() != 0) {
        prompt = resources.getString(searchable.getVoicePromptTextId());
    }
    if (searchable.getVoiceLanguageId() != 0) {
        language = resources.getString(searchable.getVoiceLanguageId());
    }
    if (searchable.getVoiceMaxResults() != 0) {
        maxResults = searchable.getVoiceMaxResults();
    }
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());

    // Add the values that configure forwarding the results
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pending);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, queryExtras);

    return voiceIntent;
}

From source file:com.rfo.basic.Run.java

public static Intent buildVoiceRecognitionIntent() {
    if (sttPrompt == null) {
        sttPrompt = sttDefaultPrompt;/*from  www  .ja v  a  2 s.  c  o m*/
    }
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, sttPrompt);
    return intent;
}