Example usage for android.speech RecognizerIntent LANGUAGE_MODEL_FREE_FORM

List of usage examples for android.speech RecognizerIntent LANGUAGE_MODEL_FREE_FORM

Introduction

In this page you can find the example usage for android.speech RecognizerIntent LANGUAGE_MODEL_FREE_FORM.

Prototype

String LANGUAGE_MODEL_FREE_FORM

To view the source code for android.speech RecognizerIntent LANGUAGE_MODEL_FREE_FORM.

Click Source Link

Document

Use a language model based on free-form speech recognition.

Usage

From source file:com.example.michel.facetrack.FaceTrackerActivity.java

/**
 * Start speech to text intent. This opens up Google Speech Recognition API dialog box to listen the speech input.
 * *///from w ww  . j a v a 2 s  .  c  o m
private void startSpeechToText() {
    Log.e("start speech to text", " start speech to text");
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Speak something...");
    try {
        startActivityForResult(intent, SPEECH_RECOGNITION_CODE);
        System.out.println("hello 2");
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getApplicationContext(), "Sorry! Speech recognition is not supported in this device.",
                Toast.LENGTH_SHORT).show();
    }
}

From source file:android.support.v17.leanback.app.SearchSupportFragment.java

/**
 * Returns an intent that can be used to request speech recognition.
 * Built from the base {@link RecognizerIntent#ACTION_RECOGNIZE_SPEECH} plus
 * extras:/*from   www  . ja v a  2s. com*/
 *
 * <ul>
 * <li>{@link RecognizerIntent#EXTRA_LANGUAGE_MODEL} set to
 * {@link RecognizerIntent#LANGUAGE_MODEL_FREE_FORM}</li>
 * <li>{@link RecognizerIntent#EXTRA_PARTIAL_RESULTS} set to true</li>
 * <li>{@link RecognizerIntent#EXTRA_PROMPT} set to the search bar hint text</li>
 * </ul>
 *
 * For handling the intent returned from the service, see
 * {@link #setSearchQuery(Intent, boolean)}.
 */
public Intent getRecognizerIntent() {
    Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    if (mSearchBar != null && mSearchBar.getHint() != null) {
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, mSearchBar.getHint());
    }
    recognizerIntent.putExtra(EXTRA_LEANBACK_BADGE_PRESENT, mBadgeDrawable != null);
    return recognizerIntent;
}

From source file:br.liveo.searchliveo.SearchCardLiveo.java

private void startVoice(EditText editText) {
    ((InputMethodManager) mContext.getSystemService(Context.INPUT_METHOD_SERVICE))
            .hideSoftInputFromWindow(editText.getWindowToken(), 0);

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, mContext.getString(R.string.searchview_voice));
    try {/*from  w w w  .  j a v a 2 s .  c  om*/
        mContext.startActivityForResult(intent, REQUEST_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(mContext.getApplicationContext(), R.string.not_supported, Toast.LENGTH_SHORT).show();
    }
}

From source file:com.wizardsofm.deskclock.alarms.AlarmActivity.java

void listenForCommand() {

    //        if (speech == null) {
    //            speech = SpeechRecognizer.createSpeechRecognizer(this);
    //            speech.setRecognitionListener(MainActivity.this);
    //        }// ww w  . j  a v  a2 s. c o m
    //        speech = SpeechRecognizer.createSpeechRecognizer(this);
    //        speech.setRecognitionListener(this);

    i = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    i.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    i.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    i.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say something");
    //        i.putExtra("android.speech.extra.DICTATION_MODE", true);
    //        i.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    i.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS, 5000);
    try {
        startActivityForResult(i, 100);

        new CountDownTimer(5000, 1000) {

            public void onTick(long millisUntilFinished) {
                //do nothing, just let it tick
            }

            public void onFinish() {
                if (!alarmStopped) {
                    listenForCommand();
                }
            }
        }.start();

        //            speech.startListening(i);
    } catch (Exception e) {

    }
}

From source file:com.eveningoutpost.dexdrip.Home.java

public void promptSpeechNoteInput(View abc) {

    if (recognitionRunning)
        return;/*from  ww w. j av a  2  s. co  m*/
    recognitionRunning = true;

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    // intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US"); // debug voice
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.speak_your_note_text));

    try {
        startActivityForResult(intent, REQ_CODE_SPEECH_NOTE_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getApplicationContext(), getString(R.string.speech_recognition_is_not_supported),
                Toast.LENGTH_LONG).show();
    }

}

From source file:com.arlib.floatingsearchview.FloatingSearchView.java

private Intent createVoiceRecIntent(Activity activity, String hint) {

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, hint);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);

    return intent;
}

From source file:com.eveningoutpost.dexdrip.Home.java

/**
 * Showing google speech input dialog/*from   w ww .  j a v a2  s.c  om*/
 */
private void promptSpeechInput() {

    if (recognitionRunning)
        return;
    recognitionRunning = true;

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    // intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US"); // debug voice
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.speak_your_treatment));

    try {
        startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getApplicationContext(), R.string.speech_recognition_is_not_supported, Toast.LENGTH_LONG)
                .show();
    }

}

From source file:radu.pidroid.Controller.java

public void startVoiceRecognition() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    startActivityForResult(intent, SPEECH_RECOGNITION_REQUEST_CODE);
}

From source file:com.delexus.imitationzhihu.MySearchView.java

/**
 * Create and return an Intent that can launch the voice search activity, perform a specific
 * voice transcription, and forward the results to the searchable activity.
 *
 * @param baseIntent The voice app search intent to start from
 * @return A completely-configured intent ready to send to the voice search activity
 *///from w w w. j av a  2 s .  c  o m
private Intent createVoiceAppSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    ComponentName searchActivity = searchable.getSearchActivity();

    // create the necessary intent to set up a search-and-forward operation
    // in the voice search system.   We have to keep the bundle separate,
    // because it becomes immutable once it enters the PendingIntent
    Intent queryIntent = new Intent(Intent.ACTION_SEARCH);
    queryIntent.setComponent(searchActivity);
    PendingIntent pending = PendingIntent.getActivity(getContext(), 0, queryIntent,
            PendingIntent.FLAG_ONE_SHOT);

    // Now set up the bundle that will be inserted into the pending intent
    // when it's time to do the search.  We always build it here (even if empty)
    // because the voice search activity will always need to insert "QUERY" into
    // it anyway.
    Bundle queryExtras = new Bundle();
    if (mAppSearchData != null) {
        queryExtras.putParcelable(SearchManager.APP_DATA, mAppSearchData);
    }

    // Now build the intent to launch the voice search.  Add all necessary
    // extras to launch the voice recognizer, and then all the necessary extras
    // to forward the results to the searchable activity
    Intent voiceIntent = new Intent(baseIntent);

    // Add all of the configuration options supplied by the searchable's metadata
    String languageModel = RecognizerIntent.LANGUAGE_MODEL_FREE_FORM;
    String prompt = null;
    String language = null;
    int maxResults = 1;

    Resources resources = getResources();
    if (searchable.getVoiceLanguageModeId() != 0) {
        languageModel = resources.getString(searchable.getVoiceLanguageModeId());
    }
    if (searchable.getVoicePromptTextId() != 0) {
        prompt = resources.getString(searchable.getVoicePromptTextId());
    }
    if (searchable.getVoiceLanguageId() != 0) {
        language = resources.getString(searchable.getVoiceLanguageId());
    }
    if (searchable.getVoiceMaxResults() != 0) {
        maxResults = searchable.getVoiceMaxResults();
    }

    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());

    // Add the values that configure forwarding the results
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pending);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, queryExtras);

    return voiceIntent;
}

From source file:org.openhab.habdroid.ui.OpenHABMainActivity.java

private void launchVoiceRecognition() {
    Intent callbackIntent = new Intent(this, OpenHABVoiceService.class);
    callbackIntent.putExtra(OpenHABVoiceService.OPENHAB_BASE_URL_EXTRA, openHABBaseUrl);
    PendingIntent openhabPendingIntent = PendingIntent.getService(this, 0, callbackIntent, 0);

    Intent speechIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    // Display an hint to the user about what he should say.
    speechIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.info_voice_input));
    speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    speechIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
    speechIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, openhabPendingIntent);

    try {//from  w w  w .jav a  2 s  .co m
        startActivity(speechIntent);
    } catch (ActivityNotFoundException e) {
        // Speech not installed?
        Intent browserIntent = new Intent(Intent.ACTION_VIEW,
                Uri.parse("https://market.android.com/details?id=com.google.android.voicesearch"));
        startActivity(browserIntent);
    }
}