Example usage for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL

List of usage examples for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL

Introduction

In this page you can find the example usage for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL.

Prototype

String EXTRA_LANGUAGE_MODEL

To view the source code for android.speech RecognizerIntent EXTRA_LANGUAGE_MODEL.

Click Source Link

Document

Informs the recognizer which speech model to prefer when performing #ACTION_RECOGNIZE_SPEECH .

Usage

From source file:br.liveo.searchliveo.SearchCardLiveo.java

private void startVoice(EditText editText) {
    ((InputMethodManager) mContext.getSystemService(Context.INPUT_METHOD_SERVICE))
            .hideSoftInputFromWindow(editText.getWindowToken(), 0);

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, mContext.getString(R.string.searchview_voice));
    try {//  w w w.ja v a 2  s. c o m
        mContext.startActivityForResult(intent, REQUEST_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(mContext.getApplicationContext(), R.string.not_supported, Toast.LENGTH_SHORT).show();
    }
}

From source file:com.wizardsofm.deskclock.alarms.AlarmActivity.java

void listenForCommand() {

    //        if (speech == null) {
    //            speech = SpeechRecognizer.createSpeechRecognizer(this);
    //            speech.setRecognitionListener(MainActivity.this);
    //        }//w w  w . j  av a 2s .  c  o  m
    //        speech = SpeechRecognizer.createSpeechRecognizer(this);
    //        speech.setRecognitionListener(this);

    i = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    i.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    i.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    i.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say something");
    //        i.putExtra("android.speech.extra.DICTATION_MODE", true);
    //        i.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    i.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS, 5000);
    try {
        startActivityForResult(i, 100);

        new CountDownTimer(5000, 1000) {

            public void onTick(long millisUntilFinished) {
                //do nothing, just let it tick
            }

            public void onFinish() {
                if (!alarmStopped) {
                    listenForCommand();
                }
            }
        }.start();

        //            speech.startListening(i);
    } catch (Exception e) {

    }
}

From source file:com.eveningoutpost.dexdrip.Home.java

public void promptSpeechNoteInput(View abc) {

    if (recognitionRunning)
        return;//from   www  .  j  ava2s .  co m
    recognitionRunning = true;

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    // intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US"); // debug voice
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.speak_your_note_text));

    try {
        startActivityForResult(intent, REQ_CODE_SPEECH_NOTE_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getApplicationContext(), getString(R.string.speech_recognition_is_not_supported),
                Toast.LENGTH_LONG).show();
    }

}

From source file:com.arlib.floatingsearchview.FloatingSearchView.java

private Intent createVoiceRecIntent(Activity activity, String hint) {

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, hint);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);

    return intent;
}

From source file:com.eveningoutpost.dexdrip.Home.java

/**
 * Showing google speech input dialog//from w  w  w  .  j a v  a2  s  .  c  om
 */
private void promptSpeechInput() {

    if (recognitionRunning)
        return;
    recognitionRunning = true;

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    // intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US"); // debug voice
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.speak_your_treatment));

    try {
        startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getApplicationContext(), R.string.speech_recognition_is_not_supported, Toast.LENGTH_LONG)
                .show();
    }

}

From source file:com.google.android.apps.paco.ExperimentExecutorCustomRendering.java

public void startSpeechRecognition(SpeechRecognitionListener listener) {
    speechRecognitionListeners.add(listener);
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, Locale.getDefault().getDisplayName());

    try {/*w w  w .j  av  a  2 s.  c o m*/
        startActivityForResult(intent, RESULT_SPEECH);
    } catch (ActivityNotFoundException a) {
        Toast t = Toast.makeText(getApplicationContext(),
                R.string.oops_your_device_doesn_t_support_speech_to_text, Toast.LENGTH_SHORT);
        t.show();
    }
}

From source file:radu.pidroid.Controller.java

public void startVoiceRecognition() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    startActivityForResult(intent, SPEECH_RECOGNITION_REQUEST_CODE);
}

From source file:com.delexus.imitationzhihu.MySearchView.java

/**
 * Create and return an Intent that can launch the voice search activity, perform a specific
 * voice transcription, and forward the results to the searchable activity.
 *
 * @param baseIntent The voice app search intent to start from
 * @return A completely-configured intent ready to send to the voice search activity
 *///from  w  ww .  j a v  a 2s  .  c  om
private Intent createVoiceAppSearchIntent(Intent baseIntent, SearchableInfo searchable) {
    ComponentName searchActivity = searchable.getSearchActivity();

    // create the necessary intent to set up a search-and-forward operation
    // in the voice search system.   We have to keep the bundle separate,
    // because it becomes immutable once it enters the PendingIntent
    Intent queryIntent = new Intent(Intent.ACTION_SEARCH);
    queryIntent.setComponent(searchActivity);
    PendingIntent pending = PendingIntent.getActivity(getContext(), 0, queryIntent,
            PendingIntent.FLAG_ONE_SHOT);

    // Now set up the bundle that will be inserted into the pending intent
    // when it's time to do the search.  We always build it here (even if empty)
    // because the voice search activity will always need to insert "QUERY" into
    // it anyway.
    Bundle queryExtras = new Bundle();
    if (mAppSearchData != null) {
        queryExtras.putParcelable(SearchManager.APP_DATA, mAppSearchData);
    }

    // Now build the intent to launch the voice search.  Add all necessary
    // extras to launch the voice recognizer, and then all the necessary extras
    // to forward the results to the searchable activity
    Intent voiceIntent = new Intent(baseIntent);

    // Add all of the configuration options supplied by the searchable's metadata
    String languageModel = RecognizerIntent.LANGUAGE_MODEL_FREE_FORM;
    String prompt = null;
    String language = null;
    int maxResults = 1;

    Resources resources = getResources();
    if (searchable.getVoiceLanguageModeId() != 0) {
        languageModel = resources.getString(searchable.getVoiceLanguageModeId());
    }
    if (searchable.getVoicePromptTextId() != 0) {
        prompt = resources.getString(searchable.getVoicePromptTextId());
    }
    if (searchable.getVoiceLanguageId() != 0) {
        language = resources.getString(searchable.getVoiceLanguageId());
    }
    if (searchable.getVoiceMaxResults() != 0) {
        maxResults = searchable.getVoiceMaxResults();
    }

    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,
            searchActivity == null ? null : searchActivity.flattenToShortString());

    // Add the values that configure forwarding the results
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pending);
    voiceIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, queryExtras);

    return voiceIntent;
}

From source file:org.openhab.habdroid.ui.OpenHABMainActivity.java

private void launchVoiceRecognition() {
    Intent callbackIntent = new Intent(this, OpenHABVoiceService.class);
    callbackIntent.putExtra(OpenHABVoiceService.OPENHAB_BASE_URL_EXTRA, openHABBaseUrl);
    PendingIntent openhabPendingIntent = PendingIntent.getService(this, 0, callbackIntent, 0);

    Intent speechIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    // Display an hint to the user about what he should say.
    speechIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.info_voice_input));
    speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    speechIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
    speechIntent.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, openhabPendingIntent);

    try {//from w w  w .  j a va2  s .c  o  m
        startActivity(speechIntent);
    } catch (ActivityNotFoundException e) {
        // Speech not installed?
        Intent browserIntent = new Intent(Intent.ACTION_VIEW,
                Uri.parse("https://market.android.com/details?id=com.google.android.voicesearch"));
        startActivity(browserIntent);
    }
}

From source file:com.tandong.sa.sherlock.widget.SearchView.java

public SearchView(Context context, AttributeSet attrs) {
    super(context, attrs);
    this.c = context;

    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.FROYO) {
        throw new IllegalStateException("SearchView is API 8+ only.");
    }// w w w .  j a v  a2 s  . c  o m

    LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
    inflater.inflate(getResources().getIdentifier("abs__search_view", "layout", context.getPackageName()), this,
            true);
    // inflater.inflate(R.layout.abs__search_view, this, true);

    mSearchButton = findViewById(
            getResources().getIdentifier("abs__search_button", "id", context.getPackageName()));
    // mSearchButton = findViewById(R.id.abs__search_button);
    mQueryTextView = (SearchAutoComplete) findViewById(
            getResources().getIdentifier("abs__search_src_text", "id", context.getPackageName()));
    // mQueryTextView = (SearchAutoComplete)
    // findViewById(R.id.abs__search_src_text);
    mQueryTextView.setSearchView(this);

    mSearchEditFrame = findViewById(
            getResources().getIdentifier("abs__search_edit_frame", "id", context.getPackageName()));
    // mSearchEditFrame = findViewById(R.id.abs__search_edit_frame);
    mSearchPlate = findViewById(
            getResources().getIdentifier("abs__search_plate", "id", context.getPackageName()));
    // mSearchPlate = findViewById(R.id.abs__search_plate);
    mSubmitArea = findViewById(
            getResources().getIdentifier("abs__submit_area", "id", context.getPackageName()));
    // mSubmitArea = findViewById(R.id.abs__submit_area);
    mSubmitButton = findViewById(
            getResources().getIdentifier("abs__search_go_btn", "id", context.getPackageName()));
    // mSubmitButton = findViewById(R.id.abs__search_go_btn);
    mCloseButton = (ImageView) findViewById(
            getResources().getIdentifier("abs__search_close_btn", "id", context.getPackageName()));
    // mCloseButton = (ImageView) findViewById(R.id.abs__search_close_btn);
    mVoiceButton = findViewById(
            getResources().getIdentifier("abs__search_voice_btn", "id", context.getPackageName()));
    // mVoiceButton = findViewById(R.id.abs__search_voice_btn);
    mSearchHintIcon = (ImageView) findViewById(
            getResources().getIdentifier("abs__search_mag_icon", "id", context.getPackageName()));
    // mSearchHintIcon = (ImageView)
    // findViewById(R.id.abs__search_mag_icon);

    mSearchButton.setOnClickListener(mOnClickListener);
    mCloseButton.setOnClickListener(mOnClickListener);
    mSubmitButton.setOnClickListener(mOnClickListener);
    mVoiceButton.setOnClickListener(mOnClickListener);
    mQueryTextView.setOnClickListener(mOnClickListener);

    mQueryTextView.addTextChangedListener(mTextWatcher);
    mQueryTextView.setOnEditorActionListener(mOnEditorActionListener);
    mQueryTextView.setOnItemClickListener(mOnItemClickListener);
    mQueryTextView.setOnItemSelectedListener(mOnItemSelectedListener);
    mQueryTextView.setOnKeyListener(mTextKeyListener);
    // Inform any listener of focus changes
    mQueryTextView.setOnFocusChangeListener(new OnFocusChangeListener() {

        public void onFocusChange(View v, boolean hasFocus) {
            if (mOnQueryTextFocusChangeListener != null) {
                mOnQueryTextFocusChangeListener.onFocusChange(SearchView.this, hasFocus);
            }
        }
    });

    TypedArray a = context.obtainStyledAttributes(attrs,
            new int[] {
                    getResources().getIdentifier("SherlockSearchView", "styleable", context.getPackageName()) },
            0, 0);
    // R.styleable.SherlockSearchView, 0, 0);
    setIconifiedByDefault(a.getBoolean(getResources().getIdentifier("SherlockSearchView_iconifiedByDefault",
            "styleable", context.getPackageName()), true));
    // R.styleable.SherlockSearchView_iconifiedByDefault, true));
    int maxWidth = a.getDimensionPixelSize(getResources().getIdentifier("SherlockSearchView_android_maxWidth",
            "styleable", context.getPackageName()), -1);
    // R.styleable.SherlockSearchView_android_maxWidth, -1);
    if (maxWidth != -1) {
        setMaxWidth(maxWidth);
    }
    CharSequence queryHint = a.getText(getResources().getIdentifier("SherlockSearchView_queryHint", "styleable",
            context.getPackageName()));
    // .getText(R.styleable.SherlockSearchView_queryHint);
    if (!TextUtils.isEmpty(queryHint)) {
        setQueryHint(queryHint);
    }
    int imeOptions = a.getInt(getResources().getIdentifier("SherlockSearchView_android_imeOptions", "styleable",
            context.getPackageName()), -1);
    // R.styleable.SherlockSearchView_android_imeOptions, -1);
    if (imeOptions != -1) {
        setImeOptions(imeOptions);
    }
    int inputType = a.getInt(getResources().getIdentifier("SherlockSearchView_android_inputType", "styleable",
            context.getPackageName()), -1);
    // R.styleable.SherlockSearchView_android_inputType, -1);
    if (inputType != -1) {
        setInputType(inputType);
    }

    a.recycle();

    boolean focusable = true;

    a = context.obtainStyledAttributes(attrs,
            new int[] { getResources().getIdentifier("SherlockView", "styleable", context.getPackageName()) },
            0,
            // a = context.obtainStyledAttributes(attrs,
            // R.styleable.SherlockView, 0,
            0);
    focusable = a.getBoolean(
            getResources().getIdentifier("SherlockView_android_focusable", "styleable",
                    context.getPackageName()),
            // focusable =
            // a.getBoolean(R.styleable.SherlockView_android_focusable,
            focusable);
    a.recycle();
    setFocusable(focusable);

    // Save voice intent for later queries/launching
    mVoiceWebSearchIntent = new Intent(RecognizerIntent.ACTION_WEB_SEARCH);
    mVoiceWebSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
    mVoiceWebSearchIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
            RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);

    mVoiceAppSearchIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    mVoiceAppSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);

    mDropDownAnchor = findViewById(mQueryTextView.getDropDownAnchor());
    if (mDropDownAnchor != null) {
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
            mDropDownAnchor.addOnLayoutChangeListener(new OnLayoutChangeListener() {
                @Override
                public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft,
                        int oldTop, int oldRight, int oldBottom) {
                    adjustDropDownSizeAndPosition();
                }
            });
        } else {
            mDropDownAnchor.getViewTreeObserver()
                    .addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
                        @Override
                        public void onGlobalLayout() {
                            adjustDropDownSizeAndPosition();
                        }
                    });
        }
    }

    updateViewsVisibility(mIconifiedByDefault);
    updateQueryHint();
}