Example usage for android.speech RecognizerIntent EXTRA_PROMPT

List of usage examples for android.speech RecognizerIntent EXTRA_PROMPT

Introduction

In this page you can find the example usage for android.speech RecognizerIntent EXTRA_PROMPT.

Prototype

String EXTRA_PROMPT

To view the source code for android.speech RecognizerIntent EXTRA_PROMPT.

Click Source Link

Document

Optional text prompt to show to the user when asking them to speak.

Usage

From source file:root.magicword.MagicWord.java

private void sendRecognizeIntent() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say the magic word");
    intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 100);
    startActivityForResult(intent, SPEECH_REQUEST_CODE);
}

From source file:org.gots.ui.NewSeedActivity.java

@Override
public void onClick(View v) {
    newSeed.setDescriptionDiseases(descriptionDiseases.getText().toString());
    newSeed.setDescriptionCultivation(descriptionEnvironment.getText().toString());
    newSeed.setDescriptionHarvest(descriptionHarvest.getText().toString());
    newSeed.setDescriptionGrowth(descriptionGrowth.getText().toString());
    Intent intent;//from w w w  .java2s  .  c  om
    switch (v.getId()) {
    case R.id.imageBarCode:
        scanBarCode();
        break;

    case R.id.buttonStock:
        if (validateSeed()) {
            new AsyncTask<Void, Integer, Void>() {
                @Override
                protected Void doInBackground(Void... params) {
                    if (picturePath != null) {
                        ByteArrayOutputStream bos = new ByteArrayOutputStream();
                        Bitmap bitmap = FileUtilities.decodeScaledBitmapFromSdCard(picturePath, 100, 100);
                        bitmap.compress(CompressFormat.PNG, 0 /* ignored for PNG */, bos);
                        byte[] bitmapdata = bos.toByteArray();

                        // write the bytes in file
                        FileOutputStream fos;
                        try {
                            fos = new FileOutputStream(new File(gotsPrefs.getGotsExternalFileDir(),
                                    newSeed.getVariety().toLowerCase().replaceAll("\\s", "")));
                            fos.write(bitmapdata);
                        } catch (FileNotFoundException e) {
                            e.printStackTrace();
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                        newSeed = seedManager.createSeed(newSeed, new File(picturePath));
                    } else
                        newSeed = seedManager.createSeed(newSeed, null);
                    // seedManager.attach
                    seedManager.addToStock(newSeed, gardenManager.getCurrentGarden());
                    return null;
                }

                protected void onPostExecute(Void result) {
                    getApplicationContext().sendBroadcast(new Intent(BroadCastMessages.SEED_DISPLAYLIST));
                    NewSeedActivity.this.finish();

                };
            }.execute();

        }
        break;

    case R.id.buttonModify:
        if (validateSeed()) {

            new AsyncTask<Void, Integer, Void>() {
                @Override
                protected Void doInBackground(Void... params) {
                    seedManager.updateSeed(newSeed);
                    return null;
                }

                protected void onPostExecute(Void result) {
                    getApplicationContext().sendBroadcast(new Intent(BroadCastMessages.SEED_DISPLAYLIST));
                    NewSeedActivity.this.finish();

                };
            }.execute();

        }
        break;

    case R.id.IdSeedDescriptionCultureVoice:
        intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Speech recognition demo");
        startActivityForResult(intent, REQUEST_GROWTH);
        break;
    case R.id.IdSeedDescriptionEnnemiVoice:
        intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Speech recognition demo");
        startActivityForResult(intent, REQUEST_DISEASES);
        break;
    case R.id.IdSeedDescriptionEnvironmentVoice:
        intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Speech recognition demo");
        startActivityForResult(intent, REQUEST_ENVIRONMENT);
        break;
    case R.id.IdSeedDescriptionHarvestVoice:
        intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Speech recognition demo");
        startActivityForResult(intent, REQUEST_HARVEST);
        break;

    // case R.id.buttonCatalogue:
    // if (validateSeed()) {
    // seedManager.createSeed(newSeed);
    // finish();
    // }
    // break;
    default:
        break;
    }

}

From source file:com.eugene.fithealthmaingit.UI.ChooseAddMealSearchFragment.java

private void promptSpeechInput() {
    ((InputMethodManager) getActivity().getSystemService(Context.INPUT_METHOD_SERVICE))
            .hideSoftInputFromWindow(mEtSearch.getWindowToken(), 0);
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say Something");
    try {/* w  w  w .jav a  2s  .c o  m*/
        startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getActivity().getApplicationContext(), "Not Supported", Toast.LENGTH_SHORT).show();
    }
}

From source file:com.meetingninja.csse.MainActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    if (drawerToggle.onOptionsItemSelected(item)) {
        return true;
    }/*  www  .  ja  v a 2s . co  m*/

    // Handle other action bar items...
    switch (item.getItemId()) {
    case R.id.action_refresh:
        switch (DrawerLabel.values()[session.getPage()]) {
        case MEETINGS:
            Toast.makeText(this, "Refreshing Meetings", Toast.LENGTH_SHORT).show();
            frag_meetings.fetchMeetings();
            // frag_meetings.populateList();
            return true;
        case NOTES:
            Toast.makeText(this, "Refreshing Notes", Toast.LENGTH_SHORT).show();
            // notesFrag.fetchNotes();
            frag_notes.populateList();
            return true;
        default:
            return super.onOptionsItemSelected(item);
        }

    case R.id.action_new_meeting:
        frag_meetings.editMeeting(null);
        return true;
    case R.id.action_new_note:
        Intent createNote = new Intent(this, EditNoteActivity.class);
        createNote.putExtra(Note.CREATE_NOTE, true);
        startActivityForResult(createNote, 3);
        return true;
    case R.id.action_logout:
        logout();
        return true;
    case R.id.action_settings:
        return true;
    case R.id.action_speak:
        Intent i = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        i.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, "en-US");
        i.putExtra(RecognizerIntent.EXTRA_PROMPT, "Go to...");
        try {
            startActivityForResult(i, VOICE_RECOGNITION_REQUEST_CODE);

        } catch (Exception e) {
            Toast.makeText(this, "Error initializing speech to text engine.", Toast.LENGTH_LONG).show();
        }
        return true;
    default:
        return super.onOptionsItemSelected(item);
    }

}

From source file:com.eugene.fithealthmaingit.UI.NavFragments.FragmentSearch.java

private void promptSpeechInput(EditText e) {
    ((InputMethodManager) getActivity().getSystemService(Context.INPUT_METHOD_SERVICE))
            .hideSoftInputFromWindow(e.getWindowToken(), 0);
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Say Something");
    try {/* w  ww .j a va 2  s . com*/
        startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getActivity().getApplicationContext(), "Not Supported", Toast.LENGTH_SHORT).show();
    }
}

From source file:com.example.castCambot.MainActivity.java

/**
  * Android voice recognition/*from  w  ww .ja  v  a  2  s  .com*/
  */
private void startVoiceRecognitionActivity() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.message_to_cast));
    startActivityForResult(intent, REQUEST_CODE);
}

From source file:root.gast.playground.speech.SpeechRecognitionPlay.java

/**
 * create the {@link RecognizerIntent} based on the many preferences
 *//*from   w  ww . j  a v a  2s . c  o m*/
private Intent readRecognizerIntentFromPreferences() {
    Intent intentToSend;

    //web search handling
    boolean isWebSearchAction = preferences.getBoolean(this, R.string.pref_websearch,
            R.string.pref_websearch_default);

    boolean isHandsFreeAction = preferences.getBoolean(this, R.string.pref_handsfree,
            R.string.pref_handsfree_default);

    if (isWebSearchAction) {
        intentToSend = RecognizerIntentFactory.getWebSearchRecognizeIntent();
        final boolean ADD_ORIGIN = true;
        if (ADD_ORIGIN && Build.VERSION.SDK_INT >= 14) {
            intentToSend.putExtra(RecognizerIntent.EXTRA_ORIGIN, true);
        }
    } else {
        if (isHandsFreeAction && Build.VERSION.SDK_INT >= 16) {
            intentToSend = RecognizerIntentFactory.getHandsFreeRecognizeIntent();
        } else {
            intentToSend = RecognizerIntentFactory.getBlankRecognizeIntent();
        }
    }

    //language model
    boolean isFreeFormModel = preferences.getBoolean(this, R.string.pref_languagemodel,
            R.string.pref_languagemodel_default);
    if (isFreeFormModel) {
        intentToSend.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    } else {
        intentToSend.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
                RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
    }

    //common extras
    String language = preferences.getString(getResources().getString(R.string.pref_language),
            getResources().getString(R.string.pref_language_default));
    intentToSend.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

    String prompt = getResources().getString(R.string.speech_prompt) + ": "
            + whatYouAreTryingToSay.getText().toString();
    intentToSend.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    intentToSend.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS,
            preferences.getInt(this, R.string.pref_maxresults, R.string.pref_maxresults_default));
    intentToSend.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS,
            preferences.getBoolean(this, R.string.pref_partial, R.string.pref_partial_default));

    setIfValueSpecified(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS,
            R.string.pref_complete_silence, R.string.pref_complete_silence_default, intentToSend);
    setIfValueSpecified(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS,
            R.string.pref_minimum_input_length, R.string.pref_minimum_input_length_default, intentToSend);
    setIfValueSpecified(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
            R.string.pref_possibly_complete_silence_length,
            R.string.pref_possibly_complete_silence_length_default, intentToSend);

    //pendingIntent handling
    boolean doPending = preferences.getBoolean(this, R.string.pref_withpendingintent,
            R.string.pref_withpendingintent);
    if (doPending) {
        Intent pendingIntentSource = new Intent(this, SpeechRecognitionResultsActivity.class);
        PendingIntent pi = PendingIntent.getActivity(this, 0, pendingIntentSource, 0);

        Bundle extraInfoBundle = new Bundle();
        // pass in what you are trying to say so the results activity can
        // show it
        extraInfoBundle.putString(SpeechRecognitionResultsActivity.WHAT_YOU_ARE_TRYING_TO_SAY_INTENT_INPUT,
                whatYouAreTryingToSay.getText().toString());
        // set the variables in the intent this is sending
        intentToSend.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT, pi);
        intentToSend.putExtra(RecognizerIntent.EXTRA_RESULTS_PENDINGINTENT_BUNDLE, extraInfoBundle);
    }

    Log.d(TAG, "sending recognizer intent: " + intentToSend.getExtras().toString());
    return intentToSend;
}

From source file:com.example.michel.facetrack.FaceTrackerActivity.java

/**
 * Start speech to text intent. This opens up Google Speech Recognition API dialog box to listen the speech input.
 * *//*w  w w.j  av a 2  s  .c  o m*/
private void startSpeechToText() {
    Log.e("start speech to text", " start speech to text");
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Speak something...");
    try {
        startActivityForResult(intent, SPEECH_RECOGNITION_CODE);
        System.out.println("hello 2");
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getApplicationContext(), "Sorry! Speech recognition is not supported in this device.",
                Toast.LENGTH_SHORT).show();
    }
}

From source file:android.support.v17.leanback.app.SearchSupportFragment.java

/**
 * Returns an intent that can be used to request speech recognition.
 * Built from the base {@link RecognizerIntent#ACTION_RECOGNIZE_SPEECH} plus
 * extras:/*from  w  w w.j  av  a2 s  .c  o m*/
 *
 * <ul>
 * <li>{@link RecognizerIntent#EXTRA_LANGUAGE_MODEL} set to
 * {@link RecognizerIntent#LANGUAGE_MODEL_FREE_FORM}</li>
 * <li>{@link RecognizerIntent#EXTRA_PARTIAL_RESULTS} set to true</li>
 * <li>{@link RecognizerIntent#EXTRA_PROMPT} set to the search bar hint text</li>
 * </ul>
 *
 * For handling the intent returned from the service, see
 * {@link #setSearchQuery(Intent, boolean)}.
 */
public Intent getRecognizerIntent() {
    Intent recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    recognizerIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);
    if (mSearchBar != null && mSearchBar.getHint() != null) {
        recognizerIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, mSearchBar.getHint());
    }
    recognizerIntent.putExtra(EXTRA_LEANBACK_BADGE_PRESENT, mBadgeDrawable != null);
    return recognizerIntent;
}

From source file:br.liveo.searchliveo.SearchCardLiveo.java

private void startVoice(EditText editText) {
    ((InputMethodManager) mContext.getSystemService(Context.INPUT_METHOD_SERVICE))
            .hideSoftInputFromWindow(editText.getWindowToken(), 0);

    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, mContext.getString(R.string.searchview_voice));
    try {//from w w w  .ja  v a  2 s .  com
        mContext.startActivityForResult(intent, REQUEST_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(mContext.getApplicationContext(), R.string.not_supported, Toast.LENGTH_SHORT).show();
    }
}