Example usage for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH

List of usage examples for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH

Introduction

In this page you can find the example usage for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH.

Prototype

String ACTION_RECOGNIZE_SPEECH

To view the source code for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH.

Click Source Link

Document

Starts an activity that will prompt the user for speech and send it through a speech recognizer.

Usage

From source file:net.olejon.mdapp.ClinicalTrialsActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    switch (item.getItemId()) {
    case android.R.id.home: {
        NavUtils.navigateUpFromSameTask(this);
        return true;
    }// ww  w . j av  a 2 s. co  m
    case R.id.clinicaltrials_menu_voice_search: {
        try {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US");
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
            startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE);
        } catch (Exception e) {
            new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title))
                    .content(getString(R.string.device_not_supported_dialog_message))
                    .positiveText(getString(R.string.device_not_supported_dialog_positive_button))
                    .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show();
        }

        return true;
    }
    case R.id.clinicaltrials_menu_clear_recent_searches: {
        clearRecentSearches();
        return true;
    }
    default: {
        return super.onOptionsItemSelected(item);
    }
    }
}

From source file:com.ct.speech.HintReceiver.java

/**
 * Fire an intent to start the speech recognition activity.
 * /* w  w  w .ja  va  2 s.c o m*/
 * @param args
 *            Argument array with the following string args: [req
 *            code][number of matches][prompt string] Google speech
 *            recognizer
 */

private void startSpeechRecognitionActivity(JSONArray args) {
    // int reqCode = 42; // Hitchhiker? // global now
    int maxMatches = 2;
    String prompt = "";
    String language = "";
    try {
        if (args.length() > 0) {
            // Request code - passed back to the caller on a successful
            // operation
            String temp = args.getString(0);
            reqCode = Integer.parseInt(temp);
        }
        if (args.length() > 1) {
            // Maximum number of matches, 0 means the recognizer decides
            String temp = args.getString(1);
            maxMatches = Integer.parseInt(temp);
        }
        if (args.length() > 2) {
            // Optional text prompt
            prompt = args.getString(2);
        }
        if (args.length() > 3) {
            // Optional language specified
            language = args.getString(3);
        }
    } catch (Exception e) {
        Log.e(TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
    }
    final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra("calling_package", "com.ct.BasicAppFrame");
    // If specific language
    if (!language.equals("")) {
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    }
    if (maxMatches > 0)
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);
    if (!(prompt.length() == 0))
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    // ctx.startActivityForResult(this, intent, reqCode); //removed to try
    // using recognizer directly
    try {
        this.ctx.runOnUiThread(new Runnable() {
            public void run() {
                final SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer((Context) ctx);
                RecognitionListener listener = new RecognitionListener() {
                    @Override
                    public void onResults(Bundle results) {
                        //closeRecordedFile();
                        sendBackResults(results);
                        ArrayList<String> voiceResults = results
                                .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
                        if (voiceResults == null) {
                            Log.e(TAG, "No voice results");
                        } else {
                            // Log.d(TAG, "Printing matches: ");
                            for (@SuppressWarnings("unused")
                            String match : voiceResults) {
                                // Log.d(TAG, match);
                            }
                        }
                        recognizer.destroy();
                    }

                    @Override
                    public void onReadyForSpeech(Bundle params) {
                        // Log.d(TAG, "Ready for speech");
                    }

                    @Override
                    public void onError(int error) {
                        Log.d(TAG, "Error listening for speech: " + error);
                        if (error == SpeechRecognizer.ERROR_NO_MATCH) {
                            sendBackResults(NO_MATCH);
                        } else if (error == SpeechRecognizer.ERROR_SPEECH_TIMEOUT) {
                            sendBackResults(NO_INPUT);
                        } else {
                            speechFailure("unknown error");
                        }
                        recognizer.destroy();
                    }

                    @Override
                    public void onBeginningOfSpeech() {
                        // Log.d(TAG, "Speech starting");
                        setStartOfSpeech();
                    }

                    @Override
                    //doesn't fire in Android after Ice Cream Sandwich
                    public void onBufferReceived(byte[] buffer) {
                    }

                    @Override
                    public void onEndOfSpeech() {
                        setEndOfSpeech();
                    }

                    @Override
                    public void onEvent(int eventType, Bundle params) {
                        // TODO Auto-generated method stub

                    }

                    @Override
                    public void onPartialResults(Bundle partialResults) {
                        // TODO Auto-generated method stub

                    }

                    @Override
                    public void onRmsChanged(float rmsdB) {
                        // TODO Auto-generated method stub

                    }
                };
                recognizer.setRecognitionListener(listener);
                Log.d(TAG, "starting speech recognition activity");
                recognizer.startListening(intent);
            }
        });
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:atlc.granadaaccessibilityranking.VoiceActivity.java

/**
 * Starts speech recognition after checking the ASR parameters
 *
 * @param language Language used for speech recognition (e.g. Locale.ENGLISH)
 * @param languageModel Type of language model used (free form or web search)
 * @param maxResults Maximum number of recognition results
 * @exception An exception is raised if the language specified is not available or the other parameters are not valid
 *///ww w.java  2  s  . c o  m
public void listen(final Locale language, final String languageModel, final int maxResults) throws Exception {
    checkASRPermission();

    if ((languageModel.equals(RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
            || languageModel.equals(RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH)) && (maxResults >= 0)) {
        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

        // Specify the calling package to identify the application
        intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, ctx.getPackageName());
        //Caution: be careful not to use: getClass().getPackage().getName());

        // Specify language model
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);

        // Specify how many results to receive. Results listed in order of confidence
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);

        // Specify recognition language
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

        myASR.startListening(intent);

    } else {
        Log.e(LOGTAG, "Invalid params to listen method");
        throw new Exception("Invalid params to listen method"); //If the input parameters are not valid, it throws an exception
    }

}

From source file:com.example.h156252.connected_cars.CarGrid.java

/**
 * Showing google speech input dialog// w w  w .  ja  v  a  2 s  .  co  m
 * */
private void promptSpeechInput() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Whats your message?!");
    try {
        startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getApplicationContext(), "Speech not supported", Toast.LENGTH_SHORT).show();
    }
}

From source file:com.jaspersoft.android.jaspermobile.activities.library.LibraryPageFragment.java

private void initVoiceRecognition() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.voice_command_title));
    startActivityForResult(intent, VOICE_COMMAND);
    analytics.sendEvent(Analytics.EventCategory.CATALOG.getValue(), Analytics.EventAction.CLICKED.getValue(),
            Analytics.EventLabel.VOICE_COMMANDS.getValue());
}

From source file:com.telepromptu.TeleprompterService.java

private void startListening() {
    if (speechRecognizer == null) {
        speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this);
        speechRecognizer.setRecognitionListener(new DictationListener());
        Intent speechIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        speechIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        speechIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName());
        speechIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 1);
        //          speechIntent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS,true);
        //          speechIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS, 300000);
        //          speechIntent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, 300000);
        speechRecognizer.startListening(speechIntent);
    }//from www .  j a  va2s. c o  m
}

From source file:net.olejon.mdapp.NasjonaleRetningslinjerActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    switch (item.getItemId()) {
    case android.R.id.home: {
        NavUtils.navigateUpFromSameTask(this);
        return true;
    }//from w w  w  . j a  v a  2  s . c o m
    case R.id.nasjonale_retningslinjer_menu_voice_search: {
        try {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "nb-NO");
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
            startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE);
        } catch (Exception e) {
            new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title))
                    .content(getString(R.string.device_not_supported_dialog_message))
                    .positiveText(getString(R.string.device_not_supported_dialog_positive_button))
                    .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show();
        }

        return true;
    }
    case R.id.nasjonale_retningslinjer_menu_clear_recent_searches: {
        clearRecentSearches();
        return true;
    }
    default: {
        return super.onOptionsItemSelected(item);
    }
    }
}

From source file:net.olejon.mdapp.PoisoningsActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    switch (item.getItemId()) {
    case android.R.id.home: {
        NavUtils.navigateUpFromSameTask(this);
        return true;
    }/* w w  w .j  a  v  a2s  .c o m*/
    case R.id.poisonings_menu_call: {
        try {
            Intent intent = new Intent(Intent.ACTION_DIAL, Uri.parse("tel:+4722591300"));
            startActivity(intent);
        } catch (Exception e) {
            new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title))
                    .content(getString(R.string.device_not_supported_dialog_message))
                    .positiveText(getString(R.string.device_not_supported_dialog_positive_button))
                    .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show();
        }

        return true;
    }
    case R.id.poisonings_menu_voice_search: {
        try {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "nb-NO");
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
            startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE);
        } catch (Exception e) {
            new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title))
                    .content(getString(R.string.device_not_supported_dialog_message))
                    .positiveText(getString(R.string.device_not_supported_dialog_positive_button))
                    .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show();
        }

        return true;
    }
    case R.id.poisonings_menu_clear_recent_searches: {
        clearRecentSearches();
        return true;
    }
    default: {
        return super.onOptionsItemSelected(item);
    }
    }
}

From source file:de.dfki.iui.mmir.plugins.speech.android.AndroidSpeechRecognizer.java

private void _startSpeechRecognitionActivity(JSONArray args, CallbackContext callbackContext,
        boolean isWithEndOfSpeechDetection) {
    int maxMatches = 0;
    String prompt = "";//TODO remove? (not used when ASR is directly used as service here...)
    String language = Locale.getDefault().toString();
    boolean isIntermediate = false;

    try {/* w  w  w  .  ja  v a  2  s  .  c  o  m*/
        if (args.length() > 0) {
            // Optional language specified
            language = args.getString(0);
        }
        if (args.length() > 1) {
            isIntermediate = args.getBoolean(1);
        }
        if (args.length() > 2) {
            // Maximum number of matches, 0 means that the recognizer "decides"
            String temp = args.getString(2);
            maxMatches = Integer.parseInt(temp);
        }
        if (args.length() > 3) {
            // Optional text prompt
            prompt = args.getString(3);
        }

        //TODO if ... withoutEndOfSpeechDetection = ...
    } catch (Exception e) {
        Log.e(PLUGIN_NAME, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
    }

    // Create the intent and set parameters
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);

    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

    if (!isWithEndOfSpeechDetection) {

        // try to simulate start/stop-recording behavior (without end-of-speech detection) 

        //NOTE these setting do not seem to have any effect for default Google Recognizer API level > 16

        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, 10000l);

        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS, new Long(10000));
        intent.putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS,
                new Long(6 * 1000));
    }

    if (maxMatches > 0)
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);

    if (!prompt.equals(""))
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);

    if (isIntermediate)
        intent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS, true);

    //NOTE the extra package seems to be required for older Android versions, but not since API level 17(?)
    if (SDK_VERSION <= Build.VERSION_CODES.JELLY_BEAN)
        intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, cordova.getActivity().getPackageName());

    synchronized (speechLock) {

        if (speech != null) {
            speech.destroy();
        }
        speech = SpeechRecognizer.createSpeechRecognizer(cordova.getActivity());

        disableSoundFeedback();

        ++recCounter;
        currentRecognizer = new ASRHandler(recCounter, enableMicLevelsListeners, callbackContext, this);
        currentRecognizer.setHapticPrompt(
                (Vibrator) this.cordova.getActivity().getSystemService(Context.VIBRATOR_SERVICE));
        speech.setRecognitionListener(currentRecognizer);
        speech.startListening(intent);

    }
}

From source file:com.justplay1.shoppist.features.search.SearchFragment.java

private void startTextToSpeech(String prompt, int requestCode) {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    try {/*from w  ww . j  ava 2  s . co m*/
        startActivityForResult(intent, requestCode);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getContext(), getString(R.string.recognition_not_present), Toast.LENGTH_SHORT).show();
    }
}