Example usage for android.speech RecognizerIntent EXTRA_LANGUAGE

List of usage examples for android.speech RecognizerIntent EXTRA_LANGUAGE

Introduction

In this page you can find the example usage for android.speech RecognizerIntent EXTRA_LANGUAGE.

Prototype

String EXTRA_LANGUAGE

To view the source code for android.speech RecognizerIntent EXTRA_LANGUAGE.

Click Source Link

Document

Optional IETF language tag (as defined by BCP 47), for example "en-US".

Usage

From source file:com.application.akscorp.yandextranslator2017.TranslateScreen.java

/**
 * start broadcast speech to text. Result process in StartScreen
 *//*  ww w .  j  ava 2 s  . com*/
private void promptSpeechInput(Locale locale) {
    try {
        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, locale.getLanguage());
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, LanguageWork.GetResourceString(context, "say"));
        getActivity().startActivityForResult(intent, 200);
    } catch (ActivityNotFoundException a) {
        Logs.SaveLog(context, a);
        Toast.makeText(context, LanguageWork.GetResourceString(context, "forbidden_option"), Toast.LENGTH_SHORT)
                .show();
    }
}

From source file:net.olejon.mdapp.Icd10Activity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    switch (item.getItemId()) {
    case android.R.id.home: {
        NavUtils.navigateUpFromSameTask(this);
        return true;
    }/*  w ww .  j a  v  a 2  s. c  o m*/
    case R.id.icd10_menu_voice_search: {
        try {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "nb-NO");
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
            startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE);
        } catch (Exception e) {
            new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title))
                    .content(getString(R.string.device_not_supported_dialog_message))
                    .positiveText(getString(R.string.device_not_supported_dialog_positive_button))
                    .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show();
        }

        return true;
    }
    default: {
        return super.onOptionsItemSelected(item);
    }
    }
}

From source file:org.apache.cordova.nodialogspeechrecognizer.SpeechRecognizer.java

/**
 * Fire an intent to start the speech recognition activity.
 *
 * @param args/*from w w  w .  j a va2 s .  c om*/
 *          Argument array with the following string args: [req code][number
 *          of matches][prompt string]
 */
private void startSpeechRecognitionActivity(JSONArray args) {
    int maxMatches = 1;
    String prompt = "";
    String language = Locale.getDefault().toString();

    try {
        if (args.length() > 0) {
            // Maximum number of matches, 0 means the recognizer decides
            String temp = args.getString(0);
            maxMatches = Integer.parseInt(temp);
        }
        if (args.length() > 1) {
            // Optional text prompt
            prompt = args.getString(1);
        }
        if (args.length() > 2) {
            // Optional language specified
            language = args.getString(2);
        }
    } catch (Exception e) {
        Log.e(LOG_TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
    }

    // Create the intent and set parameters
    final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, cordova.getActivity().getPackageName());
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

    if (maxMatches > 0)
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);
    if (!prompt.equals(""))
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    try {
        Handler loopHandler = new Handler(Looper.getMainLooper());
        loopHandler.post(new Runnable() {

            @Override
            public void run() {
                speech.startListening(intent);
            }

        });
    } catch (Exception e) {
        Log.e("error", "er", e);
    }
    // cordova.startActivityForResult(this, intent, REQUEST_CODE);
}

From source file:com.phonegap.plugins.speech.XSpeechRecognizer.java

/**
 * Fire an intent to start the speech recognition activity.
 *
 * @param args Argument array with the following string args: [req code][number of matches]
 *//*  w w  w. j  ava2s.c o m*/
private void startSpeechRecognitionActivity(JSONArray args) {

    int maxMatches = 0;
    String language = Locale.getDefault().toString();

    try {
        if (args.length() > 0) {
            // Maximum number of matches, 0 means the recognizer decides
            String temp = args.getString(0);
            maxMatches = Integer.parseInt(temp);
        }
        if (args.length() > 1) {
            // Language
            language = args.getString(1);
        }
    } catch (Exception e) {
        Log.e(TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
    }

    // Create the intent and set parameters
    final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    // intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE,"voice.recognition.test");
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

    if (maxMatches > 0)
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);

    Handler loopHandler = new Handler(Looper.getMainLooper());
    loopHandler.post(new Runnable() {

        @Override
        public void run() {
            recognizer.startListening(intent);
        }

    });
}

From source file:net.olejon.mdapp.Icd10ChapterActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    switch (item.getItemId()) {
    case android.R.id.home: {
        NavUtils.navigateUpFromSameTask(this);
        return true;
    }/*w ww  .  ja  v  a2 s .  com*/
    case R.id.icd10_chapter_menu_voice_search: {
        try {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "nb-NO");
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
            startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE);
        } catch (Exception e) {
            new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title))
                    .content(getString(R.string.device_not_supported_dialog_message))
                    .positiveText(getString(R.string.device_not_supported_dialog_positive_button))
                    .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show();
        }

        return true;
    }
    default: {
        return super.onOptionsItemSelected(item);
    }
    }
}

From source file:net.olejon.mdapp.DiseasesAndTreatmentsActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    switch (item.getItemId()) {
    case android.R.id.home: {
        NavUtils.navigateUpFromSameTask(this);
        return true;
    }//w ww.j a  v  a2s  .co m
    case R.id.diseases_and_treatments_menu_voice_search: {
        String language = (mSearchLanguage.equals("")) ? "en-US" : "nb-NO";

        try {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
            startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE);
        } catch (Exception e) {
            new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title))
                    .content(getString(R.string.device_not_supported_dialog_message))
                    .positiveText(getString(R.string.device_not_supported_dialog_positive_button))
                    .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show();
        }

        return true;
    }
    case R.id.diseases_and_treatments_menu_clear_recent_searches: {
        clearRecentSearches();
        return true;
    }
    default: {
        return super.onOptionsItemSelected(item);
    }
    }
}

From source file:net.olejon.mdapp.ClinicalTrialsActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    switch (item.getItemId()) {
    case android.R.id.home: {
        NavUtils.navigateUpFromSameTask(this);
        return true;
    }/*  w w  w.  j  a v  a 2 s  . c  om*/
    case R.id.clinicaltrials_menu_voice_search: {
        try {
            Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "en-US");
            intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
            startActivityForResult(intent, VOICE_SEARCH_REQUEST_CODE);
        } catch (Exception e) {
            new MaterialDialog.Builder(mContext).title(getString(R.string.device_not_supported_dialog_title))
                    .content(getString(R.string.device_not_supported_dialog_message))
                    .positiveText(getString(R.string.device_not_supported_dialog_positive_button))
                    .contentColorRes(R.color.black).positiveColorRes(R.color.dark_blue).show();
        }

        return true;
    }
    case R.id.clinicaltrials_menu_clear_recent_searches: {
        clearRecentSearches();
        return true;
    }
    default: {
        return super.onOptionsItemSelected(item);
    }
    }
}

From source file:com.ct.speech.HintReceiver.java

/**
 * Fire an intent to start the speech recognition activity.
 * /*from  ww  w . jav a 2  s.co m*/
 * @param args
 *            Argument array with the following string args: [req
 *            code][number of matches][prompt string] Google speech
 *            recognizer
 */

private void startSpeechRecognitionActivity(JSONArray args) {
    // int reqCode = 42; // Hitchhiker? // global now
    int maxMatches = 2;
    String prompt = "";
    String language = "";
    try {
        if (args.length() > 0) {
            // Request code - passed back to the caller on a successful
            // operation
            String temp = args.getString(0);
            reqCode = Integer.parseInt(temp);
        }
        if (args.length() > 1) {
            // Maximum number of matches, 0 means the recognizer decides
            String temp = args.getString(1);
            maxMatches = Integer.parseInt(temp);
        }
        if (args.length() > 2) {
            // Optional text prompt
            prompt = args.getString(2);
        }
        if (args.length() > 3) {
            // Optional language specified
            language = args.getString(3);
        }
    } catch (Exception e) {
        Log.e(TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
    }
    final Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra("calling_package", "com.ct.BasicAppFrame");
    // If specific language
    if (!language.equals("")) {
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
    }
    if (maxMatches > 0)
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);
    if (!(prompt.length() == 0))
        intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
    // ctx.startActivityForResult(this, intent, reqCode); //removed to try
    // using recognizer directly
    try {
        this.ctx.runOnUiThread(new Runnable() {
            public void run() {
                final SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer((Context) ctx);
                RecognitionListener listener = new RecognitionListener() {
                    @Override
                    public void onResults(Bundle results) {
                        //closeRecordedFile();
                        sendBackResults(results);
                        ArrayList<String> voiceResults = results
                                .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
                        if (voiceResults == null) {
                            Log.e(TAG, "No voice results");
                        } else {
                            // Log.d(TAG, "Printing matches: ");
                            for (@SuppressWarnings("unused")
                            String match : voiceResults) {
                                // Log.d(TAG, match);
                            }
                        }
                        recognizer.destroy();
                    }

                    @Override
                    public void onReadyForSpeech(Bundle params) {
                        // Log.d(TAG, "Ready for speech");
                    }

                    @Override
                    public void onError(int error) {
                        Log.d(TAG, "Error listening for speech: " + error);
                        if (error == SpeechRecognizer.ERROR_NO_MATCH) {
                            sendBackResults(NO_MATCH);
                        } else if (error == SpeechRecognizer.ERROR_SPEECH_TIMEOUT) {
                            sendBackResults(NO_INPUT);
                        } else {
                            speechFailure("unknown error");
                        }
                        recognizer.destroy();
                    }

                    @Override
                    public void onBeginningOfSpeech() {
                        // Log.d(TAG, "Speech starting");
                        setStartOfSpeech();
                    }

                    @Override
                    //doesn't fire in Android after Ice Cream Sandwich
                    public void onBufferReceived(byte[] buffer) {
                    }

                    @Override
                    public void onEndOfSpeech() {
                        setEndOfSpeech();
                    }

                    @Override
                    public void onEvent(int eventType, Bundle params) {
                        // TODO Auto-generated method stub

                    }

                    @Override
                    public void onPartialResults(Bundle partialResults) {
                        // TODO Auto-generated method stub

                    }

                    @Override
                    public void onRmsChanged(float rmsdB) {
                        // TODO Auto-generated method stub

                    }
                };
                recognizer.setRecognitionListener(listener);
                Log.d(TAG, "starting speech recognition activity");
                recognizer.startListening(intent);
            }
        });
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:com.example.h156252.connected_cars.CarGrid.java

/**
 * Showing google speech input dialog/*from w  w  w .  ja va 2 s  . c o  m*/
 * */
private void promptSpeechInput() {
    Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
    intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault());
    intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Whats your message?!");
    try {
        startActivityForResult(intent, REQ_CODE_SPEECH_INPUT);
    } catch (ActivityNotFoundException a) {
        Toast.makeText(getApplicationContext(), "Speech not supported", Toast.LENGTH_SHORT).show();
    }
}

From source file:atlc.granadaaccessibilityranking.VoiceActivity.java

/**
 * Starts speech recognition after checking the ASR parameters
 *
 * @param language Language used for speech recognition (e.g. Locale.ENGLISH)
 * @param languageModel Type of language model used (free form or web search)
 * @param maxResults Maximum number of recognition results
 * @exception An exception is raised if the language specified is not available or the other parameters are not valid
 *///from ww  w .  java  2s .co m
public void listen(final Locale language, final String languageModel, final int maxResults) throws Exception {
    checkASRPermission();

    if ((languageModel.equals(RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
            || languageModel.equals(RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH)) && (maxResults >= 0)) {
        Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

        // Specify the calling package to identify the application
        intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, ctx.getPackageName());
        //Caution: be careful not to use: getClass().getPackage().getName());

        // Specify language model
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, languageModel);

        // Specify how many results to receive. Results listed in order of confidence
        intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxResults);

        // Specify recognition language
        intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);

        myASR.startListening(intent);

    } else {
        Log.e(LOGTAG, "Invalid params to listen method");
        throw new Exception("Invalid params to listen method"); //If the input parameters are not valid, it throws an exception
    }

}