List of usage examples for android.speech SpeechRecognizer createSpeechRecognizer
public static SpeechRecognizer createSpeechRecognizer(final Context context)
From source file:MainActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); PackageManager pm = getPackageManager(); List<ResolveInfo> activities = pm .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); if (activities.size() == 0) { findViewById(R.id.imageButton).setEnabled(false); Toast.makeText(this, "Speech Recognition Not Supported", Toast.LENGTH_LONG).show(); }/*from w ww . j a va 2 s . co m*/ SpeechRecognizer speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this); }
From source file:com.github.zagum.speechrecognitionview.sample.MainActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); int[] colors = { ContextCompat.getColor(this, R.color.color1), ContextCompat.getColor(this, R.color.color2), ContextCompat.getColor(this, R.color.color3), ContextCompat.getColor(this, R.color.color4), ContextCompat.getColor(this, R.color.color5) }; int[] heights = { 60, 76, 58, 80, 55 }; speechRecognizer = SpeechRecognizer.createSpeechRecognizer(this); final RecognitionProgressView recognitionProgressView = (RecognitionProgressView) findViewById( R.id.recognition_view);//from w ww. j a v a 2 s .c o m recognitionProgressView.setSpeechRecognizer(speechRecognizer); recognitionProgressView.setRecognitionListener(new RecognitionListenerAdapter() { @Override public void onResults(Bundle results) { showResults(results); } }); recognitionProgressView.setColors(colors); recognitionProgressView.setBarMaxHeightsInDp(heights); recognitionProgressView.play(); Button listen = (Button) findViewById(R.id.listen); Button reset = (Button) findViewById(R.id.reset); listen.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.RECORD_AUDIO) != PackageManager.PERMISSION_GRANTED) { requestPermission(); } else { startRecognition(); recognitionProgressView.postDelayed(new Runnable() { @Override public void run() { startRecognition(); } }, 50); } } }); reset.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { recognitionProgressView.stop(); recognitionProgressView.play(); } }); }
From source file:com.annuletconsulting.homecommand.node.MainFragment.java
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { speechRecognizer = SpeechRecognizer.createSpeechRecognizer(getActivity()); speechRecognizer.setRecognitionListener(new RecognitionListener() { @Override//from w ww.jav a 2s . c om public void onReadyForSpeech(Bundle params) { for (String key : params.keySet()) Log.d(TAG, (String) params.get(key)); } @Override public void onBeginningOfSpeech() { Log.d(TAG, "Begin"); ignore = false; } @Override public void onRmsChanged(float rmsdB) { // Log.d(TAG, "Rms changed: "+rmsdB); } @Override public void onBufferReceived(byte[] buffer) { Log.d(TAG, "Buffer Received: " + buffer.toString()); } @Override public void onEndOfSpeech() { Log.d(TAG, "Endofspeech()"); } @Override public void onError(int error) { Log.d(TAG, "error: " + error); } @Override public void onResults(Bundle results) { Log.d(TAG, "onResults()"); for (String key : results.keySet()) { Log.d(TAG, key + ": " + results.get(key).toString()); // Iterator<String> it = ((ArrayList<String>) // results.get(key)).listIterator(); // while (it.hasNext()) // Log.d(TAG, it.next()); } if (!ignore) sendToServer(results.getStringArrayList(RESULTS_KEY).get(0)); } @Override public void onPartialResults(Bundle partialResults) { // Log.d(TAG, "onPartialResults()"); // String firstWord = partialResults.getStringArrayList(RESULTS_KEY).get(0).split(" ")[0]; // Log.d(TAG, firstWord); // if (firstWord.length() > 0 && !firstWord.equalsIgnoreCase("computer") && !firstWord.equalsIgnoreCase("android")) { // Log.d(TAG, "Killing this Recognition."); // ignore = true; // stopRecognizing(); // startListening(); // } } @Override public void onEvent(int eventType, Bundle params) { Log.d(TAG, "onEvent() type: " + eventType); for (String key : params.keySet()) Log.d(TAG, (String) params.get(key)); } }); View v = inflater.inflate(R.layout.main_fragment, null); button = (Button) v.findViewById(R.id.listen_button); button.setBackgroundResource(R.drawable.stopped); button.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { toggleListenMode(); } }); instance = this; return v; }
From source file:com.phonegap.plugins.speech.XSpeechRecognizer.java
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) { this.callbackContext = callbackContext; Boolean isValidAction = true; if (ACTION_SPEECH_RECOGNIZE_START.equals(action)) { Handler loopHandler = new Handler(Looper.getMainLooper()); loopHandler.post(new Runnable() { @Override//from w ww.java 2s . c o m public void run() { recognizer = SpeechRecognizer.createSpeechRecognizer(cordova.getActivity().getBaseContext()); recognizer.setRecognitionListener(new listener()); } }); startSpeechRecognitionActivity(args); } else if (ACTION_GET_SUPPORTED_LANGUAGES.equals(action)) { getSupportedLanguages(); } else if (ACTION_SPEECH_RECOGNIZE_STOP.equals(action)) { stopSpeechRecognitionActivity(); } else { this.callbackContext.error("Unknown action: " + action); isValidAction = false; } return isValidAction; }
From source file:com.vyasware.vaani.MainActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); returnedText = (TextView) findViewById(R.id.textView1); outputText = (TextView) findViewById(R.id.textView2); progressBar = (ProgressBar) findViewById(R.id.progressBar1); toggleButton = (ToggleButton) findViewById(R.id.toggleButton1); progressBar.setVisibility(View.INVISIBLE); speech = SpeechRecognizer.createSpeechRecognizer(this); speech.setRecognitionListener(this); recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); // recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, // "en"); recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName()); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "hi-IN"); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "hi-IN"); recognizerIntent.putExtra(RecognizerIntent.EXTRA_ONLY_RETURN_LANGUAGE_PREFERENCE, "hi-IN"); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 5); tts = new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() { @Override/*from w ww . ja v a 2 s . c om*/ public void onInit(int status) { if (status != TextToSpeech.ERROR) { tts.setLanguage(new Locale("hi_IN")); tts.setSpeechRate(0.9f); } } }); returnedText.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(Intent.ACTION_WEB_SEARCH); intent.putExtra(SearchManager.QUERY, returnedText.getText()); if (intent.resolveActivity(getPackageManager()) != null) startActivity(intent); } }); toggleButton.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { progressBar.setVisibility(View.VISIBLE); progressBar.setIndeterminate(true); speech.startListening(recognizerIntent); outputText.setText(""); } else { progressBar.setIndeterminate(false); progressBar.setVisibility(View.INVISIBLE); speech.stopListening(); } } }); }
From source file:com.wordpress.httpstheredefiningproductions.phonefinder.recorder.java
@Override public void onCreate() { super.onCreate(); //get the things above linked up to actual things in the app v = (Vibrator) this.getSystemService(Context.VIBRATOR_SERVICE); mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); mSpeechRecognizer = SpeechRecognizer.createSpeechRecognizer(this); mSpeechRecognizer.setRecognitionListener(new SpeechRecognitionListener()); mSpeechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName()); }
From source file:atlc.granadaaccessibilityranking.VoiceActivity.java
/** * Creates the speech recognizer and text-to-speech synthesizer instances * @see RecognitionListener.java//from www . j ava2s . c o m * @param ctx context of the interaction * */ public void initSpeechInputOutput(Activity ctx) { this.ctx = ctx; PackageManager packManager = ctx.getPackageManager(); setTTS(); // Find out whether speech recognition is supported List<ResolveInfo> intActivities = packManager .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); if (intActivities.size() != 0 || "generic".equals(Build.BRAND.toLowerCase(Locale.US))) { myASR = SpeechRecognizer.createSpeechRecognizer(ctx); myASR.setRecognitionListener(this); } else myASR = null; }
From source file:conversandroid.RichASR.java
/** * Creates the speech recognizer instance if it is available * */// w w w . j ava 2 s. c o m public void initASR() { // find out whether speech recognition is supported List<ResolveInfo> intActivities = this.getPackageManager() .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); //Speech recognition does not currently work on simulated devices if ("generic".equals(Build.BRAND.toLowerCase())) { Log.e(LOGTAG, "ASR is not supported on virtual devices"); } else { if (intActivities.size() != 0) { myASR = SpeechRecognizer.createSpeechRecognizer(getApplicationContext()); myASR.setRecognitionListener(this); } } Log.i(LOGTAG, "ASR initialized"); }
From source file:com.ksutopia.bbtalks.plugins.P201SpeechToText.java
/** * Fire an intent to start the speech recognition activity. *//from w w w .j a va 2s . c o m * @param args Argument array with the following string args: [req code][number of matches][prompt string] */ private void startSpeechRecognitionActivity(JSONArray args) { int maxMatches = 0; String prompt = ""; String language = Locale.getDefault().toString(); try { if (args.length() > 0) { // Maximum number of matches, 0 means the recognizer decides String temp = args.getString(0); maxMatches = Integer.parseInt(temp); } if (args.length() > 1) { // Optional text prompt prompt = args.getString(1); } if (args.length() > 2) { // Optional language specified language = args.getString(2); } } catch (Exception e) { Log.e(LOG_TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString())); } // Create the intent and set parameters speech = SpeechRecognizer.createSpeechRecognizer(context); recognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); recognizerIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.getPackageName()); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "en"); recognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH); if (maxMatches > 0) recognizerIntent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches); if (!prompt.equals("")) recognizerIntent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt); speech.setRecognitionListener(listener); speech.startListening(recognizerIntent); }
From source file:org.botlibre.sdk.activity.MicConfiguration.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); //set up the view setContentView(R.layout.activity_mic_configuration); btn = (Button) findViewById(R.id.btnNextTest); txt = (TextView) findViewById(R.id.txtMicSt); editTextForGoogle = (EditText) findViewById(R.id.googleText); editTextSpeech = (EditText) findViewById(R.id.txtSpeech); spinOption = (Spinner) findViewById(R.id.spinOptions); play = (Button) findViewById(R.id.btnPlayBack); micButton = (ImageButton) findViewById(R.id.micButton); ckOfflineSpeech = (CheckBox) findViewById(R.id.ckOfflineSpeech); ckDebug = (CheckBox) findViewById(R.id.ckDebug); //Load data// w ww.j a v a 2 s. com ckOfflineSpeech.setChecked(MainActivity.offlineSpeech); ckDebug.setChecked(ChatActivity.DEBUG); //disabling buttons for recording sound play.setEnabled(false); //file saved outputFile = Environment.getExternalStorageDirectory().getAbsolutePath() + "/myrec.3gp"; //setting the adapter for the dropdown menu ArrayAdapter<String> adapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_dropdown_item, new String[] { "Hands Free", "Google", "Test Mic" }); spinOption.setAdapter(adapter); //Creating Speech (Hands Free) speech = SpeechRecognizer.createSpeechRecognizer(this); speech.setRecognitionListener(this); spinOption.setOnItemSelectedListener(new OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> arg0, View arg1, int pos, long arg3) { txt.setText("Status: OFF"); spinOption.setSelection(pos); LinearLayout test1 = (LinearLayout) findViewById(R.id.test1); LinearLayout test2 = (LinearLayout) findViewById(R.id.test2); LinearLayout test3 = (LinearLayout) findViewById(R.id.test3); switch (pos) { case 0: test1.setVisibility(View.VISIBLE); test2.setVisibility(View.GONE); test3.setVisibility(View.GONE); break; case 1: test1.setVisibility(View.GONE); test3.setVisibility(View.GONE); test2.setVisibility(View.VISIBLE); break; case 2: ActivityCompat.requestPermissions(MicConfiguration.this, new String[] { Manifest.permission.READ_EXTERNAL_STORAGE }, 2); test1.setVisibility(View.GONE); test3.setVisibility(View.VISIBLE); test2.setVisibility(View.GONE); break; } } @Override public void onNothingSelected(AdapterView<?> arg0) { } }); //CheckBox OfflineSpeech ckOfflineSpeech.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { SharedPreferences.Editor cookies = MainActivity.current.getPreferences(MODE_PRIVATE).edit(); if (isChecked) { MainActivity.offlineSpeech = true; cookies.putBoolean("offlineSpeech", true); cookies.commit(); return; } //else MainActivity.offlineSpeech = false; cookies.putBoolean("offlineSpeech", false); cookies.commit(); } }); //CheckBox Debugger ckDebug.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { SharedPreferences.Editor cookies = MainActivity.current.getPreferences(MODE_PRIVATE).edit(); if (isChecked) { ChatActivity.DEBUG = true; cookies.putBoolean("debug", true); cookies.commit(); return; } //else ChatActivity.DEBUG = false; cookies.putBoolean("debug", false); cookies.commit(); } }); }