List of usage examples for android.speech.tts TextToSpeech QUEUE_FLUSH
int QUEUE_FLUSH
To view the source code for android.speech.tts TextToSpeech QUEUE_FLUSH.
Click Source Link
From source file:in.codehex.arrow.MainActivity.java
/** * Obtain the direction from the google directions API *//*from ww w. ja v a 2s . c o m*/ private void processDirection() { textToSpeech.speak("Obtaining direction. Please wait.", TextToSpeech.QUEUE_FLUSH, null, null); String url = null; try { url = Config.URL_API_DIRECTIONS + "origin=" + lat + "," + lng + "&destination=" + URLEncoder.encode(destination, "utf-8") + "&mode=walking" + "&key=" + Config.BROWSER_KEY; System.out.println(url); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } final StringRequest stringRequest = new StringRequest(Request.Method.GET, url, new Response.Listener<String>() { @Override public void onResponse(String response) { dataPreferences.edit().putString(Config.KEY_PREF_DATA, response).apply(); processDirectionData(response); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { textToSpeech.speak("Network error! Please check your internet connection!", TextToSpeech.QUEUE_FLUSH, null, null); error.printStackTrace(); } }); AppController.getInstance().addToRequestQueue(stringRequest, "direction"); }
From source file:com.vyasware.vaani.MainActivity.java
private void doMsg(String noun) { System.out.println("msg"); Cursor cur = null;//from w ww .j a va 2 s .c o m ContentResolver cr = null; try { cr = getContentResolver(); } catch (Exception ex) { ex.printStackTrace(); } try { cur = cr.query(ContactsContract.Contacts.CONTENT_URI, null, null, null, null); } catch (Exception ex) { ex.printStackTrace(); } try { boolean called = false; if (cur.getCount() > 0) { while (cur.moveToNext()) { String id = cur.getString(cur.getColumnIndex(ContactsContract.Contacts._ID)); String name = cur.getString(cur.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME)); // Log.i("Names", name); if (Integer.parseInt( cur.getString(cur.getColumnIndex(ContactsContract.Contacts.HAS_PHONE_NUMBER))) > 0) { // Query phone here. Covered next Cursor phones = getContentResolver().query( ContactsContract.CommonDataKinds.Phone.CONTENT_URI, null, ContactsContract.CommonDataKinds.Phone.CONTACT_ID + " = " + id, null, null); while (phones.moveToNext()) { String phoneNumberX = phones.getString( phones.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER)); // Log.i("Number", phoneNumber); boolean a = (name.equalsIgnoreCase(noun)); System.out.println(name + " " + a + " " + phoneNumberX); if (a) System.out.println(phoneNumberX); if (a) { String b = "sms:"; String smsUri = b + phoneNumberX; Intent smsIntent = new Intent(Intent.ACTION_VIEW); smsIntent.setData(Uri.parse(smsUri)); called = true; startActivity(smsIntent); tts.speak(noun + " ? !", TextToSpeech.QUEUE_FLUSH, null); } } phones.close(); } } if (!called) tts.speak(noun + " ? !", TextToSpeech.QUEUE_FLUSH, null); } } catch (Exception ex) { ex.printStackTrace(); } }
From source file:onion.chat.MainActivity.java
private void listen() { //inform();/*from ww w. j a v a 2 s .c om*/ PackageManager pm = getPackageManager(); List<ResolveInfo> activities = pm .queryIntentActivities(new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0); if (activities.size() == 0) { Toast.makeText(this, "Voice recognizer not present", Toast.LENGTH_SHORT).show(); } else { try { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, getClass().getPackage().getName()); intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Its Ur's"); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH); startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE); } catch (Exception e) { String toSpeak = "Oops your device doesn't support Voice recognition"; Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show(); t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null); } } }
From source file:com.ola.insta.BookingAcivity.java
private void speakOut(String textToSpeak) { mTextToSpeech.speak(textToSpeak, TextToSpeech.QUEUE_FLUSH, null); }
From source file:root.magicword.MagicWord.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == SPEECH_REQUEST_CODE) { if (resultCode == RESULT_OK) { ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); if (matches.size() == 0) { tts.speak("Heard nothing", TextToSpeech.QUEUE_FLUSH, null); } else { mostLikelyThingHeard = matches.get(0); String magicWord = this.getResources().getString(R.string.magicword); if (mostLikelyThingHeard.equals(magicWord)) { tts.speak("You said the magic word!", TextToSpeech.QUEUE_FLUSH, null); } else { tts.speak("The magic word is " + mostLikelyThingHeard + " try again", TextToSpeech.QUEUE_FLUSH, null); }/*from w w w .j a v a 2 s . c o m*/ } // result.setText("heard: " + matches); result.setText("heard: " + mostLikelyThingHeard); } else { Log.d(TAG, "result NOT ok"); } } super.onActivityResult(requestCode, resultCode, data); }
From source file:com.vyasware.vaani.MainActivity.java
private void doCall(String noun) { System.out.println("call"); System.out.println(noun);// ww w. j av a 2 s . c om Cursor cur = null; ContentResolver cr = null; try { cr = getContentResolver(); } catch (Exception ex) { ex.printStackTrace(); } try { cur = cr.query(ContactsContract.Contacts.CONTENT_URI, null, null, null, null); } catch (Exception ex) { ex.printStackTrace(); } try { boolean called = false; if (cur.getCount() > 0) { while (cur.moveToNext()) { String id = cur.getString(cur.getColumnIndex(ContactsContract.Contacts._ID)); String name = cur.getString(cur.getColumnIndex(ContactsContract.Contacts.DISPLAY_NAME)); // Log.i("Names", name); if (Integer.parseInt( cur.getString(cur.getColumnIndex(ContactsContract.Contacts.HAS_PHONE_NUMBER))) > 0) { // Query phone here. Covered next Cursor phones = getContentResolver().query( ContactsContract.CommonDataKinds.Phone.CONTENT_URI, null, ContactsContract.CommonDataKinds.Phone.CONTACT_ID + " = " + id, null, null); while (phones.moveToNext()) { String phoneNumberX = phones.getString( phones.getColumnIndex(ContactsContract.CommonDataKinds.Phone.NUMBER)); // Log.i("Number", phoneNumber); boolean a = (name.equalsIgnoreCase(noun)); System.out.println(name + " " + a + " " + phoneNumberX); if (a) System.out.println(phoneNumberX); if (a) { String b = "tel:"; String phoneCallUri = b + phoneNumberX; Intent phoneCallIntent = new Intent(Intent.ACTION_CALL); phoneCallIntent.setData(Uri.parse(phoneCallUri)); called = true; startActivity(phoneCallIntent); } } phones.close(); } } if (!called) tts.speak(noun + " ? !", TextToSpeech.QUEUE_FLUSH, null); } } catch (Exception ex) { ex.printStackTrace(); } // tts.speak(noun + " ? !",TextToSpeech.QUEUE_FLUSH, null); }
From source file:in.codehex.arrow.MainActivity.java
/** * Parse the JSON object and create an array list of the instructions * * @param response the json response from the google directions API *///from ww w . j a v a2s. c o m private void processDirectionData(String response) { directionItemList.clear(); try { JSONObject json = new JSONObject(response); JSONArray array = json.getJSONArray("routes"); JSONObject routes = array.getJSONObject(0); JSONArray legs = routes.getJSONArray("legs"); JSONObject jsonObject = legs.getJSONObject(0); String source = jsonObject.getString("start_address"); String destination = jsonObject.getString("end_address"); JSONArray steps = jsonObject.getJSONArray("steps"); for (int i = 0; i < steps.length(); i++) { JSONObject object = steps.getJSONObject(i); JSONObject distance = object.getJSONObject("distance"); String mDistance = distance.getString("text"); JSONObject duration = object.getJSONObject("duration"); String mDuration = duration.getString("text"); String mInstruction = object.getString("html_instructions"); String data = "Instruction: " + Html.fromHtml(mInstruction) + ". Distance: " + mDistance + ". Duration: " + mDuration + "."; JSONObject startLocation = object.getJSONObject("start_location"); double startLat = startLocation.getDouble("lat"); double startLng = startLocation.getDouble("lng"); JSONObject endLocation = object.getJSONObject("end_location"); double endLat = endLocation.getDouble("lat"); double endLng = endLocation.getDouble("lng"); JSONObject polyline = object.getJSONObject("polyline"); String points = polyline.getString("points"); directionItemList.add(new DirectionItem(data, startLat, startLng, endLat, endLng, points, false)); updateRecentSearch(source, destination); } } catch (Exception e) { e.printStackTrace(); } if (directionItemList.isEmpty()) textToSpeech.speak("No direction instruction is available", TextToSpeech.QUEUE_FLUSH, null, null); }
From source file:it.iziozi.iziozi.gui.IOBoardActivity.java
public void tapOnSpeakableButton(final IOSpeakableImageButton spkBtn, final Integer level) { if (IOGlobalConfiguration.isEditing) { AlertDialog.Builder builder = new AlertDialog.Builder(this); LayoutInflater inflater = getLayoutInflater(); View layoutView = inflater.inflate(R.layout.editmode_alertview, null); builder.setTitle(getString(R.string.choose)); builder.setView(layoutView);// ww w . j a v a 2 s. com final AlertDialog dialog = builder.create(); final Switch matrioskaSwitch = (Switch) layoutView.findViewById(R.id.editModeAlertToggleBoard); Button editPictoButton = (Button) layoutView.findViewById(R.id.editModeAlertActionPicture); final Button editBoardButton = (Button) layoutView.findViewById(R.id.editModeAlertActionBoard); matrioskaSwitch.setChecked(spkBtn.getIsMatrioska()); editBoardButton.setEnabled(spkBtn.getIsMatrioska()); matrioskaSwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { spkBtn.setIsMatrioska(isChecked); editBoardButton.setEnabled(isChecked); } }); editPictoButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { //spkBtn.showInsertDialog(); Intent cIntent = new Intent(getApplicationContext(), IOCreateButtonActivity.class); cIntent.putExtra(BUTTON_INDEX, mActualLevel.getBoardAtIndex(mActualIndex).getButtons().indexOf(spkBtn)); cIntent.putExtra(BUTTON_TEXT, spkBtn.getSentence()); cIntent.putExtra(BUTTON_TITLE, spkBtn.getmTitle()); cIntent.putExtra(BUTTON_IMAGE_FILE, spkBtn.getmImageFile()); cIntent.putExtra(BUTTON_AUDIO_FILE, spkBtn.getAudioFile()); startActivityForResult(cIntent, CREATE_BUTTON_CODE); matrioskaSwitch.setOnCheckedChangeListener(null); dialog.dismiss(); } }); editBoardButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { IOLevel nestedBoard = spkBtn.getLevel(); pushLevel(nestedBoard); matrioskaSwitch.setOnCheckedChangeListener(null); dialog.dismiss(); } }); dialog.show(); } else { if (IOGlobalConfiguration.isScanMode) { IOSpeakableImageButton scannedButton = mActualLevel.getBoardAtIndex(mActualIndex).getButtons() .get(mActualScanIndex); if (scannedButton.getAudioFile() != null && scannedButton.getAudioFile().length() > 0) { final MediaPlayer mPlayer = new MediaPlayer(); try { mPlayer.setDataSource(scannedButton.getAudioFile()); mPlayer.prepare(); mPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { mPlayer.release(); } }); mPlayer.start(); } catch (IOException e) { Log.e("playback_debug", "prepare() failed"); } } else if (mCanSpeak) { Log.d("speakable_debug", "should say: " + scannedButton.getSentence()); if (scannedButton.getSentence() == "") tts.speak(getResources().getString(R.string.tts_nosentence), TextToSpeech.QUEUE_FLUSH, null); else tts.speak(scannedButton.getSentence(), TextToSpeech.QUEUE_FLUSH, null); } else { Toast.makeText(this, getResources().getString(R.string.tts_notinitialized), Toast.LENGTH_LONG) .show(); } if (scannedButton.getIsMatrioska() && null != scannedButton.getLevel()) { pushLevel(scannedButton.getLevel()); } } else { if (spkBtn.getAudioFile() != null && spkBtn.getAudioFile().length() > 0) { final MediaPlayer mPlayer = new MediaPlayer(); try { mPlayer.setDataSource(spkBtn.getAudioFile()); mPlayer.prepare(); mPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { mPlayer.release(); } }); mPlayer.start(); } catch (IOException e) { Log.e("playback_debug", "prepare() failed"); } } else if (mCanSpeak) { Log.d("speakable_debug", "should say: " + spkBtn.getSentence()); if (spkBtn.getSentence() == "") tts.speak(getResources().getString(R.string.tts_nosentence), TextToSpeech.QUEUE_FLUSH, null); else tts.speak(spkBtn.getSentence(), TextToSpeech.QUEUE_FLUSH, null); } else { Toast.makeText(this, getResources().getString(R.string.tts_notinitialized), Toast.LENGTH_LONG) .show(); } if (spkBtn.getIsMatrioska() && null != spkBtn.getLevel()) { pushLevel(spkBtn.getLevel()); } } } }
From source file:com.application.akscorp.yandextranslator2017.TranslateScreen.java
/** * @param data string for say//from w w w .j av a2 s. co m */ private void TextToSpeech(String data, Locale locale) { try { //data = MyUtility.StringNormalize(data); String utteranceId = this.hashCode() + ""; SpeechEngine.setLanguage(locale); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { SpeechEngine.speak(data, TextToSpeech.QUEUE_FLUSH, null, utteranceId); } else { HashMap<String, String> map = new HashMap<>(); map.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, "MessageId"); SpeechEngine.speak(data, TextToSpeech.QUEUE_FLUSH, map); } } catch (Exception e) { Logs.SaveLog(context, e); Toast.makeText(context, LanguageWork.GetResourceString(context, "forbidden_option"), Toast.LENGTH_LONG) .show(); } }
From source file:divya.myvision.TessActivity.java
/** * onTap is called to capture the first TextBlock under the tap location and return it to * the Initializing Activity.//from ww w. ja v a 2 s.co m * * @param rawX - the raw position of the tap * @param rawY - the raw position of the tap. * @return true if the activity is ending. */ private boolean onTap(float rawX, float rawY) { String ocrText; tts.stop(); ocrText = mGraphicOverlay.getAllText(); if (ocrText != null && ocrText.length() > 0) { // Log.i("DATA_READ",ocrText); tts.speak(ocrText, TextToSpeech.QUEUE_FLUSH, null, "DEFAULT"); } else { if (tts.isSpeaking()) { tts.stop(); } else { tts.speak("Sorry, no text found", TextToSpeech.QUEUE_FLUSH, null, "DEFAULT"); } } return ocrText != null; }