List of usage examples for android.speech.tts TextToSpeech SUCCESS
int SUCCESS
To view the source code for android.speech.tts TextToSpeech SUCCESS.
Click Source Link
From source file:com.yangtsaosoftware.pebblemessenger.services.MessageProcessingService.java
@Override public void onInit(int i) { if (i == TextToSpeech.SUCCESS) { myTTSisOK = true;// ww w .jav a 2 s. c o m } else { myTTSisOK = false; myTTS.shutdown(); } }
From source file:com.google.fpl.gim.examplegame.MainService.java
@Override public void onCreate() { // The service is being created. Utils.logDebug(TAG, "onCreate"); IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(CHOICE_NOTIFICATION_ACTION_1); intentFilter.addAction(CHOICE_NOTIFICATION_ACTION_2); intentFilter.addAction(CHOICE_NOTIFICATION_ACTION_3); registerReceiver(mReceiver, intentFilter); mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); // Determines the behavior for handling Audio Focus surrender. mAudioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() { @Override/*from w w w .j a v a 2 s . c o m*/ public void onAudioFocusChange(int focusChange) { if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT || focusChange == AudioManager.AUDIOFOCUS_LOSS) { if (mTextToSpeech.isSpeaking()) { mTextToSpeech.setOnUtteranceProgressListener(null); mTextToSpeech.stop(); } if (mMediaPlayer.isPlaying()) { mMediaPlayer.stop(); } // Abandon Audio Focus, if it's requested elsewhere. mAudioManager.abandonAudioFocus(mAudioFocusChangeListener); // Restart the current moment if AudioFocus was lost. Since AudioFocus is only // requested away from this application if this application was using it, // only Moments that play sound will restart in this way. if (mMission != null) { mMission.restartMoment(); } } } }; // Asynchronously prepares the TextToSpeech. mTextToSpeech = new TextToSpeech(this, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { if (status == TextToSpeech.SUCCESS) { // Check if language is available. switch (mTextToSpeech.isLanguageAvailable(DEFAULT_TEXT_TO_SPEECH_LOCALE)) { case TextToSpeech.LANG_AVAILABLE: case TextToSpeech.LANG_COUNTRY_AVAILABLE: case TextToSpeech.LANG_COUNTRY_VAR_AVAILABLE: Utils.logDebug(TAG, "TTS locale supported."); mTextToSpeech.setLanguage(DEFAULT_TEXT_TO_SPEECH_LOCALE); mIsTextToSpeechReady = true; break; case TextToSpeech.LANG_MISSING_DATA: Utils.logDebug(TAG, "TTS missing data, ask for install."); Intent installIntent = new Intent(); installIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA); startActivity(installIntent); break; default: Utils.logDebug(TAG, "TTS local not supported."); break; } } } }); mMediaPlayer = new MediaPlayer(); }
From source file:com.bdcorps.videonews.MainActivity.java
@Override public void onInit(int status) { // status can be either TextToSpeech.SUCCESS or TextToSpeech.ERROR if (status == TextToSpeech.SUCCESS) { // Set preferred language to US english. // Note that a language may not be available, and the topicNames will indicate this. int result = mTts.setLanguage(Locale.US); mTts.setOnUtteranceCompletedListener(this); if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) { // Lanuage data is missing or the language is not supported. Log.e("404", "Language is not available."); }/* w w w.j a v a 2 s. c o m*/ } else { // Initialization failed. Log.e("404", "Could not initialize TextToSpeech."); // May be its not installed so we prompt it to be installed Intent installIntent = new Intent(); installIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA); startActivity(installIntent); } }
From source file:com.example.activitydemo.app.service.GameService.java
@Override public void onInit(int result) { Log.d(TAG, "TTS Engine " + (result == 0 ? "Successful" : "Error")); if (result == TextToSpeech.SUCCESS) { mTextToSpeechWorks = true;//from www . j av a 2 s.c o m } }
From source file:finalproject.ece558.edu.pdx.ece.brailleblackjack.PlayBlackJackGameFragment.java
/** * Check accessibility and set flags. Set-up The Android TextToSpeech Engine. Also Set-up * the Google MessageApi Client.//from www . ja v a 2 s . com * * Android Developers Website was used to aid in creating the MessageApi Client. For more info * go there * Source: http://developer.android.com/training/wearables/data-layer/events.html * @param savedInstanceState * * A Tutorial from tutorialspoint.com was used to aid with Android TextToSpeech * Source: http://www.tutorialspoint.com/android/android_text_to_speech.htm */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); context = this.getActivity(); /* Check if Accessibility is on to Speak upon certain events */ am = (AccessibilityManager) context.getSystemService(Context.ACCESSIBILITY_SERVICE); isAccessibilityEnabled = am.isEnabled(); isExploreByTouchEnabled = am.isTouchExplorationEnabled(); /* Set-up the TTS Api */ textToSpeech = new TextToSpeech(context, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { if (status == TextToSpeech.SUCCESS) { int result = textToSpeech.setLanguage(Locale.US); if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) { Log.e("error", "Language chosen is not supported"); } else { if (isAccessibilityEnabled) { if (!first_draw_spoken) { String speak = "You drew " + player_left_card.getCardDescription() + " and " + player_right_card.getCardDescription() + "\n Dealer drew " + dealer_right_card.getCardDescription(); convertTextToSpeech(speak); first_draw_spoken = true; } } } } else { Log.e("error", "Initialization Failed!"); } } }); Log.d(TAG, "Attempting to connect to Google Api Client"); mGoogleApiClient = new GoogleApiClient.Builder(context).addApi(Wearable.API).addConnectionCallbacks(this) .addOnConnectionFailedListener(this).build(); Log.d(TAG, "Connected to Google Api Client"); // Initialize state flags button_hit_state = true; button_stand_state = true; button_hint_state = true; button_start_over_state = true; first_draw_spoken = false; }
From source file:com.yangtsaosoftware.pebblemessenger.activities.SetupFragment.java
@Override public void onInit(int i) { if (i == TextToSpeech.SUCCESS) { SpannableStringBuilder ssb = new SpannableStringBuilder(); ssb.append(textInfo.getText());/*w w w . j av a 2 s .c om*/ ssb.append(myTTS.getDefaultEngine() + " "); ssb.append(greenText(R.string.setup_tts_default_locale)); ssb.append(myTTS.getLanguage().getLanguage()); ssb.append('\n'); textInfo.setText(ssb); svMyview.fullScroll(View.FOCUS_DOWN); } else { myTTS.shutdown(); myTTS = null; } }
From source file:com.rnd.snapsplit.view.OcrCaptureFragment.java
/** * Initializes the UI and creates the detector pipeline. */// ww w . j av a 2 s . c om // @Override // public void onActivityResult(int requestCode, int resultCode, Intent data) { // super.onActivityResult(requestCode, resultCode, data); // // if (requestCode == TAKE_PHOTO_CODE && resultCode == RESULT_OK) { // Toast.makeText(getContext(), "pic saved", Toast.LENGTH_LONG).show(); // Log.d("CameraDemo", "Pic saved"); // } // } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { final View view = inflater.inflate(R.layout.view_ocr_capture, container, false); final Activity activity = getActivity(); final Context context = getContext(); ((Toolbar) activity.findViewById(R.id.tool_bar_hamburger)) .setBackgroundColor(ContextCompat.getColor(context, android.R.color.transparent)); final String dir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES) + "/picFolder/"; File newdir = new File(dir); newdir.mkdirs(); mPreview = (CameraSourcePreview) view.findViewById(R.id.preview); mGraphicOverlay = (GraphicOverlay<OcrGraphic>) view.findViewById(R.id.graphicOverlay); StrictMode.VmPolicy.Builder builder = new StrictMode.VmPolicy.Builder(); StrictMode.setVmPolicy(builder.build()); // Set good defaults for capturing text. boolean autoFocus = true; boolean useFlash = false; // createNewThread(); // t.start(); final ImageView upArrow = (ImageView) view.findViewById(R.id.arrow_up); upArrow.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (rotationAngle == 0) { // arrow up //mCameraSource.takePicture(null, mPicture); //mGraphicOverlay.clear(); // mGraphicOverlay.clear(); // mGraphicOverlay.amountItem = null; onPause(); //shouldContinue = false; //mCamera.takePicture(null, null, mPicture); File pictureFile = getOutputMediaFile(); if (pictureFile == null) { return; } try { FileOutputStream fos = new FileOutputStream(pictureFile); Bitmap receiptBitmap = byteStreamToBitmap(mCameraSource.mostRecentBitmap); receiptBitmap.compress(Bitmap.CompressFormat.JPEG, 80, fos); picPath = pictureFile.getAbsolutePath(); //fos.write(mCameraSource.mostRecentBitmap); fos.close(); } catch (FileNotFoundException e) { } catch (IOException e) { } upArrow.animate().rotation(180).setDuration(500).start(); TextView amount = (TextView) view.findViewById(R.id.text_amount_value); if (mGraphicOverlay.amountItem == null) { amount.setText("0.00"); } else { amount.setText(String.format("%.2f", mGraphicOverlay.amountItemAfterFormat)); } TextView desc = (TextView) view.findViewById(R.id.text_name_value); desc.setText(mGraphicOverlay.description); RelativeLayout box = (RelativeLayout) view.findViewById(R.id.recognition_box); box.setVisibility(View.VISIBLE); Animation slide_up = AnimationUtils.loadAnimation(activity.getApplicationContext(), R.anim.slide_up); box.startAnimation(slide_up); rotationAngle = 180; } else { // t.interrupt(); // t = null; RelativeLayout box = (RelativeLayout) view.findViewById(R.id.recognition_box); Animation slide_down = AnimationUtils.loadAnimation(activity.getApplicationContext(), R.anim.slide_down); upArrow.animate().rotation(0).setDuration(500).start(); box.startAnimation(slide_down); box.setVisibility(View.INVISIBLE); //shouldContinue = true; mGraphicOverlay.amountItem = null; mGraphicOverlay.amountItemAfterFormat = 0f; mGraphicOverlay.description = ""; onResume(); // createNewThread(); // t.start(); rotationAngle = 0; } } }); ImageView addButton = (ImageView) view.findViewById(R.id.add_icon); addButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { // takePicture(); EditText description = (EditText) view.findViewById(R.id.text_name_value); EditText amount = (EditText) view.findViewById(R.id.text_amount_value); float floatAmount = Float.parseFloat(amount.getText().toString()); Summary t = new Summary(description.getText().toString(), floatAmount); Bundle bundle = new Bundle(); bundle.putSerializable("splitTransaction", t); // ByteArrayOutputStream stream = new ByteArrayOutputStream(); // mCameraSource.mostRecentBitmap.compress(Bitmap.CompressFormat.PNG, 80, stream); // byte[] byteArray = stream.toByteArray(); //Bitmap receiptBitmap = byteStreamToBitmap(mCameraSource.mostRecentBitmap); //bundle.putParcelable("receiptPicture",receiptBitmap); bundle.putString("receiptPicture", picPath); FriendsSelectionFragment fragment = new FriendsSelectionFragment(); fragment.setArguments(bundle); ((Toolbar) activity.findViewById(R.id.tool_bar_hamburger)).setVisibility(View.INVISIBLE); getActivity().getSupportFragmentManager().beginTransaction() .add(R.id.fragment_holder, fragment, "FriendsSelectionFragment").addToBackStack(null) .commit(); } }); // Check for the camera permission before accessing the camera. If the // permission is not granted yet, request permission. int rc = ActivityCompat.checkSelfPermission(context, Manifest.permission.CAMERA); if (rc == PackageManager.PERMISSION_GRANTED) { createCameraSource(autoFocus, useFlash); } else { requestCameraPermission(); } gestureDetector = new GestureDetector(context, new CaptureGestureListener()); scaleGestureDetector = new ScaleGestureDetector(context, new ScaleListener()); // Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", // Snackbar.LENGTH_LONG) // .show(); // Set up the Text To Speech engine. TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() { @Override public void onInit(final int status) { if (status == TextToSpeech.SUCCESS) { Log.d("OnInitListener", "Text to speech engine started successfully."); tts.setLanguage(Locale.US); } else { Log.d("OnInitListener", "Error starting the text to speech engine."); } } }; tts = new TextToSpeech(activity.getApplicationContext(), listener); return view; }
From source file:com.ola.insta.BookingAcivity.java
@Override public void onInit(int status) { if (status == TextToSpeech.SUCCESS) { int result = mTextToSpeech.setLanguage(Locale.US); // mTextToSpeech.setPitch(5); // set pitch level mTextToSpeech.setSpeechRate(0.8f); // set speech speed rate if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) { Log.e("TextToSpeech", "Language is not supported"); }//www . ja va2s . c o m speakOut("Tap on mike and say; CHALLO NEEKLLO!"); } else { Log.e("TextToSpeech", "Initilization Failed"); } }
From source file:at.the.gogo.windig.activities.WindigActivity.java
@Override public void onInit(final int status) { if (status == TextToSpeech.SUCCESS) { CoreInfoHolder.getInstance().setSpeakit(wantToUseTTS); // wanted & // installed }//from w w w. ja v a 2 s.c om // if (CoreInfoHolder.getInstance().isSpeakit()) { // SpeakItOut.speak(getText(R.string.tts_welcome).toString()); // } }
From source file:root.magicword.MagicWord.java
@Override public void onInit(int status) { if (status == TextToSpeech.SUCCESS) { speak.setEnabled(true);//from w w w. ja v a 2 s .co m } else { // failed to init finish(); } }