List of usage examples for android.speech.tts TextToSpeech QUEUE_FLUSH
int QUEUE_FLUSH
To view the source code for android.speech.tts TextToSpeech QUEUE_FLUSH.
Click Source Link
From source file:org.digitalcampus.oppia.activity.CourseActivity.java
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1) @SuppressLint("NewApi") public void onInit(int status) { // check for successful instantiation if (status == TextToSpeech.SUCCESS) { ttsRunning = true;/*from w w w . j a v a2 s . co m*/ ((WidgetFactory) apAdapter.getItem(currentActivityNo)).setReadAloud(true); supportInvalidateOptionsMenu(); HashMap<String, String> params = new HashMap<String, String>(); params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, TAG); myTTS.speak(((WidgetFactory) apAdapter.getItem(currentActivityNo)).getContentToRead(), TextToSpeech.QUEUE_FLUSH, params); myTTS.setOnUtteranceProgressListener(new UtteranceProgressListener() { @Override public void onDone(String utteranceId) { Log.d(TAG, "Finished reading"); CourseActivity.this.ttsRunning = false; myTTS = null; } @Override public void onError(String utteranceId) { } @Override public void onStart(String utteranceId) { } }); } else { // TTS not installed so show message Toast.makeText(this, this.getString(R.string.error_tts_start), Toast.LENGTH_LONG).show(); } }
From source file:com.vyasware.vaani.MainActivity.java
private void doGreet() { Random random = new Random(); int r = random.nextInt(3); String[] greetings = { "?, !", "?, ? ", "?. ?! ? ? ?!" }; tts.speak(greetings[r], TextToSpeech.QUEUE_FLUSH, null); }
From source file:com.bellman.bible.service.device.speak.TextToSpeechController.java
/** * flush cached text//from ww w. java 2 s .co m */ private void clearTtsQueue() { Log.d(TAG, "Stop TTS"); // Don't forget to shutdown! if (isSpeaking()) { Log.d(TAG, "Flushing speech"); // flush remaining text mTts.speak(" ", TextToSpeech.QUEUE_FLUSH, null); } mSpeakTextProvider.reset(); isSpeaking = false; }
From source file:com.projecttango.examples.java.pointcloud.PointCloudActivity.java
/** * Set up the callback listeners for the Tango service and obtain other parameters required * after Tango connection./*from w w w . j a v a 2s . c o m*/ * Listen to updates from the Point Cloud and Tango Events and Pose. */ private void startupTango() { ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<TangoCoordinateFramePair>(); framePairs.add(new TangoCoordinateFramePair(TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE, TangoPoseData.COORDINATE_FRAME_DEVICE)); mTango.connectListener(framePairs, new OnTangoUpdateListener() { @Override public void onPoseAvailable(TangoPoseData pose) { // Passing in the pose data to UX library produce exceptions. if (mTangoUx != null) { mTangoUx.updatePoseStatus(pose.statusCode); } } @Override public void onXyzIjAvailable(TangoXyzIjData xyzIj) { // We are not using onXyzIjAvailable for this app. } @Override public void onPointCloudAvailable(TangoPointCloudData pointCloud) { if (mTangoUx != null) { mTangoUx.updatePointCloud(pointCloud); } mPointCloudManager.updatePointCloud(pointCloud); final double currentTimeStamp = pointCloud.timestamp; final double pointCloudFrameDelta = (currentTimeStamp - mPointCloudPreviousTimeStamp) * SECS_TO_MILLISECS; final double ttsAlertTimeDelta = (currentTimeStamp - ttsPreviousAlertTimeStamp) * SECS_TO_MILLISECS; mPointCloudPreviousTimeStamp = currentTimeStamp; final double averageDepth = getAveragedDepth(pointCloud.points, pointCloud.numPoints); mPointCloudTimeToNextUpdate -= pointCloudFrameDelta; if (mPointCloudTimeToNextUpdate < 0.0) { mPointCloudTimeToNextUpdate = UPDATE_INTERVAL_MS; final String pointCountString = Integer.toString(pointCloud.numPoints); runOnUiThread(new Runnable() { @Override public void run() { mPointCountTextView.setText(pointCountString); mAverageZTextView.setText(FORMAT_THREE_DECIMAL.format(averageDepth)); } }); } double MIN_TRACKING_METERS = 0.50; double ARM_LENGTH_METERS = 1.42; double WAIT_TIME_MILLISECS = 5000.0; // five seconds // double MIN_X_METERS = -0.05; // double MAX_X_METERS = 0.05 // double MIN_Y_METERS = -0.5 // double MAX_Y_METERS = 0.5; if (MIN_TRACKING_METERS <= averageDepth && averageDepth <= ARM_LENGTH_METERS && ttsAlertTimeDelta >= WAIT_TIME_MILLISECS) { if (!tts.isSpeaking()) { ttsPreviousAlertTimeStamp = currentTimeStamp; String warning = "There is an object ahead of you within arms length."; tts.speak(warning, TextToSpeech.QUEUE_FLUSH, null); } } float[] averagedXY = getAveragedXY(pointCloud.points, pointCloud.numPoints); double averagedX = averagedXY[0]; double averagedY = averagedXY[1]; System.out.println("avg (x,y) : " + "(" + averagedX + ", " + averagedY + ")"); } @Override public void onTangoEvent(TangoEvent event) { if (mTangoUx != null) { mTangoUx.updateTangoEvent(event); } } @Override public void onFrameAvailable(int cameraId) { // We are not using onFrameAvailable for this application. } }); }
From source file:me.hammarstrom.imagerecognition.activities.MainActivity.java
private void convertResponseToString(BatchAnnotateImagesResponse response) { Log.d(TAG, ":: " + response.getResponses().toString()); List<FaceAnnotation> faces = response.getResponses().get(0).getFaceAnnotations(); List<EntityAnnotation> labels = response.getResponses().get(0).getLabelAnnotations(); // Label string to be populated with data for TextToSpeech String label = ""; if (labels != null && labels.size() > 0) { label = "The image may contain "; List<Animator> scoreViewAnimations = new ArrayList<>(); List<Animator> scoreAlphaAnimations = new ArrayList<>(); List<Animator> showScoreAnimations = new ArrayList<>(); for (EntityAnnotation l : labels) { if (l.getScore() < 0.6f) { continue; }//from www . j a va 2s.co m // Add label description (ex. laptop, desk, person, etc.) label += l.getDescription() + ", "; /** * Create a new {@link ScoreView} and populate it with label description and score */ ScoreView scoreView = new ScoreView(MainActivity.this); int padding = (int) DeviceDimensionsHelper.convertDpToPixel(8, this); scoreView.setPadding(padding, padding, padding, padding); scoreView.setScore(l.getScore()); scoreView.setLabelPosition(ScoreView.LABEL_POSITION_RIGHT); scoreView.setLabelText(l.getDescription()); scoreView.setAlpha(0f); scoreView.setTranslationX((DeviceDimensionsHelper.getDisplayWidth(this) / 2) * -1); // Add ScoreView to result layout mScoreResultLayout.addView(scoreView); // Create animations to used to show the ScoreView in a nice way ObjectAnimator animator = ObjectAnimator.ofFloat(scoreView, "translationX", (DeviceDimensionsHelper.getDisplayWidth(this) / 2) * -1, 0f); animator.setInterpolator(new OvershootInterpolator()); scoreViewAnimations.add(animator); ObjectAnimator alphaAnimator = ObjectAnimator.ofFloat(scoreView, "alpha", 0f, 1f); scoreAlphaAnimations.add(alphaAnimator); // Get the animation to show the actual score from ScoreView object showScoreAnimations.addAll(scoreView.getShowScoreAnimationsList()); } // Set reset button visibility to visible mButtonReset.setVisibility(View.VISIBLE); // Setup and play the animations AnimatorSet translationSet = new AnimatorSet(); translationSet.playSequentially(scoreViewAnimations); translationSet.setDuration(300); AnimatorSet alphaSet = new AnimatorSet(); alphaSet.playSequentially(scoreAlphaAnimations); alphaSet.setDuration(300); AnimatorSet showScoreSet = new AnimatorSet(); showScoreSet.playTogether(showScoreAnimations); showLoading(false); AnimatorSet set = new AnimatorSet(); set.play(translationSet).with(alphaSet).before(showScoreSet); set.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { super.onAnimationEnd(animation); mButtonReset.animate().alpha(1f).start(); } }); set.start(); } else { // Set reset button visibility to visible mButtonReset.setVisibility(View.VISIBLE); mButtonReset.setAlpha(1f); } // Handle detected faces String facesFound = ""; if (faces != null && faces.size() > 0) { FaceGraphicOverlay faceGraphicOverlay = new FaceGraphicOverlay(MainActivity.this); faceGraphicOverlay.addFaces(faces); faceGraphicOverlay.setTag("faceOverlay"); mCameraPreviewLayout.addView(faceGraphicOverlay); facesFound = FaceFoundHelper.getFacesFoundString(this, faces); } // Add the detected image data to TTS engine mTts.speak(label, TextToSpeech.QUEUE_FLUSH, null); mTts.speak(facesFound, TextToSpeech.QUEUE_ADD, null); }
From source file:in.codehex.arrow.MainActivity.java
/** * Implement and manipulate the objects// ww w . j ava2s.c om */ private void prepareObjects() { if (loginCheck()) { name = userPreferences.getString(Config.KEY_PREF_NAME, Config.PREF_DEFAULT_NAME); checkTts(); textToSpeech = new TextToSpeech(this, this); textToSpeech.setOnUtteranceProgressListener(new UtteranceProgressListener() { @Override public void onStart(String utteranceId) { } @Override public void onDone(String utteranceId) { switch (utteranceId) { case Config.UTTERANCE_ID_INITIAL: promptSpeechInput(); break; case Config.UTTERANCE_ID_CONFIRMATION: promptSpeechInput(); break; } } @Override public void onError(String utteranceId) { } }); mainLayout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { textToSpeech.speak(getString(R.string.prompt_speech_input_initial), TextToSpeech.QUEUE_FLUSH, null, Config.UTTERANCE_ID_INITIAL); } }); if (checkPlayServices()) buildGoogleApiClient(); createLocationRequest(); if (!isGPSEnabled(getApplicationContext())) showAlertGPS(); } }
From source file:me.hammarstrom.imagerecognition.activities.MainActivity.java
/** * Reset the camera preview//from ww w.ja v a2 s . c o m */ private void resetPreview() { /** * TODO * * Implement animation to fade out/translate ScoreViews * */ // Check if TTS still is active, then stop and say that we are resetting. if (mTts.isSpeaking()) { mTts.stop(); mTts.speak(getString(R.string.tts_reset), TextToSpeech.QUEUE_FLUSH, null); } // Hide process layout mProcessingLayout.animate().alpha(0f).setDuration(200).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { super.onAnimationEnd(animation); mProcessingLayout.setVisibility(View.GONE); mProcessingLayout.setAlpha(1f); // Remove all child views (ScoreViews) mScoreResultLayout.removeAllViews(); // Hide the reset button mButtonReset.setAlpha(0f); mButtonReset.setVisibility(View.GONE); // Start camera preview and set click listener mCameraPreviewLayout.setOnClickListener(MainActivity.this); mCamera.startPreview(); mCameraPreviewLayout.removeView(mCameraPreviewLayout.findViewWithTag("faceOverlay")); } }).start(); }
From source file:com.microsoft.AzureIntelligentServicesExample.MainActivity.java
private void speakOut(String sent) { tts.speak(sent, TextToSpeech.QUEUE_FLUSH, null); }
From source file:com.microsoft.mimickeralarm.mimics.MimicWithForecastFragment.java
@TargetApi(Build.VERSION_CODES.LOLLIPOP) private void ttsGreater21(String str) { String utteranceId = this.hashCode() + ""; myTTS.speak(str, TextToSpeech.QUEUE_FLUSH, null, utteranceId); }
From source file:onion.chat.MainActivity.java
private void inform() { t1 = new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() { @Override//from w w w.java 2 s . c o m public void onInit(int status) { if (status != TextToSpeech.ERROR) { t1.setLanguage(Locale.US); } } }); String toSpeak = "Welcome to Its Ur's an Instant Messaging app!"; Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show(); t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null); }