Example usage for android.speech.tts TextToSpeech QUEUE_ADD

List of usage examples for android.speech.tts TextToSpeech QUEUE_ADD

Introduction

In this page you can find the example usage for android.speech.tts TextToSpeech QUEUE_ADD.

Prototype

int QUEUE_ADD

To view the source code for android.speech.tts TextToSpeech QUEUE_ADD.

Click Source Link

Document

Queue mode where the new entry is added at the end of the playback queue.

Usage

From source file:com.abid_mujtaba.fetchheaders.MainActivity.java

private void speak(Account account) // Method for applying TextToSpeech to an account
{
    if (mTTS != null) {
        String msg = String.format(".Account %s.", account.name());

        mTTS.speak(msg, TextToSpeech.QUEUE_ADD, null);
    }/*from ww  w .j a  v a 2s  .c  om*/
}

From source file:com.example.activitydemo.app.service.GameService.java

public void sayString(String text) {
    if (mTextToSpeechWorks && mTextToSpeechEnabled) {
        mTextToSpeech.speak(text, TextToSpeech.QUEUE_ADD, null);
    }
}

From source file:net.bible.service.device.speak.TextToSpeechController.java

private void speakString(String text) {
    if (mTts == null) {
        Log.e(TAG, "Error: attempt to speak when tts is null.  Text:" + text);
    } else {//from  www .j  a  va  2s .c o m
        // Always set the UtteranceId (or else OnUtteranceCompleted will not be called)
        HashMap<String, String> dummyTTSParams = new HashMap<String, String>();
        String utteranceId = UTTERANCE_PREFIX + uniqueUtteranceNo++;
        dummyTTSParams.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, utteranceId);

        Log.d(TAG, "do speak substring of length:" + text.length() + " utteranceId:" + utteranceId);
        mTts.speak(text, TextToSpeech.QUEUE_ADD, // handle flush by clearing text queue 
                dummyTTSParams);

        mSpeakTiming.started(utteranceId, text.length());
        isSpeaking = true;
        //           Log.d(TAG, "Speaking:"+text);
    }
}

From source file:com.bellman.bible.service.device.speak.TextToSpeechController.java

private void speakString(String text) {
    if (mTts == null) {
        Log.e(TAG, "Error: attempt to speak when tts is null.  Text:" + text);
    } else {/*ww w .  j a  v  a 2 s .  co m*/
        // Always set the UtteranceId (or else OnUtteranceCompleted will not be called)
        HashMap<String, String> dummyTTSParams = new HashMap<String, String>();
        String utteranceId = UTTERANCE_PREFIX + uniqueUtteranceNo++;
        dummyTTSParams.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, utteranceId);

        Log.d(TAG, "do speak substring of length:" + text.length() + " utteranceId:" + utteranceId);
        mTts.speak(text, TextToSpeech.QUEUE_ADD, // handle flush by clearing text queue
                dummyTTSParams);

        mSpeakTiming.started(utteranceId, text.length());
        isSpeaking = true;
        //           Log.d(TAG, "Speaking:"+text);
    }
}

From source file:com.med.fast.ocr.OcrCaptureActivity.java

/**
 * onTap is called to speak the tapped TextBlock, if any, out loud.
 *
 * @param rawX - the raw position of the tap
 * @param rawY - the raw position of the tap.
 * @return true if the tap was on a TextBlock
 *///from w  w  w  .j av a2s . c  om
private boolean onTap(float rawX, float rawY) {
    // TODO: Speak the text when the user taps on screen.
    OcrGraphic graphic = mGraphicOverlay.getGraphicAtLocation(rawX, rawY);
    TextBlock text = null;
    if (graphic != null) {
        text = graphic.getTextBlock();
        if (text != null && text.getValue() != null) {
            Log.d(TAG, "text data is being spoken! " + text.getValue());
            // Speak the string.
            tts.speak(text.getValue(), TextToSpeech.QUEUE_ADD, null, "DEFAULT");
        } else {
            Log.d(TAG, "text data is null");
        }
    } else {
        Log.d(TAG, "no text detected");
    }
    return text != null;
}

From source file:ocr.OcrCaptureActivity.java

/**
 * onTap is called to speak the tapped TextBlock, if any, out loud.
 *
 * @param rawX - the raw position of the tap
 * @param rawY - the raw position of the tap.
 * @return true if the tap was on a TextBlock
 *//*from  w  w  w.  j  av a  2 s  . c  om*/
private boolean onTap(float rawX, float rawY) {
    // TODO: Speak the text when the user taps on screen.
    OcrGraphic graphic = mGraphicOverlay.getGraphicAtLocation(rawX, rawY);
    TextBlock text = null;
    if (graphic != null) {
        text = graphic.getTextBlock();
        if (text != null && text.getValue() != null) {
            Log.d(TAG, "text data is being spoken! " + text.getValue());
            // Speak the string.
            tts.speak(text.getValue(), TextToSpeech.QUEUE_ADD, null, "DEFAULT");
        } else {
            Log.d(TAG, "text data is null");
        }
    } else {
        Log.d(TAG, "no text detected");
    }
    return text != null;

}

From source file:me.hammarstrom.imagerecognition.activities.MainActivity.java

private void convertResponseToString(BatchAnnotateImagesResponse response) {
    Log.d(TAG, ":: " + response.getResponses().toString());
    List<FaceAnnotation> faces = response.getResponses().get(0).getFaceAnnotations();
    List<EntityAnnotation> labels = response.getResponses().get(0).getLabelAnnotations();

    // Label string to be populated with data for TextToSpeech
    String label = "";
    if (labels != null && labels.size() > 0) {
        label = "The image may contain ";
        List<Animator> scoreViewAnimations = new ArrayList<>();
        List<Animator> scoreAlphaAnimations = new ArrayList<>();
        List<Animator> showScoreAnimations = new ArrayList<>();

        for (EntityAnnotation l : labels) {
            if (l.getScore() < 0.6f) {
                continue;
            }/* w  w w  .  j a va  2 s.  c o m*/

            // Add label description (ex. laptop, desk, person, etc.)
            label += l.getDescription() + ", ";

            /**
             * Create a new {@link ScoreView} and populate it with label description and score
             */
            ScoreView scoreView = new ScoreView(MainActivity.this);
            int padding = (int) DeviceDimensionsHelper.convertDpToPixel(8, this);
            scoreView.setPadding(padding, padding, padding, padding);
            scoreView.setScore(l.getScore());
            scoreView.setLabelPosition(ScoreView.LABEL_POSITION_RIGHT);
            scoreView.setLabelText(l.getDescription());
            scoreView.setAlpha(0f);
            scoreView.setTranslationX((DeviceDimensionsHelper.getDisplayWidth(this) / 2) * -1);

            // Add ScoreView to result layout
            mScoreResultLayout.addView(scoreView);

            // Create animations to used to show the ScoreView in a nice way
            ObjectAnimator animator = ObjectAnimator.ofFloat(scoreView, "translationX",
                    (DeviceDimensionsHelper.getDisplayWidth(this) / 2) * -1, 0f);
            animator.setInterpolator(new OvershootInterpolator());
            scoreViewAnimations.add(animator);

            ObjectAnimator alphaAnimator = ObjectAnimator.ofFloat(scoreView, "alpha", 0f, 1f);
            scoreAlphaAnimations.add(alphaAnimator);

            // Get the animation to show the actual score from ScoreView object
            showScoreAnimations.addAll(scoreView.getShowScoreAnimationsList());
        }

        // Set reset button visibility to visible
        mButtonReset.setVisibility(View.VISIBLE);

        // Setup and play the animations
        AnimatorSet translationSet = new AnimatorSet();
        translationSet.playSequentially(scoreViewAnimations);
        translationSet.setDuration(300);

        AnimatorSet alphaSet = new AnimatorSet();
        alphaSet.playSequentially(scoreAlphaAnimations);
        alphaSet.setDuration(300);

        AnimatorSet showScoreSet = new AnimatorSet();
        showScoreSet.playTogether(showScoreAnimations);

        showLoading(false);

        AnimatorSet set = new AnimatorSet();
        set.play(translationSet).with(alphaSet).before(showScoreSet);
        set.addListener(new AnimatorListenerAdapter() {
            @Override
            public void onAnimationEnd(Animator animation) {
                super.onAnimationEnd(animation);
                mButtonReset.animate().alpha(1f).start();
            }
        });
        set.start();
    } else {
        // Set reset button visibility to visible
        mButtonReset.setVisibility(View.VISIBLE);
        mButtonReset.setAlpha(1f);
    }

    // Handle detected faces
    String facesFound = "";
    if (faces != null && faces.size() > 0) {
        FaceGraphicOverlay faceGraphicOverlay = new FaceGraphicOverlay(MainActivity.this);
        faceGraphicOverlay.addFaces(faces);
        faceGraphicOverlay.setTag("faceOverlay");
        mCameraPreviewLayout.addView(faceGraphicOverlay);

        facesFound = FaceFoundHelper.getFacesFoundString(this, faces);
    }

    // Add the detected image data to TTS engine
    mTts.speak(label, TextToSpeech.QUEUE_FLUSH, null);
    mTts.speak(facesFound, TextToSpeech.QUEUE_ADD, null);
}

From source file:argusui.com.argus.OcrCaptureActivity.java

/**
 * onTap is called to capture the first TextBlock under the tap location and return it to
 * the Initializing Activity.//from ww w  . ja  v a 2 s.com
 *
 * @param rawX - the raw position of the tap
 * @param rawY - the raw position of the tap.
 * @return true if the activity is ending.
 */
private boolean onTap(float rawX, float rawY) {
    // TODO: Speak the text when the user taps on screen.
    OcrGraphic graphic = mGraphicOverlay.getGraphicAtLocation(rawX, rawY);
    TextBlock text = null;
    if (graphic != null) {
        text = graphic.getTextBlock();
        if (text != null && text.getValue() != null) {
            Log.d(TAG, "text data is being spoken! " + text.getValue());
            // TODO: Speak the string.
            tts.speak(text.getValue(), TextToSpeech.QUEUE_ADD, null, "DEFAULT");
        } else {
            Log.d(TAG, "text data is null");
        }
    } else {
        Log.d(TAG, "no text detected");
    }
    return text != null;
}

From source file:spinc.spmmvp.google_vision.ocrRead_Complete.OcrCaptureActivity.java

/**
 * onTap is called to speak the tapped TextBlock, if any, out loud.
 *
 * @param rawX - the raw position of the tap
 * @param rawY - the raw position of the tap.
 * @return true if the tap was on a TextBlock
 *//*w w w .  j a  va2s .  c  om*/
private boolean onTap(float rawX, float rawY) {
    OcrGraphic graphic = mGraphicOverlay.getGraphicAtLocation(rawX, rawY);
    TextBlock text = null;
    if (graphic != null) {
        text = graphic.getTextBlock();
        if (text != null && text.getValue() != null) {
            Log.d(TAG, "text data is being spoken! " + text.getValue());
            // Speak the string.
            tts.speak(text.getValue(), TextToSpeech.QUEUE_ADD, null, "DEFAULT");
        } else {
            Log.d(TAG, "text data is null");
        }
    } else {
        Log.d(TAG, "no text detected");
    }
    return text != null;
}

From source file:com.google.android.gms.samples.vision.ocrreader.OcrCaptureActivity.java

/**
 * onTap is called to speak the tapped TextBlock, if any, out loud.
 *
 * @param rawX - the raw position of the tap
 * @param rawY - the raw position of the tap.
 * @return true if the tap was on a TextBlock
 *///from ww  w .j  a  v a  2  s  . c  o  m
private boolean onTap(float rawX, float rawY) {
    OcrGraphic graphic = mGraphicOverlay.getGraphicAtLocation(rawX, rawY);
    TextBlock text = null;
    if (graphic != null) {
        text = graphic.getTextBlock();
        if (text != null && text.getValue() != null) {
            Log.d(TAG, "text data is being spoken! " + text.getValue());
            // Speak the string.
            tts.speak(text.getValue(), TextToSpeech.QUEUE_ADD, null, "DEFAULT");

            Intent intent = new Intent(this, MoneyDisplay.class);

            String message = text.getValue();
            intent.putExtra(EXTRA_MESSAGE, message);
            startActivity(intent);
        } else {
            Log.d(TAG, "text data is null");
        }
    } else {
        Log.d(TAG, "no text detected");
    }
    return text != null;
}