Example usage for android.speech.tts TextToSpeech QUEUE_FLUSH

List of usage examples for android.speech.tts TextToSpeech QUEUE_FLUSH

Introduction

In this page you can find the example usage for android.speech.tts TextToSpeech QUEUE_FLUSH.

Prototype

int QUEUE_FLUSH

To view the source code for android.speech.tts TextToSpeech QUEUE_FLUSH.

Click Source Link

Document

Queue mode where all entries in the playback queue (media to be played and text to be synthesized) are dropped and replaced by the new entry.

Usage

From source file:com.theultimatelabs.scale.ScaleActivity.java

protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    Log.v(TAG, "GOT SPEECH RESULT " + resultCode + " req: " + requestCode);

    if (resultCode == RESULT_OK) {
        ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);

        Log.i(TAG, "Check for density");
        StringBuilder densityName = new StringBuilder();
        double density = findMatch(matches, mDensitiesJson, densityName);

        Log.i(TAG, "Check for volume");
        StringBuilder volumeName = new StringBuilder();
        double volume = findMatch(matches, mVolumesJson, volumeName);

        Log.i(TAG, "Check for weight");
        StringBuilder weightName = new StringBuilder();
        double weight = findMatch(matches, mWeightsJson, weightName);

        if (density != 0 && volume != 0) {
            mUnitsRatio = 1000.0 / density / volume;
            mUnitsText = String.format("%s of %s", volumeName, densityName);
        } else if (weight != 0) {
            mUnitsRatio = 1.0 / weight;/*  w  w  w. j  a  va2 s .  c  om*/
            mUnitsText = String.format("%s", weightName);
        } else {
            Toast.makeText(this, "Does not compute", Toast.LENGTH_LONG).show();
            mTts.speak("Does not compute", TextToSpeech.QUEUE_FLUSH, null);
        }

        Editor settingsEditor = mSettings.edit();
        mUnitsView.setText(mUnitsText);
        settingsEditor.putString("unitsText", mUnitsText);
        settingsEditor.putFloat("unitsRatio", (float) mUnitsRatio);
        settingsEditor.commit();

    }

    super.onActivityResult(requestCode, resultCode, data);
    // startActivity(new Intent(Intent.ACTION_VIEW,
    // Uri.parse("http://www.youtube.com/watch?v=2qBgMmRMpOo")));
}

From source file:com.example.app_2.activities.ImageGridActivity.java

public void speakOut(String text) {
    tts.speak(text, TextToSpeech.QUEUE_FLUSH, null);
}

From source file:org.tlhInganHol.android.klingonassistant.EntryActivity.java

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    if (item.getItemId() == R.id.float_mode) {
        // Minimize the app and cause it to "float".
        Log.d(TAG, "Show floating window.");
        StandOutWindow.show(this, FloatingWindow.class, StandOutWindow.DEFAULT_ID);
        String query;//from   w ww .j a va  2s. c  o  m
        if (mParentQuery != null && !mParentQuery.equals("") && mParentQuery.indexOf('*') == -1) {
            // If we have the parent query, it overrides this entry.
            query = mParentQuery;
        } else {
            // Otherwise, just use this entry's name.
            query = mEntryName;
        }
        int colonLoc = query.indexOf(':');
        if (colonLoc != -1) {
            query = query.substring(0, colonLoc);
        }
        if (!query.equals("")) {
            // If we have a non-empty query, pass it along.
            Bundle data = new Bundle();
            data.putString("query", query);
            StandOutWindow.sendData(getBaseContext(), FloatingWindow.class, StandOutWindow.DEFAULT_ID,
                    DATA_CHANGED_QUERY, data, FloatingWindow.class, StandOutWindow.DEFAULT_ID);
        }

        // Broadcast the kill order to finish all non-floating activities.
        Log.d(TAG, "Broadcast kill order to non-floating window.");
        Intent intent = new Intent(ACTION_KILL);
        intent.setType(KILL_TYPE);
        sendBroadcast(intent);
        return true;
    } else if (item.getItemId() == R.id.speak) {
        // TTS:
        if (mEntryName != null) {
            // Log.d(TAG, "Speaking");
            // Toast.makeText(getBaseContext(), mEntryName, Toast.LENGTH_LONG).show();
            mTts.speak(mEntryName, TextToSpeech.QUEUE_FLUSH, null);
        }
    }
    return super.onOptionsItemSelected(item);
}

From source file:com.example.michel.facetrack.FaceTrackerActivity.java

private void updateState(String response) {
    String toSpeak;// ww w  . jav  a 2s. co  m

    if (response.isEmpty()) {
        toSpeak = "Please say your intention.";
        mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        waitStartSTT(4000);
    }

    switch (state) {
    case START:
        try {
            PostNLU.Intention intention = PostNLU.post(response);

            if (intention.intent == PostNLU.Intent.TAKE && intention.photoType == PostNLU.PhotoType.SELFIE) {
                state = State.S_CONFIRMATION;
                toSpeak = "Hold the camera at eye level and arms length away.";
                mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
                Thread thread = new Thread() {
                    @Override
                    public void run() {
                        try {
                            sleep(5000);
                            sixSecondFlag = true;
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                };
                thread.start();
                while (sixSecondFlag != true) {

                }
                sixSecondFlag = false;
            } else {
                toSpeak = "Hold the camera at eye level.";
                mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
                state = State.P_CONFIRMATION;
                Thread thread = new Thread() {
                    @Override
                    public void run() {
                        try {
                            sleep(4000);
                            sixSecondFlag = true;
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                };
                thread.start();
                while (sixSecondFlag != true) {

                }
                sixSecondFlag = false;
            }
        } catch (IOException e) {
            e.printStackTrace();
            toSpeak = "Error interpreting what you said. Please say it again.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
            waitStartSTT(4000);
        }
        break;
    case S_CONFIRMATION:
        if (response.equals("tiltr")) {
            toSpeak = "Tilt camera slightly to the left.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (response.equals("tiltl")) {
            toSpeak = "Tilt camera slightly to the right.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (response.equals("close")) {
            toSpeak = "Move camera slightly farther away from yourself.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (response.equals("far")) {
            toSpeak = "Move camera slightly closer towards yourself.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (response.equals("good")) {
            state = State.REQUEST_COMMENT;
            mCameraSource.takePicture(new CameraSource.ShutterCallback() {
                @Override
                public void onShutter() {

                }
            }, new CameraSource.PictureCallback() {
                @Override
                public void onPictureTaken(byte[] bytes) {
                    String file_timestamp = Long.toString(System.currentTimeMillis());
                    Log.e("File: ", Environment.getExternalStorageDirectory() + "/" + file_timestamp + ".jpg");
                    final File file = new File(
                            Environment.getExternalStorageDirectory() + "/" + file_timestamp + ".jpg");
                    try {
                        save(bytes, file);
                        Toast.makeText(FaceTrackerActivity.this, "Saved to "
                                + Environment.getExternalStorageDirectory() + "/" + file_timestamp + ".jpg",
                                Toast.LENGTH_SHORT).show();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                private void save(byte[] bytes, final File file) throws IOException {
                    OutputStream output = null;
                    try {
                        output = new FileOutputStream(file);
                        output.write(bytes);
                    } finally {
                        if (null != output) {
                            output.close();
                        }
                    }
                    sendPhotoToAzure(file); // Sending a blob (photo) to the Azure Storage
                    String photo_url = "https://blindspot.blob.core.windows.net/image/" + file.getName();
                    Log.e("Photo_url : ", photo_url);
                    Float happiness = getHappiness(photo_url); // Call the Microsoft's Emotion API using the photo url
                    Log.e("Happiness: ", Float.toString(happiness));
                }
            });
            toSpeak = "Picture taken. Do you want to add a comment?";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
            waitStartSTT(4000);
        }
        break;
    case P_CONFIRMATION:
        if (response.equals("tiltr")) {
            toSpeak = "Tilt camera slightly to the right.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (response.equals("tiltl")) {
            toSpeak = "Tilt camera slightly to the left.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (response.equals("close")) {
            toSpeak = "Move camera slightly closer towards yourself.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (response.equals("far")) {
            toSpeak = "Move camera slightly farther away from yourself.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (response.equals("good")) {
            state = State.REQUEST_COMMENT;
            toSpeak = "Picture taken. Do you want to add a comment?";
            mCameraSource.takePicture(new CameraSource.ShutterCallback() {
                @Override
                public void onShutter() {

                }
            }, new CameraSource.PictureCallback() {
                @Override
                public void onPictureTaken(byte[] bytes) {
                    String file_timestamp = Long.toString(System.currentTimeMillis());
                    Log.e("File: ", Environment.getExternalStorageDirectory() + "/" + file_timestamp + ".jpg");
                    final File file = new File(
                            Environment.getExternalStorageDirectory() + "/" + file_timestamp + ".jpg");
                    try {
                        save(bytes, file);
                        Toast.makeText(FaceTrackerActivity.this, "Saved to "
                                + Environment.getExternalStorageDirectory() + "/" + file_timestamp + ".jpg",
                                Toast.LENGTH_SHORT).show();
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }

                private void save(byte[] bytes, final File file) throws IOException {
                    OutputStream output = null;
                    try {
                        output = new FileOutputStream(file);
                        output.write(bytes);
                    } finally {
                        if (null != output) {
                            output.close();
                        }
                    }
                    sendPhotoToAzure(file); // Sending a blob (photo) to the Azure Storage
                    String photo_url = "https://blindspot.blob.core.windows.net/image/" + file.getName();
                    Log.e("Photo_url : ", photo_url);
                    Float happiness = getHappiness(photo_url); // Call the Microsoft's Emotion API using the photo url
                    Log.e("Happiness: ", Float.toString(happiness));
                }
            });
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
            waitStartSTT(4000);
        }
        break;
    case REQUEST_COMMENT:
        if (response.equalsIgnoreCase("yes")) {
            state = State.ADD_COMMENT;
            toSpeak = "Record comment now.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
            waitStartSTT(2000);
        } else if (response.equalsIgnoreCase("no")) {
            toSpeak = "Storage complete. Goodbye.";
            mTTS.setOnUtteranceProgressListener(exitListener);
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else {
            toSpeak = "Error interpreting what you said. Please say it again.";
            mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
            waitStartSTT(4000);
        }
        break;
    case ADD_COMMENT:
        toSpeak = "Storage complete. Goodbye";
        mTTS.setOnUtteranceProgressListener(exitListener);
        mTTS.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        Thread thread = new Thread() {
            @Override
            public void run() {
                try {
                    sleep(2000);
                    sixSecondFlag = true;
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        };
        thread.start();
        while (sixSecondFlag != true) {

        }
        System.exit(0);
        break;
    case DONE:
        break;
    default:
        //should not be here
    }
}

From source file:onion.chat.MainActivity.java

@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
    if (resultCode != RESULT_OK)
        return;/*w ww  . j  a v  a2  s.  co m*/
    if (requestCode == REQUEST_QR) {
        Bitmap bitmap = (Bitmap) data.getExtras().get("data");
        int width = bitmap.getWidth(), height = bitmap.getHeight();
        int[] pixels = new int[width * height];
        bitmap.getPixels(pixels, 0, width, 0, 0, width, height);
        bitmap.recycle();
        RGBLuminanceSource source = new RGBLuminanceSource(width, height, pixels);
        BinaryBitmap bBitmap = new BinaryBitmap(new HybridBinarizer(source));
        MultiFormatReader reader = new MultiFormatReader();
        try {
            Result result = reader.decode(bBitmap);
            String str = result.getText();
            Log.i("ID", str);

            String[] tokens = str.split(" ", 3);

            if (tokens.length < 2 || !tokens[0].equals("Its Ur's")) {
                snack(getString(R.string.qr_invalid));
                return;
            }

            String id = tokens[1].toLowerCase();

            if (id.length() != 16) {
                snack(getString(R.string.qr_invalid));
                return;
            }

            if (db.hasContact(id)) {
                snack(getString(R.string.contact_already_added));
                return;
            }

            String name = "";
            if (tokens.length > 2) {
                name = tokens[2];
            }

            addContact(id, name);

            return;

        } catch (Exception ex) {
            snack(getString(R.string.qr_invalid));
            ex.printStackTrace();
        }
    } else if (requestCode == VOICE_RECOGNITION_REQUEST_CODE) {
        if (resultCode == RESULT_OK) {
            ArrayList<String> textMatchList = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
            String[] textTyped = new String[textMatchList.size()];
            String typeText = "";
            for (int i = 0; i < textMatchList.size(); i++) {
                textTyped[i] = textMatchList.get(i);
                typeText += textTyped[i];
            }
            if (!textMatchList.isEmpty()) {

                if (textMatchList.get(0).contains("open") || textMatchList.get(0).contains("OPEN")) {
                    String contName = "";
                    if (typeText.contains("open chat")) {
                        if (textMatchList.size() >= 2) {
                            contName = textMatchList.get(2);
                        }
                        if (contName != "") {
                            startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("chat:" + contName),
                                    getApplicationContext(), ChatActivity.class));
                        }
                    }
                    listen();
                } else if (textMatchList.get(0).contains("password")
                        || textMatchList.get(0).contains("PASSWORD")) {
                    String password = "password";

                    if (textMatchList.size() >= 2) {

                        password = textMatchList.get(0).replaceFirst("password ", "");

                    } else if (textMatchList.size() >= 1) {

                        password = textMatchList.get(0).replaceFirst("password ", "");

                    }
                    db.setPassword(password);
                    update();
                    String toSpeak = "password changed successfully to " + password;
                    Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
                    listen();

                } else if (textMatchList.get(0).contains("change") || textMatchList.get(0).contains("CHANGE")) {
                    String name = "";
                    if (textMatchList.size() >= 2) {
                        name = textMatchList.get(2);
                    }
                    db.setName(name);
                    update();
                    snack(getString(R.string.snack_alias_changed));
                    String toSpeak = "Alias changed to " + name;
                    Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
                    t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
                    listen();
                } else if (textMatchList.get(0).contains("tell") || textMatchList.get(0).contains("tell")) {
                    String id1 = "";
                    id1 = (tor.getID());
                    Toast.makeText(getApplicationContext(), id1, Toast.LENGTH_SHORT).show();
                    //t1.speak(id1, TextToSpeech.QUEUE_FLUSH, null);
                    listen();
                } else if (textMatchList.get(0).contains("enter") || textMatchList.get(0).contains("ENTER")) {
                    String id1 = "Yet to come";
                    Toast.makeText(getApplicationContext(), id1, Toast.LENGTH_SHORT).show();
                    //t1.speak(id1, TextToSpeech.QUEUE_FLUSH, null);
                    listen();
                } else if (textMatchList.get(0).contains("HELP") || textMatchList.get(0).contains("help")) {
                    String toSpeak = "Voice Commands that can be used are 1 Open chat with contact name 2 change with alias name 3 tell";
                    Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
                    t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
                    listen();
                }

                else if (textMatchList.get(0).contains("close") || textMatchList.get(0).contains("CLOSE")) {
                    String toSpeak = "Closing Voice command";
                    Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
                    t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
                }

                else {
                    // poString
                    String toSpeak = "I Can't Understand";
                    Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
                    // t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
                    listen();
                }

            }
            //Result code for various error.
        } else if (resultCode == RecognizerIntent.RESULT_AUDIO_ERROR) {

            String toSpeak = "Audio Error";
            Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
            t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (resultCode == RecognizerIntent.RESULT_CLIENT_ERROR) {

            String toSpeak = "Client Error";
            Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
            t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (resultCode == RecognizerIntent.RESULT_NETWORK_ERROR) {

            String toSpeak = "Network Error";
            Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
            t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (resultCode == RecognizerIntent.RESULT_NO_MATCH) {

            String toSpeak = "No Match";
            Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
            t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        } else if (resultCode == RecognizerIntent.RESULT_SERVER_ERROR) {

            String toSpeak = "Server Error";
            Toast.makeText(getApplicationContext(), toSpeak, Toast.LENGTH_SHORT).show();
            t1.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, null);
        }

    }

}

From source file:com.gelakinetic.mtgfam.fragments.LifeCounterFragment.java

/**
 * This is called every time an utterance is completed, as well as when the m9000Player finishes shouting.
 * It polls an item out of the LinkedList and speaks it, or returns audio focus to the system.
 *
 * @param key A key to determine what was just uttered. This is ignored
 */// w  w w. j  a  va 2s .c om
@Override
public void onUtteranceCompleted(String key) {
    if (mVocalizations.size() > 0) {
        String toSpeak = mVocalizations.poll();
        if (toSpeak.equals(OVER_9000_KEY)) {
            try {
                m9000Player.stop();
                m9000Player.prepare();
                m9000Player.start();
            } catch (IOException e) {
                /* If the media was not played, fall back to TTSing "over 9000" */
                HashMap<String, String> ttsParams = new HashMap<>();
                ttsParams.put(TextToSpeech.Engine.KEY_PARAM_STREAM, String.valueOf(AudioManager.STREAM_MUSIC));
                ttsParams.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, LIFE_ANNOUNCE);
                if (mTts.speak(getString(R.string.life_counter_over_9000), TextToSpeech.QUEUE_FLUSH,
                        ttsParams) == TextToSpeech.ERROR) {
                    FamiliarActivity activity = getFamiliarActivity();
                    if (activity != null) {
                        activity.showTtsDialog();
                    }
                }
            }
        } else {
            HashMap<String, String> ttsParams = new HashMap<>();
            ttsParams.put(TextToSpeech.Engine.KEY_PARAM_STREAM, String.valueOf(AudioManager.STREAM_MUSIC));
            ttsParams.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, LIFE_ANNOUNCE);

            if (mTts.speak(toSpeak, TextToSpeech.QUEUE_FLUSH, ttsParams) == TextToSpeech.ERROR) {
                FamiliarActivity activity = getFamiliarActivity();
                if (activity != null) {
                    activity.showTtsDialog();
                }
            }
        }
    } else {
        mAudioManager.abandonAudioFocus(this);
    }
}

From source file:com.hughes.android.dictionary.DictionaryActivity.java

@Override
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenuInfo menuInfo) {
    AdapterContextMenuInfo adapterContextMenuInfo = (AdapterContextMenuInfo) menuInfo;
    final RowBase row = (RowBase) getListAdapter().getItem(adapterContextMenuInfo.position);

    final android.view.MenuItem addToWordlist = menu.add(getString(R.string.addToWordList, wordList.getName()));
    addToWordlist.setOnMenuItemClickListener(new android.view.MenuItem.OnMenuItemClickListener() {
        public boolean onMenuItemClick(android.view.MenuItem item) {
            onAppendToWordList(row);// w w  w.j ava  2 s  .  com
            return false;
        }
    });

    final android.view.MenuItem share = menu.add("Share");
    share.setOnMenuItemClickListener(new android.view.MenuItem.OnMenuItemClickListener() {
        public boolean onMenuItemClick(android.view.MenuItem item) {
            Intent shareIntent = new Intent(android.content.Intent.ACTION_SEND);
            shareIntent.setType("text/plain");
            shareIntent.putExtra(android.content.Intent.EXTRA_SUBJECT, row.getTokenRow(true).getToken());
            shareIntent.putExtra(android.content.Intent.EXTRA_TEXT, row.getRawText(saveOnlyFirstSubentry));
            startActivity(shareIntent);
            return false;
        }
    });

    final android.view.MenuItem copy = menu.add(android.R.string.copy);
    copy.setOnMenuItemClickListener(new android.view.MenuItem.OnMenuItemClickListener() {
        public boolean onMenuItemClick(android.view.MenuItem item) {
            onCopy(row);
            return false;
        }
    });

    if (selectedSpannableText != null) {
        final String selectedText = selectedSpannableText;
        final android.view.MenuItem searchForSelection = menu
                .add(getString(R.string.searchForSelection, selectedSpannableText));
        searchForSelection.setOnMenuItemClickListener(new android.view.MenuItem.OnMenuItemClickListener() {
            public boolean onMenuItemClick(android.view.MenuItem item) {
                jumpToTextFromHyperLink(selectedText, selectedSpannableIndex);
                return false;
            }
        });
        // Rats, this won't be shown:
        //searchForSelection.setIcon(R.drawable.abs__ic_search);
    }

    if ((row instanceof TokenRow || selectedSpannableText != null) && ttsReady) {
        final android.view.MenuItem speak = menu.add(R.string.speak);
        final String textToSpeak = row instanceof TokenRow ? ((TokenRow) row).getToken()
                : selectedSpannableText;
        updateTTSLanguage(row instanceof TokenRow ? indexIndex : selectedSpannableIndex);
        speak.setOnMenuItemClickListener(new android.view.MenuItem.OnMenuItemClickListener() {
            @Override
            public boolean onMenuItemClick(android.view.MenuItem item) {
                textToSpeech.speak(textToSpeak, TextToSpeech.QUEUE_FLUSH, new HashMap<String, String>());
                return false;
            }
        });
    }
}

From source file:org.de.jmg.learn._MainActivity.java

public void speak(String t, Locale l, String ID, boolean blnFlush) {
    try {/* w w  w  . j a  va  2  s  .  c  o m*/
        if (!_main.blnTextToSpeech || l.toString().equalsIgnoreCase("_off"))
            return;
        String ts[] = (" " + t + " ").split(getString(R.string.cloze));
        if (!t.equalsIgnoreCase(getString(R.string.cloze)) && ts.length > 1) {
            for (int i = 0; i < ts.length; i++) {
                String s = ts[i];
                speak(s, l, ID, (i == 0) ? blnFlush : false);
                if (i < ts.length - 1) {
                    if (Build.VERSION.SDK_INT < 21) {
                        HashMap<String, String> h = new HashMap<>();
                        h.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, "cloze");
                        //noinspection deprecation
                        _main.tts.playSilence(2500, TextToSpeech.QUEUE_ADD, h);
                    } else {
                        _main.tts.playSilentUtterance(2500, TextToSpeech.QUEUE_ADD, "cloze");
                    }
                    //speak(getString(R.string.cloze), Locale.getDefault(), ID, false);
                }
            }
        } else {
            int res = _main.tts.setLanguage(l);
            if (res < 0) {
                if (_main.tts.setLanguage(Locale.US) < 0)
                    return;
            }
            int flags;
            if (blnFlush) {
                flags = TextToSpeech.QUEUE_FLUSH;
            } else {
                flags = TextToSpeech.QUEUE_ADD;
            }
            if (Build.VERSION.SDK_INT < 21) {
                HashMap<String, String> h = new HashMap<>();
                h.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, ID);

                //noinspection deprecation
                _main.tts.speak(t, flags, h);
            } else {
                _main.tts.speak(t, flags, null, ID);
            }
        }
    } catch (Exception ex) {
        lib.setgstatus("speak: " + t + " " + l.toString());
        lib.ShowException(_main, ex);
    }
}

From source file:org.botlibre.sdk.activity.ChatActivity.java

public void response(final ChatResponse response) {
    if (speechPlayer != null || tts != null) {
        try {//from   ww  w.  jav  a2s  .  c om
            tts.stop();
            speechPlayer.pause();
        } catch (Exception ignore) {
            Log.e("RESPONSE", "Error: " + ignore.getMessage());
        }
    }
    //needs when calling "sleep" or the its not going to let the mic off
    //also to stop the mic until the bot finish the sentence
    try {
        stopListening();
        this.response = response;

        String status = "";
        if (response.emote != null && !response.emote.equals("NONE")) {
            status = status + response.emote.toLowerCase();
        }
        if (response.action != null) {
            if (!status.isEmpty()) {
                status = status + " ";
            }
            status = status + response.action;
        }
        if (response.pose != null) {
            if (!status.isEmpty()) {
                status = status + " ";
            }
            status = status + response.pose;
        }

        if (response.command != null) {
            JSONObject jsonObject = response.getCommand();
            Command command = new Command(this, jsonObject);
        }

        TextView statusView = (TextView) findViewById(R.id.statusText);
        statusView.setText(status);

        final String text = response.message;
        final ListView list = (ListView) findViewById(R.id.chatList);
        if (text == null) {
            list.post(new Runnable() {
                @Override
                public void run() {
                    ChatResponse ready = new ChatResponse();
                    ready.message = "ready";
                    messages.add(ready);
                    ((ChatListAdapter) list.getAdapter()).notifyDataSetChanged();
                    list.invalidateViews();
                    if (list.getCount() > 2) {
                        list.setSelection(list.getCount() - 2);
                    }
                    beginListening();
                }
            });
            return;
        }
        list.post(new Runnable() {
            @Override
            public void run() {
                messages.add(response);
                ((ChatListAdapter) list.getAdapter()).notifyDataSetChanged();
                list.invalidateViews();
                if (list.getCount() > 2) {
                    list.setSelection(list.getCount() - 2);
                }
            }
        });

        WebView responseView = (WebView) findViewById(R.id.responseText);
        String html = Utils.linkHTML(text);
        if (html.contains("<") && html.contains(">")) {
            html = linkPostbacks(html);
        }
        responseView.loadDataWithBaseURL(null, html, "text/html", "utf-8", null);

        boolean talk = (text.trim().length() > 0) && (MainActivity.deviceVoice
                || (this.response.speech != null && this.response.speech.length() > 0));
        if (MainActivity.sound && talk) {
            if (!MainActivity.disableVideo && !videoError && this.response.isVideo()
                    && this.response.isVideoTalk()) {

                videoView.setOnPreparedListener(new OnPreparedListener() {
                    @Override
                    public void onPrepared(MediaPlayer mp) {
                        try {
                            mp.setLooping(true);
                            if (!MainActivity.deviceVoice) {
                                // Voice audio
                                speechPlayer = playAudio(response.speech, false, false, false);
                                speechPlayer.setOnCompletionListener(new OnCompletionListener() {
                                    @Override
                                    public void onCompletion(MediaPlayer mp) {
                                        mp.release();
                                        videoView.post(new Runnable() {
                                            public void run() {
                                                cycleVideo(response);
                                            }
                                        });
                                        runOnUiThread(new Runnable() {
                                            public void run() {
                                                if (!music) {
                                                    beginListening();
                                                }
                                            }
                                        });
                                    }
                                });

                                speechPlayer.start();
                            } else {
                                HashMap<String, String> params = new HashMap<String, String>();
                                params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, "id");

                                tts.speak(Utils.stripTags(text), TextToSpeech.QUEUE_FLUSH, params);
                            }
                        } catch (Exception exception) {
                            Log.wtf(exception.getMessage(), exception);
                        }
                    }
                });
                playVideo(this.response.avatarTalk, false);
            } else if (talk) {
                if (!MainActivity.deviceVoice) {
                    // Voice audio
                    playAudio(this.response.speech, false, false, true);
                } else {
                    HashMap<String, String> params = new HashMap<String, String>();
                    params.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, "id");

                    this.tts.speak(Utils.stripTags(text), TextToSpeech.QUEUE_FLUSH, params);
                }
            }
        } else if (talk && (!MainActivity.disableVideo && !videoError && this.response.isVideo()
                && this.response.avatarTalk != null)) {
            videoView.setOnPreparedListener(new OnPreparedListener() {
                @Override
                public void onPrepared(MediaPlayer mp) {
                    mp.setLooping(false);
                }
            });
            videoView.setOnCompletionListener(new OnCompletionListener() {
                @Override
                public void onCompletion(MediaPlayer mp) {
                    videoView.setOnCompletionListener(null);
                    cycleVideo(response);
                }
            });
            playVideo(this.response.avatarTalk, false);
            runOnUiThread(new Runnable() {
                public void run() {
                    beginListening();
                }
            });
        } else {
            runOnUiThread(new Runnable() {
                public void run() {
                    beginListening();
                }
            });
        }
    } catch (Exception exception) {
        Log.wtf(exception.getMessage(), exception);
    }
    if (micLastStat) {
        MainActivity.listenInBackground = true;
    }
}

From source file:com.lofland.housebot.BotController.java

private void speakOut() {
    String text = txtText.getText().toString();
    tts.speak(text, TextToSpeech.QUEUE_FLUSH, null);
    txtText.setText("");
}