Example usage for android.speech.tts TextToSpeech TextToSpeech

List of usage examples for android.speech.tts TextToSpeech TextToSpeech

Introduction

In this page you can find the example usage for android.speech.tts TextToSpeech TextToSpeech.

Prototype

public TextToSpeech(Context context, OnInitListener listener) 

Source Link

Document

The constructor for the TextToSpeech class, using the default TTS engine.

Usage

From source file:com.projecttango.examples.java.pointcloud.PointCloudActivity.java

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_point_cloud);

    mPointCountTextView = (TextView) findViewById(R.id.point_count_textview);
    mAverageZTextView = (TextView) findViewById(R.id.average_z_textview);
    mSurfaceView = (RajawaliSurfaceView) findViewById(R.id.gl_surface_view);

    mPointCloudManager = new TangoPointCloudManager();
    mTangoUx = setupTangoUxAndLayout();//from  w  w  w  .  j a  va2s  . c om
    mRenderer = new PointCloudRajawaliRenderer(this);
    setupRenderer();

    /* Setup tts */
    tts = new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(int status) {
            if (status == TextToSpeech.SUCCESS) {
                tts.setLanguage(Locale.US);
                tts.speak("ICU helper initialized.", TextToSpeech.QUEUE_FLUSH, null);
            }
        }
    });

    DisplayManager displayManager = (DisplayManager) getSystemService(DISPLAY_SERVICE);
    if (displayManager != null) {
        displayManager.registerDisplayListener(new DisplayManager.DisplayListener() {
            @Override
            public void onDisplayAdded(int displayId) {

            }

            @Override
            public void onDisplayChanged(int displayId) {
                synchronized (this) {
                    setDisplayRotation();
                }
            }

            @Override
            public void onDisplayRemoved(int displayId) {
            }
        }, null);
    }
}

From source file:treehou.se.habit.gcm.GcmIntentService.java

private void sendNotification(final String msg, int notificationId) {
    if (mNotificationManager == null) {
        mNotificationManager = (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE);
    }/*from   w w w . j  a  va2  s . co  m*/

    Intent notificationIntent = new Intent(this, MainActivity.class);
    notificationIntent.setAction("org.openhab.notification.selected");
    notificationIntent.setFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP);
    notificationIntent.putExtra("notificationId", notificationId);

    PendingIntent pendingNotificationIntent = PendingIntent.getActivity(getApplicationContext(), 0,
            notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT);

    // Check if notification should be spoken

    Log.d(TAG, "Message " + Constants.PREF_REGISTRATION_SERVER + notificationId);

    /*getSharedPreferences(Constants.PREF_REGISTRATION_SERVER + notificationId, MODE_PRIVATE);
    SharedPreferences preferences = getSharedPreferences(Constants.PREFERENCE_SERVER, Context.MODE_PRIVATE);
    long serverId = preferences.getLong(Constants.PREF_REGISTRATION_SERVER+notificationId,-1);
            
    if(serverId < 0){
    return;
    }
            
    Server server = Server.load(Server.class, serverId);*/

    NotificationSettingsDB notificationSettings = NotificationSettingsDB.loadGlobal(getApplicationContext());
    if (notificationSettings.notificationToSpeach()) {
        textToSpeech = new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() {
            @Override
            public void onInit(int status) {
                if (status != TextToSpeech.ERROR) {
                    textToSpeech.setLanguage(Locale.getDefault());
                    textToSpeech.speak(msg, TextToSpeech.QUEUE_FLUSH, null);
                }
            }
        });
    }

    /*NotificationDB notification = new NotificationDB(msg);
    notification.save();
    List<NotificationDB> notifications = new Select().all().from(NotificationDB.class).execute();*/
    //TODO create inbox style

    String replyLabel = getString(R.string.notification_title);
    RemoteInput remoteInput = new RemoteInput.Builder(VoiceActionService.EXTRA_VOICE_REPLY).setLabel(replyLabel)
            .build();

    Intent replyIntent = new Intent(this, VoiceActionService.class);
    PendingIntent replyPendingIntent = PendingIntent.getService(this, 0, replyIntent, 0);

    // Create the reply action and add the remote input
    NotificationCompat.Action action = new NotificationCompat.Action.Builder(R.drawable.action_voice_light,
            getString(R.string.voice_command), replyPendingIntent).addRemoteInput(remoteInput).build();

    Uri alarmSound = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION);
    NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(this)
            .setSmallIcon(R.drawable.ic_notification).setContentTitle(getString(R.string.notification_title))
            .setAutoCancel(true).setSound(alarmSound)
            .extend(new NotificationCompat.WearableExtender().addAction(action)).setContentText(msg);

    mBuilder.setContentIntent(pendingNotificationIntent);

    mNotificationManager.notify(notificationId, mBuilder.build());
}

From source file:com.hichinaschool.flashcards.anki.ReadText.java

public static void initializeTts(Context context) {
    mReviewer = context;/*  ww  w  . j av a 2  s  .  com*/
    mTts = new TextToSpeech(context, new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(int status) {
            // TODO: check if properly initialized (does not work yet)
            if (status != TextToSpeech.SUCCESS) {
                int result = mTts.setLanguage(Locale.US);
                if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) {
                } else {
                    Log.e(AnkiDroidApp.TAG, "TTS initialized and set to US");
                }
            } else {
                Log.e(AnkiDroidApp.TAG, "Initialization of TTS failed");
            }
            AnkiDroidApp.getCompat().setTtsOnUtteranceProgressListener(mTts);
        }
    });
    mTtsParams = new HashMap<String, String>();
    mTtsParams.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, "stringId");
}

From source file:com.bdcorps.videonews.MainActivity.java

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    Intent intent = getIntent();/*from   w  ww  . ja v a2  s  . c  o m*/
    topicCode = intent.getStringExtra("topicCode"); //if it's a string you stored.

    //CCT Connection
    mConnection = new CustomTabsServiceConnection() {
        @Override
        public void onCustomTabsServiceConnected(ComponentName componentName,
                CustomTabsClient customTabsClient) {
            mClient = customTabsClient;
            mCustomTabsSession = getSession();
            mClient.warmup(0);
        }

        @Override
        public void onServiceDisconnected(ComponentName componentName) {
            mClient = null;
            mCustomTabsSession = null;
        }
    };

    //Bind CCT Service
    String packageName = "com.android.chrome";
    CustomTabsClient.bindCustomTabsService(this, packageName, mConnection);

    text = (TextView) findViewById(R.id.textview);
    img = (ImageView) findViewById(R.id.imageview);
    titleTextView = (TextView) findViewById(R.id.title_text_view);

    mTts = new TextToSpeech(this, this);

    Button b1 = (Button) findViewById(R.id.button);
    b1.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            grabnews(topicCode);
        }

    });

    Button b2 = (Button) findViewById(R.id.button2);
    b2.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            speak(text.getText().toString());
        }
    });

    Button b3 = (Button) findViewById(R.id.button3);
    b3.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            article++;
            grabnews(topicCode);
        }
    });

    text.addTextChangedListener(new TextWatcher() {

        @Override
        public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) {

        }

        @Override
        public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) {

        }

        @Override
        public void afterTextChanged(Editable editable) {
            Log.i("SSS", "text on board is =" + editable.toString());
            speak(text.getText().toString());
        }

    });
}

From source file:me.hammarstrom.imagerecognition.activities.MainActivity.java

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    bindViews();//w  ww.j av a  2  s  .  co m

    mToolbar.setTitle("");
    setSupportActionBar(mToolbar);
    mCameraPreviewLayout.setOnClickListener(this);
    mButtonReset.setOnClickListener(this);

    //        mGestureDetector = new GestureDetectorCompat(this, new CameraPreviewGestureListener());
    //        mCameraPreviewLayout.setOnTouchListener(new View.OnTouchListener() {
    //            @Override
    //            public boolean onTouch(View v, MotionEvent event) {
    //                return mGestureDetector.onTouchEvent(event);
    //            }
    //        });

    mTts = new TextToSpeech(getApplicationContext(), new TextToSpeech.OnInitListener() {

        @Override
        public void onInit(int status) {
            if (status != TextToSpeech.ERROR) {
                mTts.setLanguage(Locale.UK);
            }
        }
    });
}

From source file:com.altcanvas.twitspeak.TwitSpeakActivity.java

public void init() {
    mTts = new TextToSpeech(this, this);
    mTts.setOnUtteranceCompletedListener(this);
}

From source file:com.med.fast.ocr.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *///  w  ww. jav a 2  s  .  c o m
@Override
public void onCreate(Bundle bundle) {
    super.onCreate(bundle);
    setContentView(R.layout.ocr_capture);

    // Set good defaults for capturing text.
    boolean autoFocus = true;
    boolean useFlash = false;

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();

    // TODO: Set up the Text To Speech engine.
    TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(final int status) {
            if (status == TextToSpeech.SUCCESS) {
                Log.d("TTS", "Text to speech engine started successfully.");
                tts.setLanguage(Locale.US);
            } else {
                Log.d("TTS", "Error starting the text to speech engine.");
            }
        }
    };
    tts = new TextToSpeech(this.getApplicationContext(), listener);
}

From source file:com.gelakinetic.mtgfam.fragments.LifeCounterFragment.java

/**
 * When the fragment is created, set up the TTS engine, AudioManager, and MediaPlayer for life total vocalization
 *
 * @param savedInstanceState If the fragment is being re-created from a previous saved state, this is the state.
 *//* w  ww  . j  av a 2s .c  om*/
@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    mTtsInit = false;
    mTts = new TextToSpeech(getActivity(), this);
    mTts.setOnUtteranceCompletedListener(this);

    mAudioManager = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE);

    m9000Player = MediaPlayer.create(getActivity(), R.raw.over_9000);
    if (m9000Player != null) {
        m9000Player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
            public void onCompletion(MediaPlayer mp) {
                onUtteranceCompleted(LIFE_ANNOUNCE);
            }
        });
    }
}

From source file:com.nbplus.vbroadlauncher.service.BroadcastChatHeadService.java

private void createBroadcastChatHead(Intent intent) {
    if (intent == null) {
        return;//ww w  .  j  a v  a 2  s  .  com
    }

    mBroadcastData = intent.getParcelableExtra(Constants.EXTRA_BROADCAST_PAYLOAD_DATA);
    if (mBroadcastData == null) {
        Log.d(TAG, "Broadcast data is not found!!!");
        return;
    }
    long mBroadcastIndex = intent.getLongExtra(Constants.EXTRA_BROADCAST_PAYLOAD_INDEX, -1);
    String pushType = mBroadcastData.getServiceType();
    if (!Constants.PUSH_PAYLOAD_TYPE_NORMAL_BROADCAST.equals(pushType)
            && !Constants.PUSH_PAYLOAD_TYPE_REALTIME_BROADCAST.equals(pushType)
            && !Constants.PUSH_PAYLOAD_TYPE_TEXT_BROADCAST.equals(pushType)) {
        Log.d(TAG, "This is not broadcast push type !!!");
        return;
    }
    if (mIsPlaying && mChatHead != null) {
        removeChatHead(true);
    }

    mIsPlaying = true;

    int layout = -1;
    if (Constants.PUSH_PAYLOAD_TYPE_TEXT_BROADCAST.equals(pushType)) {
        layout = R.layout.fragment_text_broadcast;
    } else {
        layout = R.layout.fragment_audio_broadcast;
    }
    mChatHead = inflater.inflate(layout, null);

    if (Constants.PUSH_PAYLOAD_TYPE_TEXT_BROADCAST.equals(mBroadcastData.getServiceType())) {
        // ?
        mTextView = (TextView) mChatHead.findViewById(R.id.broadcast_text);
        mTextView.setText(mBroadcastData.getMessage());
        mTextView.setVerticalScrollBarEnabled(true);
        mTextView.setHorizontalScrollBarEnabled(false);
        mTextView.setMovementMethod(new ScrollingMovementMethod());

        mText2SpeechHandler = new TextToSpeechHandler(this, this);
        mText2Speech = new TextToSpeech(this, this);
    } else {
        // , ??
        mWebView = (WebView) mChatHead.findViewById(R.id.webview);
        mWebViewClient = new RealtimeBroadcastWebViewClient(this, mWebView, this);
        mWebViewClient.setBackgroundTransparent();

        String url = mBroadcastData.getMessage();
        if (url.indexOf("?") > 0) {
            if (!url.contains("UUID=")) {
                url += ("&UUID=" + LauncherSettings.getInstance(this).getDeviceID());
            }
            if (!url.contains("APPID=")) {
                url += ("&APPID=" + getApplicationContext().getPackageName());
            }
        } else {
            if (!url.contains("UUID=")) {
                url += ("?UUID=" + LauncherSettings.getInstance(this).getDeviceID());
            }
            if (!url.contains("APPID=")) {
                if (!url.contains("UUID=")) {
                    url += ("?APPID=" + getApplicationContext().getPackageName());
                } else {
                    url += ("&APPID=" + getApplicationContext().getPackageName());
                }
            }
        }
        mWebViewClient.loadUrl(url);
    }
    mChatHead.setTag(mBroadcastIndex);

    //        mChatHead.findViewById(R.id.btn_dismiss).setOnClickListener(new View.OnClickListener() {
    //            @Override
    //            public void onClick(View v) {
    //                removeChatHead();
    //            }
    //        });

    /**
     * To create an overlay view, when setting up the LayoutParams DON'T set the type to TYPE_SYSTEM_OVERLAY.
            
     Instead set it to TYPE_PHONE.
            
     Use the following flags:
     FLAG_NOT_TOUCH_MODAL
     FLAG_WATCH_OUTSIDE_TOUCH
     FLAG_NOT_TOUCH_MODAL << I found this one to be quite important. Without it,
     focus is given to the overlay and soft-key (home, menu, etc.) presses are not passed to the activity below.
     */
    int flag = WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY;

    final WindowManager.LayoutParams params = new WindowManager.LayoutParams(
            WindowManager.LayoutParams.MATCH_PARENT, WindowManager.LayoutParams.MATCH_PARENT, flag, 0,
            PixelFormat.TRANSLUCENT);

    params.gravity = Gravity.CENTER;

    /**
     * do not use...
    mChatHead.findViewById(R.id.txt_title).setOnTouchListener(new View.OnTouchListener() {
    private int initialX;
    private int initialY;
    private float initialTouchX;
    private float initialTouchY;
            
    @Override
    public boolean onTouch(View v, MotionEvent event) {
        switch (event.getAction()) {
            case MotionEvent.ACTION_DOWN:
                initialX = params.x;
                initialY = params.y;
                initialTouchX = event.getRawX();
                initialTouchY = event.getRawY();
                return true;
            case MotionEvent.ACTION_UP:
                return true;
            case MotionEvent.ACTION_MOVE:
                params.x = initialX + (int) (event.getRawX() - initialTouchX);
                params.y = initialY + (int) (event.getRawY() - initialTouchY);
                windowManager.updateViewLayout(mChatHead, params);
                return true;
        }
        return false;
    }
    });
    */
    addChatHead(mChatHead, params);
}

From source file:ocr.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*from w  w w .j  av a  2s.co m*/
@Override
public void onCreate(Bundle bundle) {
    super.onCreate(bundle);
    setContentView(R.layout.activity_ocr_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);

    // Set good defaults for capturing text.
    boolean autoFocus = true;
    boolean useFlash = false;

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();

    // TODO: Set up the Text To Speech engine.
    TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(final int status) {
            if (status == TextToSpeech.SUCCESS) {
                Log.d("TTS", "Text to speech engine started successfully.");
                tts.setLanguage(Locale.US);
            } else {
                Log.d("TTS", "Error starting the text to speech engine.");
            }
        }
    };
    tts = new TextToSpeech(this.getApplicationContext(), listener);

}