List of usage examples for android.speech.tts TextToSpeech TextToSpeech
public TextToSpeech(Context context, OnInitListener listener)
From source file:in.rade.armud.armudclient.MainActivity.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main_activity); // Locate the UI widgets. mLatitudeTextView = (TextView) findViewById(R.id.latitude_text); mLongitudeTextView = (TextView) findViewById(R.id.longitude_text); mLastUpdateTimeTextView = (TextView) findViewById(R.id.last_update_time_text); mLocInfoTextView = (TextView) findViewById(R.id.locinfo_text); mSubmitNameButton = (Button) findViewById(R.id.enterButton); mCharacterNameEdit = (EditText) findViewById(R.id.editName); // Set labels. mLatitudeLabel = getResources().getString(R.string.latitude_label); mLongitudeLabel = getResources().getString(R.string.longitude_label); mLastUpdateTimeLabel = getResources().getString(R.string.last_update_time_label); mRequestingLocationUpdates = true;// ww w .ja v a 2 s. c o m mLastUpdateTime = ""; mLocInfoLabel = ""; mConnected = false; amWaitingOnSubmit = false; mLoggedIn = false; mSubmitSuccess = false; mLatestLocAccuracy = 30; mRoomArrivalTime = 0; mCurrentRoom = ""; // Add the project titles to display in a list for the listview adapter. // Initialise a listview adapter with the project titles and use it // in the listview to show the list of project. // Initialise TTS engine tts = new TextToSpeech(this, this); tts.setSpeechRate(1.2f); // Update values using data stored in the Bundle. updateValuesFromBundle(savedInstanceState); // Kick off the process of building a GoogleApiClient and requesting the LocationServices // API. buildGoogleApiClient(); //getting gesture data from watchlistener service LocalBroadcastManager.getInstance(this).registerReceiver(mMsgFromWearReceiver, new IntentFilter(Globals.COMMAND_PATH)); mReceiverRegistered = true; startService(new Intent(this, PhoneDataLayerListenerService.class)); client.connect(); mPrefs = getPreferences(MODE_PRIVATE); if (mPrefs.contains("LOGIN_STRING")) { mLoginString = mPrefs.getString("LOGIN_STRING", "connect test test"); Log.d("login string", mLoginString); logintoMUD(); mSubmitNameButton.setVisibility(View.GONE); mCharacterNameEdit.setVisibility(View.GONE); mLatitudeTextView.setText(""); } }
From source file:com.codebutler.farebot.activity.CardInfoActivity.java
@Override protected void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_card_info); final ViewPager viewPager = (ViewPager) findViewById(R.id.pager); mTabsAdapter = new TabPagerAdapter(this, viewPager); final ActionBar actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setTitle(R.string.loading); new AsyncTask<Void, Void, Void>() { private Exception mException; public boolean mSpeakBalanceEnabled; @Override//from w w w. ja v a 2 s. c om protected Void doInBackground(Void... voids) { try { Uri uri = getIntent().getData(); Cursor cursor = getContentResolver().query(uri, null, null, null, null); startManagingCursor(cursor); cursor.moveToFirst(); String data = cursor.getString(cursor.getColumnIndex(CardsTableColumns.DATA)); mCard = Card.fromXml(FareBotApplication.getInstance().getSerializer(), data); mTransitData = mCard.parseTransitData(); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(CardInfoActivity.this); mSpeakBalanceEnabled = prefs.getBoolean("pref_key_speak_balance", false); } catch (Exception ex) { mException = ex; } return null; } @Override protected void onPostExecute(Void aVoid) { findViewById(R.id.loading).setVisibility(View.GONE); findViewById(R.id.pager).setVisibility(View.VISIBLE); if (mException != null) { if (mCard == null) { Utils.showErrorAndFinish(CardInfoActivity.this, mException); } else { Log.e("CardInfoActivity", "Error parsing transit data", mException); showAdvancedInfo(mException); finish(); } return; } if (mTransitData == null) { showAdvancedInfo(new UnsupportedCardException()); finish(); return; } String titleSerial = (mTransitData.getSerialNumber() != null) ? mTransitData.getSerialNumber() : Utils.getHexString(mCard.getTagId(), ""); actionBar.setTitle(mTransitData.getCardName() + " " + titleSerial); Bundle args = new Bundle(); args.putString(AdvancedCardInfoActivity.EXTRA_CARD, mCard.toXml(FareBotApplication.getInstance().getSerializer())); args.putParcelable(EXTRA_TRANSIT_DATA, mTransitData); if (mTransitData instanceof UnauthorizedClassicTransitData) { mTabsAdapter.addTab(actionBar.newTab(), UnauthorizedCardFragment.class, args); return; } if (mTransitData.getBalanceString() != null) { mTabsAdapter.addTab(actionBar.newTab().setText(R.string.balance), CardBalanceFragment.class, args); } if (mTransitData.getTrips() != null || mTransitData.getRefills() != null) { mTabsAdapter.addTab(actionBar.newTab().setText(R.string.history), CardTripsFragment.class, args); } if (mTransitData.getSubscriptions() != null) { mTabsAdapter.addTab(actionBar.newTab().setText(R.string.subscriptions), CardSubscriptionsFragment.class, args); } if (mTransitData.getInfo() != null) { mTabsAdapter.addTab(actionBar.newTab().setText(R.string.info), CardInfoFragment.class, args); } if (mTabsAdapter.getCount() > 1) { actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS); } if (mTransitData.hasUnknownStations()) { findViewById(R.id.need_stations).setVisibility(View.VISIBLE); } boolean speakBalanceRequested = getIntent().getBooleanExtra(SPEAK_BALANCE_EXTRA, false); if (mSpeakBalanceEnabled && speakBalanceRequested) { mTTS = new TextToSpeech(CardInfoActivity.this, mTTSInitListener); } if (savedInstanceState != null) { viewPager.setCurrentItem(savedInstanceState.getInt(KEY_SELECTED_TAB, 0)); } } }.execute(); }
From source file:com.application.akscorp.yandextranslator2017.TranslateScreen.java
/** * Start component init(initialization)//w w w .ja v a 2 s . c om */ private void StartInit() { //Init speech object SpeechEngine = new TextToSpeech(context, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { if (status != TextToSpeech.ERROR) { SpeechEngine.setLanguage(Locale.UK); } } }); ImageButton ExchangeBtn = (ImageButton) TranslateScreen.findViewById(R.id.exchange_languages); ExchangeBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { ExchangeTranslateLanguages(); } }); CopyButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { CopyTextToClipBoard(GetTranslate()); } }); SpeakButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (SpeechEngine.isSpeaking()) SpeechEngine.stop(); else { try { TextToSpeech(GetTranslate(), new Locale(LangsCodeSpinner[ToLang])); } catch (Exception e) { Logs.SaveLog(context, e); Toast.makeText(context, LanguageWork.GetResourceString(context, "forbidden_option"), Toast.LENGTH_SHORT).show(); } } } }); editTextInputFrazeForTranslate = (EditTextWithButtons) TranslateScreen.findViewById(R.id.translate_input); AddToFavourite = (ImageButton) TranslateScreen.findViewById(R.id.add_favourite_button); TranslateAdapter = new TranslateListViewAdapter(context); TranslateList.setAdapter(TranslateAdapter); MyUtility.setListViewHeightBasedOnChildren(TranslateList); TranslateButtonController(); InitLanguagesTranslateList(); }
From source file:com.eng.arab.translator.androidtranslator.ShowDetailsMonth.java
public void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == MY_DATA_CHECK_CODE) { if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) { //the user has the necessary data - create the TTS myTTS = new TextToSpeech(getApplicationContext(), this); } else {//from w w w . j ava2 s.c o m //no data - install it now Intent installTTSIntent = new Intent(); installTTSIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA); startActivity(installTTSIntent); } } }
From source file:com.jtxdriggers.android.ventriloid.VentriloidService.java
@Override public void onCreate() { stopForeground(true);/*from www .j a v a 2 s. c o m*/ new Thread(new Runnable() { @Override public void run() { Looper.prepare(); handler = new Handler(); Looper.loop(); } }).start(); items = new ItemData(this); player = new Player(this); recorder = new Recorder(this); prefs = PreferenceManager.getDefaultSharedPreferences(this); if (prefs.getString("notification_type", "Text to Speech").equals("Text to Speech")) { ttsActive = true; tts = new TextToSpeech(VentriloidService.this, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { if (status == TextToSpeech.SUCCESS) ttsActive = true; else { ttsActive = false; handler.post(new Runnable() { @Override public void run() { Toast.makeText(getApplicationContext(), "TTS Initialization faled.", Toast.LENGTH_SHORT).show(); } }); } } }); } else if (prefs.getString("notification_type", "Text to Speech").equals("Ringtone")) ringtoneActive = true; registerReceiver(activityReceiver, new IntentFilter(ACTIVITY_RECEIVER)); nm = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); tm = (TelephonyManager) getSystemService(TELEPHONY_SERVICE); am = (AudioManager) getSystemService(Context.AUDIO_SERVICE); voiceActivation = prefs.getBoolean("voice_activation", false); threshold = voiceActivation ? 55.03125 : -1; vibrator = (Vibrator) getSystemService(Context.VIBRATOR_SERVICE); vibrate = prefs.getBoolean("vibrate", true); queue = new ConcurrentLinkedQueue<VentriloEventData>(); //VentriloInterface.debuglevel(65535); new Thread(eventHandler).start(); }
From source file:com.google.fpl.gim.examplegame.MainService.java
@Override public void onCreate() { // The service is being created. Utils.logDebug(TAG, "onCreate"); IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(CHOICE_NOTIFICATION_ACTION_1); intentFilter.addAction(CHOICE_NOTIFICATION_ACTION_2); intentFilter.addAction(CHOICE_NOTIFICATION_ACTION_3); registerReceiver(mReceiver, intentFilter); mAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE); // Determines the behavior for handling Audio Focus surrender. mAudioFocusChangeListener = new AudioManager.OnAudioFocusChangeListener() { @Override//from ww w .jav a 2s.c o m public void onAudioFocusChange(int focusChange) { if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT || focusChange == AudioManager.AUDIOFOCUS_LOSS) { if (mTextToSpeech.isSpeaking()) { mTextToSpeech.setOnUtteranceProgressListener(null); mTextToSpeech.stop(); } if (mMediaPlayer.isPlaying()) { mMediaPlayer.stop(); } // Abandon Audio Focus, if it's requested elsewhere. mAudioManager.abandonAudioFocus(mAudioFocusChangeListener); // Restart the current moment if AudioFocus was lost. Since AudioFocus is only // requested away from this application if this application was using it, // only Moments that play sound will restart in this way. if (mMission != null) { mMission.restartMoment(); } } } }; // Asynchronously prepares the TextToSpeech. mTextToSpeech = new TextToSpeech(this, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { if (status == TextToSpeech.SUCCESS) { // Check if language is available. switch (mTextToSpeech.isLanguageAvailable(DEFAULT_TEXT_TO_SPEECH_LOCALE)) { case TextToSpeech.LANG_AVAILABLE: case TextToSpeech.LANG_COUNTRY_AVAILABLE: case TextToSpeech.LANG_COUNTRY_VAR_AVAILABLE: Utils.logDebug(TAG, "TTS locale supported."); mTextToSpeech.setLanguage(DEFAULT_TEXT_TO_SPEECH_LOCALE); mIsTextToSpeechReady = true; break; case TextToSpeech.LANG_MISSING_DATA: Utils.logDebug(TAG, "TTS missing data, ask for install."); Intent installIntent = new Intent(); installIntent.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA); startActivity(installIntent); break; default: Utils.logDebug(TAG, "TTS local not supported."); break; } } } }); mMediaPlayer = new MediaPlayer(); }
From source file:org.botlibre.sdk.activity.ChatActivity.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_chat); // Remove flag button if a single bot app. if (MainActivity.launchType == LaunchType.Bot) { //findViewById(R.id.flagButton).setVisibility(View.GONE); }//from www . j a va2s. co m //permission required. ActivityCompat.requestPermissions(ChatActivity.this, new String[] { Manifest.permission.RECORD_AUDIO }, 1); //set/Save the current volume from the device. setStreamVolume(); //Music Volume is Enabled. muteMicBeep(false); //For "scream" issue micLastStat = MainActivity.listenInBackground; getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); this.instance = (InstanceConfig) MainActivity.instance; if (this.instance == null) { return; } /*if (MainActivity.showAds) { AdView mAdView = (AdView) findViewById(R.id.adView); AdRequest adRequest = new AdRequest.Builder().build(); mAdView.loadAd(adRequest); } else { AdView mAdView = (AdView) findViewById(R.id.adView); mAdView.setVisibility(View.GONE); }*/ setTitle(this.instance.name); ((TextView) findViewById(R.id.title)).setText(this.instance.name); HttpGetImageAction.fetchImage(this, this.instance.avatar, findViewById(R.id.icon)); ttsInit = false; tts = new TextToSpeech(this, this); if (!MainActivity.handsFreeSpeech) { setMicIcon(false, false); } else if (!MainActivity.listenInBackground) { setMicIcon(false, false); } //Last time will be saved for the MIC. if (MainActivity.listenInBackground && MainActivity.handsFreeSpeech) { microphoneThread(thread); } speech = SpeechRecognizer.createSpeechRecognizer(this); speech.setRecognitionListener(this); //scrollVie added and stuff scrollView = findViewById(R.id.chatList); menuMLayout = (LinearLayout) findViewById(R.id.menuMLayout); chatCLayout = (LinearLayout) findViewById(R.id.chatCLayout); responseLayout = (LinearLayout) findViewById(R.id.responseLayout); chatToolBar = (LinearLayout) findViewById(R.id.chatToolBar); videoView = (VideoView) findViewById(R.id.videoView); resetVideoErrorListener(); videoError = false; imageView = (ImageView) findViewById(R.id.imageView); videoLayout = findViewById(R.id.videoLayout); textView = (EditText) findViewById(R.id.messageText); textView.setOnEditorActionListener(new OnEditorActionListener() { @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { submitChat(); return false; } }); if (MainActivity.translate) { findViewById(R.id.yandex).setVisibility(View.VISIBLE); } else { findViewById(R.id.yandex).setVisibility(View.GONE); } Spinner emoteSpin = (Spinner) findViewById(R.id.emoteSpin); emoteSpin.setAdapter( new EmoteSpinAdapter(this, R.layout.emote_list, Arrays.asList(EmotionalState.values()))); ListView list = (ListView) findViewById(R.id.chatList); list.setAdapter(new ChatListAdapter(this, R.layout.chat_list, this.messages)); list.setTranscriptMode(ListView.TRANSCRIPT_MODE_ALWAYS_SCROLL); ImageButton button = (ImageButton) findViewById(R.id.speakButton); button.setOnClickListener(new View.OnClickListener() { @TargetApi(23) @Override public void onClick(View v) { if (MainActivity.handsFreeSpeech) { //set the current volume to the setting. setStreamVolume(); //if its ON Or OFF - Switching back and forth MainActivity.listenInBackground = !MainActivity.listenInBackground; //saving the boolean data of MainActivity.listeningInBackground SharedPreferences.Editor cookies = MainActivity.current.getPreferences(Context.MODE_PRIVATE) .edit(); cookies.putBoolean("listenInBackground", MainActivity.listenInBackground); cookies.commit(); if (MainActivity.listenInBackground) { micLastStat = true; try { microphoneThread(thread); } catch (Exception ignore) { } beginListening(); } else { micLastStat = false; microphoneThread(thread); stopListening(); } } else { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, MainActivity.voice.language); try { startActivityForResult(intent, RESULT_SPEECH); textView.setText(""); } catch (ActivityNotFoundException a) { Toast t = Toast.makeText(getApplicationContext(), "Your device doesn't support Speech to Text", Toast.LENGTH_SHORT); t.show(); } } } }); //adding functionality on clicking the image imageView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (stateLayouts == 4) { stateLayouts = 0; } switch (stateLayouts) { case 0: scrollView.setVisibility(View.VISIBLE); chatCLayout.setVisibility(View.VISIBLE); menuMLayout.setVisibility(View.VISIBLE); responseLayout.setVisibility(View.VISIBLE); chatToolBar.setVisibility(View.VISIBLE); break; case 1: scrollView.setVisibility(View.GONE); break; case 2: responseLayout.setVisibility(View.GONE); chatToolBar.setVisibility(View.GONE); break; case 3: menuMLayout.setVisibility(View.GONE); chatCLayout.setVisibility(View.GONE); break; } stateLayouts++; } }); //adding functionality on clicking the image videoLayout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (stateLayouts == 4) { stateLayouts = 0; } switch (stateLayouts) { case 0: scrollView.setVisibility(View.VISIBLE); chatCLayout.setVisibility(View.VISIBLE); menuMLayout.setVisibility(View.VISIBLE); responseLayout.setVisibility(View.VISIBLE); chatToolBar.setVisibility(View.VISIBLE); break; case 1: scrollView.setVisibility(View.GONE); break; case 2: responseLayout.setVisibility(View.GONE); chatToolBar.setVisibility(View.GONE); break; case 3: menuMLayout.setVisibility(View.GONE); chatCLayout.setVisibility(View.GONE); break; } stateLayouts++; } }); GestureDetector.SimpleOnGestureListener listener = new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDoubleTapEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_UP) { boolean isVideo = !MainActivity.disableVideo && !videoError && response != null && response.isVideo(); View imageView = findViewById(R.id.imageView); View videoLayout = findViewById(R.id.videoLayout); if (imageView.getVisibility() == View.VISIBLE) { imageView.setVisibility(View.GONE); } else if (!isVideo) { imageView.setVisibility(View.VISIBLE); } if (videoLayout.getVisibility() == View.VISIBLE) { videoLayout.setVisibility(View.GONE); } else if (isVideo) { videoLayout.setVisibility(View.VISIBLE); } return true; } return false; } }; final GestureDetector detector = new GestureDetector(this, listener); findViewById(R.id.chatList).setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { return detector.onTouchEvent(event); } }); listener = new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDoubleTapEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_UP) { View avatarLayout = findViewById(R.id.avatarLayout); if (avatarLayout.getVisibility() == View.VISIBLE) { avatarLayout.setVisibility(View.GONE); } else { avatarLayout.setVisibility(View.VISIBLE); } return true; } return false; } }; final GestureDetector detector2 = new GestureDetector(this, listener); /*findViewById(R.id.responseText).setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { return detector2.onTouchEvent(event); } });*/ WebView responseView = (WebView) findViewById(R.id.responseText); responseView.getSettings().setJavaScriptEnabled(true); responseView.getSettings().setDomStorageEnabled(true); responseView.addJavascriptInterface(new WebAppInterface(this), "Android"); findViewById(R.id.responseImageView).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { View avatarLayout = findViewById(R.id.avatarLayout); if (avatarLayout.getVisibility() == View.VISIBLE) { avatarLayout.setVisibility(View.GONE); } else { avatarLayout.setVisibility(View.VISIBLE); } } }); HttpGetImageAction.fetchImage(this, instance.avatar, this.imageView); HttpGetImageAction.fetchImage(this, instance.avatar, (ImageView) findViewById(R.id.responseImageView)); final ChatConfig config = new ChatConfig(); config.instance = instance.id; config.avatar = this.avatarId; if (MainActivity.translate && MainActivity.voice != null) { config.language = MainActivity.voice.language; } if (MainActivity.disableVideo) { config.avatarFormat = "image"; } else { config.avatarFormat = MainActivity.webm ? "webm" : "mp4"; } config.avatarHD = MainActivity.hd; config.speak = !MainActivity.deviceVoice; // This is required because of a bug in TextToSpeech that prevents onInit being called if an AsynchTask is called... Thread thread1 = new Thread() { public void run() { for (int count = 0; count < 5; count++) { if (ttsInit) { break; } try { Thread.sleep(1000); } catch (Exception exception) { } } HttpAction action = new HttpChatAction(ChatActivity.this, config); action.execute(); } }; thread1.start(); }
From source file:org.digitalcampus.oppia.activity.CourseActivity.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == TTS_CHECK) { if (resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS) { // the user has the necessary data - create the TTS myTTS = new TextToSpeech(this, this); }//from www . j a v a2 s . c o m } super.onActivityResult(requestCode, resultCode, data); }
From source file:finalproject.ece558.edu.pdx.ece.brailleblackjack.PlayBlackJackGameFragment.java
/** * Check accessibility and set flags. Set-up The Android TextToSpeech Engine. Also Set-up * the Google MessageApi Client.// www . j a v a 2s . c o m * * Android Developers Website was used to aid in creating the MessageApi Client. For more info * go there * Source: http://developer.android.com/training/wearables/data-layer/events.html * @param savedInstanceState * * A Tutorial from tutorialspoint.com was used to aid with Android TextToSpeech * Source: http://www.tutorialspoint.com/android/android_text_to_speech.htm */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); context = this.getActivity(); /* Check if Accessibility is on to Speak upon certain events */ am = (AccessibilityManager) context.getSystemService(Context.ACCESSIBILITY_SERVICE); isAccessibilityEnabled = am.isEnabled(); isExploreByTouchEnabled = am.isTouchExplorationEnabled(); /* Set-up the TTS Api */ textToSpeech = new TextToSpeech(context, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { if (status == TextToSpeech.SUCCESS) { int result = textToSpeech.setLanguage(Locale.US); if (result == TextToSpeech.LANG_MISSING_DATA || result == TextToSpeech.LANG_NOT_SUPPORTED) { Log.e("error", "Language chosen is not supported"); } else { if (isAccessibilityEnabled) { if (!first_draw_spoken) { String speak = "You drew " + player_left_card.getCardDescription() + " and " + player_right_card.getCardDescription() + "\n Dealer drew " + dealer_right_card.getCardDescription(); convertTextToSpeech(speak); first_draw_spoken = true; } } } } else { Log.e("error", "Initialization Failed!"); } } }); Log.d(TAG, "Attempting to connect to Google Api Client"); mGoogleApiClient = new GoogleApiClient.Builder(context).addApi(Wearable.API).addConnectionCallbacks(this) .addOnConnectionFailedListener(this).build(); Log.d(TAG, "Connected to Google Api Client"); // Initialize state flags button_hit_state = true; button_stand_state = true; button_hint_state = true; button_start_over_state = true; first_draw_spoken = false; }
From source file:com.morlunk.mumbleclient.service.PlumbleService.java
@Override public void onCreate() { super.onCreate(); // Register for notification actions IntentFilter notificationIntentFilter = new IntentFilter(); notificationIntentFilter.addAction(BROADCAST_MUTE); notificationIntentFilter.addAction(BROADCAST_DEAFEN); notificationIntentFilter.addAction(BROADCAST_TOGGLE_OVERLAY); registerReceiver(mNotificationReceiver, notificationIntentFilter); registerReceiver(mTalkReceiver, new IntentFilter(BROADCAST_TALK)); try {/* w w w .ja v a 2 s .co m*/ getBinder().registerObserver(mObserver); } catch (RemoteException e) { e.printStackTrace(); } // Register for preference changes mSettings = Settings.getInstance(this); mPTTSoundEnabled = mSettings.isPttSoundEnabled(); SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(this); preferences.registerOnSharedPreferenceChangeListener(this); // Instantiate overlay view mChannelOverlay = new PlumbleOverlay(this); mHotCorner = new PlumbleHotCorner(this, mSettings.getHotCornerGravity(), mHotCornerListener); // Set up TTS if (mSettings.isTextToSpeechEnabled()) mTTS = new TextToSpeech(this, mTTSInitListener); }