List of usage examples for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH
String ACTION_RECOGNIZE_SPEECH
To view the source code for android.speech RecognizerIntent ACTION_RECOGNIZE_SPEECH.
Click Source Link
From source file:com.google.sample.cast.refplayer.chatting.MainActivity.java
private void startVoiceRecognitionActivity() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getString(R.string.message_to_cast)); startActivityForResult(intent, REQUEST_CODE); }
From source file:com.mhennessy.mapfly.MainActivity.java
public void setMapLocationBasedOnSpeech() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, "com.mhennessy.mapfly"); SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer(this.getApplicationContext()); // Stop flying so that messages can be displayed to the user without // being overwritten by pitch/roll info. setFlyingEnabled(false);//from w w w. j av a2 s. c o m RecognitionListener listener = new RecognitionListener() { @Override public void onResults(Bundle results) { ArrayList<String> voiceResults = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (voiceResults == null) { Log.e(TAG, "No voice results"); } else { Log.d(TAG, "Printing matches: "); for (String match : voiceResults) { Log.d(TAG, match); } String bestMatch = voiceResults.get(0); setMapLocation(bestMatch); } } @Override public void onReadyForSpeech(Bundle params) { setTitle("Say something!"); Log.d(TAG, "Ready for speech"); } @Override public void onError(int error) { setTitle("Speach Error"); Log.d(TAG, "Error listening for speech: " + error); } @Override public void onBeginningOfSpeech() { Log.d(TAG, "Speech starting"); } @Override public void onBufferReceived(byte[] buffer) { // no-op } @Override public void onEndOfSpeech() { // no-op } @Override public void onEvent(int eventType, Bundle params) { // no-op } @Override public void onPartialResults(Bundle partialResults) { // no-op } @Override public void onRmsChanged(float rmsdB) { // no-op } }; recognizer.setRecognitionListener(listener); recognizer.startListening(intent); }
From source file:org.bishoph.oxdemo.OXDemo.java
public void startVoiceRecognitionActivity() { Log.v("OXDemo", "startVoiceRecognitionActivity init..."); Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); PackageManager pm = getPackageManager(); List<ResolveInfo> activities = pm.queryIntentActivities(intent, 0); if (activities.size() > 0) { intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_PROMPT, "Speech recognition demo"); startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE); Log.v("OXDemo", "...done"); } else {/* ww w . j a v a 2s . c o m*/ // createTask(folder_id, // "Auto creation "+getSimpleRandomString()); Log.v("OXDemo", "No voice recognition!"); } }
From source file:org.alfresco.mobile.android.application.fragments.user.UserSearchFragment.java
private void speechToText() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, Locale.getDefault()); try {/*from ww w. ja v a2s . c o m*/ startActivityForResult(intent, RequestCode.TEXT_TO_SPEECH); } catch (ActivityNotFoundException a) { AlfrescoNotificationManager.getInstance(getActivity()).showToast(R.string.file_editor_error_speech); } }
From source file:org.botlibre.sdk.activity.ChatActivity.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_chat); // Remove flag button if a single bot app. if (MainActivity.launchType == LaunchType.Bot) { //findViewById(R.id.flagButton).setVisibility(View.GONE); }/*from www. ja va2 s .c om*/ //permission required. ActivityCompat.requestPermissions(ChatActivity.this, new String[] { Manifest.permission.RECORD_AUDIO }, 1); //set/Save the current volume from the device. setStreamVolume(); //Music Volume is Enabled. muteMicBeep(false); //For "scream" issue micLastStat = MainActivity.listenInBackground; getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); this.instance = (InstanceConfig) MainActivity.instance; if (this.instance == null) { return; } /*if (MainActivity.showAds) { AdView mAdView = (AdView) findViewById(R.id.adView); AdRequest adRequest = new AdRequest.Builder().build(); mAdView.loadAd(adRequest); } else { AdView mAdView = (AdView) findViewById(R.id.adView); mAdView.setVisibility(View.GONE); }*/ setTitle(this.instance.name); ((TextView) findViewById(R.id.title)).setText(this.instance.name); HttpGetImageAction.fetchImage(this, this.instance.avatar, findViewById(R.id.icon)); ttsInit = false; tts = new TextToSpeech(this, this); if (!MainActivity.handsFreeSpeech) { setMicIcon(false, false); } else if (!MainActivity.listenInBackground) { setMicIcon(false, false); } //Last time will be saved for the MIC. if (MainActivity.listenInBackground && MainActivity.handsFreeSpeech) { microphoneThread(thread); } speech = SpeechRecognizer.createSpeechRecognizer(this); speech.setRecognitionListener(this); //scrollVie added and stuff scrollView = findViewById(R.id.chatList); menuMLayout = (LinearLayout) findViewById(R.id.menuMLayout); chatCLayout = (LinearLayout) findViewById(R.id.chatCLayout); responseLayout = (LinearLayout) findViewById(R.id.responseLayout); chatToolBar = (LinearLayout) findViewById(R.id.chatToolBar); videoView = (VideoView) findViewById(R.id.videoView); resetVideoErrorListener(); videoError = false; imageView = (ImageView) findViewById(R.id.imageView); videoLayout = findViewById(R.id.videoLayout); textView = (EditText) findViewById(R.id.messageText); textView.setOnEditorActionListener(new OnEditorActionListener() { @Override public boolean onEditorAction(TextView v, int actionId, KeyEvent event) { submitChat(); return false; } }); if (MainActivity.translate) { findViewById(R.id.yandex).setVisibility(View.VISIBLE); } else { findViewById(R.id.yandex).setVisibility(View.GONE); } Spinner emoteSpin = (Spinner) findViewById(R.id.emoteSpin); emoteSpin.setAdapter( new EmoteSpinAdapter(this, R.layout.emote_list, Arrays.asList(EmotionalState.values()))); ListView list = (ListView) findViewById(R.id.chatList); list.setAdapter(new ChatListAdapter(this, R.layout.chat_list, this.messages)); list.setTranscriptMode(ListView.TRANSCRIPT_MODE_ALWAYS_SCROLL); ImageButton button = (ImageButton) findViewById(R.id.speakButton); button.setOnClickListener(new View.OnClickListener() { @TargetApi(23) @Override public void onClick(View v) { if (MainActivity.handsFreeSpeech) { //set the current volume to the setting. setStreamVolume(); //if its ON Or OFF - Switching back and forth MainActivity.listenInBackground = !MainActivity.listenInBackground; //saving the boolean data of MainActivity.listeningInBackground SharedPreferences.Editor cookies = MainActivity.current.getPreferences(Context.MODE_PRIVATE) .edit(); cookies.putBoolean("listenInBackground", MainActivity.listenInBackground); cookies.commit(); if (MainActivity.listenInBackground) { micLastStat = true; try { microphoneThread(thread); } catch (Exception ignore) { } beginListening(); } else { micLastStat = false; microphoneThread(thread); stopListening(); } } else { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, MainActivity.voice.language); try { startActivityForResult(intent, RESULT_SPEECH); textView.setText(""); } catch (ActivityNotFoundException a) { Toast t = Toast.makeText(getApplicationContext(), "Your device doesn't support Speech to Text", Toast.LENGTH_SHORT); t.show(); } } } }); //adding functionality on clicking the image imageView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (stateLayouts == 4) { stateLayouts = 0; } switch (stateLayouts) { case 0: scrollView.setVisibility(View.VISIBLE); chatCLayout.setVisibility(View.VISIBLE); menuMLayout.setVisibility(View.VISIBLE); responseLayout.setVisibility(View.VISIBLE); chatToolBar.setVisibility(View.VISIBLE); break; case 1: scrollView.setVisibility(View.GONE); break; case 2: responseLayout.setVisibility(View.GONE); chatToolBar.setVisibility(View.GONE); break; case 3: menuMLayout.setVisibility(View.GONE); chatCLayout.setVisibility(View.GONE); break; } stateLayouts++; } }); //adding functionality on clicking the image videoLayout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (stateLayouts == 4) { stateLayouts = 0; } switch (stateLayouts) { case 0: scrollView.setVisibility(View.VISIBLE); chatCLayout.setVisibility(View.VISIBLE); menuMLayout.setVisibility(View.VISIBLE); responseLayout.setVisibility(View.VISIBLE); chatToolBar.setVisibility(View.VISIBLE); break; case 1: scrollView.setVisibility(View.GONE); break; case 2: responseLayout.setVisibility(View.GONE); chatToolBar.setVisibility(View.GONE); break; case 3: menuMLayout.setVisibility(View.GONE); chatCLayout.setVisibility(View.GONE); break; } stateLayouts++; } }); GestureDetector.SimpleOnGestureListener listener = new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDoubleTapEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_UP) { boolean isVideo = !MainActivity.disableVideo && !videoError && response != null && response.isVideo(); View imageView = findViewById(R.id.imageView); View videoLayout = findViewById(R.id.videoLayout); if (imageView.getVisibility() == View.VISIBLE) { imageView.setVisibility(View.GONE); } else if (!isVideo) { imageView.setVisibility(View.VISIBLE); } if (videoLayout.getVisibility() == View.VISIBLE) { videoLayout.setVisibility(View.GONE); } else if (isVideo) { videoLayout.setVisibility(View.VISIBLE); } return true; } return false; } }; final GestureDetector detector = new GestureDetector(this, listener); findViewById(R.id.chatList).setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { return detector.onTouchEvent(event); } }); listener = new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDoubleTapEvent(MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_UP) { View avatarLayout = findViewById(R.id.avatarLayout); if (avatarLayout.getVisibility() == View.VISIBLE) { avatarLayout.setVisibility(View.GONE); } else { avatarLayout.setVisibility(View.VISIBLE); } return true; } return false; } }; final GestureDetector detector2 = new GestureDetector(this, listener); /*findViewById(R.id.responseText).setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { return detector2.onTouchEvent(event); } });*/ WebView responseView = (WebView) findViewById(R.id.responseText); responseView.getSettings().setJavaScriptEnabled(true); responseView.getSettings().setDomStorageEnabled(true); responseView.addJavascriptInterface(new WebAppInterface(this), "Android"); findViewById(R.id.responseImageView).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { View avatarLayout = findViewById(R.id.avatarLayout); if (avatarLayout.getVisibility() == View.VISIBLE) { avatarLayout.setVisibility(View.GONE); } else { avatarLayout.setVisibility(View.VISIBLE); } } }); HttpGetImageAction.fetchImage(this, instance.avatar, this.imageView); HttpGetImageAction.fetchImage(this, instance.avatar, (ImageView) findViewById(R.id.responseImageView)); final ChatConfig config = new ChatConfig(); config.instance = instance.id; config.avatar = this.avatarId; if (MainActivity.translate && MainActivity.voice != null) { config.language = MainActivity.voice.language; } if (MainActivity.disableVideo) { config.avatarFormat = "image"; } else { config.avatarFormat = MainActivity.webm ? "webm" : "mp4"; } config.avatarHD = MainActivity.hd; config.speak = !MainActivity.deviceVoice; // This is required because of a bug in TextToSpeech that prevents onInit being called if an AsynchTask is called... Thread thread1 = new Thread() { public void run() { for (int count = 0; count < 5; count++) { if (ttsInit) { break; } try { Thread.sleep(1000); } catch (Exception exception) { } } HttpAction action = new HttpChatAction(ChatActivity.this, config); action.execute(); } }; thread1.start(); }
From source file:com.neighbor.ex.tong.ui.activity.MainActivity2Activity.java
private void promptSpeechInput() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_PROMPT, getResources().getString(R.string.voice_prompt)); startActivityForResult(intent, REQ_CODE_SPEECH_INPUT); }
From source file:com.delexus.imitationzhihu.MySearchView.java
public MySearchView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); final TintTypedArray a = TintTypedArray.obtainStyledAttributes(context, attrs, R.styleable.MySearchView, defStyleAttr, 0);/* w ww.j a va2 s.c om*/ final LayoutInflater inflater = LayoutInflater.from(context); inflater.inflate(R.layout.search_view, this); mSearchSrcTextView = (SearchAutoComplete) findViewById(R.id.my_search_src_text); mSearchSrcTextView.setSearchView(this); mNavigationButton = (ImageView) findViewById(R.id.my_search_button); mGoButton = (ImageView) findViewById(R.id.my_search_go_btn); mCloseButton = (ImageView) findViewById(R.id.my_search_close_btn); mVoiceButton = (ImageView) findViewById(R.id.my_search_voice_btn); // Set up icons and backgrounds. mNavigationButton.setImageDrawable(a.getDrawable(R.styleable.MySearchView_navigateIcon)); mGoButton.setImageDrawable(a.getDrawable(R.styleable.MySearchView_goIcon)); mCloseButton.setImageDrawable(a.getDrawable(R.styleable.MySearchView_closeIcon)); mVoiceButton.setImageDrawable(a.getDrawable(R.styleable.MySearchView_voiceIcon)); mSearchHintIcon = a.getDrawable(R.styleable.MySearchView_searchHintIcon); mNavigationButton.setOnClickListener(mOnClickListener); mCloseButton.setOnClickListener(mOnClickListener); mGoButton.setOnClickListener(mOnClickListener); mVoiceButton.setOnClickListener(mOnClickListener); mSearchSrcTextView.setOnClickListener(mOnClickListener); mSearchSrcTextView.addTextChangedListener(mTextWatcher); mSearchSrcTextView.setOnEditorActionListener(mOnEditorActionListener); mSearchSrcTextView.setOnKeyListener(mTextKeyListener); // Inform any listener of focus changes mSearchSrcTextView.setOnFocusChangeListener(new OnFocusChangeListener() { @Override public void onFocusChange(View v, boolean hasFocus) { if (mOnQueryTextFocusChangeListener != null) { mOnQueryTextFocusChangeListener.onFocusChange(MySearchView.this, hasFocus); } } }); setIconifiedByDefault(a.getBoolean(R.styleable.MySearchView_iconifiedByDefault, true)); final int maxWidth = a.getDimensionPixelSize(R.styleable.MySearchView_android_maxWidth, -1); if (maxWidth != -1) { setMaxWidth(maxWidth); } mDefaultQueryHint = a.getText(R.styleable.MySearchView_defaultQueryHint); mQueryHint = a.getText(R.styleable.MySearchView_queryHint); final int imeOptions = a.getInt(R.styleable.MySearchView_android_imeOptions, -1); if (imeOptions != -1) { setImeOptions(imeOptions); } final int inputType = a.getInt(R.styleable.MySearchView_android_inputType, -1); if (inputType != -1) { setInputType(inputType); } boolean focusable = true; focusable = a.getBoolean(R.styleable.MySearchView_android_inputType, focusable); setFocusable(focusable); a.recycle(); // Save voice intent for later queries/launching mVoiceWebSearchIntent = new Intent(RecognizerIntent.ACTION_WEB_SEARCH); mVoiceWebSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); mVoiceWebSearchIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH); mVoiceAppSearchIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); mVoiceAppSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); updateViewsVisibility(mIconifiedByDefault); updateQueryHint(); }
From source file:in.codehex.arrow.MainActivity.java
/** * Prompt the user to say something//w w w . j a va 2s. co m */ private void promptSpeechInput() { textToSpeech.stop(); intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, Locale.getDefault()); try { startActivityForResult(intent, Config.REQUEST_SPEECH_INPUT); } catch (ActivityNotFoundException e) { e.printStackTrace(); } }
From source file:io.github.vomitcuddle.SearchViewAllowEmpty.SearchView.java
public SearchView(Context context, AttributeSet attrs) { super(context, attrs); LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); inflater.inflate(R.layout.cba_search_view, this, true); mSearchButton = findViewById(R.id.search_button); mQueryTextView = (SearchAutoComplete) findViewById(R.id.search_src_text); mQueryTextView.setSearchView(this); mSearchEditFrame = findViewById(R.id.search_edit_frame); mSearchPlate = findViewById(R.id.search_plate); mSubmitArea = findViewById(R.id.submit_area); mSubmitButton = findViewById(R.id.search_go_btn); mCloseButton = (ImageView) findViewById(R.id.search_close_btn); mVoiceButton = findViewById(R.id.search_voice_btn); mSearchHintIcon = (ImageView) findViewById(R.id.search_mag_icon); mSearchButton.setOnClickListener(mOnClickListener); mCloseButton.setOnClickListener(mOnClickListener); mSubmitButton.setOnClickListener(mOnClickListener); mVoiceButton.setOnClickListener(mOnClickListener); mQueryTextView.setOnClickListener(mOnClickListener); mQueryTextView.addTextChangedListener(mTextWatcher); mQueryTextView.setOnEditorActionListener(mOnEditorActionListener); mQueryTextView.setOnItemClickListener(mOnItemClickListener); mQueryTextView.setOnItemSelectedListener(mOnItemSelectedListener); mQueryTextView.setOnKeyListener(mTextKeyListener); // Inform any listener of focus changes mQueryTextView.setOnFocusChangeListener(new OnFocusChangeListener() { public void onFocusChange(View v, boolean hasFocus) { if (mOnQueryTextFocusChangeListener != null) { mOnQueryTextFocusChangeListener.onFocusChange(SearchView.this, hasFocus); }// w w w. j av a 2 s . co m } }); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.SearchView, 0, 0); setIconifiedByDefault(a.getBoolean(R.styleable.SearchView_iconifiedByDefault, true)); int maxWidth = a.getDimensionPixelSize(R.styleable.SearchView_android_maxWidth, -1); if (maxWidth != -1) { setMaxWidth(maxWidth); } CharSequence queryHint = a.getText(R.styleable.SearchView_queryHint); if (!TextUtils.isEmpty(queryHint)) { setQueryHint(queryHint); } int imeOptions = a.getInt(R.styleable.SearchView_android_imeOptions, -1); if (imeOptions != -1) { setImeOptions(imeOptions); } int inputType = a.getInt(R.styleable.SearchView_android_inputType, -1); if (inputType != -1) { setInputType(inputType); } a.recycle(); boolean focusable = true; a = context.obtainStyledAttributes(attrs, R.styleable.View, 0, 0); focusable = a.getBoolean(R.styleable.View_android_focusable, focusable); a.recycle(); setFocusable(focusable); // Save voice intent for later queries/launching mVoiceWebSearchIntent = new Intent(RecognizerIntent.ACTION_WEB_SEARCH); mVoiceWebSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); mVoiceWebSearchIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH); mVoiceAppSearchIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); mVoiceAppSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); mDropDownAnchor = findViewById(mQueryTextView.getDropDownAnchor()); if (mDropDownAnchor != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { addOnLayoutChangeListenerToDropDownAnchorSDK11(); } else { addOnLayoutChangeListenerToDropDownAnchorBase(); } } updateViewsVisibility(mIconifiedByDefault); updateQueryHint(); }
From source file:cm.aptoide.com.actionbarsherlock.widget.SearchView.java
public SearchView(Context context, AttributeSet attrs) { super(context, attrs); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.FROYO) { throw new IllegalStateException("SearchView is API 8+ only."); }//from w ww . j av a2 s . c om LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); inflater.inflate(R.layout.abs__search_view, this, true); mSearchButton = findViewById(R.id.abs__search_button); mQueryTextView = (SearchAutoComplete) findViewById(R.id.abs__search_src_text); mQueryTextView.setSearchView(this); mSearchEditFrame = findViewById(R.id.abs__search_edit_frame); mSearchPlate = findViewById(R.id.abs__search_plate); mSubmitArea = findViewById(R.id.abs__submit_area); mSubmitButton = findViewById(R.id.abs__search_go_btn); mCloseButton = (ImageView) findViewById(R.id.abs__search_close_btn); mVoiceButton = findViewById(R.id.abs__search_voice_btn); mSearchHintIcon = (ImageView) findViewById(R.id.abs__search_mag_icon); mSearchButton.setOnClickListener(mOnClickListener); mCloseButton.setOnClickListener(mOnClickListener); mSubmitButton.setOnClickListener(mOnClickListener); mVoiceButton.setOnClickListener(mOnClickListener); mQueryTextView.setOnClickListener(mOnClickListener); mQueryTextView.addTextChangedListener(mTextWatcher); mQueryTextView.setOnEditorActionListener(mOnEditorActionListener); mQueryTextView.setOnItemClickListener(mOnItemClickListener); mQueryTextView.setOnItemSelectedListener(mOnItemSelectedListener); mQueryTextView.setOnKeyListener(mTextKeyListener); // Inform any listener of focus changes mQueryTextView.setOnFocusChangeListener(new OnFocusChangeListener() { public void onFocusChange(View v, boolean hasFocus) { if (mOnQueryTextFocusChangeListener != null) { mOnQueryTextFocusChangeListener.onFocusChange(SearchView.this, hasFocus); } } }); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.SherlockSearchView, 0, 0); setIconifiedByDefault(a.getBoolean(R.styleable.SherlockSearchView_iconifiedByDefault, true)); int maxWidth = a.getDimensionPixelSize(R.styleable.SherlockSearchView_android_maxWidth, -1); if (maxWidth != -1) { setMaxWidth(maxWidth); } CharSequence queryHint = a.getText(R.styleable.SherlockSearchView_queryHint); if (!TextUtils.isEmpty(queryHint)) { setQueryHint(queryHint); } int imeOptions = a.getInt(R.styleable.SherlockSearchView_android_imeOptions, -1); if (imeOptions != -1) { setImeOptions(imeOptions); } int inputType = a.getInt(R.styleable.SherlockSearchView_android_inputType, -1); if (inputType != -1) { setInputType(inputType); } a.recycle(); boolean focusable = true; a = context.obtainStyledAttributes(attrs, R.styleable.SherlockView, 0, 0); focusable = a.getBoolean(R.styleable.SherlockView_android_focusable, focusable); a.recycle(); setFocusable(focusable); // Save voice intent for later queries/launching mVoiceWebSearchIntent = new Intent(RecognizerIntent.ACTION_WEB_SEARCH); mVoiceWebSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); mVoiceWebSearchIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH); mVoiceAppSearchIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); mVoiceAppSearchIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); mDropDownAnchor = findViewById(mQueryTextView.getDropDownAnchor()); if (mDropDownAnchor != null) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mDropDownAnchor.addOnLayoutChangeListener(new OnLayoutChangeListener() { @Override public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) { adjustDropDownSizeAndPosition(); } }); } else { mDropDownAnchor.getViewTreeObserver() .addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { adjustDropDownSizeAndPosition(); } }); } } updateViewsVisibility(mIconifiedByDefault); updateQueryHint(); }