Java tutorial
/* * Copyright (c) Microsoft. All rights reserved. * Licensed under the MIT license. * // * Project Oxford: http://ProjectOxford.ai * // * ProjectOxford SDK GitHub: * https://github.com/Microsoft/ProjectOxford-ClientSDK * // * Copyright (c) Microsoft Corporation * All rights reserved. * // * MIT License: * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * // * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * // * THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.igo.SpeechReco; import android.app.AlertDialog; import android.content.Intent; import android.content.SharedPreferences; import android.os.AsyncTask; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.annotation.NonNull; import android.support.v4.content.ContextCompat; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.support.v7.app.AppCompatActivity; import android.text.Editable; import android.text.InputFilter; import android.text.TextWatcher; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.Button; import android.widget.EditText; import android.widget.ProgressBar; import android.widget.RadioGroup; import android.widget.ListView; import android.speech.tts.TextToSpeech; import com.bumptech.glide.Glide; import com.firebase.ui.database.FirebaseRecyclerAdapter; import com.google.android.gms.auth.api.Auth; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.database.DatabaseReference; import com.google.firebase.database.FirebaseDatabase; import de.hdodenhof.circleimageview.CircleImageView; import com.microsoft.bing.speech.SpeechClientStatus; import com.microsoft.cognitiveservices.speechrecognition.DataRecognitionClient; import com.microsoft.cognitiveservices.speechrecognition.ISpeechRecognitionServerEvents; import com.microsoft.cognitiveservices.speechrecognition.MicrophoneRecognitionClient; import com.microsoft.cognitiveservices.speechrecognition.RecognitionResult; import com.microsoft.cognitiveservices.speechrecognition.RecognitionStatus; import com.microsoft.cognitiveservices.speechrecognition.SpeechRecognitionMode; import com.microsoft.cognitiveservices.speechrecognition.SpeechRecognitionServiceFactory; import java.io.InputStream; import java.util.concurrent.TimeUnit; import android.widget.TextView; import android.widget.Toast; public class MainActivity extends AppCompatActivity implements ISpeechRecognitionServerEvents, GoogleApiClient.OnConnectionFailedListener { public static class MessageViewHolderLeft extends RecyclerView.ViewHolder { public TextView messageTextView; public TextView messengerTextView; public CircleImageView messengerImageView; public MessageViewHolderLeft(View v) { super(v); messageTextView = (TextView) itemView.findViewById(R.id.messageTextViewLeft); messengerTextView = (TextView) itemView.findViewById(R.id.messengerTextViewLeft); messengerImageView = (CircleImageView) itemView.findViewById(R.id.messengerImageViewLeft); } } public static class MessageViewHolderRight extends RecyclerView.ViewHolder { public TextView messageTextView; public TextView messengerTextView; public CircleImageView messengerImageView; public MessageViewHolderRight(View v) { super(v); messageTextView = (TextView) itemView.findViewById(R.id.messageTextViewRight); messengerTextView = (TextView) itemView.findViewById(R.id.messengerTextViewRight); messengerImageView = (CircleImageView) itemView.findViewById(R.id.messengerImageViewRight); } } private static final String TAG = "MainActivity"; public static final String MESSAGES_CHILD = "messages"; public static final int DEFAULT_MSG_LENGTH_LIMIT = 1000; public static final String ANONYMOUS = "anonymous"; private String mUsername; private String mPhotoUrl; private SharedPreferences mSharedPreferences; private GoogleApiClient mGoogleApiClient; private Button mSendButton; private RecyclerView mMessageRecyclerView; private LinearLayoutManager mLinearLayoutManager; private ProgressBar mProgressBar; private EditText mMessageEditText; private FirebaseAuth mFirebaseAuth; private FirebaseUser mFirebaseUser; private DatabaseReference mFirebaseDatabaseReference; private FirebaseRecyclerAdapter<FriendlyMessage, RecyclerView.ViewHolder> mFirebaseAdapter; //private static final String TAG = "ChatActivity"; private ChatArrayAdapter chatArrayAdapter; private ListView listView; private EditText chatText; private Button buttonSend; TextToSpeech tts; int m_waitSeconds = 0; DataRecognitionClient dataClient = null; MicrophoneRecognitionClient micClient = null; FinalResponseStatus isReceivedResponse = FinalResponseStatus.NotReceived; EditText _logText; TextView _tview; RadioGroup _radioGroup; Button _buttonSelectMode; Button _startButton; public enum FinalResponseStatus { NotReceived, OK, Timeout } /** * Gets the primary subscription key */ public String getPrimaryKey() { return this.getString(R.string.primaryKey); } /** * Gets the LUIS application identifier. * @return The LUIS application identifier. */ private String getLuisAppId() { return this.getString(R.string.luisAppID); } /** * Gets the LUIS subscription identifier. * @return The LUIS subscription identifier. */ private String getLuisSubscriptionID() { return this.getString(R.string.luisSubscriptionID); } /** * Gets a value indicating whether or not to use the microphone. * @return true if [use microphone]; otherwise, false. */ private Boolean getUseMicrophone() { return true; } /** * Gets a value indicating whether LUIS results are desired. * @return true if LUIS results are to be returned otherwise, false. */ private Boolean getWantIntent() { return false; } /** * Gets the current speech recognition mode. * @return The speech recognition mode. */ private SpeechRecognitionMode getMode() { return SpeechRecognitionMode.LongDictation; } /** * Gets the default locale. * @return The default locale. */ private String getDefaultLocale() { return "es-MX"; } /** * Gets the short wave file path. * @return The short wave file. */ private String getShortWaveFile() { return "whatstheweatherlike.wav"; } /** * Gets the long wave file path. * @return The long wave file. */ private String getLongWaveFile() { return "batman.wav"; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); // Set default username is anonymous. mUsername = ANONYMOUS; // Initialize Firebase Auth mFirebaseAuth = FirebaseAuth.getInstance(); mFirebaseUser = mFirebaseAuth.getCurrentUser(); if (mFirebaseUser == null) { // Not signed in, launch the Sign In activity startActivity(new Intent(this, SignInActivity.class)); finish(); return; } else { mUsername = mFirebaseUser.getDisplayName(); if (mFirebaseUser.getPhotoUrl() != null) { mPhotoUrl = mFirebaseUser.getPhotoUrl().toString(); } } mGoogleApiClient = new GoogleApiClient.Builder(this) .enableAutoManage(this /* FragmentActivity */, this /* OnConnectionFailedListener */) .addApi(Auth.GOOGLE_SIGN_IN_API).build(); // Initialize ProgressBar and RecyclerView. mProgressBar = (ProgressBar) findViewById(R.id.progressBar); mMessageRecyclerView = (RecyclerView) findViewById(R.id.messageRecyclerView); mLinearLayoutManager = new LinearLayoutManager(this); mLinearLayoutManager.setStackFromEnd(true); mMessageRecyclerView.setLayoutManager(mLinearLayoutManager); // New child entries mFirebaseDatabaseReference = FirebaseDatabase.getInstance().getReference(); mFirebaseAdapter = new FirebaseRecyclerAdapter<FriendlyMessage, RecyclerView.ViewHolder>( FriendlyMessage.class, R.layout.item_message_left, RecyclerView.ViewHolder.class, mFirebaseDatabaseReference.child(MESSAGES_CHILD)) { @Override protected void populateViewHolder(final RecyclerView.ViewHolder viewHolder, final FriendlyMessage friendlyMessage, int position) { mProgressBar.setVisibility(ProgressBar.INVISIBLE); if (friendlyMessage.getName().equals(mUsername)) { MessageViewHolderRight viewHolderR = (MessageViewHolderRight) viewHolder; viewHolderR.messageTextView.setText(friendlyMessage.getText()); viewHolderR.messengerTextView.setText(friendlyMessage.getName()); if (friendlyMessage.getPhotoUrl() == null) { viewHolderR.messengerImageView.setImageDrawable(ContextCompat.getDrawable(MainActivity.this, R.drawable.ic_account_circle_black_36dp)); } else { Glide.with(MainActivity.this).load(friendlyMessage.getPhotoUrl()) .into(viewHolderR.messengerImageView); } } else { MessageViewHolderLeft viewHolderL = (MessageViewHolderLeft) viewHolder; viewHolderL.messageTextView.setText(friendlyMessage.getText()); viewHolderL.messengerTextView.setText(friendlyMessage.getName()); if (friendlyMessage.getPhotoUrl() == null) { viewHolderL.messengerImageView.setImageDrawable(ContextCompat.getDrawable(MainActivity.this, R.drawable.ic_account_circle_black_36dp)); } else { Glide.with(MainActivity.this).load(friendlyMessage.getPhotoUrl()) .into(viewHolderL.messengerImageView); } } } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { switch (viewType) { case 1: View userType1 = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_message_left, parent, false); return new MessageViewHolderLeft(userType1); case 2: View userType2 = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_message_right, parent, false); return new MessageViewHolderRight(userType2); } return super.onCreateViewHolder(parent, viewType); } @Override public int getItemViewType(int position) { FriendlyMessage user = getItem(position); if (user.getName().equals(mUsername)) { return 2; } else { return 1; } } }; mFirebaseAdapter.registerAdapterDataObserver(new RecyclerView.AdapterDataObserver() { @Override public void onItemRangeInserted(int positionStart, int itemCount) { super.onItemRangeInserted(positionStart, itemCount); int friendlyMessageCount = mFirebaseAdapter.getItemCount(); int lastVisiblePosition = mLinearLayoutManager.findLastCompletelyVisibleItemPosition(); // If the recycler view is initially being loaded or the // user is at the bottom of the list, scroll to the bottom // of the list to show the newly added message. if (lastVisiblePosition == -1 || (positionStart >= (friendlyMessageCount - 1) && lastVisiblePosition == (positionStart - 1))) { mMessageRecyclerView.scrollToPosition(positionStart); } } }); mMessageRecyclerView.setLayoutManager(mLinearLayoutManager); mMessageRecyclerView.setAdapter(mFirebaseAdapter); mMessageEditText = (EditText) findViewById(R.id.messageEditText); mMessageEditText.setFilters(new InputFilter[] { new InputFilter.LengthFilter( mSharedPreferences.getInt(CodelabPreferences.FRIENDLY_MSG_LENGTH, DEFAULT_MSG_LENGTH_LIMIT)) }); mMessageEditText.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { if (charSequence.toString().trim().length() > 0) { mSendButton.setEnabled(true); } else { mSendButton.setEnabled(false); } } @Override public void afterTextChanged(Editable editable) { } }); mSendButton = (Button) findViewById(R.id.sendButton); mSendButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { FriendlyMessage friendlyMessage = new FriendlyMessage(mMessageEditText.getText().toString(), mUsername, mPhotoUrl); mFirebaseDatabaseReference.child(MESSAGES_CHILD).push().setValue(friendlyMessage); mMessageEditText.setText(""); } }); //******************************************************************************** Toolbar myToolbar = (Toolbar) findViewById(R.id.my_toolbar); setSupportActionBar(myToolbar); /*tts=new TextToSpeech(MainActivity.this, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { // TODO Auto-generated method stub if(status == TextToSpeech.SUCCESS){ int result=tts.setLanguage(new Locale("es", "ES")); if(result==TextToSpeech.LANG_MISSING_DATA || result==TextToSpeech.LANG_NOT_SUPPORTED){ Log.e("error", "This Language is not supported"); } else{ ConvertTextToSpeech(); } } else Log.e("error", "Initilization Failed!"); } });*/ //buttonSend = (Button) findViewById(R.id.send); //listView = (ListView) findViewById(R.id.msgview); //chatArrayAdapter = new ChatArrayAdapter(getApplicationContext(), R.layout.right); //listView.setAdapter(chatArrayAdapter); /*chatText = (EditText) findViewById(R.id.msg); chatText.setOnKeyListener(new View.OnKeyListener() { public boolean onKey(View v, int keyCode, KeyEvent event) { if ((event.getAction() == KeyEvent.ACTION_DOWN) && (keyCode == KeyEvent.KEYCODE_ENTER)) { ConvertTextToSpeech(); return sendChatMessage(false, chatText.getText().toString()); } return false; } }); buttonSend.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View arg0) { ConvertTextToSpeech(); sendChatMessage(false, chatText.getText().toString()); } });*/ //listView.setTranscriptMode(AbsListView.TRANSCRIPT_MODE_ALWAYS_SCROLL); //listView.setAdapter(chatArrayAdapter); //to scroll the list view to bottom on data change /*chatArrayAdapter.registerDataSetObserver(new DataSetObserver() { @Override public void onChanged() { super.onChanged(); listView.setSelection(chatArrayAdapter.getCount() - 1); } });*/ //this._logText = (EditText) findViewById(R.id.editText1); //this._radioGroup = (RadioGroup)findViewById(R.id.groupMode); //this._buttonSelectMode = (Button)findViewById(R.id.buttonSelectMode); this._startButton = (Button) findViewById(R.id.button1); if (getString(R.string.primaryKey).startsWith("Please")) { new AlertDialog.Builder(this).setTitle(getString(R.string.add_subscription_key_tip_title)) .setMessage(getString(R.string.add_subscription_key_tip)).setCancelable(false).show(); } // setup the buttons final MainActivity This = this; this._startButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View arg0) { This.StartButton_Click(arg0); } }); /*this._buttonSelectMode.setOnClickListener(new OnClickListener() { @Override public void onClick(View arg0) { This.ShowMenu(This._radioGroup.getVisibility() == View.INVISIBLE); } });*/ /*this._radioGroup.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() { @Override public void onCheckedChanged(RadioGroup rGroup, int checkedId) { This.RadioButton_Click(rGroup, checkedId); } });*/ //this.ShowMenu(true); } /* @Override protected void onPause() { if(tts != null){ tts.stop(); tts.shutdown(); } super.onPause(); } */ /*private void ConvertTextToSpeech() { String speak = chatText.getText().toString(); // TODO Auto-generated method stub tts.speak(speak, TextToSpeech.QUEUE_FLUSH, null); }*/ private boolean sendChatMessage(boolean isMic, String mytext) { if (isMic) { //chatArrayAdapter.add(new ChatMessage(false, mytext)); } else { //chatArrayAdapter.add(new ChatMessage(true, mytext)); //chatText.setText(""); } return true; } /*private void ShowMenu(boolean show) { if (show) { //this._radioGroup.setVisibility(View.VISIBLE); this._logText.setVisibility(View.INVISIBLE); } else { //this._radioGroup.setVisibility(View.INVISIBLE); this._logText.setText(""); this._logText.setVisibility(View.VISIBLE); } }*/ /** * Handles the Click event of the _startButton control. */ private void StartButton_Click(View arg0) { this._startButton.setEnabled(false); this._startButton.setText("Reconociendo"); this.m_waitSeconds = this.getMode() == SpeechRecognitionMode.ShortPhrase ? 20 : 100; //this.ShowMenu(false); this.LogRecognitionStart(); if (this.getUseMicrophone()) { if (this.micClient == null) { if (this.getWantIntent()) { this.WriteLine("--- Start microphone dictation with Intent detection ----"); this.micClient = SpeechRecognitionServiceFactory.createMicrophoneClientWithIntent(this, this.getDefaultLocale(), this, this.getPrimaryKey(), this.getLuisAppId(), this.getLuisSubscriptionID()); } else { this.micClient = SpeechRecognitionServiceFactory.createMicrophoneClient(this, this.getMode(), this.getDefaultLocale(), this, this.getPrimaryKey()); } } this.micClient.startMicAndRecognition(); } else { if (null == this.dataClient) { if (this.getWantIntent()) { this.dataClient = SpeechRecognitionServiceFactory.createDataClientWithIntent(this, this.getDefaultLocale(), this, this.getPrimaryKey(), this.getLuisAppId(), this.getLuisSubscriptionID()); } else { this.dataClient = SpeechRecognitionServiceFactory.createDataClient(this, this.getMode(), this.getDefaultLocale(), this, this.getPrimaryKey()); } } this.SendAudioHelper((this.getMode() == SpeechRecognitionMode.ShortPhrase) ? this.getShortWaveFile() : this.getLongWaveFile()); } } /** * Logs the recognition start. */ private void LogRecognitionStart() { String recoSource; if (this.getUseMicrophone()) { recoSource = "microphone"; } else if (this.getMode() == SpeechRecognitionMode.ShortPhrase) { recoSource = "short wav file"; } else { recoSource = "long wav file"; } this.WriteLine("\n--- Start speech recognition using " + recoSource + " with " + this.getMode() + " mode in " + this.getDefaultLocale() + " language ----\n\n"); } private void SendAudioHelper(String filename) { RecognitionTask doDataReco = new RecognitionTask(this.dataClient, this.getMode(), filename); try { doDataReco.execute().get(m_waitSeconds, TimeUnit.SECONDS); } catch (Exception e) { doDataReco.cancel(true); isReceivedResponse = FinalResponseStatus.Timeout; } } public void onFinalResponseReceived(final RecognitionResult response) { boolean isFinalDicationMessage = this.getMode() == SpeechRecognitionMode.LongDictation && (response.RecognitionStatus == RecognitionStatus.EndOfDictation || response.RecognitionStatus == RecognitionStatus.DictationEndSilenceTimeout || response.RecognitionStatus == RecognitionStatus.InitialSilenceTimeout); if (null != this.micClient && this.getUseMicrophone() && ((this.getMode() == SpeechRecognitionMode.ShortPhrase) || isFinalDicationMessage)) { // we got the final result, so it we can end the mic reco. No need to do this // for dataReco, since we already called endAudio() on it as soon as we were done // sending all the data. this.micClient.endMicAndRecognition(); } if (isFinalDicationMessage) { this._startButton.setEnabled(true); this._startButton.setText("Comenzar"); this.isReceivedResponse = FinalResponseStatus.OK; } if (!isFinalDicationMessage) { /*this.WriteLine("********* Final n-BEST Results *********"); for (int i = 0; i < response.Results.length; i++) { this.WriteLine("[" + i + "]" + " Confidence=" + response.Results[i].Confidence + " Text=\"" + response.Results[i].DisplayText + "\""); } this.WriteLine();*/ if (response.Results.length == 0) { } else { FriendlyMessage friendlyMessage = new FriendlyMessage(response.Results[0].DisplayText, mUsername, mPhotoUrl); mFirebaseDatabaseReference.child(MESSAGES_CHILD).push().setValue(friendlyMessage); //mMessageEditText.setText(""); sendChatMessage(true, response.Results[0].DisplayText); } } } /** * Called when a final response is received and its intent is parsed */ public void onIntentReceived(final String payload) { this.WriteLine("--- Intent received by onIntentReceived() ---"); this.WriteLine(payload); this.WriteLine(); } public void onPartialResponseReceived(final String response) { /*this.WriteLine("--- Partial result received by onPartialResponseReceived() ---"); this.WriteLine(response); this.WriteLine();*/ return; } public void onError(final int errorCode, final String response) { this._startButton.setEnabled(true); Log.e("Error code: " + SpeechClientStatus.fromInt(errorCode) + " " + errorCode, "error"); Log.e("Error text: " + response, "error"); } /** * Called when the microphone status has changed. * @param recording The current recording state */ public void onAudioEvent(boolean recording) { /*this.WriteLine("--- Microphone status change received by onAudioEvent() ---"); this.WriteLine("********* Microphone status: " + recording + " *********"); if (recording) { this.WriteLine("Please start speaking."); } WriteLine();*/ if (!recording) { this.micClient.endMicAndRecognition(); this._startButton.setEnabled(true); } } @Override public void onConnectionFailed(@NonNull ConnectionResult connectionResult) { // An unresolvable error has occurred and Google APIs (including Sign-In) will not // be available. Log.d(TAG, "onConnectionFailed:" + connectionResult); Toast.makeText(this, "Google Play Services error.", Toast.LENGTH_SHORT).show(); } @Override public void onStart() { super.onStart(); // Check if user is signed in. // TODO: Add code to check if user is signed in. } /** * Writes the line. */ private void WriteLine() { this.WriteLine(""); } /** * Writes the line. * @param text The line to write. */ private void WriteLine(String text) { //this._logText.append(text + "\n"); } /** * Handles the Click event of the RadioButton control. * @param rGroup The radio grouping. * @param checkedId The checkedId. */ private void RadioButton_Click(RadioGroup rGroup, int checkedId) { // Reset everything if (this.micClient != null) { this.micClient.endMicAndRecognition(); try { this.micClient.finalize(); } catch (Throwable throwable) { throwable.printStackTrace(); } this.micClient = null; } if (this.dataClient != null) { try { this.dataClient.finalize(); } catch (Throwable throwable) { throwable.printStackTrace(); } this.dataClient = null; } //this.ShowMenu(false); this._startButton.setEnabled(true); } /* * Speech recognition with data (for example from a file or audio source). * The data is broken up into buffers and each buffer is sent to the Speech Recognition Service. * No modification is done to the buffers, so the user can apply their * own VAD (Voice Activation Detection) or Silence Detection * * @param dataClient * @param recoMode * @param filename */ private class RecognitionTask extends AsyncTask<Void, Void, Void> { DataRecognitionClient dataClient; SpeechRecognitionMode recoMode; String filename; RecognitionTask(DataRecognitionClient dataClient, SpeechRecognitionMode recoMode, String filename) { this.dataClient = dataClient; this.recoMode = recoMode; this.filename = filename; } @Override protected Void doInBackground(Void... params) { try { // Note for wave files, we can just send data from the file right to the server. // In the case you are not an audio file in wave format, and instead you have just // raw data (for example audio coming over bluetooth), then before sending up any // audio data, you must first send up an SpeechAudioFormat descriptor to describe // the layout and format of your raw audio data via DataRecognitionClient's sendAudioFormat() method. // String filename = recoMode == SpeechRecognitionMode.ShortPhrase ? "whatstheweatherlike.wav" : "batman.wav"; InputStream fileStream = getAssets().open(filename); int bytesRead = 0; byte[] buffer = new byte[1024]; do { // Get Audio data to send into byte buffer. bytesRead = fileStream.read(buffer); if (bytesRead > -1) { // Send of audio data to service. dataClient.sendAudio(buffer, bytesRead); } } while (bytesRead > 0); } catch (Throwable throwable) { throwable.printStackTrace(); } finally { dataClient.endAudio(); } return null; } } }