List of usage examples for android.media MediaPlayer create
public static MediaPlayer create(Context context, int resid)
From source file:com.example.android.bangla.FamilyFragment.java
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.word_list, container, false); // Create and setup the AudioManager to request audio focus mAudioManager = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE); // Create a list of words final ArrayList<Word> words = new ArrayList<Word>(); words.add(new Word(R.string.family_father, R.string.bangla_family_father, R.drawable.family_father, R.raw.family_father));//from ww w . j ava 2 s .c o m words.add(new Word(R.string.family_mother, R.string.bangla_family_mother, R.drawable.family_mother, R.raw.family_mother)); words.add( new Word(R.string.family_son, R.string.bangla_family_son, R.drawable.family_son, R.raw.family_son)); words.add(new Word(R.string.family_daughter, R.string.bangla_family_daughter, R.drawable.family_daughter, R.raw.family_daughter)); words.add(new Word(R.string.family_older_brother, R.string.bangla_family_older_brother, R.drawable.family_older_brother, R.raw.family_older_brother)); words.add(new Word(R.string.family_younger_brother, R.string.bangla_family_younger_brother, R.drawable.family_younger_brother, R.raw.family_younger_brother)); words.add(new Word(R.string.family_older_sister, R.string.bangla_family_older_sister, R.drawable.family_older_sister, R.raw.family_older_sister)); words.add(new Word(R.string.family_younger_sister, R.string.bangla_family_younger_sister, R.drawable.family_younger_sister, R.raw.family_younger_sister)); words.add(new Word(R.string.family_grandmother, R.string.bangla_family_grandmother, R.drawable.family_grandmother, R.raw.family_grandmother)); words.add(new Word(R.string.family_grandfather, R.string.bangla_family_grandfather, R.drawable.family_grandfather, R.raw.family_grandfather)); WordAdapter adapter = new WordAdapter(getActivity(), words, R.color.category_family); ListView listView = (ListView) rootView.findViewById(R.id.list); listView.setAdapter(adapter); // Set a click listener to play the audio when the list item is clicked on listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int position, long l) { releaseMediaPlayer(); Word word = words.get(position); int result = mAudioManager.requestAudioFocus(mOnAudioFocusChangeListener, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { mMediaPlayer = MediaPlayer.create(getActivity(), word.getAudioResourceId()); mMediaPlayer.start(); mMediaPlayer.setOnCompletionListener(mCompletionListener); } } }); return rootView; }
From source file:net.ddns.mlsoftlaberge.trycorder.TryviscamFragment.java
private void buttonbad() { MediaPlayer mediaPlayer = MediaPlayer.create(getActivity().getBaseContext(), R.raw.denybeep1); mediaPlayer.start(); // no need to call prepare(); create() does that for you }
From source file:com.HskPackage.HskNamespace.HSK1ProjectActivity.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main);/* w w w. java 2s . c o m*/ final TextView codice = (TextView) findViewById(R.id.codice); final Button carattere = (Button) findViewById(R.id.carattere); final TextView fonetica = (TextView) findViewById(R.id.fonetica); final TextView significato = (TextView) findViewById(R.id.significato); /*********** CREATE A DATABASE ******************************************************/ final String DB_PATH = "/data/data/com.HskPackage.HskNamespace/"; final String DB_NAME = "chineseX.db"; SQLiteDatabase db = null; boolean exists = (new File(DB_PATH + DB_NAME)).exists(); AssetManager assetManager = getAssets(); if (!exists) { try { InputStream in = assetManager.open(DB_NAME); OutputStream out = new FileOutputStream(DB_PATH + DB_NAME); copyFile(in, out); in.close(); out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } File dbFile = new File(DB_PATH + DB_NAME); db = SQLiteDatabase.openOrCreateDatabase(dbFile, null); } else { File dbFile = new File(DB_PATH + DB_NAME); db = SQLiteDatabase.openOrCreateDatabase(dbFile, null); } final Integer valore = 1; //String query = "SELECT * FROM chineseX"; String query = "SELECT * FROM chineseX where _id = ? "; String[] selectionArgs = { valore.toString() }; Cursor cursor = null; cursor = db.rawQuery(query, selectionArgs); //cursor = db.rawQuery(query, null); int count = cursor.getCount(); System.out.println("il numero di dati contenuti nel database " + count); while (cursor.moveToNext()) { long id = cursor.getLong(0); System.out.println("Questo l'ID ====>" + id); scodice = cursor.getString(1); codice.setText(scodice); System.out.println("Questo il codice ====>" + codice); scarattere = cursor.getString(2); carattere.setText(scarattere); System.out.println("Questo il carattere ====>" + carattere); sfonetica = cursor.getString(3); fonetica.setText(sfonetica); System.out.println("Questo il fonet ====>" + fonetica); ssignificato = cursor.getString(4); significato.setText("?? - Visualizza Significato"); System.out.println("Questo il carattere ====>" + ssignificato); } //fine db.close(); //set up sound button final MediaPlayer mpButtonClick = MediaPlayer.create(this, R.raw.hangout_ringtone); //final MediaPlayer chword001 = MediaPlayer.create(this, R.raw.ayi001); /* // set up change Images miaImmagine = (ImageView) findViewById(R.id.Image); // dichiaro l'oggetto image view miaImmagine.setImageResource(R.drawable.uno1); // associo l'immagine alla figura uno // setto un evento di cattura del click sull'immagine miaImmagine.setOnClickListener( new OnClickListener() { public void onClick(View arg0) { //chword001.start(); } }) ; */ final Intent first = new Intent(this, Activity2.class); final Intent immagine = new Intent(this, Activity3.class); /* * Un intent definito nella javadoc della classe android.content.Intent come una * "descrizione astratta dell'operazione da eseguire". * E un intent ESPLICITO perch cosciamo il destinatario. * Passiamo come parametri il context attuale ed la classe che identifica l'activity di destinazione. * E' importante che la classe sia registrata nell'AndroidManifest.xml * */ Button b = (Button) this.findViewById(R.id.button1); Button b2 = (Button) this.findViewById(R.id.button2); Button b3 = (Button) this.findViewById(R.id.carattere); b.setOnClickListener(new OnClickListener() { public void onClick(View arg0) { Integer valore = 1; valore = valore + 1; if (valore >= 153) { valore = 1; } System.out.println("AVANTI" + valore); first.putExtra("AVANTI", valore); startActivity(first); finish(); mpButtonClick.start(); } }); b2.setOnClickListener(new OnClickListener() { public void onClick(View arg0) { Integer valore = 153; System.out.println("AVANTI == >" + valore); first.putExtra("AVANTI", valore); startActivity(first); finish(); mpButtonClick.start(); } }); b3.setOnClickListener(new OnClickListener() { public void onClick(View arg0) { Integer valore = 1; System.out.println("AVANTI" + valore); immagine.putExtra("AVANTI", valore); startActivity(immagine); finish(); mpButtonClick.start(); } }); }
From source file:edu.sfsu.csc780.chathub.ui.MainActivity.java
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); DesignUtils.applyColorfulTheme(this); setContentView(R.layout.activity_main); final Context context = this; soundPreference = this.getString(R.string.play_sounds); mSentSound = MediaPlayer.create(context, R.raw.sentmessage); mStartRecordSound = MediaPlayer.create(context, R.raw.recording_beep); mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); // Set default username is anonymous. mUsername = ANONYMOUS;/*from w w w . j a v a2 s . c om*/ //Initialize Auth mAuth = FirebaseAuth.getInstance(); mUser = mAuth.getCurrentUser(); if (mUser == null) { startActivity(new Intent(this, SignInActivity.class)); finish(); return; } else { mUsername = mUser.getDisplayName(); if (mUser.getPhotoUrl() != null) { mPhotoUrl = mUser.getPhotoUrl().toString(); } } mGoogleApiClient = new GoogleApiClient.Builder(this) .enableAutoManage(this /* FragmentActivity */, this /* OnConnectionFailedListener */) .addApi(Auth.GOOGLE_SIGN_IN_API).build(); // Initialize ProgressBar and RecyclerView. mProgressBar = (ProgressBar) findViewById(R.id.progressBar); mMessageRecyclerView = (RecyclerView) findViewById(R.id.messageRecyclerView); mLinearLayoutManager = new LinearLayoutManager(this); mLinearLayoutManager.setStackFromEnd(true); mMessageRecyclerView.setLayoutManager(mLinearLayoutManager); mFirebaseAdapter = MessageUtil.getFirebaseAdapter(this, this, /* MessageLoadListener */ mLinearLayoutManager, mMessageRecyclerView, mImageClickListener); mMessageRecyclerView.setAdapter(mFirebaseAdapter); mProgressBar.setVisibility(ProgressBar.INVISIBLE); DesignUtils.setBackground(this); mMessageEditText = (EditText) findViewById(R.id.messageEditText); mMessageEditText.setFilters(new InputFilter[] { new InputFilter.LengthFilter(MSG_LENGTH_LIMIT) }); mMessageEditText.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i1, int i2) { } @Override public void onTextChanged(CharSequence charSequence, int i, int i1, int i2) { if (charSequence.toString().trim().length() > 0) { mSendButton.setEnabled(true); } else { mSendButton.setEnabled(false); } } @Override public void afterTextChanged(Editable editable) { } }); mSendButton = (FloatingActionButton) findViewById(R.id.sendButton); mSendButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { // Send messages on click. //mMessageRecyclerView.scrollToPosition(0); ChatMessage chatMessage = new ChatMessage(mMessageEditText.getText().toString(), mUsername, mPhotoUrl); MessageUtil.send(chatMessage); if (mSharedPreferences.getBoolean(soundPreference, true)) { mSentSound.start(); } mMessageEditText.setText(""); } }); mImageButton = (ImageButton) findViewById(R.id.shareImageButton); mImageButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { pickImage(); } }); mLocationButton = (ImageButton) findViewById(R.id.locationButton); mLocationButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { //mLocationButton.setEnabled(false); featureFlag = 1; loadMap(); } }); mCameraButton = (ImageButton) findViewById(R.id.cameraButton); mCameraButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { dispatchTakePhotoIntent(); } }); mVideoButton = (ImageButton) findViewById(R.id.videoButton); mVideoButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { dispatchRecordVideoIntent(); } }); mVoiceMessageButton = (ImageButton) findViewById(R.id.shareVoiceMessage); mVoiceMessageButton.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent event) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: if (mSharedPreferences.getBoolean(soundPreference, true)) { mStartRecordSound.start(); } while (mStartRecordSound.isPlaying()) ; startRecording(); break; case MotionEvent.ACTION_UP: stopRecording(); if (mSharedPreferences.getBoolean(soundPreference, true)) { mSentSound.start(); } break; } return false; } }); //Initialize the Accelerometer for Shake function mSensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE); // Use the accelerometer. mAccelerometer = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER); mShakeDetector = new ShakeDetector(); shakePreference = this.getString(R.string.shake_change_background); if (mSharedPreferences.getBoolean(shakePreference, true)) { //Shake to change bg mSensorManager.registerListener(mShakeDetector, mAccelerometer, SensorManager.SENSOR_DELAY_UI); mShakeDetector.setOnShakeListener(new ShakeDetector.OnShakeListener() { @Override public void onShake(int count) { // if(count > 2) DesignUtils.setRandomBackground(context); } }); } }
From source file:com.hybris.mobile.lib.location.geofencing.service.GeofencingIntentService.java
/** * Send a notification when a geofence is triggered * * @param geofence the geofence triggered * @param notification the notification object * @param geofenceTransition the geofence transition type */// w ww . j a v a 2s. co m protected void sendNotification(Geofence geofence, GeofenceObject.Notification notification, int geofenceTransition) { if (notification != null) { // Notification String notificationContentTitle = notification.getNotificationTitle(); String notificationContentText = notification.getNotificationText(); int notificationIconResId = notification.getNotificationIconResId(); Notification.Builder builder = new Notification.Builder(this); builder.setContentTitle(notificationContentTitle).setContentText(notificationContentText); if (notificationIconResId > 0) { builder.setSmallIcon(notificationIconResId); } try { // Intent on click on the notification if (StringUtils.isNotBlank(notification.getIntentClassDestination())) { Class<?> intentClassDestination = Class.forName(notification.getIntentClassDestination()); // Create an explicit content Intent that starts the Activity defined in intentClassDestination Intent notificationIntent = new Intent(this, intentClassDestination); // Geofence Id to pass to the activity in order to retrieve the object if (notification.getIntentBundle() != null) { GeofenceObject.IntentBundle intentBundle = notification.getIntentBundle(); notificationIntent.putExtra(intentBundle.getKeyName(), intentBundle.getBundle()); // Easter egg :) if (intentBundle.getBundle().getBoolean(GeofencingConstants.EXTRA_PLAY_SOUND)) { MediaPlayer mediaPlayer; if (geofenceTransition == Geofence.GEOFENCE_TRANSITION_ENTER) { Log.d(TAG, "Playing entering geofence sound"); mediaPlayer = MediaPlayer.create(getApplicationContext(), R.raw.entering_geofence); } else { Log.d(TAG, "Playing exiting geofence sound"); mediaPlayer = MediaPlayer.create(getApplicationContext(), R.raw.leaving_geofence); } mediaPlayer.start(); } } PendingIntent notificationPendingIntent = PendingIntent.getActivity(this, geofence.getRequestId().hashCode(), notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT); builder.setContentIntent(notificationPendingIntent); } } catch (ClassNotFoundException e) { Log.e(TAG, "Unable to find class " + notification.getIntentClassDestination() + "." + e.getLocalizedMessage()); } // Constructing the Notification and setting the flag to auto remove the notification when the user click on it Notification notificationView; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { notificationView = builder.build(); } else { notificationView = builder.getNotification(); } notificationView.flags = Notification.FLAG_AUTO_CANCEL; notificationView.defaults = Notification.DEFAULT_ALL; // Get an instance of the Notification manager NotificationManager mNotificationManager = (NotificationManager) getSystemService( Context.NOTIFICATION_SERVICE); // Issue the notification mNotificationManager.notify(UUID.randomUUID().toString().hashCode(), notificationView); } else { Log.e(TAG, "Notification empty for Geofence " + geofence); } }
From source file:mozilla.voicejam.singwithme.CameraActivity.java
void play() { Uri uri = Uri.parse(filePath);//from www. j av a 2 s . co m MediaPlayer mediaPlayer = MediaPlayer.create(this, uri); mediaPlayer.start(); mediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { @Override public void onCompletion(MediaPlayer MP) { MP.release(); } }); }
From source file:com.handmark.pulltorefresh.library.internal.LoadingLayout.java
public void refreshing() { if (hasPlayedSound) { hasPlayedSound = false;/*from w w w. j a v a2 s .c o m*/ boolean isMuted = false; switch (audioManager.getRingerMode()) { case AudioManager.RINGER_MODE_NORMAL: isMuted = false; break; case AudioManager.RINGER_MODE_SILENT: isMuted = true; break; case AudioManager.RINGER_MODE_VIBRATE: isMuted = true; break; } if (mPreferences.getBoolean(PREFERENCE_KEY_SOUND_NAVIGATION, true)) { if (isMuted != true) { if (mPlayer != null) { if (mPlayer.isPlaying()) { mPlayer.stop(); } mPlayer.release(); } mPlayer = MediaPlayer.create(mContext, R.raw.release); mPlayer.start(); } } } mHeaderText.setText(Html.fromHtml(mRefreshingLabel)); mHeaderArrow.setVisibility(View.INVISIBLE); mHeaderProgress.setVisibility(View.VISIBLE); mSubHeaderText.setVisibility(View.GONE); }
From source file:com.cw.litenote.note.NoteUi.java
public NoteUi(AppCompatActivity activity, ViewPager viewPager, int position) { System.out.println("NoteUi / constructor"); pager = viewPager;//from w w w . j a v a2 s. c o m act = activity; mPosition = position; DB_page db_page = new DB_page(act, TabsHost.getCurrentPageTableId()); setNotesCnt(db_page.getNotesCount(true)); String pictureUri = db_page.getNotePictureUri(position, true); String linkUri = db_page.getNoteLinkUri(position, true); String tagStr = "current" + position + "pictureView"; ViewGroup pictureGroup = (ViewGroup) pager.findViewWithTag(tagStr); if ((pictureGroup != null)) { setPictureView_listeners(act, pager, pictureUri, linkUri, pictureGroup); TextView picView_footer = (TextView) (pictureGroup.findViewById(R.id.image_footer)); Button picView_back_button = (Button) (pictureGroup.findViewById(R.id.image_view_back)); Button picView_viewMode_button = (Button) (pictureGroup.findViewById(R.id.image_view_mode)); TextView videoView_currPosition = (TextView) (pictureGroup.findViewById(R.id.video_current_pos)); SeekBar videoView_seekBar = (SeekBar) (pictureGroup.findViewById(R.id.video_seek_bar)); TextView videoView_fileLength = (TextView) (pictureGroup.findViewById(R.id.video_file_length)); // show back button if (Note.isPictureMode()) picView_back_button.setVisibility(View.VISIBLE); else picView_back_button.setVisibility(View.GONE); // Show picture title TextView picView_title; picView_title = (TextView) (pictureGroup.findViewById(R.id.image_title)); String pictureName; if (!Util.isEmptyString(pictureUri)) pictureName = Util.getDisplayNameByUriString(pictureUri, act); else if (Util.isYouTubeLink(linkUri)) pictureName = linkUri; else pictureName = ""; if (!Util.isEmptyString(pictureName)) { picView_title.setVisibility(View.VISIBLE); picView_title.setText(pictureName); } else picView_title.setVisibility(View.INVISIBLE); // show footer if (Note.isPictureMode()) { picView_footer.setVisibility(View.VISIBLE); picView_footer.setText((pager.getCurrentItem() + 1) + "/" + pager.getAdapter().getCount()); } else picView_footer.setVisibility(View.GONE); // for video if (UtilVideo.hasVideoExtension(pictureUri, act)) { if (!UtilVideo.hasMediaControlWidget) NoteUi.updateVideoPlayButtonState(pager, getFocus_notePos()); else show_picViewUI_previous_next(false, 0); } // set image view buttons (View Mode, Previous, Next) visibility if (Note.isPictureMode()) { picView_viewMode_button.setVisibility(View.VISIBLE); // show previous/next buttons for image, not for video if (UtilVideo.hasVideoExtension(pictureUri, act) && !UtilVideo.hasMediaControlWidget) // for video { System.out.println("NoteUi / constructor / for video"); show_picViewUI_previous_next(true, position); } else if (UtilImage.hasImageExtension(pictureUri, act) && (UtilVideo.mVideoView == null))// for image { System.out.println("NoteUi / constructor / for image"); show_picViewUI_previous_next(true, position); } } else { show_picViewUI_previous_next(false, 0); picView_viewMode_button.setVisibility(View.GONE); } // show seek bar for video only if (!UtilVideo.hasMediaControlWidget) { if (UtilVideo.hasVideoExtension(pictureUri, act)) { MediaPlayer mp = MediaPlayer.create(act, Uri.parse(pictureUri)); if (mp != null) { videoFileLength_inMilliSeconds = mp.getDuration(); mp.release(); } primaryVideoSeekBarProgressUpdater(pager, position, UtilVideo.mPlayVideoPosition, pictureUri); } else { videoView_currPosition.setVisibility(View.GONE); videoView_seekBar.setVisibility(View.GONE); videoView_fileLength.setVisibility(View.GONE); } } showSeekBarProgress = true; } }
From source file:com.example.android.gft.ColorsFragment.java
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.word_list, container, false); // Create and setup the {@link AudioManager} to request audio focus mAudioManager = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE); // Create a list of words final ArrayList<Word> words = new ArrayList<Word>(); words.add(new Word(R.string.color_red, R.string.miwok_color_red, R.drawable.color_red, R.raw.color_red)); words.add(new Word(R.string.color_mustard_yellow, R.string.miwok_color_mustard_yellow, R.drawable.color_mustard_yellow, R.raw.color_mustard_yellow)); words.add(new Word(R.string.color_dusty_yellow, R.string.miwok_color_dusty_yellow, R.drawable.color_dusty_yellow, R.raw.color_dusty_yellow)); words.add(new Word(R.string.color_green, R.string.miwok_color_green, R.drawable.color_green, R.raw.color_green));// w w w . j a v a2s . com words.add(new Word(R.string.color_brown, R.string.miwok_color_brown, R.drawable.color_brown, R.raw.color_brown)); words.add( new Word(R.string.color_gray, R.string.miwok_color_gray, R.drawable.color_gray, R.raw.color_gray)); words.add(new Word(R.string.color_black, R.string.miwok_color_black, R.drawable.color_black, R.raw.color_black)); words.add(new Word(R.string.color_white, R.string.miwok_color_white, R.drawable.color_white, R.raw.color_white)); // Create an {@link WordAdapter}, whose data source is a list of {@link Word}s. The // adapter knows how to create list items for each item in the list. WordAdapter adapter = new WordAdapter(getActivity(), words, R.color.category_colors); // Find the {@link ListView} object in the view hierarchy of the {@link Activity}. // There should be a {@link ListView} with the view ID called list, which is declared in the // word_list.xml layout file. ListView listView = (ListView) rootView.findViewById(R.id.list); // Make the {@link ListView} use the {@link WordAdapter} we created above, so that the // {@link ListView} will display list items for each {@link Word} in the list. listView.setAdapter(adapter); // Set a click listener to play the audio when the list item is clicked on listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int position, long l) { // Release the media player if it currently exists because we are about to // play a different sound file releaseMediaPlayer(); // Get the {@link Word} object at the given position the user clicked on Word word = words.get(position); // Request audio focus so in order to play the audio file. The app needs to play a // short audio file, so we will request audio focus with a short amount of time // with AUDIOFOCUS_GAIN_TRANSIENT. int result = mAudioManager.requestAudioFocus(mOnAudioFocusChangeListener, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { // We have audio focus now. // Create and setup the {@link MediaPlayer} for the audio resource associated // with the current word mMediaPlayer = MediaPlayer.create(getActivity(), word.getAudioResourceId()); // Start the audio file mMediaPlayer.start(); // Setup a listener on the media player, so that we can stop and release the // media player once the sound has finished playing. mMediaPlayer.setOnCompletionListener(mCompletionListener); } } }); return rootView; }
From source file:com.example.android.gft.PhrasesFragment.java
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.word_list, container, false); // Create and setup the {@link AudioManager} to request audio focus mAudioManager = (AudioManager) getActivity().getSystemService(Context.AUDIO_SERVICE); // Create a list of words final ArrayList<Word> words = new ArrayList<Word>(); words.add(new Word(R.string.phrase_where_are_you_going, R.string.miwok_phrase_where_are_you_going, R.raw.phrase_where_are_you_going)); words.add(new Word(R.string.phrase_what_is_your_name, R.string.miwok_phrase_what_is_your_name, R.raw.phrase_what_is_your_name)); words.add(new Word(R.string.phrase_my_name_is, R.string.miwok_phrase_my_name_is, R.raw.phrase_my_name_is)); words.add(new Word(R.string.phrase_how_are_you_feeling, R.string.miwok_phrase_how_are_you_feeling, R.raw.phrase_how_are_you_feeling)); words.add(new Word(R.string.phrase_im_feeling_good, R.string.miwok_phrase_im_feeling_good, R.raw.phrase_im_feeling_good)); words.add(new Word(R.string.phrase_are_you_coming, R.string.miwok_phrase_are_you_coming, R.raw.phrase_are_you_coming)); words.add(new Word(R.string.phrase_yes_im_coming, R.string.miwok_phrase_yes_im_coming, R.raw.phrase_yes_im_coming)); words.add(new Word(R.string.phrase_im_coming, R.string.miwok_phrase_im_coming, R.raw.phrase_im_coming)); words.add(new Word(R.string.phrase_lets_go, R.string.miwok_phrase_lets_go, R.raw.phrase_lets_go)); words.add(new Word(R.string.phrase_come_here, R.string.miwok_phrase_come_here, R.raw.phrase_come_here)); // Create an {@link WordAdapter}, whose data source is a list of {@link Word}s. The // adapter knows how to create list items for each item in the list. WordAdapter adapter = new WordAdapter(getActivity(), words, R.color.category_phrases); // Find the {@link ListView} object in the view hierarchy of the {@link Activity}. // There should be a {@link ListView} with the view ID called list, which is declared in the // word_list.xml layout file. ListView listView = (ListView) rootView.findViewById(R.id.list); // Make the {@link ListView} use the {@link WordAdapter} we created above, so that the // {@link ListView} will display list items for each {@link Word} in the list. listView.setAdapter(adapter);// w w w. j a v a 2 s . c om // Set a click listener to play the audio when the list item is clicked on listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int position, long l) { // Release the media player if it currently exists because we are about to // play a different sound file releaseMediaPlayer(); // Get the {@link Word} object at the given position the user clicked on Word word = words.get(position); // Request audio focus so in order to play the audio file. The app needs to play a // short audio file, so we will request audio focus with a short amount of time // with AUDIOFOCUS_GAIN_TRANSIENT. int result = mAudioManager.requestAudioFocus(mOnAudioFocusChangeListener, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { // We have audio focus now. // Create and setup the {@link MediaPlayer} for the audio resource associated // with the current word mMediaPlayer = MediaPlayer.create(getActivity(), word.getAudioResourceId()); // Start the audio file mMediaPlayer.start(); // Setup a listener on the media player, so that we can stop and release the // media player once the sound has finished playing. mMediaPlayer.setOnCompletionListener(mCompletionListener); } } }); return rootView; }