List of usage examples for android.content Intent getStringArrayListExtra
public ArrayList<String> getStringArrayListExtra(String name)
From source file:com.learnncode.mediachooser.activity.DirectoryListActivity.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == MediaChooser.REQ_PICK_IMAGE) { if (resultCode == MediaChooser.RESULT_CLOSE_ALL) { setResult(resultCode, null); finish();//from w w w . ja v a2 s . c om return; } else if (resultCode == MediaChooser.RESULT_OK) { } if (data == null || data.getStringArrayListExtra("list") == null || data.getStringArrayListExtra("list").size() == 0) { System.out.println("------------?null------------------"); } else { //// System.out.println("-----------??-------"); setResult(resultCode, data); finish(); } } }
From source file:com.rfo.basic.Web.java
public void onActivityResult(int requestCode, int resultCode, Intent data) { switch (requestCode) { case Run.VOICE_RECOGNITION_REQUEST_CODE: if (resultCode == RESULT_OK) { Run.sttResults = new ArrayList<String>(); Run.sttResults = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); }// w w w . j ava 2 s . c o m Run.sttDone = true; } }
From source file:com.pixplicity.castdemo.MainActivity.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == REQUEST_SPEECH_RECOGNITION && resultCode == RESULT_OK) { ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); if (matches.size() > 0) { String message = matches.get(0); if (!CastProxy.getChannel().sendMessage(message)) { mEtMessage.setText(message); }/* w w w . j ava 2s . c o m*/ } } super.onActivityResult(requestCode, resultCode, data); }
From source file:tech.salroid.filmy.activities.MainActivity.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == MaterialSearchView.REQUEST_VOICE && resultCode == RESULT_OK) { ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); if (matches != null && matches.size() > 0) { String searchWrd = matches.get(0); if (!TextUtils.isEmpty(searchWrd)) { materialSearchView.setQuery(searchWrd, false); }//from w ww . j av a 2 s .c o m } return; } super.onActivityResult(requestCode, resultCode, data); }
From source file:com.todoroo.astrid.voice.VoiceInputAssistant.java
/** * Can also be called from Fragment.onActivityResult to simply get the string result * of the speech to text, or null if it couldn't be processed. Convenient when you * don't have a bunch of UI elements to hook into. * @param activityRequestCode/*from ww w . ja v a 2 s. c o m*/ * @param resultCode * @param data * @return */ public String getActivityResult(int activityRequestCode, int resultCode, Intent data) { if (activityRequestCode == this.requestCode) { if (resultCode == Activity.RESULT_OK) { // Fill the quickAddBox-view with the string the recognizer thought it could have heard ArrayList<String> match = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); // make sure we only do this if there is SomeThing (tm) returned if (match != null && match.size() > 0 && match.get(0).length() > 0) { String recognizedSpeech = match.get(0); recognizedSpeech = recognizedSpeech.substring(0, 1).toUpperCase() + recognizedSpeech.substring(1).toLowerCase(); return recognizedSpeech; } } } return null; }
From source file:com.aqtx.app.main.activity.MainActivity.java
@Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode == Activity.RESULT_OK) { if (requestCode == REQUEST_CODE_NORMAL) { final ArrayList<String> selected = data.getStringArrayListExtra(ContactSelectActivity.RESULT_DATA); if (selected != null && !selected.isEmpty()) { TeamCreateHelper.createNormalTeam(MainActivity.this, selected, false, null); } else { Toast.makeText(MainActivity.this, "??", Toast.LENGTH_SHORT) .show();/*from w w w.j a v a2 s .c o m*/ } } else if (requestCode == REQUEST_CODE_ADVANCED) { final ArrayList<String> selected = data.getStringArrayListExtra(ContactSelectActivity.RESULT_DATA); TeamCreateHelper.createAdvancedTeam(MainActivity.this, selected); } } }
From source file:com.open.file.manager.CutCopyService.java
@Override protected void onHandleIntent(Intent intent) { progressview = new RemoteViews(getApplicationContext().getPackageName(), R.layout.progressbarlayout); currentaction = intent.getIntExtra("action", 0); filelist = intent.getStringArrayListExtra("filelist"); targetfolder = new File(intent.getStringExtra("targetfolder")); tree = new FileCopyTree(filelist, targetfolder); duplicates = null;//from w w w .jav a 2 s . c om currentfileind = 0; progressbytes = 0; progresspercent = 0; totalbytes = tree.size; mHandler = new dupresponcehandler(this); if (tree.duplicates.size() != 0) { sendDuplicateMessage(); } id = 1; completeid = (completeid + 2) % Integer.MAX_VALUE; cutcopymanager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); cutcopybuilder = new NotificationCompat.Builder(this); // cutcopybuilder.setProgress(100, 0, false); Intent notificationIntent = new Intent(this, MainActivity.class); notificationIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP); contentIntent = PendingIntent.getActivity(this, 0, notificationIntent, 0); cutcopybuilder .setContent(new RemoteViews(getApplicationContext().getPackageName(), R.layout.progressbarlayout)); cutcopybuilder.setSmallIcon(R.drawable.notifyicon); actiongerund = getResources().getString(actioning[currentaction]); cutcopybuilder.setContentTitle(actiongerund + " files"); cutcopybuilder.setContentIntent(contentIntent); cutcopynotification = cutcopybuilder.build(); cutcopynotification.contentView = progressview; cutcopynotification.contentView.setProgressBar(R.id.progressBar, 100, 0, false); cutcopynotification.contentView.setTextViewText(R.id.progresstext, actiongerund + " files"); cutcopymanager.notify(id, cutcopynotification); startForeground(id, cutcopynotification); performCutCopy(); }
From source file:com.example.h156252.connected_cars.CarGrid.java
/** * Receiving speech input/* ww w . j a v a 2s. c o m*/ * */ @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); switch (requestCode) { case REQ_CODE_SPEECH_INPUT: { if (resultCode == RESULT_OK && null != data) { ArrayList<String> result = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); message_to_be_sent = result.get(0); /* Toast.makeText(getApplicationContext(), "Sending message: " + message_to_be_sent.toUpperCase(), Toast.LENGTH_SHORT).show();*/ new VoiceTask().execute("http://connect-car.au-syd.mybluemix.net/api/Items/" + receiver_id); } break; } } }
From source file:com.ferid.app.notetake.MainActivity.java
@Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == SPEECH_REQUEST_CODE && resultCode == RESULT_OK) { ArrayList<String> results = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); String spokenText = results.get(0); //capitalize the first letter spokenText = capitalizeFirstLetter(spokenText); //now append the spoken text appendNote(spokenText);/*from ww w. j a v a 2 s . c om*/ } super.onActivityResult(requestCode, resultCode, data); }
From source file:conversandroid.SimpleASR.java
/** * Shows the formatted best of N best recognition results (N-best list) from * best to worst in the <code>ListView</code>. * For each match, it will render the recognized phrase and the confidence with * which it was recognized.//from ww w . j a v a 2 s.co m */ @SuppressLint("InlinedApi") @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == ASR_CODE) { if (resultCode == RESULT_OK) { if (data != null) { //Retrieves the N-best list and the confidences from the ASR result ArrayList<String> nBestList = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); float[] nBestConfidences = null; if (Build.VERSION.SDK_INT >= 14) //Checks the API level because the confidence scores are supported only from API level 14 nBestConfidences = data.getFloatArrayExtra(RecognizerIntent.EXTRA_CONFIDENCE_SCORES); //Creates a collection of strings, each one with a recognition result and its confidence //following the structure "Phrase matched (conf: 0.5)" ArrayList<String> nBestView = new ArrayList<String>(); for (int i = 0; i < nBestList.size(); i++) { if (nBestConfidences != null) { if (nBestConfidences[i] >= 0) nBestView.add(nBestList.get(i) + " (conf: " + String.format("%.2f", nBestConfidences[i]) + ")"); else nBestView.add(nBestList.get(i) + " (no confidence value available)"); } else nBestView.add(nBestList.get(i) + " (no confidence value available)"); } //Includes the collection in the ListView of the GUI setListView(nBestView); Log.i(LOGTAG, "There were : " + nBestView.size() + " recognition results"); } } else { //Reports error in recognition error in log Log.e(LOGTAG, "Recognition was not successful"); } //Enable button Button speak = (Button) findViewById(R.id.speech_btn); speak.setEnabled(true); } }