List of usage examples for android.speech SpeechRecognizer RESULTS_RECOGNITION
String RESULTS_RECOGNITION
To view the source code for android.speech SpeechRecognizer RESULTS_RECOGNITION.
Click Source Link
From source file:com.baidu.android.voicedemo.ApiActivity.java
@Override public void onResults(Bundle results) { long end2finish = System.currentTimeMillis() - speechEndTime; status = STATUS_None;/*from w ww. j a v a 2 s .c o m*/ ArrayList<String> nbest = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); print("?" + Arrays.toString(nbest.toArray(new String[nbest.size()]))); String json_res = results.getString("origin_result"); try { print("origin_result=\n" + new JSONObject(json_res).toString(4)); } catch (Exception e) { print("origin_result=[warning: bad json]\n" + json_res); } btn.setText(""); String strEnd2Finish = ""; if (end2finish < 60 * 1000) { strEnd2Finish = "(waited " + end2finish + "ms)"; } txtResult.setText(nbest.get(0) + strEnd2Finish); }
From source file:com.ct.speech.HintReceiver.java
public void sendBackResults(Bundle results) { ArrayList<String> voiceResults = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); speechResults(reqCode, voiceResults); }
From source file:cn.laojing.smarthome.VoiceActivity.java
@Override public void onPartialResults(Bundle partialResults) { ArrayList<String> nbest = partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (nbest.size() > 0) { print("~" + Arrays.toString(nbest.toArray(new String[0]))); }/* w w w .java2s . c om*/ }
From source file:com.mhennessy.mapfly.MainActivity.java
public void setMapLocationBasedOnSpeech() { Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); intent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, "com.mhennessy.mapfly"); SpeechRecognizer recognizer = SpeechRecognizer.createSpeechRecognizer(this.getApplicationContext()); // Stop flying so that messages can be displayed to the user without // being overwritten by pitch/roll info. setFlyingEnabled(false);//from www . j av a 2 s .c o m RecognitionListener listener = new RecognitionListener() { @Override public void onResults(Bundle results) { ArrayList<String> voiceResults = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); if (voiceResults == null) { Log.e(TAG, "No voice results"); } else { Log.d(TAG, "Printing matches: "); for (String match : voiceResults) { Log.d(TAG, match); } String bestMatch = voiceResults.get(0); setMapLocation(bestMatch); } } @Override public void onReadyForSpeech(Bundle params) { setTitle("Say something!"); Log.d(TAG, "Ready for speech"); } @Override public void onError(int error) { setTitle("Speach Error"); Log.d(TAG, "Error listening for speech: " + error); } @Override public void onBeginningOfSpeech() { Log.d(TAG, "Speech starting"); } @Override public void onBufferReceived(byte[] buffer) { // no-op } @Override public void onEndOfSpeech() { // no-op } @Override public void onEvent(int eventType, Bundle params) { // no-op } @Override public void onPartialResults(Bundle partialResults) { // no-op } @Override public void onRmsChanged(float rmsdB) { // no-op } }; recognizer.setRecognitionListener(listener); recognizer.startListening(intent); }
From source file:cn.laojing.smarthome.VoiceActivity.java
@Override public void onResults(Bundle results) { long end2finish = System.currentTimeMillis() - speechEndTime; ArrayList<String> nbest = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); print("?" + Arrays.toString(nbest.toArray(new String[nbest.size()]))); mListViewAdapter.setCmds(nbest.get(0)); mVoiceControl.runCommand(nbest.get(0)); }
From source file:nl.hnogames.domoticz.SpeechSettingsActivity.java
private void showSpeechResults(Bundle results) { ArrayList<String> matches = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); //Toast.makeText(this, matches.get(0), Toast.LENGTH_LONG).show(); processResult(matches.get(0).toLowerCase()); }
From source file:cn.jasonlv.siri.activity.MainActivity.java
@Override public void onResults(Bundle results) { long end2finish = System.currentTimeMillis() - speechEndTime; status = STATUS_None;/*from ww w . j a va 2 s. co m*/ ArrayList<String> nbest = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); print("?" + Arrays.toString(nbest.toArray(new String[nbest.size()]))); String json_res = results.getString("origin_result"); Log.e(LOG_TAG, json_res); View inputPanel = getLayoutInflater().inflate(R.layout.input_layout, null); TextView inputTextView = (TextView) inputPanel.findViewById(R.id.input_text); inputTextView.setText(nbest.get(0)); inputPanel.setFocusable(true); inputPanel.setFocusableInTouchMode(true); container.addView(inputPanel); //View fragmentContainer = getLayoutInflater().inflate(R.layout.fragment_container_layout, null); //container.addView(fragmentContainer); FrameLayout fragmentContainer = new FrameLayout(this); fragmentContainer.setId(fragmentConatainerId); fragmentContainer.setFocusable(true); fragmentContainer.setFocusableInTouchMode(true); container.addView(fragmentContainer); onProcessingResult(nbest, json_res, fragmentConatainerId); fragmentConatainerId++; scrollView.post(new Runnable() { @Override public void run() { scrollView.fullScroll(View.FOCUS_DOWN); } }); }
From source file:com.glabs.homegenie.util.VoiceControl.java
@Override public void onResults(Bundle results) { if ((results != null) && results.containsKey(SpeechRecognizer.RESULTS_RECOGNITION)) { List<String> heard = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); float[] scores = results.getFloatArray(SpeechRecognizer.CONFIDENCE_SCORES); String msg = ""; for (String s : heard) { Toast.makeText(_hgcontext.getApplicationContext(), "Executing: " + s, 20000).show(); interpretInput(s);//from w ww . j ava2s. c o m // msg += s; break; } } }
From source file:org.botlibre.sdk.activity.MicConfiguration.java
@Override public void onResults(Bundle results) { List<String> text = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); editTextSpeech.setText(text.get(0)); }
From source file:com.example.haizhu.myvoiceassistant.ui.RobotChatActivity.java
public void initListener() { recognitionListener = new RecognitionListener() { @Override/*www . java 2 s.c o m*/ public void onReadyForSpeech(Bundle bundle) { } @Override public void onBeginningOfSpeech() { } @Override public void onRmsChanged(float v) { final int VTAG = 0xFF00AA01; Integer rawHeight = (Integer) speechWave.getTag(VTAG); if (rawHeight == null) { rawHeight = speechWave.getLayoutParams().height; speechWave.setTag(VTAG, rawHeight); } RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) speechWave.getLayoutParams(); params.height = (int) (rawHeight * v * 0.01); params.height = Math.max(params.height, speechWave.getMeasuredWidth()); speechWave.setLayoutParams(params); } @Override public void onBufferReceived(byte[] bytes) { } @Override public void onEndOfSpeech() { } @Override public void onError(int error) { StringBuilder sb = new StringBuilder(); switch (error) { case SpeechRecognizer.ERROR_AUDIO: sb.append(""); break; case SpeechRecognizer.ERROR_SPEECH_TIMEOUT: sb.append(""); break; case SpeechRecognizer.ERROR_CLIENT: sb.append(""); break; case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS: sb.append("???"); break; case SpeechRecognizer.ERROR_NETWORK: sb.append(""); break; case SpeechRecognizer.ERROR_NO_MATCH: sb.append("?"); break; case SpeechRecognizer.ERROR_RECOGNIZER_BUSY: sb.append(""); break; case SpeechRecognizer.ERROR_SERVER: sb.append("?"); break; case SpeechRecognizer.ERROR_NETWORK_TIMEOUT: sb.append(""); break; } sb.append(":" + error); print("" + sb.toString()); Message message = mhandler.obtainMessage(RECOGNIZE_ERROR); message.obj = sb.toString(); mhandler.sendMessage(message); } @Override public void onResults(Bundle results) { ArrayList<String> nbest = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); print("?" + Arrays.toString(nbest.toArray(new String[nbest.size()]))); String json_res = results.getString("origin_result"); String results_nlu_json = results.getString("results_nlu"); Message message = mhandler.obtainMessage(RECOGNIZE_SUCESS); message.obj = results_nlu_json; mhandler.sendMessage(message); //?UI ResultsAnalysisManager.analyseResult(results_nlu_json); print("result_nlu=\n" + results_nlu_json); try { print("origin_result=\n" + new JSONObject(json_res).toString(4)); } catch (Exception e) { print("origin_result=[warning: bad json]\n" + json_res); } } @Override public void onPartialResults(Bundle bundle) { } @Override public void onEvent(int eventType, Bundle bundle) { switch (eventType) { case 11: String reason = bundle.get("reason") + ""; print("EVENT_ERROR, " + reason); Message message = mhandler.obtainMessage(RECOGNIZE_ERROR); message.obj = reason; mhandler.sendMessage(message); break; } } }; speechSynthesizerListener = new SpeechSynthesizerListener() { @Override public void onStartWorking(SpeechSynthesizer speechSynthesizer) { } @Override public void onSpeechStart(SpeechSynthesizer speechSynthesizer) { } @Override public void onNewDataArrive(SpeechSynthesizer speechSynthesizer, byte[] bytes, boolean b) { } @Override public void onBufferProgressChanged(SpeechSynthesizer speechSynthesizer, int i) { } @Override public void onSpeechProgressChanged(SpeechSynthesizer speechSynthesizer, int i) { } @Override public void onSpeechPause(SpeechSynthesizer speechSynthesizer) { } @Override public void onSpeechResume(SpeechSynthesizer speechSynthesizer) { } @Override public void onCancel(SpeechSynthesizer speechSynthesizer) { } @Override public void onSynthesizeFinish(SpeechSynthesizer speechSynthesizer) { } @Override public void onSpeechFinish(SpeechSynthesizer speechSynthesizer) { } @Override public void onError(SpeechSynthesizer speechSynthesizer, SpeechError speechError) { } }; }