List of usage examples for android.speech SpeechRecognizer ERROR_AUDIO
int ERROR_AUDIO
To view the source code for android.speech SpeechRecognizer ERROR_AUDIO.
Click Source Link
From source file:sandra.examples.oneshot.voicelaunch.VoiceLaunch.java
/** * Provides feedback to the user (by means of a Toast and a synthesized message) when the ASR encounters an error *///from w w w. j a va 2 s . co m @Override public void processAsrError(int errorCode) { String errorMessage; switch (errorCode) { case SpeechRecognizer.ERROR_AUDIO: errorMessage = "Audio recording error"; break; case SpeechRecognizer.ERROR_CLIENT: errorMessage = "Client side error"; break; case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS: errorMessage = "Insufficient permissions"; break; case SpeechRecognizer.ERROR_NETWORK: errorMessage = "Network related error"; break; case SpeechRecognizer.ERROR_NETWORK_TIMEOUT: errorMessage = "Network operation timeout"; break; case SpeechRecognizer.ERROR_NO_MATCH: errorMessage = "No recognition result matched"; break; case SpeechRecognizer.ERROR_RECOGNIZER_BUSY: errorMessage = "RecognitionServiceBusy"; break; case SpeechRecognizer.ERROR_SERVER: errorMessage = "Server sends error status"; break; case SpeechRecognizer.ERROR_SPEECH_TIMEOUT: errorMessage = "No speech input"; break; default: errorMessage = "ASR error"; break; } Toast.makeText(this, errorMessage, Toast.LENGTH_LONG).show(); changeButtonAppearanceToDefault(); try { myTts.speak(errorMessage, "EN"); } catch (Exception e) { Log.i(LOGTAG, "Selected language not available, using the device's default"); } Log.e(LOGTAG, "Error when attempting listen: " + errorMessage); }
From source file:com.example.haizhu.myvoiceassistant.ui.RobotChatActivity.java
public void initListener() { recognitionListener = new RecognitionListener() { @Override// w w w . j av a2 s . c o m public void onReadyForSpeech(Bundle bundle) { } @Override public void onBeginningOfSpeech() { } @Override public void onRmsChanged(float v) { final int VTAG = 0xFF00AA01; Integer rawHeight = (Integer) speechWave.getTag(VTAG); if (rawHeight == null) { rawHeight = speechWave.getLayoutParams().height; speechWave.setTag(VTAG, rawHeight); } RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) speechWave.getLayoutParams(); params.height = (int) (rawHeight * v * 0.01); params.height = Math.max(params.height, speechWave.getMeasuredWidth()); speechWave.setLayoutParams(params); } @Override public void onBufferReceived(byte[] bytes) { } @Override public void onEndOfSpeech() { } @Override public void onError(int error) { StringBuilder sb = new StringBuilder(); switch (error) { case SpeechRecognizer.ERROR_AUDIO: sb.append(""); break; case SpeechRecognizer.ERROR_SPEECH_TIMEOUT: sb.append(""); break; case SpeechRecognizer.ERROR_CLIENT: sb.append(""); break; case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS: sb.append("???"); break; case SpeechRecognizer.ERROR_NETWORK: sb.append(""); break; case SpeechRecognizer.ERROR_NO_MATCH: sb.append("?"); break; case SpeechRecognizer.ERROR_RECOGNIZER_BUSY: sb.append(""); break; case SpeechRecognizer.ERROR_SERVER: sb.append("?"); break; case SpeechRecognizer.ERROR_NETWORK_TIMEOUT: sb.append(""); break; } sb.append(":" + error); print("" + sb.toString()); Message message = mhandler.obtainMessage(RECOGNIZE_ERROR); message.obj = sb.toString(); mhandler.sendMessage(message); } @Override public void onResults(Bundle results) { ArrayList<String> nbest = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); print("?" + Arrays.toString(nbest.toArray(new String[nbest.size()]))); String json_res = results.getString("origin_result"); String results_nlu_json = results.getString("results_nlu"); Message message = mhandler.obtainMessage(RECOGNIZE_SUCESS); message.obj = results_nlu_json; mhandler.sendMessage(message); //?UI ResultsAnalysisManager.analyseResult(results_nlu_json); print("result_nlu=\n" + results_nlu_json); try { print("origin_result=\n" + new JSONObject(json_res).toString(4)); } catch (Exception e) { print("origin_result=[warning: bad json]\n" + json_res); } } @Override public void onPartialResults(Bundle bundle) { } @Override public void onEvent(int eventType, Bundle bundle) { switch (eventType) { case 11: String reason = bundle.get("reason") + ""; print("EVENT_ERROR, " + reason); Message message = mhandler.obtainMessage(RECOGNIZE_ERROR); message.obj = reason; mhandler.sendMessage(message); break; } } }; speechSynthesizerListener = new SpeechSynthesizerListener() { @Override public void onStartWorking(SpeechSynthesizer speechSynthesizer) { } @Override public void onSpeechStart(SpeechSynthesizer speechSynthesizer) { } @Override public void onNewDataArrive(SpeechSynthesizer speechSynthesizer, byte[] bytes, boolean b) { } @Override public void onBufferProgressChanged(SpeechSynthesizer speechSynthesizer, int i) { } @Override public void onSpeechProgressChanged(SpeechSynthesizer speechSynthesizer, int i) { } @Override public void onSpeechPause(SpeechSynthesizer speechSynthesizer) { } @Override public void onSpeechResume(SpeechSynthesizer speechSynthesizer) { } @Override public void onCancel(SpeechSynthesizer speechSynthesizer) { } @Override public void onSynthesizeFinish(SpeechSynthesizer speechSynthesizer) { } @Override public void onSpeechFinish(SpeechSynthesizer speechSynthesizer) { } @Override public void onError(SpeechSynthesizer speechSynthesizer, SpeechError speechError) { } }; }
From source file:com.onebus.view.MainActivity.java
public void onError(int error) { status = STATUS_None;//from w ww .ja va 2 s . c o m StringBuilder sb = new StringBuilder(); switch (error) { case SpeechRecognizer.ERROR_AUDIO: Toast.makeText(getApplicationContext(), "", Toast.LENGTH_SHORT).show(); break; case SpeechRecognizer.ERROR_SPEECH_TIMEOUT: Toast.makeText(getApplicationContext(), "", Toast.LENGTH_SHORT).show(); break; case SpeechRecognizer.ERROR_CLIENT: Toast.makeText(getApplicationContext(), "", Toast.LENGTH_SHORT).show(); break; case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS: Toast.makeText(getApplicationContext(), "???", Toast.LENGTH_SHORT).show(); break; case SpeechRecognizer.ERROR_NETWORK: Toast.makeText(getApplicationContext(), "", Toast.LENGTH_SHORT).show(); break; case SpeechRecognizer.ERROR_NO_MATCH: Toast.makeText(getApplicationContext(), "?", Toast.LENGTH_SHORT).show(); break; case SpeechRecognizer.ERROR_RECOGNIZER_BUSY: Toast.makeText(getApplicationContext(), "", Toast.LENGTH_SHORT).show(); break; case SpeechRecognizer.ERROR_SERVER: Toast.makeText(getApplicationContext(), "?", Toast.LENGTH_SHORT).show(); break; case SpeechRecognizer.ERROR_NETWORK_TIMEOUT: Toast.makeText(getApplicationContext(), "", Toast.LENGTH_SHORT).show(); break; } }
From source file:com.cwp.cmoneycharge.activity.AddPayActivity.java
@Override public void onError(int error) { //?//from ww w . ja v a 2s. com status = STATUS_None; StringBuilder sb = new StringBuilder(); switch (error) { case SpeechRecognizer.ERROR_AUDIO: sb.append(""); break; case SpeechRecognizer.ERROR_SPEECH_TIMEOUT: sb.append(""); break; case SpeechRecognizer.ERROR_CLIENT: sb.append(""); break; case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS: sb.append("???"); break; case SpeechRecognizer.ERROR_NETWORK: sb.append(""); break; case SpeechRecognizer.ERROR_NO_MATCH: sb.append("?"); break; case SpeechRecognizer.ERROR_RECOGNIZER_BUSY: sb.append(""); break; case SpeechRecognizer.ERROR_SERVER: sb.append("?"); break; case SpeechRecognizer.ERROR_NETWORK_TIMEOUT: sb.append(""); break; } sb.append(":" + error); Toast.makeText(AddPayActivity.this, "" + sb.toString(), Toast.LENGTH_SHORT).show(); }
From source file:org.botlibre.sdk.activity.ChatActivity.java
@Override public void onError(int error) { debug("onError:" + error); Log.d("onError Info", "ChatActivity on error executes here!"); try {// ww w . ja v a 2s. co m isRecording = false; lastReply = System.currentTimeMillis(); this.speech.destroy(); this.speech = SpeechRecognizer.createSpeechRecognizer(this); this.speech.setRecognitionListener(this); setMicIcon(false, false); muteMicBeep(false); setStreamVolume(); if (error == SpeechRecognizer.ERROR_AUDIO) { Log.d("System.out", "Error: Audio Recording Error"); } else if (error == SpeechRecognizer.ERROR_CLIENT) { Log.d("System.out", "Error: Other client side error"); restartListening(); } else if (error == SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS) { Log.d("System.out", "Error: INsufficient permissions"); } else if (error == SpeechRecognizer.ERROR_NETWORK) { Log.d("System.out", "Error: Other network Error"); } else if (error == SpeechRecognizer.ERROR_NETWORK_TIMEOUT) { Log.d("System.out", "Error: Network operation timed out"); } else if (error == SpeechRecognizer.ERROR_NO_MATCH) { Log.d("System.out", "Error: No recognition result matched"); restartListening(); } else if (error == SpeechRecognizer.ERROR_RECOGNIZER_BUSY) { Log.d("System.out", "Error: Recognition service busy"); restartListening(); } else if (error == SpeechRecognizer.ERROR_SERVER) { Log.d("System.out", "Error: Server Error"); failedOfflineLanguage = true; restartListening(); } else if (error == SpeechRecognizer.ERROR_SPEECH_TIMEOUT) { Log.d("System.out", "Error: NO speech input"); isListening = true; restartListening(); } } catch (Exception e) { Log.e("micError", e.getMessage()); } }