如何在android中使用google语音识别器识别单词

如何在android中使用google语音识别器识别单词,android,speech-recognition,Android,Speech Recognition,Logcat 我的应用程序会识别一个词,例如mobile,然后它会做一些事情。我发现使用谷歌语音识别器很简单。如下图所示: public abstract class SpeechRecognizerResult extends Activity implements SpeechRecognizerManager.OnResultListener { private final String TAG = getClass().getSimpleName(); private SpeechRec

Logcat

我的应用程序会识别一个词,例如mobile,然后它会做一些事情。我发现使用
谷歌语音识别器
很简单。如下图所示:

public abstract class SpeechRecognizerResult extends Activity implements SpeechRecognizerManager.OnResultListener {
private final String TAG = getClass().getSimpleName();
private SpeechRecognizerManager mSpeechRecognizerManager;
private TextView txt_result;

protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.recognizer_listener);

    mSpeechRecognizerManager =new SpeechRecognizerManager(this);
    mSpeechRecognizerManager.setOnResultListner(this);

    txt_result= (TextView) findViewById(R.id.text_result);
}

@Override
public void OnResult(ArrayList<String> commands) {

    for(String command:commands) {
        if (command.equals("mobile")){
            Toast.makeText(this, "You said:" + command, Toast.LENGTH_SHORT).show();
            txt_result.setText(command);
            return;
        }

    }
}
}
公共抽象类SpeechRecognizerResult扩展活动实现SpeechRecognizerManager.OnResultListener{
私有最终字符串标记=getClass().getSimpleName();
私人演讲识别经理mSpeechRecognizerManager;
私有文本视图txt_结果;
创建时受保护的void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
setContentView(R.layout.recognizer\u listener);
mSpeechRecognizerManager=新的SpeechRecognizerManager(此);
mSpeechRecognizerManager.setOnResultListner(此文件);
txt_result=(TextView)findViewById(R.id.text_result);
}
@凌驾
public void OnResult(ArrayList命令){
用于(字符串命令:命令){
if(command.equals(“移动”)){
Toast.makeText(这个,“你说:”+命令,Toast.LENGTH_SHORT.show();
txt_result.setText(命令);
返回;
}
}
}
}

公共类SpeechRecognitzerManager{
私有上下文;
受保护的android.SpeechRecognizer-mGoogleSpeechRecognizer;
保护意图MSpeechRecognitizerIntent;
私有OnResultListener mOnResultListener;
公共演讲识别器管理器(上下文){
this.mContext=上下文;
InitGoogleSpeechRecognitor();
}
私有void initGoogleSpeechRecognizer(){
mGoogleSpeechRecognizer=android.SpeechRecognizer.SpeechRecognizer
.CreateSpeechRecognitor(mContext);
mgoogleSpeechRecognitor.setRecognitionListener(新的GoogleRecognitionListener());
mSpeechRecognizerIntent=新意图(RecognizerIntent.ACTION\u recognizer\u SPEECH);
mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CONFIDENCE_分数,true);
}
公共空间销毁(){
if(mGoogleSpeechRecognizer!=null){
mGoogleSpeechRecognizer.cancel();
;
mGoogleSpeechRecognizer.destroy();
}
}
保护类GoogleRecognitionListener实现
android.speech.RecognitionListener{
私有最终字符串标记=GoogleRecognitionListener.class
.getSimpleName();
@凌驾
开始时的公共无效fSpeech(){
}
@凌驾
公共无效onEndOfSpeech(){
}
@凌驾
ReadyForSpeech上的公共void(Bundle参数){
}
@凌驾
在RMSCHANGED上的公共无效(浮动rmsdB){
}
@凌驾
已接收公共无效onBufferReceived(字节[]缓冲区){
}
@凌驾
公共无效onError(内部错误){
Log.e(标记“onError:+错误);
}
@凌驾
public void onPartialResults(Bundle partialResults){
Log.d(标记“onpartialresultshear:”);
}
@凌驾
公共结果(捆绑结果){
如果((结果!=null)
&&结果
.containsKey(android.speech.SpeechRecognizer.RESULTS\u RECOGNITION)){
ArrayList=结果
.getStringArrayList(android.SpeechRecognizer.RESULTS\u RECOGNITION);
浮动分数=结果
.getFloatArray(android.speech.SpeechRecognitor.CONFIDENCE_分数);
for(int i=0;i
  • 我应该如何解决问题
  • 我如何继续识别(找到第一个后)

将您的SpeechRecognitzerResult类公开,而不是抽象的。@brandall。噢!非常感谢。但现在我说mobile时什么也没发生。为什么?您在哪里调用
startListening()
?我在您的代码中看不到它。@brandall。我希望在设备锁定时看到它(尽管应用程序在后台运行),那么我应该把它放在哪里?一个锁屏小部件
public abstract class SpeechRecognizerResult extends Activity implements SpeechRecognizerManager.OnResultListener {
private final String TAG = getClass().getSimpleName();
private SpeechRecognizerManager mSpeechRecognizerManager;
private TextView txt_result;

protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.recognizer_listener);

    mSpeechRecognizerManager =new SpeechRecognizerManager(this);
    mSpeechRecognizerManager.setOnResultListner(this);

    txt_result= (TextView) findViewById(R.id.text_result);
}

@Override
public void OnResult(ArrayList<String> commands) {

    for(String command:commands) {
        if (command.equals("mobile")){
            Toast.makeText(this, "You said:" + command, Toast.LENGTH_SHORT).show();
            txt_result.setText(command);
            return;
        }

    }
}
}
public class SpeechRecognizerManager {
private Context mContext;
protected android.speech.SpeechRecognizer mGoogleSpeechRecognizer;
protected Intent mSpeechRecognizerIntent;
private OnResultListener mOnResultListener;


public SpeechRecognizerManager(Context context) {
    this.mContext = context;
    initGoogleSpeechRecognizer();

}


private void initGoogleSpeechRecognizer() {

    mGoogleSpeechRecognizer = android.speech.SpeechRecognizer
            .createSpeechRecognizer(mContext);

    mGoogleSpeechRecognizer.setRecognitionListener(new GoogleRecognitionListener());

    mSpeechRecognizerIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);

    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);

    mSpeechRecognizerIntent.putExtra(RecognizerIntent.EXTRA_CONFIDENCE_SCORES, true);
}


public void destroy() {
    if (mGoogleSpeechRecognizer != null) {
        mGoogleSpeechRecognizer.cancel();
        ;
        mGoogleSpeechRecognizer.destroy();
    }

}



protected class GoogleRecognitionListener implements
        android.speech.RecognitionListener {

    private final String TAG = GoogleRecognitionListener.class
            .getSimpleName();

    @Override
    public void onBeginningOfSpeech() {
    }

    @Override
    public void onEndOfSpeech() {
    }

    @Override
    public void onReadyForSpeech(Bundle params) {
    }

    @Override
    public void onRmsChanged(float rmsdB) {
    }

    @Override
    public void onBufferReceived(byte[] buffer) {

    }

    @Override
    public void onError(int error) {
        Log.e(TAG, "onError:" + error);

    }

    @Override
    public void onPartialResults(Bundle partialResults) {
        Log.d(TAG, "onPartialResultsheard:");

    }

    @Override
    public void onResults(Bundle results) {
        if ((results != null)
                && results
                .containsKey(android.speech.SpeechRecognizer.RESULTS_RECOGNITION)) {
            ArrayList<String> heard = results
                    .getStringArrayList(android.speech.SpeechRecognizer.RESULTS_RECOGNITION);
            float[] scores = results
                    .getFloatArray(android.speech.SpeechRecognizer.CONFIDENCE_SCORES);

            for (int i = 0; i < heard.size(); i++) {
                Log.d(TAG, "onResultsheard:" + heard.get(i)
                        + " confidence:" + scores[i]);

            }


            //send list of words to activity
            if (mOnResultListener!=null){
                mOnResultListener.OnResult(heard);
            }

        }



    }


    @Override
    public void onEvent(int eventType, Bundle params) {

    }

}



    public void setOnResultListner(OnResultListener onResultListener){
        mOnResultListener=onResultListener;
    }

public interface OnResultListener{
    public void OnResult(ArrayList<String> commands);
}