Google recognizer and pocketsphinx in two different classes, how to loop them?

你。 提交于 2019-12-12 01:42:20

问题


Yesterday i ask a simplified question of my problem, but think its too simplified.

What my programm should do, is to hear a keyword and when he hear it, he should listen to what i said. (like if you told to siri or google now, by saying siri or ok google).

I'm using pocketsphinx for the keyword and the google speechrecognizer for the longer parts. It works, but only for one time. The pocketsphinx is in the MainActivity and the google recognizer is in an extra class (Jarvis).

The programm starts with the pocketsphinx listener, when he hear the KEYPHRASE, he starts the google listener by calling jarvis.startListener() (by the next()-method) and there is the problem, when the googlelistener is done, i dont come back from the Jarvis-class to the MainActivity to call the next() method again.

(when the google recognizer is done, the last things he do is in the onResult() in Jarvis-class, but from there i cant call the next()-method from MainActivity-class)

MainActivity

package com.example.superuser.jarvis;

import android.app.Activity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.speech.RecognitionListener;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;

import java.io.File;
import java.io.IOException;

import edu.cmu.pocketsphinx.Assets;
import edu.cmu.pocketsphinx.Hypothesis;
import edu.cmu.pocketsphinx.SpeechRecognizer;
import edu.cmu.pocketsphinx.SpeechRecognizerSetup;

import static android.widget.Toast.makeText;
import static edu.cmu.pocketsphinx.SpeechRecognizerSetup.defaultSetup;



public class MainActivity extends Activity implements edu.cmu.pocketsphinx.RecognitionListener {

    private String LOG_TAG = "Jarvis_hears_anything";
    private TextView tv;
    private Jarvis jarvis;
    private boolean wannahearjarvis = false;

    /* Named searches allow to quickly reconfigure the decoder */
    private static final String KWS_SEARCH = "wakeup";

    /* Keyword we are looking for to activate menu */
    private static final String KEYPHRASE = "jarvis";

    private edu.cmu.pocketsphinx.SpeechRecognizer recognizer;
    //private HashMap<String, Integer> captions;


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        final Button button = (Button) findViewById(R.id.b1);
        tv = (TextView) findViewById(R.id.tv1);
        //captions = new HashMap<String, Integer>();
        //captions.put(KWS_SEARCH, R.string.kws_caption);
        jarvis = new Jarvis(getApplicationContext());
        new AsyncTask<Void, Void, Exception>() {
            @Override
            protected Exception doInBackground(Void... params) {
                try {
                    Assets assets = new Assets(MainActivity.this);
                    File assetDir = assets.syncAssets();
                    setupRecognizer(assetDir);
                } catch (IOException e) {
                    return e;
                }
                return null;
            }

            @Override
            protected void onPostExecute(Exception result) {
                if (result != null) {
                    ((TextView) findViewById(R.id.tv1))
                            .setText("Failed to init recognizer " + result);
                } else {
                    //switchSearch(KWS_SEARCH);
                    recognizer.startListening(KWS_SEARCH);
                }
            }
        }.execute();

                button.setOnClickListener(new View.OnClickListener() {
                    @Override
                    public void onClick(View v) {
                        Toast.makeText(getApplicationContext(), "geht", Toast.LENGTH_SHORT).show();

                    }
                });


    }

    public void next(){
        if (wannahearjarvis){
            recognizer.startListening(KWS_SEARCH);
            wannahearjarvis = false;
        }
        else{
            jarvis.startListening();
            wannahearjarvis = true;
        }
    }

    @Override
    public void onDestroy() {
        super.onDestroy();
        recognizer.cancel();
        recognizer.shutdown();
    }

    /**
     * In partial result we get quick updates about current hypothesis. In
     * keyword spotting mode we can react here, in other modes we need to wait
     * for final result in onResult.
     */
    @Override
    public void onPartialResult(Hypothesis hypothesis) {
        if (hypothesis == null)
            return;

        String text = hypothesis.getHypstr();
        if (text.equals(KEYPHRASE)){
            tv.append("found");
            recognizer.stop();
            //switchSearch(KWS_SEARCH);

        }
        else {
            //((TextView) findViewById(R.id.tv1)).append(text+"PR");
            //Log.i(LOG_TAG, text+"PR");
        }
    }

    /**
     * This callback is called when we stop the recognizer.
     */
    @Override
    public void onResult(Hypothesis hypothesis) {
        //((TextView) findViewById(R.id.tv1)).setText("");
        ((TextView) findViewById(R.id.tv1)).append("oR");
        if (hypothesis != null) {
            String text = hypothesis.getHypstr();
            makeText(getApplicationContext(), text, Toast.LENGTH_SHORT).show();
        }

        next();
    }

    @Override
    public void onBeginningOfSpeech() {
    }

    /**
     * We stop recognizer here to get a final result
     */
    @Override
    public void onEndOfSpeech() {
        if (!recognizer.getSearchName().equals(KWS_SEARCH)){
            tv.append("fuck");
        }
            //switchSearch(KWS_SEARCH);
    }

    /*private void switchSearch(String searchName) {
        recognizer.stop();

        // If we are not spotting, start listening with timeout (10000 ms or 10 seconds).
        if (searchName.equals(KWS_SEARCH))
            recognizer.startListening(searchName);
        else
            recognizer.startListening(searchName, 10000);

        //String caption = getResources().getString(captions.get(searchName));

        //((TextView) findViewById(R.id.tv1)).setText(caption);
        //((TextView) findViewById(R.id.tv1)).append(caption);
    }*/

    private void setupRecognizer(File assetsDir) throws IOException {
        // The recognizer can be configured to perform multiple searches
        // of different kind and switch between them

        recognizer = defaultSetup()
                .setAcousticModel(new File(assetsDir, "en-us-ptm"))
                .setDictionary(new File(assetsDir, "cmudict-en-us.dict"))

                        // To disable logging of raw audio comment out this call (takes a lot of space on the device)
                .setRawLogDir(assetsDir)

                        // Threshold to tune for keyphrase to balance between false alarms and misses
                .setKeywordThreshold(1e-20f)

                        // Use context-independent phonetic search, context-dependent is too slow for mobile
                .setBoolean("-allphone_ci", true)

                .getRecognizer();
        recognizer.addListener(this);

        /** In your application you might not need to add all those searches.
         * They are added here for demonstration. You can leave just one.
         */

        // Create keyword-activation search.
        recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);

    }

    @Override
    public void onError(Exception error) {
        ((TextView) findViewById(R.id.tv1)).setText(error.getMessage());
    }

    @Override
    public void onTimeout() {
        //switchSearch(KWS_SEARCH);
    }
}

Jarvis

package com.example.superuser.jarvis;

import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.os.Bundle;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.widget.Toast;

import java.util.ArrayList;

    public class Jarvis implements RecognitionListener{

    private AudioManager audiom;
    private SpeechRecognizer speech;
    private Intent recogIntent;
    private Toast m;
    private Context c;
    private String text;


    public Jarvis(Context context){
        speech = SpeechRecognizer.createSpeechRecognizer(context);
        speech.setRecognitionListener(this);
        recogIntent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
        recogIntent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_PREFERENCE, "de");
        //recogIntent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, context.getPackageName());
        m = new Toast(context);
        c=context;
    }

    public void startListening(){
        speech.startListening(recogIntent);
    }

    public void destroy(){
        speech.stopListening();
        speech.cancel();
        speech.destroy();
    }


    @Override
    public void onReadyForSpeech(Bundle params) {

    }

    @Override
    public void onBeginningOfSpeech() {

    }

    @Override
    public void onRmsChanged(float rmsdB) {

    }

    @Override
    public void onBufferReceived(byte[] buffer) {

    }

    @Override
    public void onEndOfSpeech() {

    }

    @Override
    public void onError(int error) {

    }

    @Override
    public void onResults(Bundle results) {
        ArrayList<String> matches = results
                .getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
        Toast.makeText(c, matches.get(0), Toast.LENGTH_LONG).show();
        speech.cancel();
        //tried 
        //MainActivity m = new MainActivity();
        //m.next();
        //but got a Nullpointer Exception

    }

    @Override
    public void onPartialResults(Bundle partialResults) {

    }

    @Override
    public void onEvent(int eventType, Bundle params) {

    }
}

回答1:


You can store reference to the main activity in Jarvis object in a field:

class Jarvis {
    ....
    private MainActivity m;
    ....
    public Jarvis(MainActivity m) {
         this.m = m;
    }
    ....
    public void onResults(Bundle results) {
       ....
       m.next();
    }

You can also send intents to the main activity as described here. This might be overkill in your case though.



来源:https://stackoverflow.com/questions/36403686/google-recognizer-and-pocketsphinx-in-two-different-classes-how-to-loop-them

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!