9Implementing Nuance Speech Recognition on Swift, cannot listen to onResult, onError… events

前端 未结 3 1251
谎友^
谎友^ 2020-12-20 08:57

I have two parts of my Speech Recon project with Nuance, the .h file of a module (ObjectiveC) and aViewController (swift).

I want to set up aSpee

相关标签:
3条回答
  • 2020-12-20 08:59

    Here's what I've got Bridging Header:

    #import <SpeechKit/SpeechKit.h>
    #import "NuanceHeader.h"
    

    NuanceHeader.h:

    #import <Foundation/Foundation.h>
    @interface NuanceHeader : NSObject
    @end
    

    NuanceHeader.m

    #import "NuanceHeader.h"
    const unsigned char SpeechKitApplicationKey[] = {...};
    @implementation NuanceHeader
    @end
    

    When it comes to the UIViewController that uses all this:

    class MyViewController: UIViewController, SpeechKitDelegate, SKRecognizerDelegate
    {
        var voiceSearch: SKRecognizer?
    
        override func viewDidLoad()
        {
           //Setup SpeechKit
           SpeechKit.setupWithID("...", host: "sandbox.nmdp.nuancemobility.net", port: 443, useSSL: false, delegate: self)
        }
    
        func someAction()
        {
            self.voiceSearch = SKRecognizer(type: SKSearchRecognizerType, detection: UInt(SKLongEndOfSpeechDetection), language:"eng-USA", delegate: self)
    
        }
    
        func recognizerDidBeginRecording(recognizer: SKRecognizer!)
        {
            //The recording has started
        }
    
        func recognizerDidFinishRecording(recognizer: SKRecognizer!)
        {
            //The recording has stopped
        }
    
        func recognizer(recognizer: SKRecognizer!, didFinishWithResults results: SKRecognition!)
        {
            //The voice recognition process has understood something
        }
    
        func recognizer(recognizer: SKRecognizer!, didFinishWithError error: NSError!, suggestion: String!)
        {
           //an error has occurred
        }
    }
    

    There is nothing else to it, check every step, this part is pretty straight forward

    0 讨论(0)
  • 2020-12-20 09:08

    Since things have changed a bit I thought I would add my 2 cents:

     var listening = false
    var transaction: SKTransaction?
    var session: SKSession?
    
    override func viewDidLoad() {
        super.viewDidLoad()
    
        session = SKSession(URL: NSURL(string: serverURL), appToken: appKey)
    
        let audioFormat = SKPCMFormat()
        audioFormat.sampleFormat = .SignedLinear16;
        audioFormat.sampleRate = 16000;
        audioFormat.channels = 1;
    
        print("\(NSHomeDirectory())/start.mp3")
    
        // Attach them to the session
        session!.startEarcon = SKAudioFile(URL: NSURL(fileURLWithPath: "\(NSHomeDirectory())/start.mp3"), pcmFormat: audioFormat)
        session!.endEarcon = SKAudioFile(URL: NSURL(fileURLWithPath: "\(NSHomeDirectory())/stop.mp3"), pcmFormat: audioFormat)
    
    }
    
    @IBAction func speechButtonDidClick(sender: AnyObject) {
    
        if listening == false {
            transaction = session?.recognizeWithType(SKTransactionSpeechTypeDictation,
                                                        detection: .Short,
                                                        language: "eng-USA",
                                                        delegate: self)
    
        }else{
            transaction?.stopRecording()
        }
    }
    
    // SKTransactionDelegate
    func transactionDidBeginRecording(transaction: SKTransaction!) {
        messageText.text = "listening"
        listening = true
        indicator.startAnimating()
        startPollingVolume()
    }
    func transactionDidFinishRecording(transaction: SKTransaction!) {
        messageText.text = "stopped"
        listening = false
        indicator.stopAnimating()
        stopPollingVolume()
    }
    
    func transaction(transaction: SKTransaction!, didReceiveRecognition recognition: SKRecognition!) {
    
        print("got something")
    
        //Take the best result
        if recognition.text != nil{
            speechTextField.text = recognition.text
        }
    }
    func transaction(transaction: SKTransaction!, didReceiveServiceResponse response: [NSObject : AnyObject]!) {
        print ("service response")
        print(response)
    }
    func transaction(transaction: SKTransaction!, didFinishWithSuggestion suggestion: String!) {
    }
    func transaction(transaction: SKTransaction!, didFailWithError error: NSError!, suggestion: String!) {
        print ("error")
        print(error)
    }
    
    
    var timer = NSTimer()
    var interval = 0.01;
    
    func startPollingVolume() {
        timer = NSTimer.scheduledTimerWithTimeInterval(interval,
                                                       target: self,
                                                       selector: #selector(ViewController.pollVolume),
                                                       userInfo: nil,
                                                       repeats: true)
    }
    
    func pollVolume() {
        if transaction != nil{
            let volumeLevel:Float = transaction!.audioLevel
            audioLevelIndicator.progress = volumeLevel / 90
        }
    }
    
    func stopPollingVolume() {
        timer.invalidate()
        audioLevelIndicator.progress = 0
    }
    

    hope this helps someone!

    0 讨论(0)
  • 2020-12-20 09:11

    Try let objCDelegate = self as SKRecognizerDelegate and then use objCDelegate as delegate parameter

    0 讨论(0)
提交回复
热议问题