WebRTC iOS: Record Remote Audio stream using WebRTC

我怕爱的太早我们不能终老 提交于 2021-02-04 19:27:49

问题


I am working on an audio streaming application with recording functionality for a receiver.

I got stuck at the point where the user want to record audio stream on the receiver side.

Below is my code Initialisation

var engine = AVAudioEngine()
var recordingFile: AVAudioFile?
var audioPlayer: AVAudioPlayer?
let player = AVAudioPlayerNode()
var isRecording: Bool = false

Initialise AudioEngine

func initializeAudioEngine() {

        let input = self.engine.inputNode
        let format = input.inputFormat(forBus: 0)

        self.engine.attach(self.player)
        let mainMixerNode = self.engine.mainMixerNode
        self.engine.connect(input, to:mainMixerNode, format: format)
        self.engine.prepare()

        do {
            try self.engine.start()
            self.startRecording()
        } catch (let error) {
            print("START FAILED", error)
        }
    }

Start Recording

func startRecording() {

        self.createRecordingFile()

        self.engine.mainMixerNode.installTap(onBus: 0,
                                         bufferSize: 1024,
                                         format: self.engine.mainMixerNode.outputFormat(forBus: 0)) { (buffer, time) -> Void in
            do {
                self.isRecording = true
                try self.recordingFile?.write(from: buffer)
            } catch (let error) {
                print("RECORD ERROR", error);
            }
            return
        }
    }

Create Buffer

private func createBuffer(forFileNamed fileName: String) -> AVAudioPCMBuffer? {
        var res: AVAudioPCMBuffer?

        if let fileURL = Bundle.main.url(forResource: fileName, withExtension: "caf") {
            do {
                let file = try AVAudioFile(forReading: fileURL)
                res = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity:AVAudioFrameCount(file.length))
                if let _ = res {
                    do {
                        try file.read(into: res!)
                    } catch (let error) {
                        print("ERROR read file", error)
                    }
                }
            } catch (let error) {
                print("ERROR file creation", error)
            }
        }
        return res
    }

Stop Recording

func stopRecording() {
   self.engine.mainMixerNode.removeTap(onBus: 0)
}

I am trying to record using earphone, but It's not working


回答1:


WebRTC does not have any Internal API to start or stop recording. We can try using AVAudioSession instead.

First setUp Audio session

func setUPAudioSession() -> Bool {
      let audiosession = AVAudioSession()
      do {
        try audiosession.setCategory(AVAudioSessionCategoryPlayAndRecord)
      } catch(let error) {
         print("--> \(error.localizedDescription)")
      }
     do {
        try audiosession.setActive(true)
      } catch (let error) {
        print("--> \(error.localizedDescription)")
      }
        return audiosession.isInputAvailable;
}

After setUp the audio session now start recording as below

func startRecording() -> Bool {
      var settings: [String: Any]  = [String: String]()
          settings[AVFormatIDKey] = kAudioFormatLinearPCM
          settings[AVSampleRateKey] = 8000.0
          settings[AVNumberOfChannelsKey] = 1
          settings[AVLinearPCMBitDepthKey] = 16
          settings[AVLinearPCMIsBigEndianKey] = false
          settings[AVLinearPCMIsFloatKey] = false
          settings[AVAudioQualityMax] = AVEncoderAudioQualityKey
    //Create device directory where recorded file will be save   automatically
           let searchPaths: [String] =               NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
         let documentPath_ = searchPaths.first
         let pathToSave = "\(documentPath_)/\(dateString)"
         let url: URL = URL(pathToSave)
         recorder = try? AVAudioRecorder(url: url, settings: settings)
         // Initialize degate, metering, etc.
         recorder.delegate = self;
         recorder.meteringEnabled = true;
         recorder?.prepareToRecord()
         if let recordIs = recorder {
             return recordIs.record()
         }
         return false
  }

Play recorded file

func playrecodingFile() {
    //Get the path of recorded file saved in previous method
    let searchPaths: [String] =    NSSearchPathForDirectoriesInDomains(.documentDirectory, .allDomainsMask, true)
     let documentPath_ = searchPaths.first
     let fileManager = FileManager.default
     let arrayListOfRecordSound: [String]
      if fileManager.fileExists(atPath: recordingFolder()) {
          let arrayListOfRecordSound = try?     fileManager.contentsOfDirectory(atPath: documentPath_)
      }
  let selectedSound = "\(documentPath_)/\(arrayListOfRecordSound.first)"
  let url = URL.init(fileURLWithPath: selectedSound)
  let player = try? AVAudioPlayer(contentsOf: url)
      player?.delegate = self;
      try?  AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback)
      player?.prepareToPlay()
      player?.play()
}

Stop recording

func stopRecording() {
   recorder?.stop()
 }

pauseRecording

func pauseRecording() {
    recorder?.pause()
}

Stop recording

func stopRecording() {
    recorder?.stop()
}



回答2:


Its will work because once you setup

let audiosession = AVAudioSession()

As AVAudioSessionCategoryPlayAndRecord and set

audiosession.setActive(true)

It will start recording whichever audio dump to device.



来源:https://stackoverflow.com/questions/59667496/webrtc-ios-record-remote-audio-stream-using-webrtc

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!