AVAssetWriter Unable to record audio with video | Crashing

回眸只為那壹抹淺笑 提交于 2021-01-05 08:55:53

问题


I am trying to capture video/Audio frames from CMSampleBuffer but completely failing to obtain a proper video recording.

Expected Output: A Video file in .mp4 format that has both audio(from the mic) and video frames.

Current Output: An Empty Directory/A video file without audio.

Crashes on Run : Media type of sample buffer must match receiver's media type ("soun")

I tried almost everything available online to troubleshoot this. I have a deadline coming and I just pulling my hair trying to figure out what exactly is going on. Any help/pointers are highly appreciated.

Below is the source.

CameraController.swift

class CameraController: UIViewController, SFrameCaptureDelegate {
    
    override func viewDidLoad() {
        super.viewDidLoad()
        setupUI()
        
        assetWriter = AssetManager(filename: UUID().uuidString.appending(".mp4"))
        frameBuffer.delegate = self
        frameBuffer.startSession()
        
    }
    
    
    var previewView: PreviewView = {
        let instance = PreviewView()
        return instance
    }()
    
    var frameBuffer = FrameCapture(type: .AudioVideo)
    
    var captureButton: UIButton = {
        let instance = UIButton()
        instance.setTitle("Capture", for: .normal)
        instance.backgroundColor = .white
        return instance
    }()
 
    // if the user is recording the frames from the phone
    var frameCaptureRunning = false
    
    var assetWriter : AssetManager!
    
    var videoDirectoryPath = SFileManager.shared.getDocumentDirectory()
    
    func setupUI() {
        
        view.addSubview(previewView)
        previewView.top(to: view)
        previewView.left(to: view)
        previewView.right(to: view)
        previewView.height(view.frame.height)
        
        previewView.session = frameBuffer.session
        
        
        view.addSubview(captureButton)
        captureButton.size(CGSize(width: 100, height: 100))
        captureButton.centerX(to: view)
        captureButton.bottom(to: view, offset: -20)
        captureButton.addTarget(self, action: #selector(startpic), for: .touchDown)
        captureButton.addTarget(self, action: #selector(stopic), for: .touchUpInside)
    }
    
    @objc func startpic() {
        frameCaptureRunning = true
        assetWriter.isRecording = true
    }
    
    @objc func stopic() {
        frameCaptureRunning = false
        assetWriter.isRecording = false
        assetWriter.finish {
            DispatchQueue.main.async {
                let activity = UIActivityViewController(activityItems: [self.assetWriter.url!], applicationActivities: nil)
                self.present(activity, animated: true, completion: nil)
            }
            print("This -- ",self.assetWriter.url.path)
            do {
                let attr = try FileManager.default.attributesOfItem(atPath: self.assetWriter.url.path)
                let fileSize = attr[FileAttributeKey.size] as! UInt64
                print("H264 file size = \(fileSize)")

                DispatchQueue.main.async {
                    let player = AVPlayer(url: self.assetWriter.url)
                    let playerLayer = AVPlayerLayer(player: player)
                    playerLayer.videoGravity = .resizeAspectFill
                    playerLayer.frame = self.view.bounds
                    playerLayer.backgroundColor = UIColor.red.cgColor
                    self.view.layer.addSublayer(playerLayer)
                    player.play()
                }
            }catch{
                print("issues with finishing")
            }
        }
        
        
    }
    
    func capturedFrame(buffers: CMSampleBuffer) {
        
        if !frameCaptureRunning { return }
        assetWriter.write(buffer: buffers)
        
    }
    
}

FrameCapture.swift

protocol SFrameCaptureDelegate: class {
    func capturedFrame(buffers: CMSampleBuffer)
}

class FrameCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
    
    init(type: SessionType) {
        super.init()
        print("SFC - Frame Buffers initialized with Config - ", type.self)
        sessionType = type
    }
    
    func startSession() {
        print("SFC - Frame Buffers Session Starting")
        sessionQueue.async {
            self.configureSession(type: self.sessionType)
            self.session.startRunning()
        }
    }
    
    weak var delegate: SFrameCaptureDelegate?
    
    enum SessionSetupResult {
        case success
        case notAuthorized
        case configurationFailed
    }
    
    enum SessionType {
        case Audio
        case Video
        case AudioVideo
    }
    
    let session = AVCaptureSession()
    let sessionQueue = DispatchQueue(label: "sessionQueue", qos: .userInitiated)
    let videoQueue = DispatchQueue(label: "videoQueue", qos: .userInitiated)
    let audioQueue = DispatchQueue(label: "audioQueue", qos: .userInitiated)
    var setupResult: SessionSetupResult = .success
    var sessionType: SessionType = .Video
    
    @objc dynamic var videoDeviceInput: AVCaptureDeviceInput!
    let videoOutput = AVCaptureVideoDataOutput()
    let audioOutput = AVCaptureAudioDataOutput()
    var photoQualityPrioritizationMode: AVCapturePhotoOutput.QualityPrioritization = .balanced
    
///  MARK: SessionConfig
    func configureSession(type: SessionType) {
        
        if setupResult != .success { return }
        
        session.beginConfiguration()
        session.sessionPreset = .high
        
        do {
            var defaultVideoDevice: AVCaptureDevice?
            
            if let dualCameraDevice = AVCaptureDevice.default(.builtInDualWideCamera, for: .video, position: .back) {
                defaultVideoDevice = dualCameraDevice
            } else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back){
                defaultVideoDevice = backCameraDevice
            } else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front){
                defaultVideoDevice = frontCameraDevice
            }
            
            guard let videoDevice = defaultVideoDevice else {
                print("CAM - Camera unavailable")
                setupResult = .configurationFailed
                session.commitConfiguration()
                return
            }
            
            let videoInputDevice = try AVCaptureDeviceInput(device: videoDevice)
            
            if session.canAddInput(videoInputDevice) {
                session.addInput(videoInputDevice)
                videoDeviceInput = videoInputDevice
            } else {
                print("CAM - Couldn't add input to the session")
                setupResult = .configurationFailed
                session.commitConfiguration()
                return
            }
        } catch {
            print("CAM - Couldn't create device input. Error - ", error.localizedDescription)
            setupResult = .configurationFailed
            session.commitConfiguration()
            return
        }
        
        if sessionType == .AudioVideo {
            do {
                let audioDevice = AVCaptureDevice.default(for: .audio)
                let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
                print("SFC - in audio device input")
                if session.canAddInput(audioDeviceInput) {
                    session.addInput(audioDeviceInput)
                } else { print("CAM - Couldn't add audio input device to session.") }
            } catch { print("couldn't create audio input device. Error - ",error.localizedDescription) }
        }
        
        
        videoOutput.setSampleBufferDelegate(self, queue: videoQueue)
        if session.canAddOutput(videoOutput) {
            session.addOutput(videoOutput)
            photoQualityPrioritizationMode = .balanced
        } else {
            print("Could not add photo output to the session")
            setupResult = .configurationFailed
            session.commitConfiguration()
            return
        }
        
        if sessionType == .AudioVideo {
            audioOutput.setSampleBufferDelegate(self, queue: audioQueue)
            if session.canAddOutput(audioOutput) {
                session.addOutput(audioOutput)
            } else {
                print("Couldn't add audio output")
                setupResult = .configurationFailed
                session.commitConfiguration()
            }
        }
        
        videoOutput.connections.first?.videoOrientation = .portrait
        videoOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA ]
        videoOutput.alwaysDiscardsLateVideoFrames = true
        
        session.commitConfiguration()

    }
    
/// MARK: CMSampleBufferDelegate
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        self.delegate?.capturedFrame(buffers: sampleBuffer)
    }
    
}

AssetManager.swift

class AssetManager: NSObject {
    
    private var assetWriter: AVAssetWriter?
    private var videoInput: AVAssetWriterInput?
    private var audioInput: AVAssetWriterInput?
    var url: URL!
    
    let writerQueue = DispatchQueue(label: "writerQueue", qos: .utility)
    
    var isRecording = false
    var video_frames_written = false
    
    init(filename: String) {
        super.init()
        self.videoDirectory.appendPathComponent(filename)
        self.url = self.videoDirectory
        
    }

    private var videoDirectory = SFileManager.shared.getDocumentDirectory()

    
    private func setupWriter() {
        
        SFileManager.shared.clearPreviousFiles(withPath: videoDirectory.path)
        SFileManager.shared.createNewDirectory(withPath: videoDirectory.path)
        printLog(item: self.videoDirectory)
        
        
        self.assetWriter = try? AVAssetWriter(outputURL: self.videoDirectory, fileType: AVFileType.mp4)
        
        let videoOutputSettings = [
            AVVideoCodecKey: AVVideoCodecType.h264,
            AVVideoHeightKey: 1280,
            AVVideoWidthKey:720
        ] as [String : Any]
        
        
        self.videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
        self.videoInput?.expectsMediaDataInRealTime = true
        if let videoInput = self.videoInput, (self.assetWriter?.canAdd(videoInput))! {
            self.assetWriter?.add(videoInput)
        }
        
        
        let audioOutputSettings = [
            AVFormatIDKey: kAudioFormatMPEG4AAC,
            AVNumberOfChannelsKey: 1,
            AVSampleRateKey: 44100,
            AVEncoderBitRateKey: 64000
        ] as [String: Any]
        
        
        self.audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
        self.audioInput?.expectsMediaDataInRealTime = true
        if let audioInput = self.audioInput, (self.assetWriter?.canAdd(audioInput))! {
            self.assetWriter?.add(audioInput)
            printDone(item: "Asset writer added, \(String(describing: self.audioInput))")
        } else {
            printError(item: "No audio Input")
        }
        
        
    }
    
    
    public func write(buffer: CMSampleBuffer) {
        writerQueue.sync {
            
            if assetWriter == nil { self.setupWriter() }

            if self.assetWriter?.status == .unknown {
                self.assetWriter?.startWriting()
                self.assetWriter?.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(buffer))
                printDone(item: "Started AssetWriter")
            }

            if self.assetWriter?.status == .failed {
                printError(item: "Asset Writer Failed with Error: \(String(describing: self.assetWriter?.error))")
            }

            if CMSampleBufferDataIsReady(buffer) {

                if let videoInput = self.videoInput, videoInput.isReadyForMoreMediaData {
                    videoInput.append(buffer)
                }
                
                if let audioInput = self.audioInput, audioInput.isReadyForMoreMediaData {
                    audioInput.append(buffer) // Crashes at this line
                }
            }
        }
    }
    
    public func finish(completion: @escaping (() -> Void)) {
        writerQueue.async {
            self.assetWriter?.finishWriting(completionHandler: { [self] in
                printDone(item: "Finished Writing")
                completion()
            })
        }
    }
}


回答1:


You are writing a video buffer to your audioInput and depending on how the buffers arrive, you might also write an audio buffer to your videoInput.

In your case, the CMSampleBuffers contain either audio or video, so you append audio buffers to audioInput and video buffers to videoInput.

You can distinguish the two types of buffer by comparing the output in captureOutput:didOutput: to your audioInput and videoOutput or by looking at the buffer's CMSampleBufferGetFormatDescription()'s CMFormatDescriptionGetMediaType(), but that's more complicated.



来源:https://stackoverflow.com/questions/65000687/avassetwriter-unable-to-record-audio-with-video-crashing

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!