问题
I'm using Aperture to record audio and video from screen. We need to lower the bitrate of video, so i'm trying to rewrite it and record video with AVAssetWriter. My implementation is based on CustomCamera project and is almost working. The problem is in video, after few seconds it is frozen, althought audio is working. Could you help me please? I dont know where the problem is, maybe it is problem with buffers or garbage collector collect some variable. thanks.
Here is the code:
//
// ViewController.swift
// CustomCamera
//
// Created by Taras Chernyshenko on 6/27/17.
// Copyright © 2017 Taras Chernyshenko. All rights reserved.
//
import AVFoundation
import Photos
class NewRecorder: NSObject,
AVCaptureAudioDataOutputSampleBufferDelegate,
AVCaptureVideoDataOutputSampleBufferDelegate {
private var session: AVCaptureSession = AVCaptureSession()
private var deviceInput: AVCaptureScreenInput?
private var previewLayer: AVCaptureVideoPreviewLayer?
private var videoOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
private var audioOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()
//private var videoDevice: AVCaptureDevice = AVCaptureScreenInput(displayID: 69731840) //AVCaptureDevice.default(for: AVMediaType.video)!
private var audioConnection: AVCaptureConnection?
private var videoConnection: AVCaptureConnection?
private var assetWriter: AVAssetWriter?
private var audioInput: AVAssetWriterInput?
private var videoInput: AVAssetWriterInput?
private var fileManager: FileManager = FileManager()
private var recordingURL: URL?
private var isCameraRecording: Bool = false
private var isRecordingSessionStarted: Bool = false
private var recordingQueue = DispatchQueue(label: "recording.queue")
func setup() {
self.session.sessionPreset = AVCaptureSession.Preset.high
self.recordingURL = URL(fileURLWithPath: "\(NSTemporaryDirectory() as String)/file.mp4")
if self.fileManager.isDeletableFile(atPath: self.recordingURL!.path) {
_ = try? self.fileManager.removeItem(atPath: self.recordingURL!.path)
}
self.assetWriter = try? AVAssetWriter(outputURL: self.recordingURL!,
fileType: AVFileType.mp4)
self.assetWriter!.movieFragmentInterval = kCMTimeInvalid
self.assetWriter!.shouldOptimizeForNetworkUse = true
let audioSettings = [
AVFormatIDKey : kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey : 2,
AVSampleRateKey : 44100.0,
AVEncoderBitRateKey: 192000
] as [String : Any]
let videoSettings = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : 1920,
AVVideoHeightKey : 1080
/*AVVideoCompressionPropertiesKey: [
AVVideoAverageBitRateKey: NSNumber(value: 5000000)
]*/
] as [String : Any]
self.videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,
outputSettings: videoSettings)
self.audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,
outputSettings: audioSettings)
self.videoInput?.expectsMediaDataInRealTime = true
self.audioInput?.expectsMediaDataInRealTime = true
if self.assetWriter!.canAdd(self.videoInput!) {
self.assetWriter?.add(self.videoInput!)
}
if self.assetWriter!.canAdd(self.audioInput!) {
self.assetWriter?.add(self.audioInput!)
}
//self.deviceInput = try? AVCaptureDeviceInput(device: self.videoDevice)
self.deviceInput = AVCaptureScreenInput(displayID: 724042646)
self.deviceInput!.minFrameDuration = CMTimeMake(1, Int32(30))
self.deviceInput!.capturesCursor = true
self.deviceInput!.capturesMouseClicks = true
if self.session.canAddInput(self.deviceInput!) {
self.session.addInput(self.deviceInput!)
}
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
//importent line of code what will did a trick
//self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
//let rootLayer = self.view.layer
//rootLayer.masksToBounds = true
//self.previewLayer?.frame = CGRect(x: 0, y: 0, width: 1920, height: 1080)
//rootLayer.insertSublayer(self.previewLayer!, at: 0)
self.session.startRunning()
DispatchQueue.main.async {
self.session.beginConfiguration()
if self.session.canAddOutput(self.videoOutput) {
self.session.addOutput(self.videoOutput)
}
self.videoConnection = self.videoOutput.connection(with: AVMediaType.video)
/*if self.videoConnection?.isVideoStabilizationSupported == true {
self.videoConnection?.preferredVideoStabilizationMode = .auto
}*/
self.session.commitConfiguration()
let audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)
let audioIn = try? AVCaptureDeviceInput(device: audioDevice!)
if self.session.canAddInput(audioIn!) {
self.session.addInput(audioIn!)
}
if self.session.canAddOutput(self.audioOutput) {
self.session.addOutput(self.audioOutput)
}
self.audioConnection = self.audioOutput.connection(with: AVMediaType.audio)
}
}
func startRecording() {
if self.assetWriter?.startWriting() != true {
print("error: \(self.assetWriter?.error.debugDescription ?? "")")
}
self.videoOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
self.audioOutput.setSampleBufferDelegate(self, queue: self.recordingQueue)
}
func stopRecording() {
self.videoOutput.setSampleBufferDelegate(nil, queue: nil)
self.audioOutput.setSampleBufferDelegate(nil, queue: nil)
self.assetWriter?.finishWriting {
print("Saved in folder \(self.recordingURL!)")
exit(0)
}
}
func captureOutput(_ captureOutput: AVCaptureOutput, didOutput
sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if !self.isRecordingSessionStarted {
let presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
self.assetWriter?.startSession(atSourceTime: presentationTime)
self.isRecordingSessionStarted = true
}
let description = CMSampleBufferGetFormatDescription(sampleBuffer)!
if CMFormatDescriptionGetMediaType(description) == kCMMediaType_Audio {
if self.audioInput!.isReadyForMoreMediaData {
//print("appendSampleBuffer audio");
self.audioInput?.append(sampleBuffer)
}
} else {
if self.videoInput!.isReadyForMoreMediaData {
//print("appendSampleBuffer video");
if !self.videoInput!.append(sampleBuffer) {
print("Error writing video buffer");
}
}
}
}
}
回答1:
So I fixed it by moving this code
if self.session.canAddInput(self.deviceInput!) {
self.session.addInput(self.deviceInput!)
}
after the call self.session.beginConfiguration(), so
self.session.beginConfiguration()
if self.session.canAddInput(self.deviceInput!) {
self.session.addInput(self.deviceInput!)
}
if self.session.canAddOutput(self.videoOutput) {
self.session.addOutput(self.videoOutput)
}
self.videoConnection = self.videoOutput.connection(with: AVMediaType.video)
self.session.commitConfiguration()
来源:https://stackoverflow.com/questions/48569738/swift-4-avfoundation-screen-and-audio-recording-using-avassetwriter-on-mac-os