iOS how to correctly handle orientation when capturing video using AVAssetWriter

爱⌒轻易说出口 提交于 2019-12-06 07:16:57

Video orientation is handled by the AVAssetWriterInput.transform, looks like the getVideoTransform() implementation is not correct - CGAffineTransform expects the rotation angle to be in radians, so need to change to something like this:

private func getVideoTransform() -> CGAffineTransform {
    switch UIDevice.current.orientation {
        case .portrait:
            return .identity
        case .portraitUpsideDown:
            return CGAffineTransform(rotationAngle: .pi)
        case .landscapeLeft:
            return CGAffineTransform(rotationAngle: .pi/2)
        case .landscapeRight:
            return CGAffineTransform(rotationAngle: -.pi/2)
        default:
            return .identity
        }
    }

From Apple Technical Q&A: https://developer.apple.com/library/archive/qa/qa1744/_index.html

If you are using an AVAssetWriter object to write a movie file, you can use the transform property of the associated AVAssetWriterInput to specify the output file orientation. This will write a display transform property into the output file as the preferred transformation of the visual media data for display purposes. See the AVAssetWriterInput.h interface file for the details.

I have found a solution to my problem. The solution is to export the video using AVAssetExportSession to handle setting the video size and then handling the rotation at the time of export and not during recording. I still have an issue were I need to fix the scale factor to go from my original video size to a smaller 640x480 resolution, but at least I solved my rotation issues. Please see updated code below.

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

    //MARK: - Outlet

    @IBOutlet weak var previewView: UIView!
    @IBOutlet var playStopButton: UIButton!

    //MARK: - Private Variables

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
    private let captureSession = AVCaptureSession()

    var outputUrl: URL {
        get {

            if let url = _outputUrl {
                return url
            }

            _outputUrl = outputDirectory.appendingPathComponent("video.mp4")
            return _outputUrl!
        }
    }

    private var _outputUrl: URL?

    var exportUrl: URL {
        get {

            if let url = _exportUrl {
                return url
            }

            _exportUrl = outputDirectory.appendingPathComponent("video_encoded.mp4")
            return _exportUrl!
        }
    }

    private var _exportUrl: URL?

    var outputDirectory: URL {
        get {

            if let url = _outputDirectory {
                return url
            }

            _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
            return _outputDirectory!
        }
    }

    private var _outputDirectory: URL?

    private var assetWriter: AVAssetWriter?
    private var videoInput: AVAssetWriterInput?
    private var audioInput: AVAssetWriterInput?
    private var videoOutput: AVCaptureVideoDataOutput?
    private var audioOutput: AVCaptureAudioDataOutput?

    private var isRecording = false
    private var isWriting = false

    private var videoSize = CGSize(width: 640, height: 480)
    private var exportPreset = AVAssetExportPreset640x480

    //MARK: - View Life-cycle

    override func viewDidLoad() {
        super.viewDidLoad()

        videoQueue.async {

            do {

                try self.configureCaptureSession()

                DispatchQueue.main.sync {
                    self.configurePreview()
                }

            } catch {

                DispatchQueue.main.async {
                    self.showAlert("Unable to configure capture session")
                }
            }
        }
    }

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
        return .portrait
    }

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    //MARK: - Capture Session

    private func configureCaptureSession() throws {

        do {

            // configure capture devices
            let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
            let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)

            let camInput = try AVCaptureDeviceInput(device: camDevice)
            let micInput = try AVCaptureDeviceInput(device: micDevice)

            if captureSession.canAddInput(camInput) {
                captureSession.addInput(camInput)
            }

            if captureSession.canAddInput(micInput) {
                captureSession.addInput(micInput)
            }

            // configure audio/video output
            videoOutput = AVCaptureVideoDataOutput()
            videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
            videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let v = videoOutput {
                captureSession.addOutput(v)
            }

            audioOutput = AVCaptureAudioDataOutput()
            audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let a = audioOutput {
                captureSession.addOutput(a)
            }

            // configure audio session
            let audioSession = AVAudioSession.sharedInstance()
            try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
            try audioSession.setActive(true)

            var micPort: AVAudioSessionPortDescription?

            if let inputs = audioSession.availableInputs {
                for port in inputs {
                    if port.portType == AVAudioSessionPortBuiltInMic {
                        micPort = port
                        break;
                    }
                }
            }

            if let port = micPort, let dataSources = port.dataSources {

                for source in dataSources {
                    if source.orientation == AVAudioSessionOrientationFront {
                        try audioSession.setPreferredInput(port)
                        break
                    }
                }
            }

        } catch {
            print("Failed to configure audio/video capture session")
            throw error
        }
    }

    private func configureAssetWriter() throws {

        prepareVideoFile()

        do {

            if assetWriter != nil {
                assetWriter = nil
                videoInput = nil
                audioInput = nil
            }

            assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

            guard let writer = assetWriter else {
                print("Asset writer not created")
                return
            }

            let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                 AVVideoWidthKey: NSNumber(value: Float(videoSize.width)),
                                 AVVideoHeightKey: NSNumber(value: Float(videoSize.height))]

            videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
            videoInput?.expectsMediaDataInRealTime = true

            var channelLayout = AudioChannelLayout()
            memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
            channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;

            let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
                                                AVSampleRateKey: 44100,
                                                AVNumberOfChannelsKey: 2]

            audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
            audioInput?.expectsMediaDataInRealTime = true

            guard let vi = videoInput else {
                print("Video input not configured")
                return
            }

            guard let ai = audioInput else {
                print("Audio input not configured")
                return
            }

            if writer.canAdd(vi) {
                writer.add(vi)
            }

            if writer.canAdd(ai) {
                writer.add(ai)
            }

        } catch {
            print("Failed to configure asset writer")
            throw error
        }
    }

    private func prepareVideoFile() {

        if FileManager.default.fileExists(atPath: outputUrl.path) {

            do {
                try FileManager.default.removeItem(at: outputUrl)
            } catch {
                print("Unable to remove file at URL \(outputUrl)")
            }
        }

        if !FileManager.default.fileExists(atPath: outputDirectory.path) {

            do {
                try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
            } catch {
                print("Unable to create directory at URL \(outputDirectory)")
            }
        }
    }

    private func configurePreview() {

        if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            previewLayer.frame = previewView.bounds
            previewView.layer.addSublayer(previewLayer)
        }
    }

    private func getVideoSize() -> CGSize {

        if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {

            if videoSize.width > videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }

        } else {

            if videoSize.width < videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }
        }
    }

    //MARK: - Controls

    private func startRecording() {

        videoQueue.async {

            do {
                try self.configureAssetWriter()
                self.captureSession.startRunning()

            } catch {
                print("Unable to start recording")
                DispatchQueue.main.async { self.showAlert("Unable to start recording") }
            }
        }

        isRecording = true
        playStopButton.setTitle("Stop Recording", for: .normal)
        print("Recording did start")
    }

    private func stopRecording() {

        if !isRecording {
            return
        }

        videoQueue.async {

            self.assetWriter?.finishWriting {
                print("Asset writer did finish writing")
                self.isWriting = false
            }

            self.captureSession.stopRunning()

            do {
                try self.export()
            } catch {
                print("Export failed")
                DispatchQueue.main.async { self.showAlert("Unable to export video") }
            }
        }

        isRecording = false

        playStopButton.setTitle("Start Recording", for: .normal)
        print("Recording did stop")
    }

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

        guard let w = assetWriter else {
            print("Asset writer not configured")
            return
        }

        guard let vo = videoOutput else {
            print("Video output not configured")
            return
        }

        guard let ao = audioOutput else {
            print("Audio output not configured")
            return
        }

        guard let vi = videoInput else {
            print("Video input not configured")
            return
        }

        guard let ai = audioInput else {
            print("Audio input not configured")
            return
        }

        let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        print("Writer status \(w.status.rawValue)")

        if let e = w.error {
            print("Writer error \(e)")
            stopRecording()
            return
        }

        switch w.status {

        case .unknown:

            if !isWriting {
                isWriting = true
                w.startWriting()
                w.startSession(atSourceTime: st)
            }

            return

        case .completed:
            print("Video writing completed")
            return

        case .cancelled:
            print("Video writing cancelled")
            return

        case .failed:
            print("Video writing failed")
            return

        default:
            print("Video is writing")
        }

        if vo == captureOutput {

            if !vi.append(sampleBuffer) {
                print("Unable to write to video buffer")
            }

        } else if ao == captureOutput {

            if !ai.append(sampleBuffer) {
                print("Unable to write to audio buffer")
            }
        }
    }

    //MARK: - Export

    private func getVideoComposition(asset: AVAsset, videoSize: CGSize) -> AVMutableVideoComposition? {

        guard let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else {
            print("Unable to get video tracks")
            return nil
        }

        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = videoSize

        let seconds: Float64 = Float64(1.0 / videoTrack.nominalFrameRate)
        videoComposition.frameDuration = CMTimeMakeWithSeconds(seconds, 600);

        let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)

        var transforms = asset.preferredTransform

        var isPortrait = true;

        if (transforms.a == 0.0 && transforms.b == 1.0 && transforms.c == -1.0 && transforms.d == 0)
        || (transforms.a == 0.0 && transforms.b == -1.0 && transforms.c == 1.0 && transforms.d == 0) {
            isPortrait = false;
        }

        if isPortrait {
            transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0.degreesToRadians)))
            transforms = transforms.concatenating(CGAffineTransform(translationX: videoSize.width, y: 0))
        }

        layerInst.setTransform(transforms, at: kCMTimeZero)

        let inst = AVMutableVideoCompositionInstruction()
        inst.backgroundColor = UIColor.black.cgColor
        inst.layerInstructions = [layerInst]
        inst.timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)

        videoComposition.instructions = [inst]

        return videoComposition

    }

    private func export() throws {

        let videoAsset = AVURLAsset(url: outputUrl)

        if FileManager.default.fileExists(atPath: exportUrl.path) {
            try FileManager.default.removeItem(at: exportUrl)
        }

        let videoSize = getVideoSize()

        guard let encoder = AVAssetExportSession(asset: videoAsset, presetName: exportPreset) else {
            print("Unable to create encoder")
            return
        }

        guard let vidcomp = getVideoComposition(asset: videoAsset, videoSize: videoSize) else {
            print("Unable to create video composition")
            return
        }

        encoder.videoComposition = vidcomp
        encoder.outputFileType = AVFileTypeMPEG4  // MP4 format
        encoder.outputURL = exportUrl
        encoder.shouldOptimizeForNetworkUse = true

        encoder.exportAsynchronously(completionHandler: {
            print("Video exported successfully")
        })
    }

    //MARK: Helpers

    private func getDocumentsDirectory() -> URL {
        let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
        let documentsDirectory = paths[0]
        return documentsDirectory
    }

    //MARK: Actions

    @IBAction func startStopTapped(sender: AnyObject) {

        if isRecording {
            stopRecording()
        } else {
            startRecording()
        }
    }
}

just swap width and height in writer settings

and don't forget about the HEVC

assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

https://developer.apple.com/videos/play/wwdc2017/503 https://developer.apple.com/videos/play/wwdc2017/511

- (BOOL)
configureWriterInput {
    const BOOL isError = YES;

    AVFileType
        mov = AVFileTypeQuickTimeMovie;

    NSDictionary <NSString *, id> *settings;

    // HEVC
    if (@available(iOS 11.0, *)) {
        NSArray <AVVideoCodecType> *available = [self.sessionOutput availableVideoCodecTypesForAssetWriterWithOutputFileType:
            mov];

        const BOOL isHEVC = [available containsObject:AVVideoCodecTypeHEVC];

        if (isHEVC) {
            settings = [self.sessionOutput recommendedVideoSettingsForVideoCodecType:
                AVVideoCodecTypeHEVC

            assetWriterOutputFileType:
                mov];
        }
        else {
            settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
                mov];
        }
    }
    else {
        settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
            mov];
    }

    if ([writer
    canApplyOutputSettings:settings forMediaType:AVMediaTypeVideo]) {

        // swap width and height to fix orientation

        NSMutableDictionary <NSString *, id> *rotate =
            [settings mutableCopy];

        if (YES
            && settings[AVVideoHeightKey]
            && settings[AVVideoWidthKey]
        ) {
            rotate[AVVideoHeightKey] = settings[AVVideoWidthKey];
            rotate[AVVideoWidthKey] = settings[AVVideoHeightKey];

            if ([writer
            canApplyOutputSettings:rotate forMediaType:AVMediaTypeVideo]) {
                settings = rotate;
            }
            else {
            }
        }
        else {

        }
    }
    else {
        return isError;
    }

    writerInput = [AVAssetWriterInput
        assetWriterInputWithMediaType:AVMediaTypeVideo
        outputSettings:settings];

    {
        // AVCaptureConnection *con =
            // [self.sessionOutput connectionWithMediaType:AVMediaTypeVideo];

        // const AVCaptureVideoOrientation o = con.videoOrientation;

        // writerInput.transform = [[self class] configureOrientationTransform:o];
    }

    if ([writer canAddInput:writerInput]) {
        [writer addInput:writerInput];
        return ! isError;
    }
    else {
        return isError;
    }
}
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!