Swift 3 : How to export video with text using AVVideoComposition

穿精又带淫゛_ 提交于 2019-12-06 04:04:55

Here is full working code which I used in my project. It will show CATextLayer at bottom (0,0). And in export session finish it will replace new path in player item. I used one model from Objective C code to get orientation. Please do testing in device. AVPLayer will not show text layer properly in simulator.

let composition = AVMutableComposition.init()

    let videoComposition = AVMutableVideoComposition()
    videoComposition.frameDuration = CMTimeMake(1, 30)
    videoComposition.renderScale  = 1.0

    let compositionCommentaryTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)


    let compositionVideoTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)


    let clipVideoTrack:AVAssetTrack = self.currentAsset.tracks(withMediaType: AVMediaTypeVideo)[0]

    let audioTrack: AVAssetTrack? = self.currentAsset.tracks(withMediaType: AVMediaTypeAudio)[0]

    try? compositionCommentaryTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.currentAsset.duration), of: audioTrack!, at: kCMTimeZero)

    try? compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.currentAsset.duration), of: clipVideoTrack, at: kCMTimeZero)

    let orientation = VideoModel.videoOrientation(self.currentAsset)
    var isPortrait = false

    switch orientation {
    case .landscapeRight:
        isPortrait = false
    case .landscapeLeft:
        isPortrait = false
    case .portrait:
        isPortrait = true
    case .portraitUpsideDown:
        isPortrait = true
    }

    var naturalSize = clipVideoTrack.naturalSize

    if isPortrait
    {
        naturalSize = CGSize.init(width: naturalSize.height, height: naturalSize.width)
    }

    videoComposition.renderSize = naturalSize

    let scale = CGFloat(1.0)

    var transform = CGAffineTransform.init(scaleX: CGFloat(scale), y: CGFloat(scale))

    switch orientation {
    case .landscapeRight: break
    // isPortrait = false
    case .landscapeLeft:
        transform = transform.translatedBy(x: naturalSize.width, y: naturalSize.height)
        transform = transform.rotated(by: .pi)
    case .portrait:
        transform = transform.translatedBy(x: naturalSize.width, y: 0)
        transform = transform.rotated(by: CGFloat(M_PI_2))
    case .portraitUpsideDown:break
    }

    let frontLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack!)
    frontLayerInstruction.setTransform(transform, at: kCMTimeZero)

    let MainInstruction = AVMutableVideoCompositionInstruction()
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
    MainInstruction.layerInstructions = [frontLayerInstruction]
    videoComposition.instructions = [MainInstruction]

    let parentLayer = CALayer.init()
    parentLayer.frame = CGRect.init(x: 0, y: 0, width: naturalSize.width, height: naturalSize.height)

    let videoLayer = CALayer.init()
    videoLayer.frame = parentLayer.frame


    let layer = CATextLayer()
    layer.string = "HELLO ALL"
    layer.foregroundColor = UIColor.white.cgColor
    layer.backgroundColor = UIColor.orange.cgColor
    layer.fontSize = 32
    layer.frame = CGRect.init(x: 0, y: 0, width: 300, height: 100)

    var rct = layer.frame;

    let widthScale = self.playerView.frame.size.width/naturalSize.width

    rct.size.width /= widthScale
    rct.size.height /= widthScale
    rct.origin.x /= widthScale
    rct.origin.y /= widthScale



    parentLayer.addSublayer(videoLayer)
    parentLayer.addSublayer(layer)

    videoComposition.animationTool = AVVideoCompositionCoreAnimationTool.init(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

    let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
    let videoPath = documentsPath+"/cropEditVideo.mov"

    let fileManager = FileManager.default

    if fileManager.fileExists(atPath: videoPath)
    {
        try! fileManager.removeItem(atPath: videoPath)
    }

    print("video path \(videoPath)")

    var exportSession = AVAssetExportSession.init(asset: composition, presetName: AVAssetExportPresetHighestQuality)
    exportSession?.videoComposition = videoComposition
    exportSession?.outputFileType = AVFileTypeQuickTimeMovie
    exportSession?.outputURL = URL.init(fileURLWithPath: videoPath)
    exportSession?.videoComposition = videoComposition
    var exportProgress: Float = 0
    let queue = DispatchQueue(label: "Export Progress Queue")
    queue.async(execute: {() -> Void in
        while exportSession != nil {
            //                int prevProgress = exportProgress;
            exportProgress = (exportSession?.progress)!
            print("current progress == \(exportProgress)")
            sleep(1)
        }
    })

    exportSession?.exportAsynchronously(completionHandler: {


        if exportSession?.status == AVAssetExportSessionStatus.failed
        {
            print("Failed \(exportSession?.error)")
        }else if exportSession?.status == AVAssetExportSessionStatus.completed
        {
            exportSession = nil

            let asset = AVAsset.init(url: URL.init(fileURLWithPath: videoPath))
            DispatchQueue.main.async {
                let item = AVPlayerItem.init(asset: asset)


                self.player.replaceCurrentItem(with: item)

                let assetDuration = CMTimeGetSeconds(composition.duration)
                self.progressSlider.maximumValue = Float(assetDuration)

                self.syncLayer.removeFromSuperlayer()
                self.lblIntro.isHidden = true

                self.player.play()
                //                    let url =  URL.init(fileURLWithPath: videoPath)
                //                    let activityVC = UIActivityViewController(activityItems: [url], applicationActivities: [])
                //                    self.present(activityVC, animated: true, completion: nil)
            }

        }
    })

Below is code of My VideoModel class

-(AVCaptureVideoOrientation)videoOrientation:(AVAsset *)asset
{
    AVCaptureVideoOrientation result = 0;
    NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
    if([tracks    count] > 0) {
        AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
        CGAffineTransform t = videoTrack.preferredTransform;
        // Portrait
        if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
        {
            result = AVCaptureVideoOrientationPortrait;
        }
        // PortraitUpsideDown
        if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0)  {

            result = AVCaptureVideoOrientationPortraitUpsideDown;
        }
        // LandscapeRight
        if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
        {
            result = AVCaptureVideoOrientationLandscapeRight;
        }
        // LandscapeLeft
        if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
        {
            result = AVCaptureVideoOrientationLandscapeLeft;
        }
    }
    return result;
}

Let me know if you need any more help in this.

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!