Swift 3: How to add watermark on video ? AVVideoCompositionCoreAnimationTool iOS 10 issue

时光总嘲笑我的痴心妄想 提交于 2019-11-28 23:45:37

This solution work for me, super easy and super fast

func addWatermark(inputURL: URL, outputURL: URL, handler:@escaping (_ exportSession: AVAssetExportSession?)-> Void) {
    let mixComposition = AVMutableComposition()
    let asset = AVAsset(url: inputURL)
    let videoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
    let timerange = CMTimeRangeMake(kCMTimeZero, asset.duration)

        let compositionVideoTrack:AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))!

    do {
        try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
        compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
    } catch {
        print(error)
    }

    let watermarkFilter = CIFilter(name: "CISourceOverCompositing")!
    let watermarkImage = CIImage(image: UIImage(named: "waterMark")!)
    let videoComposition = AVVideoComposition(asset: asset) { (filteringRequest) in
        let source = filteringRequest.sourceImage.clampedToExtent()
        watermarkFilter.setValue(source, forKey: "inputBackgroundImage")
        let transform = CGAffineTransform(translationX: filteringRequest.sourceImage.extent.width - (watermarkImage?.extent.width)! - 2, y: 0)
        watermarkFilter.setValue(watermarkImage?.transformed(by: transform), forKey: "inputImage")
        filteringRequest.finish(with: watermarkFilter.outputImage!, context: nil)
    }

    guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPreset640x480) else {
        handler(nil)

        return
    }

    exportSession.outputURL = outputURL
    exportSession.outputFileType = AVFileType.mp4
    exportSession.shouldOptimizeForNetworkUse = true
    exportSession.videoComposition = videoComposition
    exportSession.exportAsynchronously { () -> Void in
        handler(exportSession)
    }
}

Sorry if i haven't posted how to call this function. It's been a while that i haven't touched that code, so i hope this will be enough:

let outputURL = NSURL.fileURL(withPath: "TempPath")
let inputURL = NSURL.fileURL(withPath: "VideoWithWatermarkPath")
addWatermark(inputURL: inputURL, outputURL: outputURL, handler: { (exportSession) in
    guard let session = exportSession else {
        // Error 
        return
    }
    switch session.status {
        case .completed:
        guard NSData(contentsOf: outputURL) != nil else {
            // Error
            return
        }

        // Now you can find the video with the watermark in the location outputURL

        default:
        // Error
    }
})

I have got this answer from here and it is working for me. See if it is working for you.

import UIKit
import AssetsLibrary
import AVFoundation

enum QUWatermarkPosition {
    case TopLeft
    case TopRight
    case BottomLeft
    case BottomRight
    case Default
}

class QUWatermarkManager: NSObject {

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status: status, session: session, outputURL: outputURL)
        }
    }

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status: status, session: session, outputURL: outputURL)
        }
    }

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {

        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), { () -> Void in
            var mixComposition = AVMutableComposition()

            var compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            var clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
            compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), ofTrack: clipVideoTrack, atTime: kCMTimeZero, error: nil)
            clipVideoTrack.preferredTransform

            let videoSize = clipVideoTrack.naturalSize

            var parentLayer = CALayer()
            var videoLayer = CALayer()
            parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
            videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
            parentLayer.addSublayer(videoLayer)

            if text != nil {
                var titleLayer = CATextLayer()
                titleLayer.backgroundColor = UIColor.redColor().CGColor
                titleLayer.string = text
                titleLayer.font = "Helvetica"
                titleLayer.fontSize = 15
                titleLayer.alignmentMode = kCAAlignmentCenter
                titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height)
                parentLayer.addSublayer(titleLayer)
            } else if name != nil {
                var watermarkImage = UIImage(named: name)
                var imageLayer = CALayer()
                imageLayer.contents = watermarkImage?.CGImage

                var xPosition : CGFloat = 0.0
                var yPosition : CGFloat = 0.0
                let imageSize : CGFloat = 57.0

                switch (position) {
                case .TopLeft:
                    xPosition = 0
                    yPosition = 0
                    break
                case .TopRight:
                    xPosition = videoSize.width - imageSize
                    yPosition = 0
                    break
                case .BottomLeft:
                    xPosition = 0
                    yPosition = videoSize.height - imageSize
                    break
                case .BottomRight, .Default:
                    xPosition = videoSize.width - imageSize
                    yPosition = videoSize.height - imageSize
                    break
                default:
                    break
                }


                imageLayer.frame = CGRectMake(xPosition, yPosition, imageSize, imageSize)
                imageLayer.opacity = 0.65
                parentLayer.addSublayer(imageLayer)
            }

            var videoComp = AVMutableVideoComposition()
            videoComp.renderSize = videoSize
            videoComp.frameDuration = CMTimeMake(1, 30)
            videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)

            var instruction = AVMutableVideoCompositionInstruction()
            instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
            var videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack

            let layerInstruction = self.videoCompositionInstructionForTrack(compositionVideoTrack, asset: videoAsset)

            instruction.layerInstructions = [layerInstruction]
            videoComp.instructions = [instruction]

            let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! String
            var dateFormatter = NSDateFormatter()
            dateFormatter.dateStyle = .LongStyle
            dateFormatter.timeStyle = .ShortStyle
            let date = dateFormatter.stringFromDate(NSDate())
            let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov")
            let url = NSURL(fileURLWithPath: savePath)

            let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
            exporter.outputURL = url
            exporter.outputFileType = AVFileTypeQuickTimeMovie
            exporter.shouldOptimizeForNetworkUse = true
            exporter.videoComposition = videoComp

            exporter.exportAsynchronouslyWithCompletionHandler() {
                dispatch_async(dispatch_get_main_queue(), { () -> Void in
                    if exporter.status == AVAssetExportSessionStatus.Completed {
                        let outputURL = exporter.outputURL
                        if flag {
                            // Save to library
                            let library = ALAssetsLibrary()
                            if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
                                library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
                                    completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
                                        completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL)
                                })
                            }
                        } else {
                            completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL)
                        }

                    } else {
                        // Error
                        completion!(status: exporter.status, session: exporter, outputURL: nil)
                    }
                })
            }
        })
    }


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.Up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .Right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .Left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .Up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .Down
        }
        return (assetOrientation, isPortrait)
    }

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack

        var transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform)

        var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width
        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
            instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
                atTime: kCMTimeZero)
        } else {
            let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
            var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
            if assetInfo.orientation == .Down {
                let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
                let windowBounds = UIScreen.mainScreen().bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
                concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
            }
            instruction.setTransform(concat, atTime: kCMTimeZero)
        }

        return instruction
    }
}

The answer of @User511 in

Swift 3:

import UIKit
import AssetsLibrary
import AVFoundation
import Photos

enum QUWatermarkPosition {
    case TopLeft
    case TopRight
    case BottomLeft
    case BottomRight
    case Default
}

class QUWatermarkManager: NSObject {

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status, session, outputURL)
        }
    }

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status, session, outputURL)
        }
    }

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
        DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async {

            let mixComposition = AVMutableComposition()

            let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
            do {
                try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
            }
            catch {
                print(error.localizedDescription)
            }

            let videoSize = clipVideoTrack.naturalSize

            let parentLayer = CALayer()
            let videoLayer = CALayer()
            parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
            videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
            parentLayer.addSublayer(videoLayer)

            if text != nil {
                let titleLayer = CATextLayer()
                titleLayer.backgroundColor = UIColor.red.cgColor
                titleLayer.string = text
                titleLayer.font = "Helvetica" as CFTypeRef
                titleLayer.fontSize = 15
                titleLayer.alignmentMode = kCAAlignmentCenter
                titleLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
                parentLayer.addSublayer(titleLayer)
            } else if name != nil {
                let watermarkImage = UIImage(named: name)
                let imageLayer = CALayer()
                imageLayer.contents = watermarkImage?.cgImage

                var xPosition : CGFloat = 0.0
                var yPosition : CGFloat = 0.0
                let imageSize : CGFloat = 57.0

                switch (position) {
                case .TopLeft:
                    xPosition = 0
                    yPosition = 0
                    break
                case .TopRight:
                    xPosition = videoSize.width - imageSize
                    yPosition = 0
                    break
                case .BottomLeft:
                    xPosition = 0
                    yPosition = videoSize.height - imageSize
                    break
                case .BottomRight, .Default:
                    xPosition = videoSize.width - imageSize
                    yPosition = videoSize.height - imageSize
                    break
                }


                imageLayer.frame = CGRect(x: xPosition, y: yPosition, width: imageSize, height: imageSize)
                imageLayer.opacity = 0.65
                parentLayer.addSublayer(imageLayer)
            }

            let videoComp = AVMutableVideoComposition()
            videoComp.renderSize = videoSize
            videoComp.frameDuration = CMTimeMake(1, 30)
            videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

            let instruction = AVMutableVideoCompositionInstruction()
            instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
            _ = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack

            let layerInstruction = self.videoCompositionInstructionForTrack(track: compositionVideoTrack, asset: videoAsset)

            instruction.layerInstructions = [layerInstruction]
            videoComp.instructions = [instruction]

            let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] 
            let dateFormatter = DateFormatter()
            dateFormatter.dateStyle = .long
            dateFormatter.timeStyle = .short
            let date = dateFormatter.string(from: Date())
            let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("watermarkVideo-\(date).mov")

            let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
            exporter?.outputURL = url
            exporter?.outputFileType = AVFileTypeQuickTimeMovie
            exporter?.shouldOptimizeForNetworkUse = true
            exporter?.videoComposition = videoComp

            exporter?.exportAsynchronously() {
                DispatchQueue.main.async {

                    if exporter?.status == AVAssetExportSessionStatus.completed {
                        let outputURL = exporter?.outputURL
                        if flag {
                            // Save to library
//                            let library = ALAssetsLibrary()

                            if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL!.path) {
                                PHPhotoLibrary.shared().performChanges({
                                    PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
                                }) { saved, error in
                                    if saved {
                                        completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
                                    }
                                }
                            }

//                            if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) {
//                                library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
//                                                                           completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
//                                                                            
//                                                                            completion!(AVAssetExportSessionStatus.Completed, exporter, outputURL)
//                                })
//                            }
                        } else {
                            completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
                        }

                    } else {
                        // Error
                        completion!(exporter?.status, exporter, nil)
                    }
                }
            }
        }
    }


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }
        return (assetOrientation, isPortrait)
    }

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]

        let transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform: transform)

        var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
                                     at: kCMTimeZero)
        } else {
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
            if assetInfo.orientation == .down {
                let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                let windowBounds = UIScreen.main.bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
            }
            instruction.setTransform(concat, at: kCMTimeZero)
        }

        return instruction
    }
}
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!