Swift Merge AVasset-Videos array

后端 未结 3 1886
忘了有多久
忘了有多久 2021-02-04 20:18

I want to merge the AVAsset-arrayVideos into one single video and save it on camera roll. Raywenderlich.com has a great tutorial where two videos are merged into on

相关标签:
3条回答
  • 2021-02-04 20:36

    You need to track the total time for all of the assets and update it for each video.

    The code in your question was rewriting the atTimeM with the current video. That's why only the first and last got included.

    It will look something like this:

    ...
    var totalTime : CMTime = CMTimeMake(0, 0)
    
    func mergeVideoArray() {
    
        let mixComposition = AVMutableComposition()
        for videoAsset in arrayVideos {
            let videoTrack = 
                mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, 
                                               preferredTrackID: Int32(kCMPersistentTrackID_Invalid))          
            do {
                if videoAsset == arrayVideos.first {
                    atTimeM = kCMTimeZero
                } else {
                    atTimeM = totalTime // <-- Use the total time for all the videos seen so far.
                }
                try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), 
                                               of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], 
                                               at: atTimeM)  
                videoSize = videoTrack.naturalSize
            } catch let error as NSError {
                print("error: \(error)")
            }
            totalTime += videoAsset.duration // <-- Update the total time for all videos.
    ...
    

    You can remove the use of lastAsset.

    0 讨论(0)
  • 2021-02-04 20:42

    Swift 4

    Use like

    MeargeVide.mergeVideoArray(arrayVideos: arrayAsset) { (urlMeargeVide, error) in
     debugPrint("url",urlMeargeVide ?? "")
                    debugPrint("error",error ?? "")
    }
    

    Complete class with orientation and merge multiple clip in single.

    class MeargeVide {
    
       static func orientationFromTransform(_ transform: CGAffineTransform)
            -> (orientation: UIImageOrientation, isPortrait: Bool) {
                var assetOrientation = UIImageOrientation.up
                var isPortrait = false
                if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
                    assetOrientation = .right
                    isPortrait = true
                } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
                    assetOrientation = .left
                    isPortrait = true
                } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
                    assetOrientation = .up
                } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
                    assetOrientation = .down
                }
                return (assetOrientation, isPortrait)
        }
    
        static  func videoCompositionInstruction(_ track: AVCompositionTrack, asset: AVAsset)
            -> AVMutableVideoCompositionLayerInstruction {
                let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
                let assetTrack = asset.tracks(withMediaType: .video)[0]
    
                let transform = assetTrack.preferredTransform
                let assetInfo = orientationFromTransform(transform)
    
                var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
                if assetInfo.isPortrait {
                    scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
                    let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
                    instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor), at: kCMTimeZero)
                } else {
                    let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
                    var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
                        .concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
                    if assetInfo.orientation == .down {
                        let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                        let windowBounds = UIScreen.main.bounds
                        let yFix = assetTrack.naturalSize.height + windowBounds.height
                        let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                        concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
                    }
                    instruction.setTransform(concat, at: kCMTimeZero)
                }
    
                return instruction
        }
    
        class func mergeVideoArray(arrayVideos:[AVAsset], callBack:@escaping (_ urlGet:URL?,_ errorGet:Error?) -> Void){
    
            var atTimeM: CMTime = CMTimeMake(0, 0)
            var lastAsset: AVAsset!
            var layerInstructionsArray = [AVVideoCompositionLayerInstruction]()
            var completeTrackDuration: CMTime = CMTimeMake(0, 1)
            var videoSize: CGSize = CGSize(width: 0.0, height: 0.0)
            var totalTime : CMTime = CMTimeMake(0, 0)
    
            let mixComposition = AVMutableComposition.init()
            for videoAsset in arrayVideos{
    
                let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
                do {
                    if videoAsset == arrayVideos.first {
                        atTimeM = kCMTimeZero
                    } else {
                        atTimeM = totalTime // <-- Use the total time for all the videos seen so far.
                    }
                    try videoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration),
                                                    of: videoAsset.tracks(withMediaType: AVMediaType.video)[0],
                                                    at: completeTrackDuration)
                    videoSize = (videoTrack?.naturalSize)!
    
    
    
                } catch let error as NSError {
                    print("error: \(error)")
                }
    
                totalTime = CMTimeAdd(totalTime, videoAsset.duration)
    
    
    
                completeTrackDuration = CMTimeAdd(completeTrackDuration, videoAsset.duration)
    
                let firstInstruction = self.videoCompositionInstruction(videoTrack!, asset: videoAsset)
                firstInstruction.setOpacity(0.0, at: videoAsset.duration)
    
                layerInstructionsArray.append(firstInstruction)
                lastAsset = videoAsset
            }
    
    
            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.layerInstructions = layerInstructionsArray
            mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, completeTrackDuration)
    
            let mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(1, 30)
            mainComposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
    
            let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
            let dateFormatter = DateFormatter()
            dateFormatter.dateStyle = .long
            dateFormatter.timeStyle = .short
            let date = dateFormatter.string(from: NSDate() as Date)
            let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov")
            let url = NSURL(fileURLWithPath: savePath)
    
            let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
            exporter!.outputURL = url as URL
            exporter!.outputFileType = AVFileType.mp4
            exporter!.shouldOptimizeForNetworkUse = true
            exporter!.videoComposition = mainComposition
            exporter!.exportAsynchronously {
                DispatchQueue.main.async {
                    callBack(exporter?.outputURL, exporter?.error)
                }
    
            }
        } 
    }
    
    0 讨论(0)
  • 2021-02-04 20:56

    You don't need atTimeM at all, since you are simply marching completeTrackDuration along it is where the next piece should be added. So replace

    if videoAsset == arrayVideos.first{
                atTimeM = kCMTimeZero
            } else{
                atTimeM = lastAsset!.duration
            }
            try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM)
    

    with

    try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: completeTrackDuration)
    
    0 讨论(0)
提交回复
热议问题