Video rotated after applying AVVideoComposition

女生的网名这么多〃 提交于 2019-11-30 20:10:41
Dhiru

You have a problem in the renderingSize of AVVideoComposition. You should apply transform on AVMutableVideoCompositionInstruction (ie. Rotate and translate transform ).

I have done it in Objective-c and am posting my code. You can convert the syntax into Swift

Objective-c

//------------------------------------
//      FIXING ORIENTATION
//------------------------------------


AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));

AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack]; // second

AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_  = UIImageOrientationUp;
BOOL  isFirstAssetPortrait_  = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)  {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)  {FirstAssetOrientation_ =  UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)   {FirstAssetOrientation_ =  UIImageOrientationUp;}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_){
    FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.height;
    CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
    [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
    CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
    [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];



AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *SecondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation SecondAssetOrientation_  = UIImageOrientationUp;
BOOL  isSecondAssetPortrait_  = NO;
CGAffineTransform secondTransform = SecondAssetTrack.preferredTransform;
if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0)  {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0)  {SecondAssetOrientation_ =  UIImageOrientationLeft; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0)   {SecondAssetOrientation_ =  UIImageOrientationUp;}
if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;}
CGFloat SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_){
    SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.height;
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
    [SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration];
}else{
    ;
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
    [SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:secondAsset.duration];
}


MainInstruction.layerInstructions = [NSArray arrayWithObjects:SecondlayerInstruction,nil];;

AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 480.0);


// Now , you have Orientation Fixed Instrucation layer
// add this composition to your video 😀
// If you want to export Video than you can do like below

NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"final_merged_video-%d.mp4",arc4random() % 1000]];


NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPreset640x480];
exporter.outputURL=url;
exporter.videoComposition=MainCompositionInst;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^
{

[[AppDelegate Getdelegate] hideIndicator];

[self exportDidFinish:exporter];
});
}];

for Swift see this answer Click here

in addition you can also try to rotate your video layer by applying rotation transform on it.

#define degreeToRadian(x) (M_PI * x / 180.0)

[_playerLayer setAffineTransform:CGAffineTransformMakeRotation(degreeToRad‌​ian(degree))]

If You are trying to play AVMutableCompostion You should set AVAssetTrack's preferredTransform to AVMutableCompositionTrack's preferredTransform.

let asset = AVAsset(url: url!)

let composition = AVMutableComposition()
let compositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)

let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first

try? compositionTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: videoTrack!, at: kCMTimeZero)

compositionTrack.preferredTransform = (videoTrack?.preferredTransform)!

let playerItem = AVPlayerItem(asset: composition)
let filter = CIFilter(name: "CIColorInvert")
playerItem.videoComposition = AVVideoComposition(asset: composition, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
            filter?.setValue(request.sourceImage, forKey: kCIInputImageKey)
            request.finish(with: (filter?.outputImage)!, context: nil)
        })
 .... the rest of code

Instead of assuming that the image will be filtered, check first if filteredImage is nil. If not, then request.finish(with: filteredImage, context: nil)

However, if it is nil you must request.finish(with: SomeError)

This is as per the docs.

What worked for me at the end:

private func filterVideo(with filter: Filter?) {
    guard let player = playerLayer?.player, let playerItem = player.currentItem else { return }

    let videoComposition = AVVideoComposition(asset: playerItem.asset, applyingCIFiltersWithHandler: { (request) in
        if let filter = filter {
            if let filteredImage = filter.filterImage(request.sourceImage) {
                let output = filteredImage.cropping(to: request.sourceImage.extent)
                request.finish(with: output, context: nil)
            } else {
                printError("Image not filtered")
                request.finish(with: RenderError.couldNotFilter)
            }
        } else {
            let output = request.sourceImage.cropping(to: request.sourceImage.extent)
            request.finish(with: output, context: nil)
        }
    })

    playerItem.videoComposition = videoComposition
}

This is the filterImage function of Filter, which is just a nice little wrapper for CIFilter:

func filterImage(_ ciImage: CIImage) -> CIImage? {
    guard let filter = ciFilter else { return nil }
    filter.setDefaults()
    filter.setValue(ciImage, forKey: kCIInputImageKey)
    guard let filteredImageData = filter.value(forKey: kCIOutputImageKey) as? CIImage else { return nil }
    return filteredImageData
}
sibin

Try this code below which worked for me

// Grab the source track from AVURLAsset for example.
let assetV = YourAVASSET.tracks(withMediaType: AVMediaTypeVideo).last

// Grab the composition video track from AVMutableComposition you already made.
let compositionV = YourCompostion.tracks(withMediaType: AVMediaTypeVideo).last

// Apply the original transform.
if ((assetV != nil) && (compostionV != nil)) {
    compostionV?.preferredTransform = (assetV?.preferredTransform)!
}

And then go ahead an export your video...

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!