I am assembling a bunch of video clips filmed on the iPhone in portrait mode. To assemble them I am taking straightforward approach as follows:
AVURLAsset to get hold
Here's some code that adjusted the rotation using the preferred transform:
// our composition, with one video and one audio track
AVMutableComposition* composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// loop through all the clips, adding them to our track and inserting composition instructions
NSDictionary* options = @{ AVURLAssetPreferPreciseDurationAndTimingKey: @YES };
CMTime insertionPoint = kCMTimeZero;
NSMutableArray* instructions = [NSMutableArray array];
NSError* error = nil;
for (NSURL* outputFileURL in self.movieFileURLs) {
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:outputFileURL options:options];
// insert this clip's video track into our composition
NSArray *videoAssetTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoAssetTrack = (videoAssetTracks.count > 0 ? [videoAssetTracks objectAtIndex:0] : nil);
[videoTrack insertTimeRange:CMTimeRangeFromTimeToTime(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:insertionPoint error:&error];
// create a layer instruction at the start of this clip to apply the preferred transform to correct orientation issues
AVMutableVideoCompositionLayerInstruction *instruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack];
[instruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
// create the composition instructions for the range of this clip
AVMutableVideoCompositionInstruction * videoTrackInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoTrackInstruction.timeRange = CMTimeRangeMake(insertionPoint, asset.duration);
videoTrackInstruction.layerInstructions = @[instruction];
[instructions addObject:videoTrackInstruction];
// insert this clip's audio track into our composition
NSArray *audioAssetTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack *audioAssetTrack = (audioAssetTracks.count > 0 ? [audioAssetTracks objectAtIndex:0] : nil);
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssetTrack atTime:insertionPoint error:nil];
// advance the insertion point to the end of the clip
insertionPoint = CMTimeAdd(insertionPoint, asset.duration);
}
// create our video composition which will be assigned to the player item
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = instructions;
videoComposition.frameDuration = CMTimeMake(1, videoTrack.naturalTimeScale);
videoComposition.renderSize = videoTrack.naturalSize;
_videoComposition = videoComposition;