I have just made a simple app which uses QTKit to merge two videos together
Here is the problem:
I have just discovered that if I t
To do this it's easiest to use AVFoundation frameworks instead of QTKit, as editing/manipulating the video is much easier (imo), though it seems to be a bit slower and the code may be less compact.
I'd suggest starting by reading the AVFoundation programming guide at https://developer.apple.com/library/mac/documentation/AudioVideo/Conceptual/AVFoundationPG/AVFoundationPG.pdf
But here's a basic example to get you started. Before starting, make sure you have linked against the AVFoundation and CoreMedia frameworks.
In your header besides any other methods or ivars that you may have add an AVAssetExportSession *exporter
and NSTimer *timer
, as well as a method - (void) monitorProgress;
.
Your implementation file would then include the following methods (assuming you're triggering the method with an IBAction called doIt). And don't forget to #import
and #import
:
- (IBAction)doIt:(id)sender {
// Initial array of movie URLs
NSArray *myMovieURLs = [NSArray arrayWithObjects:[NSURL fileURLWithPath:@"/path/to/first.mov"], [NSURL fileURLWithPath:@"/path/to/second.mov"], nil];
// Create the composition & A/V tracks
AVMutableComposition *comp = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [comp addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [comp addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// A reference for insertion start time
CMTime startTime = kCMTimeZero;
for (int i=0; i< [myMovieURLs count]; i++){
// Get asset
NSURL *movieURL = [myMovieURLs objectAtIndex:i];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:movieURL options:nil];
// Get video and audio tracks (assuming video exists - test for audio as an empty track will crash the program!) and insert in composition tracks
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
bool success = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:videoTrack atTime:startTime error:nil];
if ([[asset tracksWithMediaType:AVMediaTypeAudio]count]){
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
success = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [asset duration]) ofTrack:audioTrack atTime:startTime error:nil];
}
// increment the start time to the end of this first video
startTime = CMTimeAdd(startTime, [asset duration]);
}
//Set the output URL
NSURL *outputURL = [NSURL fileURLWithPath:@"/path/to/output.mov"];
/* Create the exporter.
Note the preset type is up to you to choose. If you wanted, you could check the asset's size (with [asset naturalSize]) or other values above and use that to base your preset on.
Use exportPresetsCompatibleWithAsset: to get a list of presets that are compatible with a specific asset.
*/
NSLog(@"Compat presets you could use: %@", [AVAssetExportSession exportPresetsCompatibleWithAsset:comp]);
exporter = [[AVAssetExportSession alloc] initWithAsset:comp presetName:AVAssetExportPreset640x480];
[exporter setOutputURL:outputURL];
[exporter setOutputFileType:AVFileTypeQuickTimeMovie];
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
switch ([exporter status]) {
case AVAssetExportSessionStatusFailed:
NSLog(@"Export failed: %@", [[exporter error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export canceled");
break;
default:
break;
}
}];
// This is just a simple timer that will call a method to log the progress
timer=[NSTimer scheduledTimerWithTimeInterval:5
target:self
selector:@selector(monitorProgress)
userInfo:nil
repeats:YES];
}
-(void)monitorProgress{
if ([exporter progress] == 1.0){
[timer invalidate];
}
NSLog(@"Progress: %f",[exporter progress]* 100);
}