I am trying to implement functionality like below
Final Recorded Video = \"Capture a video from front camera + Record an audio
// Add some extra code for following methods "1st Method"
-(void) playMovieAtURL: (NSURL*) theURL
{
[player play];
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
NSError *err = nil;
[audioSession setCategory :AVAudioSessionCategoryPlayAndRecord error:&err];
if(err)
{
NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
return;
}
[audioSession setActive:YES error:&err];
err = nil;
if(err){
NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
return;
}
recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue :[NSNumber numberWithInt:kAudioFormatAppleIMA4] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:16000.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 1] forKey:AVNumberOfChannelsKey];
recorderFilePath = [NSString stringWithFormat:@"%@/MySound.caf", DOCUMENTS_FOLDER];
NSLog(@"recorderFilePath: %@",recorderFilePath);
audio_url = [NSURL fileURLWithPath:recorderFilePath];
err = nil;
NSData *audioData = [NSData dataWithContentsOfFile:[audio_url path] options: 0 error:&err];
if(audioData)
{
NSFileManager *fm = [NSFileManager defaultManager];
[fm removeItemAtPath:[audio_url path] error:&err];
}
err = nil;
recorder = [[ AVAudioRecorder alloc] initWithURL:audio_url settings:recordSetting error:&err];
if(!recorder)
{
NSLog(@"recorder: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
UIAlertView *alert =
[[UIAlertView alloc] initWithTitle: @"Warning"
message: [err localizedDescription]
delegate: nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[alert show];
return;
}
//prepare to record
[recorder setDelegate:self];
[recorder prepareToRecord];
recorder.meteringEnabled = YES;
BOOL audioHWAvailable = audioSession.inputAvailable;
if (! audioHWAvailable)
{
UIAlertView *cantRecordAlert =
[[UIAlertView alloc] initWithTitle: @"Warning"
message: @"Audio input hardware not available"
delegate: nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[cantRecordAlert show];
return;
}
}
// 2nd method
-(void) stopVideoRecording
{
[player.view removeFromSuperview];
[player stop];
[movieFileOutput stopRecording];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_url options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:outputURL options:nil];
mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack setPreferredTransform:videoTrack.preferredTransform];
}
// Final Play Video
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
AVPlayer *player1 = [AVPlayer playerWithPlayerItem:playerItem];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player1];
[playerLayer setFrame:CGRectMake(0, 0, 320, 480)];
[[[self view] layer] addSublayer:playerLayer];
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[player1 play];
player1.actionAtItemEnd = AVPlayerActionAtItemEndNone;
I think this may help you..
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audioUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoUrl options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetPassthrough];
NSString* videoName = @"export.mov";
NSString *exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath])
{
[[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil];
}
_assetExport.outputFileType = @"com.apple.quicktime-movie";
DLog(@"file type %@",_assetExport.outputFileType);
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
// your completion code here
}
}
];
Courtesy:-https://stackoverflow.com/a/3456565/1865424
and you can also check the code for recording video from front camera.
-(IBAction)cameraLibraryButtonClick:(id)sender{
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
UIImagePickerController *videoRecorder = [[UIImagePickerController alloc]init];
videoRecorder.delegate = self;
NSArray *sourceTypes = [UIImagePickerController availableMediaTypesForSourceType:videoRecorder.sourceType];
NSLog(@"Available types for source as camera = %@", sourceTypes);
if (![sourceTypes containsObject:(NSString*)kUTTypeMovie] ) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:nil
message:@"Device Not Supported for video Recording." delegate:self
cancelButtonTitle:@"Yes"
otherButtonTitles:@"No",nil];
[alert show];
[alert release];
return;
}
videoRecorder.cameraDevice=UIImagePickerControllerCameraDeviceFront;
videoRecorder.sourceType = UIImagePickerControllerSourceTypeCamera;
videoRecorder.mediaTypes = [NSArray arrayWithObject:(NSString*)kUTTypeMovie];
videoRecorder.videoQuality = UIImagePickerControllerQualityTypeLow;
videoRecorder.videoMaximumDuration = 120;
self.imagePicker = videoRecorder;
[videoRecorder release];
[self presentModalViewController:self.imagePicker animated:YES];
newMedia = YES;
}
else {
[self displaysorceError];
}
}
Courtesy:-https://stackoverflow.com/a/14154289/1865424
If these doesn't work for you..Let me know..But i think this will help you out..