Video recording in iOS programmatically

后端 未结 2 1650
有刺的猬
有刺的猬 2021-02-03 12:44

I am trying to implement functionality like below

Final Recorded Video = \"Capture a video from front camera + Record an audio

2条回答
  •  悲&欢浪女
    2021-02-03 13:16

    // Add some extra code for following methods "1st Method"

      -(void) playMovieAtURL: (NSURL*) theURL
    
        {
           [player play];
           AVAudioSession *audioSession = [AVAudioSession sharedInstance];
           NSError *err = nil;
           [audioSession setCategory :AVAudioSessionCategoryPlayAndRecord error:&err];
         if(err)
          {
            NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo]     description]);
            return;
        }
           [audioSession setActive:YES error:&err];
           err = nil;
        if(err){
            NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
            return;
        }
    
           recordSetting = [[NSMutableDictionary alloc] init];
    
          [recordSetting setValue :[NSNumber numberWithInt:kAudioFormatAppleIMA4] forKey:AVFormatIDKey];
          [recordSetting setValue:[NSNumber numberWithFloat:16000.0] forKey:AVSampleRateKey];
          [recordSetting setValue:[NSNumber numberWithInt: 1] forKey:AVNumberOfChannelsKey];
          recorderFilePath = [NSString stringWithFormat:@"%@/MySound.caf", DOCUMENTS_FOLDER];
         NSLog(@"recorderFilePath: %@",recorderFilePath);
         audio_url = [NSURL fileURLWithPath:recorderFilePath];
        err = nil;
        NSData *audioData = [NSData dataWithContentsOfFile:[audio_url path] options: 0 error:&err];
        if(audioData)
        {
            NSFileManager *fm = [NSFileManager defaultManager];
            [fm removeItemAtPath:[audio_url path] error:&err];
        }
    
        err = nil;
        recorder = [[ AVAudioRecorder alloc] initWithURL:audio_url settings:recordSetting error:&err];
        if(!recorder)
        {
            NSLog(@"recorder: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
            UIAlertView *alert =
            [[UIAlertView alloc] initWithTitle: @"Warning"
                                       message: [err localizedDescription]
                                      delegate: nil
                             cancelButtonTitle:@"OK"
                             otherButtonTitles:nil];
            [alert show];
            return;
        }
    
        //prepare to record
        [recorder setDelegate:self];
        [recorder prepareToRecord];
        recorder.meteringEnabled = YES;
    
        BOOL audioHWAvailable = audioSession.inputAvailable;
        if (! audioHWAvailable)
        {
            UIAlertView *cantRecordAlert =
            [[UIAlertView alloc] initWithTitle: @"Warning"
                                       message: @"Audio input hardware not available"
                                      delegate: nil
                             cancelButtonTitle:@"OK"
                             otherButtonTitles:nil];
            [cantRecordAlert show];
            return;
        }
    
    
    }
    

    // 2nd method

    -(void) stopVideoRecording
    
        {
        [player.view removeFromSuperview];
        [player stop];
        [movieFileOutput stopRecording];
    
        AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_url options:nil];
        AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:outputURL options:nil];
    
        mixComposition = [AVMutableComposition composition];
    
        AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                            preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
                                            ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                                             atTime:kCMTimeZero error:nil];
    
        AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                                       preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                                       ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                                        atTime:kCMTimeZero error:nil];
    
        AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                              presetName:AVAssetExportPresetPassthrough];
    
        AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        [compositionVideoTrack setPreferredTransform:videoTrack.preferredTransform];
    }
    

    // Final Play Video

    AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:mixComposition];
    AVPlayer *player1 = [AVPlayer playerWithPlayerItem:playerItem];
    AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player1];
    [playerLayer setFrame:CGRectMake(0, 0, 320, 480)];
    [[[self view] layer] addSublayer:playerLayer];
    playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [player1 play];
    player1.actionAtItemEnd = AVPlayerActionAtItemEndNone;
    

提交回复
热议问题