问题
I am trying to capture the video and audio from iphone camera and output as a video file by avassetwriter, but the output video file only contains the first frame with audio. I have inspected AVCaptureSession delegate method,
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
it seems only the delegate method only got one video sample buffer at the first, then receive audio sample buffer all the time like follow log.
- Video SampleBuffer captured!
- Audio SampleBuffer captured!
- Audio SampleBuffer captured!
- Audio SampleBuffer captured!
Here are the code how i setup the audio/video input and output:
//Init Video and audio capture devices component NSError *error = nil;
// Setup the video input
videoDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];
// Create a device input with the device and add it to the session.
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
// Setup the video output
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
videoOutput.alwaysDiscardsLateVideoFrames = NO;
videoOutput.minFrameDuration = CMTimeMake(20, 600);
videoOutput.videoSettings =
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
// Setup the audio input
audioDevice = [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error ];
// Setup the audio output
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
// Create the session
captureSession = [[AVCaptureSession alloc] init];
[captureSession addInput:videoInput];
[captureSession addInput:audioInput];
[captureSession addOutput:videoOutput];
[captureSession addOutput:audioOutput];
captureSession.sessionPreset = AVCaptureSessionPreset640x480;
// Setup the queue
dispatch_queue_t videoBufferQueue = dispatch_queue_create("videoBufferQueue", NULL);
// dispatch_queue_t audioBufferQueue = dispatch_get_global_queue("audioBufferQueue",0);
[videoOutput setSampleBufferDelegate:self queue:videoBufferQueue];
[audioOutput setSampleBufferDelegate:self queue:videoBufferQueue];
dispatch_release(videoBufferQueue);
// dispatch_release(audioBufferQueue);
Here are the code i setup the AVAssetWriter and AssetWriterInput:
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
// Add video input
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:128.0*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:480], AVVideoWidthKey,
[NSNumber numberWithInt:320], AVVideoHeightKey,
//videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
videoWriterInput.expectsMediaDataInRealTime = YES;
// Add the audio input
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt:kAudioFormatAppleLossless ], AVFormatIDKey,
[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
nil ];
audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
audioWriterInput.expectsMediaDataInRealTime = YES;
NSError *error = nil;
NSString *betaCompressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:videoURL];
unlink([betaCompressionDirectory UTF8String]);
videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
fileType:AVFileTypeQuickTimeMovie
error:&error];
if(error)
NSLog(@"error = %@", [error localizedDescription]);
// add input
[videoWriter addInput:videoWriterInput];
[videoWriter addInput:audioWriterInput];
The code of starting the capture
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
//[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange],
kCVPixelBufferPixelFormatTypeKey, nil];
adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary] retain];
NSLog(@"Adaptor init finished. Going to start capture Session...");
/*We start the capture*/
[self.captureSession startRunning];
Code from AVCaptureSession delegate captureOutput method:
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if( !CMSampleBufferDataIsReady(sampleBuffer) )
{
NSLog( @"sample buffer is not ready. Skipping sample" );
return;
}
if( isRecording == YES )
{
switch (videoWriter.status) {
case AVAssetWriterStatusUnknown:
NSLog(@"First time execute");
if (CMTimeCompare(lastSampleTime, kCMTimeZero) == 0) {
lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:lastSampleTime];
//Break if not ready, otherwise fall through.
if (videoWriter.status != AVAssetWriterStatusWriting) {
break ;
}
case AVAssetWriterStatusWriting:
if( captureOutput == audioOutput) {
NSLog(@"Audio Buffer capped!");
if( ![audioWriterInput isReadyForMoreMediaData]) {
break;
}
@try {
if( ![audioWriterInput appendSampleBuffer:sampleBuffer] ) {
NSLog(@"Audio Writing Error");
} else {
[NSThread sleepForTimeInterval:0.03];
}
}
@catch (NSException *e) {
NSLog(@"Audio Exception: %@", [e reason]);
}
}
else if( captureOutput == videoOutput ) {
NSLog(@"Video Buffer capped!");
if( ![videoWriterInput isReadyForMoreMediaData]) {
break;
}
@try {
CVImageBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CMTime frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (buffer)
{
if([videoWriterInput isReadyForMoreMediaData])
if(![adaptor appendPixelBuffer:buffer withPresentationTime:frameTime]) //CMTimeMake(frame, fps)
NSLog(@"FAIL");
else {
[NSThread sleepForTimeInterval:0.03];
// NSLog(@"Success:%d, Time diff with Zero: ", frame);
// CMTimeShow(frameTime);
}
else
NSLog(@"video writer input not ready for more data, skipping frame");
}
frame++;
}
@catch (NSException *e) {
NSLog(@"Video Exception Exception: %@", [e reason]);
}
}
break;
case AVAssetWriterStatusCompleted:
return;
case AVAssetWriterStatusFailed:
NSLog(@"Critical Error Writing Queues");
// bufferWriter->writer_failed = YES ;
// _broadcastError = YES;
return;
case AVAssetWriterStatusCancelled:
break;
default:
break;
}
}
回答1:
CaptureSession does not get output audio sample buffer, when it takes to much time to handle video output, this was in my case. Video and audio output buffers go to you in same queue, so you need to give enough time for handle both, before new buffer will come.
Most likely, this code is a reason: [NSThread sleepForTimeInterval:0.03];
来源:https://stackoverflow.com/questions/9257052/avcapturesession-only-got-video-buffer