问题
I'm trying to tweak the iOS broadcasting extension feature to record the video instead of live streaming.
It seems possible because you can get pixel buffer in processSampleBuffer:withType:
method.
Then I have coded like below but it fails when appending the buffer.
I'm familiar neither to AVAssetWriter nor App Extension programming so I cannot figure out what's wrong here.
Am I doing something we're not supposed to do in the extension? Or the usage of AVAssetWriter is wrong?
Any idea is helpful. Thanks!
//
// SampleHandler.m
// The main class of the App Extension
//
#import "SampleHandler.h"
#import <AVFoundation/AVFoundation.h>
@implementation SampleHandler {
VideoExporter *exporter;
NSDate *startDate;
}
- (void)broadcastStartedWithSetupInfo:(NSDictionary<NSString *,NSObject *> *)setupInfo {
// User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
[self setupWriter];
}
- (void)setupWriter {
NSFileManager *fm = [NSFileManager defaultManager];
// Set video path into shared container
NSURL *containerURL = [fm containerURLForSecurityApplicationGroupIdentifier:@"group.com.mycompany"];
NSURL *libraryURL = [containerURL URLByAppendingPathComponent:@"Library" isDirectory: true];
BOOL isDir = false;
NSURL *cachesURL = [libraryURL URLByAppendingPathComponent:@"Caches" isDirectory: true];
NSURL *outVideoURL = [cachesURL URLByAppendingPathComponent:@"output.mov"];
if([[NSFileManager defaultManager] fileExistsAtPath:[outVideoURL path]]){
[[NSFileManager defaultManager] removeItemAtPath:[outVideoURL path] error:nil];
}
exporter = [[VideoExporter alloc] initWithOutputURL:outVideoURL size:CGSizeMake(1280, 720) frameRate:30];
exporter.delegate = self;
[exporter beginExport];
startDate = [NSDate date];
}
- (void)broadcastPaused {
// User has requested to pause the broadcast. Samples will stop being delivered.
}
- (void)broadcastResumed {
// User has requested to resume the broadcast. Samples delivery will resume.
}
- (void)broadcastFinished {
// User has requested to finish the broadcast.
NSLog(@"User requested finish writing");
[exporter finishWriting];
}
- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
switch (sampleBufferType) {
case RPSampleBufferTypeVideo:
// Handle video sample buffer
[exporter addCMSampleBuffer:sampleBuffer];
break;
case RPSampleBufferTypeAudioApp:
// Handle audio sample buffer for app audio
break;
case RPSampleBufferTypeAudioMic:
// Handle audio sample buffer for mic audio
break;
default:
break;
}
}
@end
//
// VideoExporter.m
// Helper class to write the video
//
#import "VideoExporter.h"
@implementation VideoExporter
@synthesize width, height;
@synthesize framesPerSecond;
@synthesize outputURL;
@synthesize delegate;
- (id)initWithOutputURL:(NSURL *)aURL size:(CGSize)size frameRate:(uint64_t)fps {
if ((self = [super init])) {
width = (int)round(size.width);
height = (int)round(size.height);
framesPerSecond = fps;
outputURL = aURL;
}
return self;
}
- (void)beginExport {
NSError *error = nil;
writer = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSAssert(writer != nil, @"Writer should not be nil");
NSDictionary * outSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecTypeH264, AVVideoCodecKey,
[NSNumber numberWithInt:width], AVVideoWidthKey,
[NSNumber numberWithInt:height], AVVideoHeightKey, nil];
writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:outSettings];
NSDictionary * pixelAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil];
adaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:pixelAttributes];
[writer addInput:writerInput];
BOOL started = [writer startWriting];
NSAssert(started, @"Should start writing!");
[writer startSessionAtSourceTime:kCMTimeZero];
}
- (void)addCMSampleBuffer:(CMSampleBufferRef)buf {
if(writer.status==0) {
NSLog(@"Writer status unknown!!");
}
[self appendCMSampleBuffer:buf];
}
- (void)finishWriting {
[writerInput markAsFinished];
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
int64_t longDuration = 1000;
CMTime cmTime = CMTimeMake(longDuration, 1);
[writer endSessionAtSourceTime:cmTime];
[writer finishWritingWithCompletionHandler:^{
// Call delegate method here
dispatch_semaphore_signal(semaphore);
}];
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
}
#pragma mark - Private -
- (void)appendCMSampleBuffer:(CMSampleBufferRef)bufferRef {
if(![writerInput isReadyForMoreMediaData]) {
NSLog(@"WriterInput not ready! status = %ld, error=%@", (long)writer.status, writer.error);
return;
}
BOOL success = [adaptor appendPixelBuffer:(CVPixelBufferRef)bufferRef withPresentationTime:CMTimeMake(frameCount++, (int32_t) framesPerSecond)];
if(success == NO) {
NSLog(@"Append buffer failed! status = %ld, error=%@", (long)writer.status, writer.error); // Always gets here
}
}
@end
回答1:
I tested this behavior on iOS 13.7 (17H35)
It seems that AVAssetWriter
requires Foreground Privileges, which the extension doesn't have. (source)
The startWriting
method returns false
and assetWriter.error
equals:
Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={
NSLocalizedFailureReason=An unknown error occurred (-17508),
NSLocalizedDescription=The operation could not be completed,
NSUnderlyingError=0x282a80120 {
Error Domain=NSOSStatusErrorDomain Code=-17508 "(null)"
}
}
来源:https://stackoverflow.com/questions/45583928/recording-video-with-ios-broadcasting-extension