问题
I am working on an iOS app that uses AVAudioEngine for various things, including recording audio to a file, applying effects to that audio using audio units, and playing back the audio with the effect applied. I use a tap to also write the output to a file. When this is done it writes to the file in real time as the audio is playing back.
Is it possible to set up an AVAudioEngine graph that reads from a file, processes the sound with an audio unit, and outputs to a file, but faster than real time (ie., as fast as the hardware can process it)? The use case for this would be to output a few minutes of audio with effects applied, and I certainly wouldn't want to wait for a few minutes for it to be processed.
Edit: here's the code that I'm using to set up the AVAudioEngine's graph, and play a sound file:
AVAudioEngine* engine = [[AVAudioEngine alloc] init];
AVAudioPlayerNode* player = [[AVAudioPlayerNode alloc] init];
[engine attachNode:player];
self.player = player;
self.engine = engine;
if (!self.distortionEffect) {
self.distortionEffect = [[AVAudioUnitDistortion alloc] init];
[self.engine attachNode:self.distortionEffect];
[self.engine connect:self.player to:self.distortionEffect format:[self.distortionEffect outputFormatForBus:0]];
AVAudioMixerNode* mixer = [self.engine mainMixerNode];
[self.engine connect:self.distortionEffect to:mixer format:[mixer outputFormatForBus:0]];
}
[self.distortionEffect loadFactoryPreset:AVAudioUnitDistortionPresetDrumsBitBrush];
NSError* error;
if (![self.engine startAndReturnError:&error]) {
NSLog(@"error: %@", error);
} else {
NSURL* fileURL = [[NSBundle mainBundle] URLForResource:@"test2" withExtension:@"mp3"];
AVAudioFile* file = [[AVAudioFile alloc] initForReading:fileURL error:&error];
if (error) {
NSLog(@"error: %@", error);
} else {
[self.player scheduleFile:file atTime:nil completionHandler:nil];
[self.player play];
}
}
The above code plays the sound in the test2.mp3 file, with the AVAudioUnitDistortionPresetDrumsBitBrush
distortion preset applied, in real time.
I then modified the above code by adding these lines after [self.player play]:
[self.engine stop];
[self renderAudioAndWriteToFile];
I modified the renderAudioAndWriteToFile method that Vladimir provided so that instead of allocating a new AVAudioEngine in the first line, it simply uses self.engine that has already been set up.
However, in renderAudioAndWriteToFile, it's logging "Can not render audio unit" because AudioUnitRender is returning a status of kAudioUnitErr_Uninitialized
.
Edit 2: I should mention that I'm perfectly happy to convert the AVAudioEngine code I posted to use the C apis if that would make things easier. However, I would want the code to produce the same output as the AVAudioEngine code (including the use of the factory preset shown above).
回答1:
- Configure your engine and player node.
- Call
play
method for your player node. - Pause your engine.
- Get an audio unit from your AVAudioOutputNode (
audioEngine.outputNode
) with this method. - Render from audio unit with AudioUnitRender in cycle and write audio buffer list to file with Extended Audio File Services.
Example:
Audio engine configuration
- (void)configureAudioEngine {
self.engine = [[AVAudioEngine alloc] init];
self.playerNode = [[AVAudioPlayerNode alloc] init];
[self.engine attachNode:self.playerNode];
AVAudioUnitDistortion *distortionEffect = [[AVAudioUnitDistortion alloc] init];
[self.engine attachNode:distortionEffect];
[self.engine connect:self.playerNode to:distortionEffect format:[distortionEffect outputFormatForBus:0]];
self.mixer = [self.engine mainMixerNode];
[self.engine connect:distortionEffect to:self.mixer format:[self.mixer outputFormatForBus:0]];
[distortionEffect loadFactoryPreset:AVAudioUnitDistortionPresetDrumsBitBrush];
NSError* error;
if (![self.engine startAndReturnError:&error])
NSLog(@"Can't start engine: %@", error);
else
[self scheduleFileToPlay];
}
- (void)scheduleFileToPlay {
NSError* error;
NSURL *fileURL = [[NSBundle mainBundle] URLForResource:@"filename" withExtension:@"m4a"];
self.file = [[AVAudioFile alloc] initForReading:fileURL error:&error];
if (self.file)
[self.playerNode scheduleFile:self.file atTime:nil completionHandler:nil];
else
NSLog(@"Can't read file: %@", error);
}
Rendering methods
- (void)renderAudioAndWriteToFile {
[self.playerNode play];
[self.engine pause];
AVAudioOutputNode *outputNode = self.engine.outputNode;
AudioStreamBasicDescription const *audioDescription = [outputNode outputFormatForBus:0].streamDescription;
NSString *path = [self filePath];
ExtAudioFileRef audioFile = [self createAndSetupExtAudioFileWithASBD:audioDescription andFilePath:path];
if (!audioFile)
return;
AVURLAsset *asset = [AVURLAsset assetWithURL:self.file.url];
NSTimeInterval duration = CMTimeGetSeconds(asset.duration);
NSUInteger lengthInFrames = duration * audioDescription->mSampleRate;
const NSUInteger kBufferLength = 4096;
AudioBufferList *bufferList = AEAllocateAndInitAudioBufferList(*audioDescription, kBufferLength);
AudioTimeStamp timeStamp;
memset (&timeStamp, 0, sizeof(timeStamp));
timeStamp.mFlags = kAudioTimeStampSampleTimeValid;
OSStatus status = noErr;
for (NSUInteger i = kBufferLength; i < lengthInFrames; i += kBufferLength) {
status = [self renderToBufferList:bufferList writeToFile:audioFile bufferLength:kBufferLength timeStamp:&timeStamp];
if (status != noErr)
break;
}
if (status == noErr && timeStamp.mSampleTime < lengthInFrames) {
NSUInteger restBufferLength = (NSUInteger) (lengthInFrames - timeStamp.mSampleTime);
AudioBufferList *restBufferList = AEAllocateAndInitAudioBufferList(*audioDescription, restBufferLength);
status = [self renderToBufferList:restBufferList writeToFile:audioFile bufferLength:restBufferLength timeStamp:&timeStamp];
AEFreeAudioBufferList(restBufferList);
}
AEFreeAudioBufferList(bufferList);
ExtAudioFileDispose(audioFile);
if (status != noErr)
NSLog(@"An error has occurred");
else
NSLog(@"Finished writing to file at path: %@", path);
}
- (NSString *)filePath {
NSArray *documentsFolders =
NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *fileName = [NSString stringWithFormat:@"%@.m4a", [[NSUUID UUID] UUIDString]];
NSString *path = [documentsFolders[0] stringByAppendingPathComponent:fileName];
return path;
}
- (ExtAudioFileRef)createAndSetupExtAudioFileWithASBD:(AudioStreamBasicDescription const *)audioDescription
andFilePath:(NSString *)path {
AudioStreamBasicDescription destinationFormat;
memset(&destinationFormat, 0, sizeof(destinationFormat));
destinationFormat.mChannelsPerFrame = audioDescription->mChannelsPerFrame;
destinationFormat.mSampleRate = audioDescription->mSampleRate;
destinationFormat.mFormatID = kAudioFormatMPEG4AAC;
ExtAudioFileRef audioFile;
OSStatus status = ExtAudioFileCreateWithURL(
(__bridge CFURLRef) [NSURL fileURLWithPath:path],
kAudioFileM4AType,
&destinationFormat,
NULL,
kAudioFileFlags_EraseFile,
&audioFile
);
if (status != noErr) {
NSLog(@"Can not create ext audio file");
return nil;
}
UInt32 codecManufacturer = kAppleSoftwareAudioCodecManufacturer;
status = ExtAudioFileSetProperty(
audioFile, kExtAudioFileProperty_CodecManufacturer, sizeof(UInt32), &codecManufacturer
);
status = ExtAudioFileSetProperty(
audioFile, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), audioDescription
);
status = ExtAudioFileWriteAsync(audioFile, 0, NULL);
if (status != noErr) {
NSLog(@"Can not setup ext audio file");
return nil;
}
return audioFile;
}
- (OSStatus)renderToBufferList:(AudioBufferList *)bufferList
writeToFile:(ExtAudioFileRef)audioFile
bufferLength:(NSUInteger)bufferLength
timeStamp:(AudioTimeStamp *)timeStamp {
[self clearBufferList:bufferList];
AudioUnit outputUnit = self.engine.outputNode.audioUnit;
OSStatus status = AudioUnitRender(outputUnit, 0, timeStamp, 0, bufferLength, bufferList);
if (status != noErr) {
NSLog(@"Can not render audio unit");
return status;
}
timeStamp->mSampleTime += bufferLength;
status = ExtAudioFileWrite(audioFile, bufferLength, bufferList);
if (status != noErr)
NSLog(@"Can not write audio to file");
return status;
}
- (void)clearBufferList:(AudioBufferList *)bufferList {
for (int bufferIndex = 0; bufferIndex < bufferList->mNumberBuffers; bufferIndex++) {
memset(bufferList->mBuffers[bufferIndex].mData, 0, bufferList->mBuffers[bufferIndex].mDataByteSize);
}
}
I used some functions from this cool framework:
AudioBufferList *AEAllocateAndInitAudioBufferList(AudioStreamBasicDescription audioFormat, int frameCount) {
int numberOfBuffers = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? audioFormat.mChannelsPerFrame : 1;
int channelsPerBuffer = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? 1 : audioFormat.mChannelsPerFrame;
int bytesPerBuffer = audioFormat.mBytesPerFrame * frameCount;
AudioBufferList *audio = malloc(sizeof(AudioBufferList) + (numberOfBuffers-1)*sizeof(AudioBuffer));
if ( !audio ) {
return NULL;
}
audio->mNumberBuffers = numberOfBuffers;
for ( int i=0; i<numberOfBuffers; i++ ) {
if ( bytesPerBuffer > 0 ) {
audio->mBuffers[i].mData = calloc(bytesPerBuffer, 1);
if ( !audio->mBuffers[i].mData ) {
for ( int j=0; j<i; j++ ) free(audio->mBuffers[j].mData);
free(audio);
return NULL;
}
} else {
audio->mBuffers[i].mData = NULL;
}
audio->mBuffers[i].mDataByteSize = bytesPerBuffer;
audio->mBuffers[i].mNumberChannels = channelsPerBuffer;
}
return audio;
}
void AEFreeAudioBufferList(AudioBufferList *bufferList ) {
for ( int i=0; i<bufferList->mNumberBuffers; i++ ) {
if ( bufferList->mBuffers[i].mData ) free(bufferList->mBuffers[i].mData);
}
free(bufferList);
}
来源:https://stackoverflow.com/questions/30679061/can-i-use-avaudioengine-to-read-from-a-file-process-with-an-audio-unit-and-writ