问题
I hope someone can help me. I am new to Objective-c and OSX and I am trying to play audio data I am receiving via socket into my audio queue. I found out this link https://stackoverflow.com/a/30318859/4274654 which in away address my issue with circular buffer.
However when I try to run my project it returns It returns an error (OSStatus) -10865. That is why the code logs " Error enabling AudioUnit output bus".
status = AudioUnitSetProperty(_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &one, sizeof(one));
Here is my code:
Test.h
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import "TPCircularBuffer.h"
@interface Test : Communicator
@property (nonatomic) AudioComponentInstance audioUnit;
@property (nonatomic) TPCircularBuffer circularBuffer;
-(TPCircularBuffer *) outputShouldUseCircularBuffer;
-(void) start;
@end
Test.m
#import "Test.h"
#define kOutputBus 0
#define kInputBus 1
@implementation Test{
BOOL stopped;
}
static OSStatus OutputRenderCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData){
Test *output = (__bridge Test*)inRefCon;
TPCircularBuffer *circularBuffer = [output outputShouldUseCircularBuffer];
if( !circularBuffer ){
SInt32 *left = (SInt32*)ioData->mBuffers[0].mData;
for(int i = 0; i < inNumberFrames; i++ ){
left[ i ] = 0.0f;
}
return noErr;
};
int32_t bytesToCopy = ioData->mBuffers[0].mDataByteSize;
SInt16* outputBuffer = ioData->mBuffers[0].mData;
uint32_t availableBytes;
SInt16 *sourceBuffer = TPCircularBufferTail(circularBuffer, &availableBytes);
int32_t amount = MIN(bytesToCopy,availableBytes);
memcpy(outputBuffer, sourceBuffer, amount);
TPCircularBufferConsume(circularBuffer,amount);
return noErr;
}
-(void) start
{
[self circularBuffer:&_circularBuffer withSize:24576*5];
stopped = NO;
[self setupAudioUnit];
// [super setup:@"http://localhost" port:5321];
}
-(void) setupAudioUnit
{
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
OSStatus status;
status = AudioComponentInstanceNew(comp, &_audioUnit);
if(status != noErr)
{
NSLog(@"Error creating AudioUnit instance");
}
// Enable input and output on AURemoteIO
// Input is enabled on the input scope of the input element
// Output is enabled on the output scope of the output element
UInt32 one = 1;
status = AudioUnitSetProperty(_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &one, sizeof(one));
if(status != noErr)
{
NSLog(@"Error enableling AudioUnit output bus");
}
// Explicitly set the input and output client formats
// sample rate = 44100, num channels = 1, format = 16 bit int point
AudioStreamBasicDescription audioFormat = [self getAudioDescription];
status = AudioUnitSetProperty(_audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &audioFormat, sizeof(audioFormat));
if(status != noErr)
{
NSLog(@"Error setting audio format");
}
AURenderCallbackStruct renderCallback;
renderCallback.inputProc = OutputRenderCallback;
renderCallback.inputProcRefCon = (__bridge void *)(self);
status = AudioUnitSetProperty(_audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &renderCallback, sizeof(renderCallback));
if(status != noErr)
{
NSLog(@"Error setting rendering callback");
}
// Initialize the AURemoteIO instance
status = AudioUnitInitialize(_audioUnit);
if(status != noErr)
{
NSLog(@"Error initializing audio unit");
}
}
- (AudioStreamBasicDescription)getAudioDescription {
AudioStreamBasicDescription audioDescription = {0};
audioDescription.mFormatID = kAudioFormatLinearPCM;
audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
audioDescription.mChannelsPerFrame = 1;
audioDescription.mBytesPerPacket = sizeof(SInt16)*audioDescription.mChannelsPerFrame;
audioDescription.mFramesPerPacket = 1;
audioDescription.mBytesPerFrame = sizeof(SInt16)*audioDescription.mChannelsPerFrame;
audioDescription.mBitsPerChannel = 8 * sizeof(SInt16);
audioDescription.mSampleRate = 44100.0;
return audioDescription;
}
-(void)circularBuffer:(TPCircularBuffer *)circularBuffer withSize:(int)size {
TPCircularBufferInit(circularBuffer,size);
}
-(void)appendDataToCircularBuffer:(TPCircularBuffer*)circularBuffer
fromAudioBufferList:(AudioBufferList*)audioBufferList {
TPCircularBufferProduceBytes(circularBuffer,
audioBufferList->mBuffers[0].mData,
audioBufferList->mBuffers[0].mDataByteSize);
}
-(void)freeCircularBuffer:(TPCircularBuffer *)circularBuffer {
TPCircularBufferClear(circularBuffer);
TPCircularBufferCleanup(circularBuffer);
}
-(TPCircularBuffer *) outputShouldUseCircularBuffer
{
return &_circularBuffer;
}
-(void) stop
{
OSStatus status = AudioOutputUnitStop(_audioUnit);
if(status != noErr)
{
NSLog(@"Error stopping audio unit");
}
TPCircularBufferClear(&_circularBuffer);
_audioUnit = nil;
stopped = YES;
}
-(void)stream:(NSStream *)stream handleEvent:(NSStreamEvent)event{
switch (event) {
case NSStreamEventOpenCompleted:
NSLog(@"Stream opened");
break;
case NSStreamEventHasBytesAvailable:
if (stream == [super inputStream]) {
NSLog(@"NSStreamEventHasBytesAvailable");
uint8_t buffer[1024];
NSUInteger len;
while ([[super inputStream] hasBytesAvailable]) {
len = [[super inputStream] read:buffer maxLength:sizeof(buffer)];
if (len > 0) {
//converting buffer to byte data
NSString *output = [[NSString alloc] initWithBytes:buffer length:len encoding:NSASCIIStringEncoding];
if (nil != output) {
//NSLog(@"server overideddddd said: %@", output);
}
NSData *data0 = [[NSData alloc] initWithBytes:buffer length:len];
if (nil != data0) {
SInt16* byteData = (SInt16*)malloc(len);
memcpy(byteData, [data0 bytes], len);
double sum = 0.0;
for(int i = 0; i < len/2; i++) {
sum += byteData[i] * byteData[i];
}
Byte* soundData = (Byte*)malloc(len);
memcpy(soundData, [data0 bytes], len);
if(soundData)
{
AudioBufferList *theDataBuffer = (AudioBufferList*) malloc(sizeof(AudioBufferList) *1);
theDataBuffer->mNumberBuffers = 1;
theDataBuffer->mBuffers[0].mDataByteSize = (UInt32)len;
theDataBuffer->mBuffers[0].mNumberChannels = 1;
theDataBuffer->mBuffers[0].mData = (SInt16*)soundData;
NSLog(@"soundData here");
[self appendDataToCircularBuffer:&_circularBuffer fromAudioBufferList:theDataBuffer];
}
}
}
}
}
break;
case NSStreamEventErrorOccurred:
NSLog(@"Can't connect to server");
break;
case NSStreamEventEndEncountered:
[stream close];
[stream removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
break;
default:
NSLog(@"Unknown event");
}
[super stream:stream handleEvent:event];
}
@end
I would highly appreciate if there is any one with an example of playing buffers returned from a socket server into audio queue so that I can be able to listen to sound as it comes from the socket server.
Thanks
回答1:
Your code seems to be asking for a kAudioUnitSubType_VoiceProcessingIO audio unit. But kAudioUnitSubType_RemoteIO would be a more suitable iOS audio unit for just playing buffers of audio samples.
Also, your code does not seem to first select an appropriate audio session category and activate it before playing audio. See Apple's documentation for doing this: https://developer.apple.com/library/content/documentation/Audio/Conceptual/AudioSessionProgrammingGuide/Introduction/Introduction.html
来源:https://stackoverflow.com/questions/50045009/how-to-play-pcm-audio-buffer-from-a-socket-server-using-audio-unit-circular-buff