how to play nsdata audio buffering receive from web-socket?

寵の児 提交于 2019-12-04 06:02:20

问题


I'm creating a call app in objective c.my problem is in the send and receive audio stream.recording audio buffering and convert to nsdata and send with (base64 format) by socket rocket this is good work but I'm not know how to after receiving nsdata from server play this audio buffering?

my code:

viewController.h

#import <UIKit/UIKit.h>
#import <AudioToolbox/AudioQueue.h>
#import <AudioToolbox/AudioFile.h>
#import <SocketRocket/SocketRocket.h>
#import <AVFoundation/AVFoundation.h>
#import <AVKit/AVKit.h>

#define NUM_BUFFERS 3
#define SAMPLERATE 16000

//Struct defining recording state
typedef struct {
    AudioStreamBasicDescription dataFormat;
    AudioQueueRef               queue;
    AudioQueueBufferRef         buffers[NUM_BUFFERS];
    AudioFileID                 audioFile;
    SInt64                      currentPacket;
    bool                        recording;
} RecordState;


//Struct defining playback state
typedef struct {
    AudioStreamBasicDescription dataFormat;
    AudioQueueRef               queue;
    AudioQueueBufferRef         buffers[NUM_BUFFERS];
    AudioFileID                 audioFile;
    SInt64                      currentPacket;
    bool                        playing;
} PlayState;

@interface ViewController : UIViewController <SRWebSocketDelegate> {
    RecordState recordState;
    PlayState playState;
    CFURLRef fileURL;
}

@property (nonatomic, strong) SRWebSocket * webSocket;

@property (weak, nonatomic) IBOutlet UITextView *textView;
@property (weak, nonatomic) IBOutlet UIImageView *imageView;



@end

viewController.m

#import "ViewController.h"
#import <MediaPlayer/MediaPlayer.h>

id thisClass;

//Declare C callback functions
void AudioInputCallback(void * inUserData,  // Custom audio metada
                        AudioQueueRef inAQ,
                        AudioQueueBufferRef inBuffer,
                        const AudioTimeStamp * inStartTime,
                        UInt32 isNumberPacketDescriptions,
                        const AudioStreamPacketDescription * inPacketDescs);

void AudioOutputCallback(void * inUserData,
                         AudioQueueRef outAQ,
                         AudioQueueBufferRef outBuffer);


@interface ViewController ()



@end

@implementation ViewController{
    NSString  *documentsDirectory ;

}

@synthesize webSocket;
@synthesize textView;
@synthesize imageView;



// Takes a filled buffer and writes it to disk, "emptying" the buffer
void AudioInputCallback(void * inUserData,
                        AudioQueueRef inAQ,
                        AudioQueueBufferRef inBuffer,
                        const AudioTimeStamp * inStartTime,
                        UInt32 inNumberPacketDescriptions,
                        const AudioStreamPacketDescription * inPacketDescs)
{
    RecordState * recordState = (RecordState*)inUserData;
    if (!recordState->recording)
    {
        printf("Not recording, returning\n");
    }


    printf("Writing buffer %lld\n", recordState->currentPacket);
    OSStatus status = AudioFileWritePackets(recordState->audioFile,
                                            false,
                                            inBuffer->mAudioDataByteSize,
                                            inPacketDescs,
                                            recordState->currentPacket,
                                            &inNumberPacketDescriptions,
                                            inBuffer->mAudioData);



    if (status == 0)
    {
        recordState->currentPacket += inNumberPacketDescriptions;

        NSData * audioData = [NSData dataWithBytes:inBuffer->mAudioData length:inBuffer->mAudioDataByteSize * NUM_BUFFERS];

        NSString *base64Encoded = [audioData base64EncodedStringWithOptions:0];

        [thisClass sendAudioToSocketAsData:base64Encoded];



    }

    AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}


// Fills an empty buffer with data and sends it to the speaker
void AudioOutputCallback(void * inUserData,
                         AudioQueueRef outAQ,
                         AudioQueueBufferRef outBuffer) {
    PlayState * playState = (PlayState *) inUserData;
    if(!playState -> playing) {
        printf("Not playing, returning\n");
        return;
    }

    printf("Queuing buffer %lld for playback\n", playState -> currentPacket);

    AudioStreamPacketDescription * packetDescs;

    UInt32 bytesRead;
    UInt32 numPackets =  SAMPLERATE * NUM_BUFFERS;
    OSStatus status;
    status = AudioFileReadPackets(playState -> audioFile, false, &bytesRead, packetDescs, playState -> currentPacket, &numPackets, outBuffer -> mAudioData);

    if (numPackets) {
        outBuffer -> mAudioDataByteSize = bytesRead;
        status = AudioQueueEnqueueBuffer(playState -> queue, outBuffer, 0, packetDescs);
        playState -> currentPacket += numPackets;
    }else {
        if (playState -> playing) {
            AudioQueueStop(playState -> queue, false);
            AudioFileClose(playState -> audioFile);
            playState -> playing = false;
        }

        AudioQueueFreeBuffer(playState -> queue, outBuffer);
    }

}



- (void) setupAudioFormat:(AudioStreamBasicDescription *) format {

    format -> mSampleRate = SAMPLERATE; //
    format -> mFormatID = kAudioFormatLinearPCM; //
    format -> mFramesPerPacket = 1;
    format -> mChannelsPerFrame = 1; //
    format -> mBytesPerFrame = 2;
    format -> mBytesPerPacket = 2;
    format -> mBitsPerChannel = 16; //
    format -> mReserved = 0;
    format -> mFormatFlags =  kLinearPCMFormatFlagIsBigEndian |kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;

}

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.

    char path[256];
    [self getFilename:path maxLength:sizeof path];

    fileURL = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*)path, strlen(path), false);



    // Init state variables
    recordState.recording = false;
    thisClass = self;



}

- (void) startRecordingInQueue {
    [self setupAudioFormat:&recordState.dataFormat];

    recordState.currentPacket = 0;

    OSStatus status;

    status = AudioQueueNewInput(&recordState.dataFormat, AudioInputCallback, &recordState, CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &recordState.queue);
    if(status == 0) {
        //Prime recording buffers with empty data
        for (int i=0; i < NUM_BUFFERS; i++) {
            AudioQueueAllocateBuffer(recordState.queue, SAMPLERATE, &recordState.buffers[i]);
            AudioQueueEnqueueBuffer(recordState.queue, recordState.buffers[i], 0, NULL);
        }

        status = AudioFileCreateWithURL(fileURL, kAudioFileAIFFType, &recordState.dataFormat, kAudioFileFlags_EraseFile, &recordState.audioFile);
        if (status == 0) {
            recordState.recording = true;
            status = AudioQueueStart(recordState.queue, NULL);
            if(status == 0) {
                NSLog(@"-----------Recording--------------");
                NSLog(@"File URL : %@", fileURL);
            }
        }
    }

    if (status != 0) {
        [self stopRecordingInQueue];
    }
}

- (void) stopRecordingInQueue {
    recordState.recording = false;
    AudioQueueStop(recordState.queue, true);
    for (int i=0; i < NUM_BUFFERS; i++) {
        AudioQueueFreeBuffer(recordState.queue, recordState.buffers[i]);
    }

    AudioQueueDispose(recordState.queue, true);
    AudioFileClose(recordState.audioFile);
    NSLog(@"---Idle------");
    NSLog(@"File URL : %@", fileURL);


}



- (IBAction)startRecordingAudio:(id)sender {
    NSLog(@"starting recording tapped");
    [self startRecordingInQueue];
}
- (IBAction)stopRecordingAudio:(id)sender {
    NSLog(@"stop recording tapped");
    [self stopRecordingInQueue];
}




- (BOOL) getFilename:(char *) buffer maxLength:(int) maxBufferLength {

    NSArray * paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString * docDir = [paths objectAtIndex:0];

    NSString * file = [docDir stringByAppendingString:@"recording.aif"];
    return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];

}


- (void) sendAudioToSocketAsData:(NSString *) audioData {
    [self.webSocket send:audioData];
}

- (IBAction)connectToSocketTapped:(id)sender {
    [self startStreaming];
}

- (void) startStreaming {
    [self connectToSocket];
}


- (void) connectToSocket {
    //Socket Connection Intiliazation

    // create the NSURLRequest that will be sent as the handshake
    NSURLRequest *request = [NSURLRequest requestWithURL:[NSURL URLWithString:@"ws://localhost:8000"]];
//    NSURLRequest *request = [NSURLRequest requestWithURL:[NSURL URLWithString:@"ws://demos.kaazing.com/echo"]];
//    NSURLRequest *request = [NSURLRequest requestWithURL:[NSURL URLWithString:@"http://online.attacker-site.com"]];

    // create the socket and assign delegate

    self.webSocket = [[SRWebSocket alloc] initWithURLRequest:request];

    self.webSocket.delegate = self;

    // open socket
    [self.webSocket open];

}


///--------------------------------------
#pragma mark - SRWebSocketDelegate
///--------------------------------------

- (void)webSocketDidOpen:(SRWebSocket *)webSocket;
{
    NSLog(@"Websocket Connected");

}

- (void) webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error {
    NSLog(@":( Websocket Failed With Error %@", error);
    self.webSocket = nil;
}

- (void) webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message {

NSData *decodedData = [[NSData alloc] initWithBase64EncodedString:message options:0];


}

- (void)webSocket:(SRWebSocket *)webSocket didCloseWithCode:(NSInteger)code reason:(NSString *)reason wasClean:(BOOL)wasClean;
{
    NSLog(@"WebSocket closed");
    self.webSocket = nil;
}

- (void)webSocket:(SRWebSocket *)webSocket didReceivePong:(NSData *)pongPayload;
{
    NSLog(@"WebSocket received pong");
}

- (void)didReceiveMemoryWarning {
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}


@end

来源:https://stackoverflow.com/questions/48762974/how-to-play-nsdata-audio-buffering-receive-from-web-socket

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!