问题
I would like to know how to get the bytes of an AVAudioCompressedBuffer and then reconstruct an AVAudioCompressedBuffer from the bytes.
The code below takes an AVAudioPCMBuffer, compresses it with AVAudioConverter to AVAudioCompressedBuffer (iLBC) and gets the bytes, then converts the bytes back to AVAudioCompressedBuffer (iLBC) and uncompresses back to AVAudioPCMBuffer and plays the buffer. Something goes wrong with converting the AVAudioCompressedBuffer to bytes and back. If I skip this conversion, the audio plays as expected.
I can even match the contents of compressedBuffer and compressedBuffer2, which leads me to believe that the mechanism for getting the bytes is not exactly right.
// Do iLBC Compression
let compressedBuffer: AVAudioCompressedBuffer = self.compress(inBuffer: buffer)
// packetCapacity: 4, maximumPacketSize: 38
// self.player.scheduleBuffer(self.uncompress(inBuffer: compressedBuffer)) // This works perfectly
// Convert Buffer to Byte Array
let pointer1: UnsafeMutablePointer = compressedBuffer.data.bindMemory(to: UInt8.self, capacity: 152)
var audioByteArray = [UInt8](repeating: 0, count: 152)
pointer1.withMemoryRebound(to: UInt8.self, capacity: 152) { srcByteData in
audioByteArray.withUnsafeMutableBufferPointer {
$0.baseAddress!.initialize(from: srcByteData, count: 152)
}
}
// Convert Byte Array to Buffer
let compressedBuffer2: AVAudioCompressedBuffer = AVAudioCompressedBuffer(format: AVAudioFormat.init(streamDescription: &self.descriptor)!, packetCapacity: 4, maximumPacketSize: 38)
let destination = compressedBuffer2.data
audioByteArray.withUnsafeBufferPointer {
let src = UnsafeRawPointer($0.baseAddress!).bindMemory(to: UInt8.self, capacity: 152)
destination.copyMemory(from: src, byteCount: 152)
}
// Do iLBC Decompression
let uncompressedBuffer: AVAudioPCMBuffer = self.uncompress(inBuffer: compressedBuffer2)
// Works perfectly with inBuffer: compressedBuffer
// Play Buffer
self.player.scheduleBuffer(uncompressedBuffer)
// Plays fine when 'let uncompressedBuffer: AVAudioPCMBuffer = self.uncompress(inBuffer: compressedBuffer)'
Compress and Uncompress functions
let format = AVAudioFormat.init(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: 16000, channels: 1, interleaved: false)
var compressedFormatDescriptor = AudioStreamBasicDescription(mSampleRate: 8000, mFormatID: kAudioFormatiLBC, mFormatFlags: 0, mBytesPerPacket: 0, mFramesPerPacket: 0, mBytesPerFrame: 0, mChannelsPerFrame: 1, mBitsPerChannel: 0, mReserved: 0)
func compress(inBuffer : AVAudioPCMBuffer) -> AVAudioCompressedBuffer {
let inputFormat = inBuffer.format
let converter = AVAudioConverter(from: inputFormat, to: self.compressedFormat!)
let outBuffer = AVAudioCompressedBuffer(format: self.compressedFormat!, packetCapacity: 4, maximumPacketSize: 38)
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return inBuffer
}
var error : NSError?
converter!.convert(to: outBuffer, error: &error, withInputFrom: inputBlock)
return outBuffer
}
func uncompress(inBuffer : AVAudioCompressedBuffer) -> AVAudioPCMBuffer {
let inputFormat = inBuffer.format
let outputFormat = format
let converter = AVAudioConverter(from: inputFormat, to: outputFormat!)
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return inBuffer
}
var error : NSError?
let outBuffer: AVAudioPCMBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!, frameCapacity: 1600)!
converter!.convert(to: outBuffer, error: &error, withInputFrom: inputBlock)
return outBuffer
}
回答1:
This is what I ended up doing.
The main thing was setting byteLength and packetCount on the second AVAudioCompressedBuffer.
iLBC has a fixed frame size (304 bits per block for 20ms frames)
304 * 50 = 15200 = 15.2 kbit/s
AVAudioNodeTapBlock for an AVAudioInputNode fires every 100ms.
byteLength = 304bits / 8 = 38bytes
packetCount = 5
Which will result in 5 * 38 * 10 * 8 = 15200 = 15.2 kbit/s.
The code below shows everything written out.
So, what happens is
1) Convert AVAudioPCMBuffer from microphone to AVAudioCompressedBuffer (iLBC)
2) Convert AVAudioCompressedBuffer (iLBC) to [UInt8] (5 * 38 = 190 bytes).
3) Convert [UInt8] to AVAudioCompressedBuffer (iLBC)
4) Convert AVAudioCompressedBuffer (iLBC) to AVAudioPCMBuffer
5) Play AVAudioPCMBuffer
// Compress to iLBC
let packetCapacity = 5
let maximumPacketSize = 38
let capacity = packetCapacity * maximumPacketSize // 190
var descriptor = AudioStreamBasicDescription.init(mSampleRate: 8000, mFormatID: kAudioFormatiLBC, mFormatFlags: 0, mBytesPerPacket: 0, mFramesPerPacket: 0, mBytesPerFrame: 0, mChannelsPerFrame: 1, mBitsPerChannel: 0, mReserved: 0)
let ilbcformat = AVAudioFormat.init(streamDescription: &descriptor)
let compressor: AVAudioConverter = AVAudioConverter.init(from: self.format!, to: ilbcformat)!
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return buffer
}
let compressedBuffer: AVAudioCompressedBuffer = AVAudioCompressedBuffer(format: ilbcformat, packetCapacity: 5, maximumPacketSize: 38)
compressor.convert(to: compressedBuffer, error: nil, withInputFrom: inputBlock)
// Convert to Bytes
let compressedBufferPointer = compressedBuffer.data.bindMemory(to: UInt8.self, capacity: 190)
var compressedBytes: [UInt8] = [UInt8].init(repeating: 0, count: 190)
compressedBufferPointer.withMemoryRebound(to: UInt8.self, capacity: 190) { sourceBytes in
compressedBytes.withUnsafeMutableBufferPointer {
$0.baseAddress!.initialize(from: sourceBytes, count: 190)
}
}
// Convert to buffer
let compressedBuffer2: AVAudioCompressedBuffer = AVAudioCompressedBuffer.init(format: ilbcformat, packetCapacity: 5, maximumPacketSize: 38)
compressedBuffer2.byteLength = 190
compressedBuffer2.packetCount = 5
compressedBytes.withUnsafeMutableBufferPointer {
compressedBuffer2.data.copyMemory(from: $0.baseAddress!, byteCount: 190)
}
// Uncompress to PCM
let uncompressor: AVAudioConverter = AVAudioConverter.init(from: ilbcformat, to: self.format!)!
let inputBlock2 : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return compressedBuffer2
}
let uncompressedBuffer: AVAudioPCMBuffer = AVAudioPCMBuffer.init(pcmFormat: self.format!, frameCapacity: 4410)!
uncompressor.convert(to: uncompressedBuffer, error: nil, withInputFrom: inputBlock2)
// Play Buffer
self.player.scheduleBuffer(uncompressedBuffer)
来源:https://stackoverflow.com/questions/50906173/avaudiocompressedbuffer-to-uint8-array-and-vice-versa