问题
I'm trying to play music from byte array which is coming from the network in pcmInt16 data format.
// formats
let format1 = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: 48000, channels: 1, interleaved: false)!
let format2 = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16, sampleRate: 48000, channels: 1, interleaved: false)!
// byte array buffer
var byteArray: [Int16]! // one packet size is 512
...
// 1. create / attach / connect engine
engine.prepare()
try! engine.start()
engine.attach(playerNode)
engine.connect(playerNode, to: engine.mainMixerNode, format: format1)
// 2. fill byteArray with music stream // int16 48kHz 32bit
...
// 3.
var len = 512
let pcmBuffer = AVAudioPCMBuffer(pcmFormat: format2, frameCapacity: AVAudioFrameCount(len))!
// HERE
// How to set the first 512 data from byteArray ?
playerNode.scheduleBuffer(pcmBuffer, completionHandler: nil)
How to set the first 512 data from byteArray ? i tried something like this but its not working: memcpy(pcmBuffer.audioBufferList.pointee.mBuffers.mData, byteArray[0..<512], len * 2)
回答1:
The AVAudioMixerNode is good for sampleRate conversions, but for broad format changes like Int16 to Float, you're probably better off converting yourself. For performance, I suggest using vDSP Accelerate.
import Cocoa
import AVFoundation
import Accelerate
import PlaygroundSupport
let bufferSize = 512
let bufferByteSize = MemoryLayout<Float>.size * bufferSize
var pcmInt16Data: [Int16] = []
var pcmFloatData = [Float](repeating: 0.0, count: bufferSize) // allocate once and reuse
// one buffer of noise as an example
for _ in 0..<bufferSize {
let value = Int16.random(in: Int16.min...Int16.max)
pcmInt16Data.append(value)
}
let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
let audioFormat = AVAudioFormat(standardFormatWithSampleRate: 48_000.0, channels: 1)!
let buffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(bufferSize))
let mixer = engine.mainMixerNode
engine.attach(player)
engine.connect(player, to: mixer, format: audioFormat)
engine.prepare()
do {
try engine.start()
} catch {
print("Error info: \(error)")
}
player.play()
if let buffer = AVAudioPCMBuffer(pcmFormat: audioFormat, frameCapacity: UInt32(bufferSize)) {
let monoChannel = buffer.floatChannelData![0]
// Int16 ranges from -32768 to 32767 -- we want to convert and scale these to Float values between -1.0 and 1.0
var scale = Float(Int16.max) + 1.0
vDSP_vflt16(pcmInt16Data, 1, &pcmFloatData, 1, vDSP_Length(bufferSize)) // Int16 to Float
vDSP_vsdiv(pcmFloatData, 1, &scale, &pcmFloatData, 1, vDSP_Length(bufferSize)) // divide by scale
memcpy(monoChannel, pcmFloatData, bufferByteSize)
buffer.frameLength = UInt32(bufferSize)
player.scheduleBuffer(buffer, completionHandler: nil) // load more buffers in the completionHandler
}
PlaygroundPage.current.needsIndefiniteExecution = true
If instead you'd like to play an AVAudioFile, use the AVAudioPlayerNode.scheduleFile() and .scheduleSegment methods rather than trying to read the Int16 data directly from a WAV/AIFF. You'll want to pay attention to the AVAudioFile.processingFormat parameter and use that for the format of the connection from the player to the mixer.
import Cocoa
import PlaygroundSupport
import AVFoundation
let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
let playEntireFile = true
func playLocalFile() {
// file needs to be in ~/Documents/Shared Playground Data
let localURL = playgroundSharedDataDirectory.appendingPathComponent("MyAwesomeMixtape6.aiff")
guard let audioFile = try? AVAudioFile(forReading: localURL) else { return }
let audioFormat = audioFile.processingFormat
let mixer = engine.mainMixerNode
engine.attach(player)
engine.connect(player, to: mixer, format: audioFormat)
engine.prepare()
do {
try engine.start()
} catch {
print("Error info: \(error)")
}
player.play()
if playEntireFile {
player.scheduleFile(audioFile, at: nil, completionHandler: nil)
} else { // play segment
let startTimeSeconds = 5.0
let durationSeconds = 2.0
let sampleRate = audioFormat.sampleRate
let startFramePostion = startTimeSeconds * sampleRate
let durationFrameCount = durationSeconds * sampleRate
player.scheduleSegment(audioFile, startingFrame: AVAudioFramePosition(startFramePostion), frameCount: AVAudioFrameCount(durationFrameCount), at: nil, completionHandler: nil)
}
}
playLocalFile()
PlaygroundPage.current.needsIndefiniteExecution = true
For remote files, try AVPlayer.
import Cocoa
import AVFoundation
import PlaygroundSupport
var player: AVPlayer?
func playRemoteFile() {
guard let remoteURL = URL(string: "https://ondemand.npr.org/anon.npr-mp3/npr/me/2020/03/20200312_me_singapore_wins_praise_for_its_covid-19_strategy_the_us_does_not.mp3"
) else { return }
player = AVPlayer(url: remoteURL)
player?.play()
}
playRemoteFile()
PlaygroundPage.current.needsIndefiniteExecution = true
回答2:
First of all, you should better not use Implicitly Unwrapped Optionals as far as you can.
var byteArray: [Int16] = [] // one packet size is 512
As far as I can see from your code shown, there is no need to make byteArray
Optional.
And How to set the first 512 data from byteArray ?
Your code would work with a little modification:
pcmBuffer.frameLength = AVAudioFrameCount(len)
memcpy(pcmBuffer.audioBufferList.pointee.mBuffers.mData, byteArray, len * 2)
Or you can work with int16ChannelData
:
if let channelData = pcmBuffer.int16ChannelData {
memcpy(channelData[0], byteArray, len * MemoryLayout<Int16>.stride)
pcmBuffer.frameLength = AVAudioFrameCount(len)
} else {
print("bad format")
}
You may want to load non-first parts of your byteArray
, but that is another issue.
来源:https://stackoverflow.com/questions/60641585/schedule-buffer-with-avaudiopcmbuffer-int16-data