I'm trying to create an experimental application that streams audio in real time from client 1
to client 2
.
So following some tutorials and questions about the same subject, I used WebRTC and binaryjs. So far this is what I get
1- Client 1
and Client 2
have connected to BinaryJS to send/receive data chunks.
2- Client 1
used WebRTC to record audio and gradually send it to BinaryJS
3- Client 2
receives the chunks and try to play them.
Well I'm getting an error in the last part. This is the error message I get:
Uncaught RangeError: Source is too large
at Float32Array.set (native)
And this is the code:
Client 1
var WSClient;
var AudioStream;
function load(){
var session = {
audio: true,
video: false
};
var recordRTC = null;
navigator.getUserMedia(session, startRecording, onError);
WSClient = new BinaryClient('ws://localhost:9001');
WSClient.on('open',function(){
console.log('client opened')
AudioStream = WSClient.createStream();
})
}
function startRecording(stream){
var context = new AudioContext();
var audio_input = context.createMediaStreamSource(stream);
var buffer_size = 2048;
var recorder = context.createScriptProcessor(buffer_size, 1, 1);
recorder.onaudioprocess = function(e){
console.log('chunk')
var left = e.inputBuffer.getChannelData(0);
AudioStream.write(left);
};
audio_input.connect(recorder);
recorder.connect(context.destination);
}
Client 2
var WSClient;
var audioContext;
var sourceNode;
function load(){
audioContext = new AudioContext();
sourceNode = audioContext.createBufferSource();
sourceNode.connect(audioContext.destination);
sourceNode.start(0);
WSClient = new BinaryClient('ws://localhost:9001');
WSClient.on('open',function(){
console.log('client opened');
});
WSClient.on('stream', function(stream, meta){
// collect stream data
stream.on('data', function(data){
console.log('received chunk')
var integers = new Int16Array(data);
var audioBuffer = audioContext.createBuffer(1, 2048, 4410);
audioBuffer.getChannelData(0).set(integers); //appearently this is where the error occurs
sourceNode.buffer = audioBuffer;
});
});
}
Server
var wav = require('wav');
var binaryjs = require('binaryjs');
var binaryjs_server = binaryjs.BinaryServer;
var server = binaryjs_server({port: 9001});
server.on('connection', function(client){
console.log('server connected');
var file_writter = null;
client.on('stream', function(stream, meta){
console.log('streaming', server.clients)
//send to other clients
for(var id in server.clients){
if(server.clients.hasOwnProperty(id)){
var otherClient = server.clients[id];
if(otherClient != client){
var send = otherClient.createStream(meta);
stream.pipe(send);
}
}
}
});
client.on('close', function(stream){
console.log('client closed')
if(file_writter != null) file_writter.end();
});
});
The error occurs here:
audioBuffer.getChannelData(0).set(integers);
So I have two questions:
Is it possible to send the chunks I captured in client 1
and then reproduce them in client 2
?
What is the deal with the error I'm having?
Thanks guys!
@edit 1
Since i'm getting code snippets from other questions I'm still trying to understand it. I commented the line in client 2
code that creates an Int16Array
and I now get a different error (but I don't know which version of the code is more correct):
Uncaught DOMException: Failed to set the 'buffer' property on 'AudioBufferSourceNode': Cannot set buffer after it has been already been set
Probably because I'm setting it everytime I get a new chunk of data.
The DOMException about AudioBufferSourceNode
means you need to create a new AudioBufferSourceNode
for every new Audiobuffer
that you're creating. So something like
sourceNode = new AudioBufferSourceNode(audioContext, {buffer: audioBuffer})
And an AudioBuffer
has Float32Array
s. You need to convert your
Int16Array
to a Float32Array
before assigning it to an
AudioBuffer
. Probably good enough to divide everything by 32768.
来源:https://stackoverflow.com/questions/47274120/how-to-play-audio-stream-chunks-recorded-with-webrtc