问题
Here's my code to capture video stream and encode to webm (Using https://github.com/GoogleChromeLabs/webm-wasm):
async function captureWebm() {
console.log("Started webm capture")
worker.postMessage("./webm-wasm.wasm");
await nextEvent(worker, "message");
console.log('webm worker loaded')
worker.postMessage({
width: w,
height: h,
timebaseNum: 1,
timebaseDen: 30,
bitrate: 1500,
realtime: true
});
let encodeWebm = async function() {
mCtx.drawImage(player, 0, 0, w, h);
const imageData = mCtx.getImageData(0, 0, w, h);
const buffer = imageData.data.buffer;
worker.postMessage(buffer, [buffer]);
requestAnimationFrame(encodeWebm);
};
requestAnimationFrame(encodeWebm);
}
And here's the listener:
let queue = [];
worker.onmessage = ev => {
if (!ev.data) {
console.log('End of stream');
}
if (ev.data instanceof ArrayBuffer && ev.data.byteLength !== undefined) {
const arrayBuffer = ev.data;
queue.push(arrayBuffer);
}
};
And finally build the video:
setInterval(function () {
let webm = buildWebmVideoFromArrayOfBuffer();
queue = [];
socket.send(webm);
}, 500);
Without using MediaSource
how can you build a webm video from this array of buffer? My goal is the build a webm video every 500 ms.
来源:https://stackoverflow.com/questions/61303618/create-webm-video-blob-from-series-of-arraybuffer