MediaStream Capture Canvas and Audio Simultaneously

跟風遠走 提交于 2019-11-26 17:46:30

Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?

Yes, you can do it using the MediaStream.addTrack() method.

But Firefox will only use the initial stream's tracks into the Recorder until this bug has been fixed.


OP already known how to get all of it, but here is a reminder for future readers :

  • To get a videoStream track from the canvas, you can call canvas.captureStream(framerate) method.

  • To get an audio streamTrack from a video element you can use the WebAudio API and it's createMediaStreamDestination method. This will return a MediaStreamDestination node (dest) containing our audioStream. You'll then have to connect a MediaElementSource created from your video element, to this dest. If you need to add more audio tracks to this stream, you should connect all these sources to dest.

Now that we've got two streams, one for the canvas video and one for the audio, we can use canvasStream.addTrack(audioStream.getAudioTracks()[0]) just before initializing our new MediaRecorder(canvasStream).

Here is a complete example, that will work only in chrome now, and probably soon in Firefox, when they will have fixed the bug :

var cStream,
  aStream,
  vid,
  recorder,
  analyser,
  dataArray,
  bufferLength,
  chunks = [];

function clickHandler() {

  this.textContent = 'stop recording';
  cStream = canvas.captureStream(30);
  cStream.addTrack(aStream.getAudioTracks()[0]);

  recorder = new MediaRecorder(cStream);
  recorder.start();

  recorder.ondataavailable = saveChunks;

  recorder.onstop = exportStream;

  this.onclick = stopRecording;

};

function exportStream(e) {

  if (chunks.length) {

    var blob = new Blob(chunks)
    var vidURL = URL.createObjectURL(blob);
    var vid = document.createElement('video');
    vid.controls = true;
    vid.src = vidURL;
    vid.onend = function() {
      URL.revokeObjectURL(vidURL);
    }
    document.body.insertBefore(vid, canvas);

  } else {

    document.body.insertBefore(document.createTextNode('no data saved'), canvas);

  }
}

function saveChunks(e) {

  e.data.size && chunks.push(e.data);

}

function stopRecording() {

  vid.pause();
  this.parentNode.removeChild(this);
  recorder.stop();

}

function initAudioStream(evt) {

  var audioCtx = new AudioContext();
  // create a stream from our AudioContext
  var dest = audioCtx.createMediaStreamDestination();
  aStream = dest.stream;
  // connect our video element's output to the stream
  var sourceNode = audioCtx.createMediaElementSource(this);
  sourceNode.connect(dest)
    // start the video
  this.play();

  // just for the fancy canvas drawings
  analyser = audioCtx.createAnalyser();
  sourceNode.connect(analyser);

  analyser.fftSize = 2048;
  bufferLength = analyser.frequencyBinCount;
  dataArray = new Uint8Array(bufferLength);
  analyser.getByteTimeDomainData(dataArray);

  // output to our headphones
  sourceNode.connect(audioCtx.destination)

  startCanvasAnim();

  rec.onclick = clickHandler;
  rec.disabled = false;
};

var loadVideo = function() {

  vid = document.createElement('video');
  vid.crossOrigin = 'anonymous';
  vid.oncanplay = initAudioStream;
  vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';

  
}

function startCanvasAnim() {
  // from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
  var canvasCtx = canvas.getContext('2d');

  canvasCtx.fillStyle = 'rgb(200, 200, 200)';
  canvasCtx.lineWidth = 2;
  canvasCtx.strokeStyle = 'rgb(0, 0, 0)';

  var draw = function() {

    var drawVisual = requestAnimationFrame(draw);

    analyser.getByteTimeDomainData(dataArray);

    canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
    canvasCtx.beginPath();

    var sliceWidth = canvas.width * 1.0 / bufferLength;
    var x = 0;

    for (var i = 0; i < bufferLength; i++) {

      var v = dataArray[i] / 128.0;
      var y = v * canvas.height / 2;

      if (i === 0) {
        canvasCtx.moveTo(x, y);
      } else {
        canvasCtx.lineTo(x, y);
      }

      x += sliceWidth;
    }

    canvasCtx.lineTo(canvas.width, canvas.height / 2);
    canvasCtx.stroke();

  };

  draw();

}

loadVideo();
<canvas id="canvas" width="500" height="200"></canvas>
<button id="rec" disabled>record</button>

Ps : Since FF team seems to take some time to fix the bug, here is a quick fix to make it work on FF too.

You can also mix two tracks by using new MediaStream([track1, track2]).
However, chrome currently prefixes this constructor, but since it does support addTrack, it's not really needed, and we can come with something as ugly as

var mixedStream = 'MediaStream' in window ? 
  new MediaStream([cStream.getVideoTracks()[0], aStream.getAudioTracks()[0]]) :
  cStream;
recorder = new MediaRecorder(mixedStream);

Working fiddle for both FF and chrome.

Kaiido's demo is brilliant. For those just looking for the tl;dr code to add an audio stream to their existing canvas stream:

let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!