Access microphone from a browser - Javascript

后端 未结 2 1331
猫巷女王i
猫巷女王i 2020-11-27 04:13

Is it possible to access the microphone (built-in or auxiliary) from a browser using client-side JavaScript?

Ideally, it would store the recorded audio in the browse

相关标签:
2条回答
  • 2020-11-27 04:35

    Here we capture microphone audio as a Web Audio API event loop buffer using getUserMedia() ... time domain and frequency domain snippets of each audio event loop buffer are printed (viewable in browser console just hit key F12 or ctrl+shift+i )

    <html><head><meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
    <title>capture microphone audio into buffer</title>
    
    <script type="text/javascript">
    
    
      var webaudio_tooling_obj = function () {
    
        var audioContext = new AudioContext();
    
        console.log("audio is starting up ...");
    
        var BUFF_SIZE = 16384;
    
        var audioInput = null,
            microphone_stream = null,
            gain_node = null,
            script_processor_node = null,
            script_processor_fft_node = null,
            analyserNode = null;
    
        if (!navigator.getUserMedia)
                navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
                              navigator.mozGetUserMedia || navigator.msGetUserMedia;
    
        if (navigator.getUserMedia){
    
            navigator.getUserMedia({audio:true}, 
              function(stream) {
                  start_microphone(stream);
              },
              function(e) {
                alert('Error capturing audio.');
              }
            );
    
        } else { alert('getUserMedia not supported in this browser.'); }
    
        // ---
    
        function show_some_data(given_typed_array, num_row_to_display, label) {
    
            var size_buffer = given_typed_array.length;
            var index = 0;
            var max_index = num_row_to_display;
    
            console.log("__________ " + label);
    
            for (; index < max_index && index < size_buffer; index += 1) {
    
                console.log(given_typed_array[index]);
            }
        }
    
        function process_microphone_buffer(event) { // invoked by event loop
    
            var i, N, inp, microphone_output_buffer;
    
            microphone_output_buffer = event.inputBuffer.getChannelData(0); // just mono - 1 channel for now
    
            // microphone_output_buffer  <-- this buffer contains current gulp of data size BUFF_SIZE
    
            show_some_data(microphone_output_buffer, 5, "from getChannelData");
        }
    
        function start_microphone(stream){
    
          gain_node = audioContext.createGain();
          gain_node.connect( audioContext.destination );
    
          microphone_stream = audioContext.createMediaStreamSource(stream);
          microphone_stream.connect(gain_node); 
    
          script_processor_node = audioContext.createScriptProcessor(BUFF_SIZE, 1, 1);
          script_processor_node.onaudioprocess = process_microphone_buffer;
    
          microphone_stream.connect(script_processor_node);
    
          // --- enable volume control for output speakers
    
          document.getElementById('volume').addEventListener('change', function() {
    
              var curr_volume = this.value;
              gain_node.gain.value = curr_volume;
    
              console.log("curr_volume ", curr_volume);
          });
    
          // --- setup FFT
    
          script_processor_fft_node = audioContext.createScriptProcessor(2048, 1, 1);
          script_processor_fft_node.connect(gain_node);
    
          analyserNode = audioContext.createAnalyser();
          analyserNode.smoothingTimeConstant = 0;
          analyserNode.fftSize = 2048;
    
          microphone_stream.connect(analyserNode);
    
          analyserNode.connect(script_processor_fft_node);
    
          script_processor_fft_node.onaudioprocess = function() {
    
            // get the average for the first channel
            var array = new Uint8Array(analyserNode.frequencyBinCount);
            analyserNode.getByteFrequencyData(array);
    
            // draw the spectrogram
            if (microphone_stream.playbackState == microphone_stream.PLAYING_STATE) {
    
                show_some_data(array, 5, "from fft");
            }
          };
        }
    
      }(); //  webaudio_tooling_obj = function()
    
    
    
    </script>
    
    </head>
    <body>
    
        <p>Volume</p>
        <input id="volume" type="range" min="0" max="1" step="0.1" value="0.5"/>
    
    </body>
    </html>
    

    Since this code exposes microphone data as a buffer you could add ability to stream using websockets or simply aggregate each event loop buffer into a monster buffer then download the monster to a file

    Notice the call to

        var audioContext = new AudioContext();
    

    which indicates its using the Web Audio API which is baked into all modern browsers (including mobile browsers) to provide an extremely powerful audio platform of which tapping into the mic is but a tiny fragment ... NOTE the CPU usage jumps up due to this demo writing each event loop buffer into browser console log which is for testing only so actual use is far less resource intensive even when you mod this to stream audio to elsewhere

    Links to some Web Audio API documentation

    • Basic concepts behind Web Audio API
    • SO wiki on Web Audio API
    • nice Web Audio API demos ... some with github links
    0 讨论(0)
  • 2020-11-27 04:35

    Yes you can.

    Using the getUserMedia() API, you can capture raw audio input from your microphone.

    https://nusofthq.com/blog/recording-mp3-using-only-html5-and-javascript-recordmp3-js/

    0 讨论(0)
提交回复
热议问题