Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

How to get microphone volume using AudioWorklet

I'm interested in getting a continuous reading of the microphone volume in Javscript. Many of the existing solutions to this on StackOverflow (see here, here, and here) utilize BaseAudioContext.createScriptProcessor() which is deprecated as of 2014.

I'd like to be using future-proof code in my project, so can anyone share a modern minimal example of how to read the microphone volume using the new AudioWorkletNode?

like image 388
mrossman Avatar asked Jul 02 '20 18:07

mrossman


Video Answer


1 Answers

Let's take a look at some points that are needed to know:

  • All this change is due to avoid latency, creating its own thread, that is to say, running on the audio rendering thread (AudioWorkletGlobalScope).
  • This new way of implementing has two parts: AudioWorkletProcessor and AudioWorkletNode.
  • AudioWorkletNode requires at least two things: an AudioContext object and the processor name as a string. A processor definition can be loaded and registered by the new Audio Worklet object's addModule () call.
  • Worklet APIs including AudioWorklet are only available in a secure context. In this case we can use localhost, but it's necessary to know this.
  • We need to communicate atleast from AudioWorkletProcessor to AudioWorkletNode the current value or for this example the volume to take any action with it.
  • It's necessary to access to the microphone of your computer with navigator.getUserMedia
/** Declare a context for AudioContext object */
let audioContext
// Creating a list of colors for led
const ledColor = [
    "#064dac",
    "#064dac",
    "#064dac",
    "#06ac5b",
    "#15ac06",
    "#4bac06",
    "#80ac06",
    "#acaa06",
    "#ac8b06",
    "#ac5506",
]
let isFirtsClick = true
let listeing = false

function onMicrophoneDenied() {
    console.log('denied')
}

/**
 * This method updates leds
 * depending the volume detected
 * 
 * @param {Float} vol value of volume detected from microphone
 */
function leds(vol) {
    let leds = [...document.getElementsByClassName('led')]
    let range = leds.slice(0, Math.round(vol))

    for (var i = 0; i < leds.length; i++) {
        leds[i].style.boxShadow = "-2px -2px 4px 0px #a7a7a73d, 2px 2px 4px 0px #0a0a0e5e";
        leds[i].style.height = "22px"
    }

    for (var i = 0; i < range.length; i++) {
        range[i].style.boxShadow = `5px 2px 5px 0px #0a0a0e5e inset, -2px -2px 1px 0px #a7a7a73d inset, -2px -2px 30px 0px ${ledColor[i]} inset`;
        range[i].style.height = "25px"
    }
}

/**
 * Method used to create a comunication between
 * AudioWorkletNode, Microphone and AudioWorkletProcessor
 * 
 * @param {MediaStream} stream If user grant access to microphone, this gives you
 * a MediaStream object necessary in this implementation
 */
async function onMicrophoneGranted(stream) {
    // Instanciate just in the first time
    // when button is pressed
    if (isFirtsClick) {
        // Initialize AudioContext object
        audioContext = new AudioContext()

        // Adding an AudioWorkletProcessor
        // from another script with addModule method
        await audioContext.audioWorklet.addModule('vumeter-processor.js')

        // Creating a MediaStreamSource object
        // and sending a MediaStream object granted by 
        // the user
        let microphone = audioContext.createMediaStreamSource(stream)

        // Creating AudioWorkletNode sending
        // context and name of processor registered
        // in vumeter-processor.js
        const node = new AudioWorkletNode(audioContext, 'vumeter')

        // Listing any message from AudioWorkletProcessor in its
        // process method here where you can know
        // the volume level
        node.port.onmessage  = event => {
            let _volume = 0
            let _sensibility = 5 // Just to add any sensibility to our ecuation
            if (event.data.volume)
                _volume = event.data.volume;
            leds((_volume * 100) / _sensibility)
        }

        // Now this is the way to
        // connect our microphone to
        // the AudioWorkletNode and output from audioContext
        microphone.connect(node).connect(audioContext.destination)

        isFirtsClick = false
    }

    // Just to know if button is on or off
    // and stop or resume the microphone listening
    let audioButton = document.getElementsByClassName('audio-control')[0]
    if (listeing) {
        audioContext.suspend()
        audioButton.style.boxShadow = "-2px -2px 4px 0px #a7a7a73d, 2px 2px 4px 0px #0a0a0e5e"
        audioButton.style.fontSize = "25px"
    } else {
        audioContext.resume()
        audioButton.style.boxShadow = "5px 2px 5px 0px #0a0a0e5e inset, -2px -2px 1px 0px #a7a7a73d inset"
        audioButton.style.fontSize = "24px"
    }

    listeing = !listeing
}

function activeSound () {
    // Tell user that this
    // program wants to use
    // the microphone
    try {
        navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
        
        navigator.getUserMedia(
            { audio: true, video: false },
            onMicrophoneGranted,
            onMicrophoneDenied
        );
    } catch(e) {
        alert(e)
    }
}

document.getElementById('audio').addEventListener('click', () => {
    activeSound()
})

In this section is the implementation where you can know the volume of your microphone:

const SMOOTHING_FACTOR = 0.8;
const MINIMUM_VALUE = 0.00001;

// This is the way to register an AudioWorkletProcessor
// it's necessary to declare a name, in this case
// the name is "vumeter"
registerProcessor('vumeter', class extends AudioWorkletProcessor {

  _volume
  _updateIntervalInMS
  _nextUpdateFrame

  constructor () {
    super();
    this._volume = 0;
    this._updateIntervalInMS = 25;
    this._nextUpdateFrame = this._updateIntervalInMS;
    this.port.onmessage = event => {
      if (event.data.updateIntervalInMS)
        this._updateIntervalInMS = event.data.updateIntervalInMS;
    }
  }

  get intervalInFrames () {
    return this._updateIntervalInMS / 1000 * sampleRate;
  }

  process (inputs, outputs, parameters) {
    const input = inputs[0];

    // Note that the input will be down-mixed to mono; however, if no inputs are
    // connected then zero channels will be passed in.
    if (input.length > 0) {
      const samples = input[0];
      let sum = 0;
      let rms = 0;

      // Calculated the squared-sum.
      for (let i = 0; i < samples.length; ++i)
        sum += samples[i] * samples[i];

      // Calculate the RMS level and update the volume.
      rms = Math.sqrt(sum / samples.length);
      this._volume = Math.max(rms, this._volume * SMOOTHING_FACTOR);

      // Update and sync the volume property with the main thread.
      this._nextUpdateFrame -= samples.length;
      if (this._nextUpdateFrame < 0) {
        this._nextUpdateFrame += this.intervalInFrames;
        this.port.postMessage({volume: this._volume});
      }
    }
    
    return true;
  }
});

Finally this is the html where you can display the volume detected:

<div class="container">
    <span>Microphone</span>
    <div class="volumen-wrapper">
        <div class="led"></div>
        <div class="led"></div>
        <div class="led"></div>
        <div class="led"></div>
        <div class="led"></div>
                
        <div class="led"></div>
        <div class="led"></div>
        <div class="led"></div>
        <div class="led"></div>
        <div class="led"></div>
    </div>

    <div class="control-audio-wrapper">
        <div id="audio" class="audio-control">&#127908;</div>
    </div>
</div>
<script type="module" src="./index.js"></script>

This is the result enter image description here

Here is my implementation in codepen

Sources:

  • Enter to Audio worklet
  • Web audio
  • w3.org/webaudio
like image 140
forgived Avatar answered Oct 10 '22 20:10

forgived