Open In App

How to make an audio visualizer with HTML CANVAS API ?

In this article, we will see how to make an audio visualizer using the canvas API. Canvas API is used to draw graphics on the HTML canvas element. So with this feature, we will make an audio visualizer that responds to the audio from the microphone of the system.

Audio Visualizer can be defined as a tool that visually displays the rhythm, loudness, and frequency of music via animated images.



Classes Used:

Methods Used:



Approach: The following steps will follow to create the audio visualizer:

Syntax:

var btn = document.getElementById("mybtn");
btn.onclick = async()=>{
 let stream =await navigator.mediaDevices.getUserMedia({
     audio: true,
     video: true
   });
}

In the above code, we have a button that consists of a callback function for onclick event to access the audio device.

Create an instance of AudioContext class and pass the stream object to it as shown below. Also, create an analyzer that is used to analyze the audio nodes by creating data visualization.

Syntax:

 const audioContext = new AudioContext();
 const analyser = audioContext.createAnalyser();
 const mediaStreamSource = 
     audioContext.createMediaStreamSource(stream);
 mediaStreamSource.connect(analyser);
 analyser.fftSize = 256;

Define a function drawVisualizer( ) which calls itself continuously at a particular frame rate using the requestAnimationFrame function to reflect the change in the audio signals.

Add the canvas API methods into the drawVisualizer function to create the final visualization, as shown below:

function drawVisualizer() {
  requestAnimationFrame(drawVisualizer);
  
  const bufferLength = analyser.frequencyBinCount;
  const dataArray = new Uint8Array(bufferLength);
  analyser.getByteFrequencyData(dataArray);
  
  const width = visualizer.width;
  const height = visualizer.height;
  const barWidth = 10;
  
  const canvasContext = visualizer.getContext('2d');
  canvasContext.clearRect(0, 0, width, height);
  let x = 0;
  dataArray.forEach((item, index,array) => {
   const y = item / 255 * height*1.1;
   canvasContext.strokeStyle = `blue`;
   x = x + barWidth;
   canvasContext.beginPath();
   canvasContext.lineCap = "round";
   canvasContext.lineWidth = 2;
   canvasContext.moveTo(x, height);
   canvasContext.lineTo(x, height - y);
   canvasContext.stroke();
  })
 }

In the above code snippet, initially, we are declaring a typed array to store the audio nodes in terms of integer values ie. dataArray then we are accessing the canvas element created above through JS selector ie.visualizer, and accessing the canvas 2D context ie.canvasContext. We are looping through the elements of the dataArray and for every element, we are drawing a line path from the bottom of the canvas element to the point (x, height – y) from the above code. We have assigned the line color as blue and line width as 2 which helps us in drawing multiple vertical lines.

Example: In this example, we are creating an audio visualizer with canvas API, AudioContext, and mediaDevices.




<!DOCTYPE html>
<html>
 
<head>
    <title>
        Creating an audio visualizer
        using HTML CANVAS API
    </title>
</head>
 
<body>
    <h1 style="color:green">
        GeeksforGeeks
    </h1>
    <h3>
        How to make an audio visualizer
        with HTML CANVAS API?
    </h3>
 
    <button id="mybtn">Click Me</button>
     
    <canvas id="visualizer" width="100px"
        height="100px" style="border:5px solid blue;
                   border-radius:100px">
    </canvas>
 
    <script type="text/javascript">
        var btn = document.getElementById("mybtn");
        var visualizer = document.getElementById("visualizer");
        btn.onclick = async () => {
            let stream = await navigator.mediaDevices.getUserMedia({
                audio: true,
                video: true
            });
 
            const audioContext = new AudioContext();
            const analyser = audioContext.createAnalyser();
            const mediaStreamSource =
                audioContext.createMediaStreamSource(stream);
 
            // Connecting the analyzer to the media source
            mediaStreamSource.connect(analyser);
            analyser.fftSize = 256;
            drawVisualizer();
 
            function drawVisualizer() {
                requestAnimationFrame(drawVisualizer)
                const bufferLength = analyser.frequencyBinCount
                const dataArray = new Uint8Array(bufferLength)
 
                // Updating the analyzer with the new
                // generated data visualization
                analyser.getByteFrequencyData(dataArray)
                const width = visualizer.width
                const height = visualizer.height
                const barWidth = 10
                const canvasContext = visualizer.getContext('2d')
                canvasContext.clearRect(0, 0, width, height)
                let x = 0
                dataArray.forEach((item, index, array) => {
 
                    // This formula decides the height of the vertical
                    // lines for every item in dataArray
                    const y = item / 255 * height * 1.1
                    canvasContext.strokeStyle = `blue`
                     
                    // This decides the distances between the
                    // vertical lines
                    x = x + barWidth
                    canvasContext.beginPath();
                    canvasContext.lineCap = "round";
                    canvasContext.lineWidth = 2;
                    canvasContext.moveTo(x, height);
                    canvasContext.lineTo(x, height - y);
                    canvasContext.stroke();
                })
            }
        }
    </script>
</body>
 
</html>

Output: This output consists of a circular visualizer which is blue in color and a button beside it to access the system media resources.

 


Article Tags :