Open In App

Web Audio API

Web Audio API is used by developers to create and change the audio content through the web browser. There are various features for managing this audio operation like processing, analyzing audio data, synthesizing, etc. With this API, we can easily integrate the audio functionality in our projects. In this article, we will see the Methods, Interfaces, and 2 practical examples of Web Audio API.

Web Audio API Concepts and Usage

Web Audio API Interfaces

Syntax:

const audioContext = new (window.AudioContext || window.webkitAudioContext)();

Parameters:

Example 1: In this example, we are creating an Audio Synthesizer using Web Audio API where the user can set the frequency, detune, gain value, and check the audio by playing or pausing the audio. We have used the setValueTime function from the AudioParam interface which controls the parameters.






document.addEventListener('DOMContentLoaded', () => {
    const Acontext = new (window.AudioContext ||
        window.webkitAudioContext)();
    const osci = Acontext.createOscillator();
    const gain = Acontext.createGain();
    osci.connect(gain);
    gain.connect(Acontext.destination);
    osci.type = 'sine';
    osci.frequency.value = 440;
    osci.detune.value = 0;
    gain.gain.value = 0.5;
    const playBtn =
        document.getElementById('playButton');
    const freqIp =
        document.getElementById('frequency');
    const detuneIp =
        document.getElementById('detune');
    const gainIp =
        document.getElementById('gain');
    playBtn.addEventListener('click', () => {
        if (Acontext.state === 'suspended') {
            Acontext.resume();
        }
        if (playBtn.textContent === 'Play') {
            osci.frequency
                .setValueAtTime(freqIp.value, Acontext.currentTime);
            osci.detune
                .setValueAtTime(detuneIp.value, Acontext.currentTime);
            gain.gain
                .setValueAtTime(gainIp.value, Acontext.currentTime);
            osci.start();
            playBtn.textContent = 'Stop';
        } else {
            osci.stop();
            playBtn.textContent = 'Play';
        }
    });
    freqIp.addEventListener('input', () => {
        osci.frequency
            .setValueAtTime(freqIp.value, Acontext.currentTime);
    });
    detuneIp.addEventListener('input', () => {
        osci.detune
            .setValueAtTime(detuneIp.value, Acontext.currentTime);
    });
    gainIp.addEventListener('input', () => {
        gain.gain
            .setValueAtTime(gainIp.value, Acontext.currentTime);
    });
});

Example 2: In this example, we have created the audio analysis feature where we can see the peak frequency and and average volume of the audio which is been uploaded from the device. The visuals while the the audio is been playing are also shown on the screen. We have used the interfaces like AudioContext, MediaElementAudioSourceNode, MediaStreamTrackAudioSourceNode, and AudioParam in the example.




const aObj =
    new (window.AudioContext ||
        window.webkitAudioContext)();
const analyseObj = aObj.createAnalyser();
const canvas =
    document.getElementById('visualizer');
const ctx = canvas.getContext('2d');
analyseObj.fftSize = 256;
const leng = analyseObj.frequencyBinCount;
const dArr = new Uint8Array(leng);
const src =
    aObj.createMediaElementSource(document
        .getElementById('audioPlayer'));
src.connect(analyseObj);
analyseObj.connect(aObj.destination);
document.getElementById('audioFileInput')
    .addEventListener('change', function (e) {
    const file = e.target.files[0];
    if (file) {
        const objectURL = URL.createObjectURL(file);
        document.getElementById('audioPlayer').src = objectURL;
    }
});
function draw() {
    analyseObj.getByteFrequencyData(dArr);
    ctx.fillStyle = '#2c3e50';
    ctx.fillRect(0, 0, canvas.width, canvas.height);
    const barWidth = (canvas.width / leng) * 2.5;
    let x = 0;
    dArr.forEach(function (data) {
        const barHeight = data;
 
        ctx.fillStyle = `rgb(0, ${barHeight + 100}, 0)`;
        ctx.fillRect(x, canvas.height - barHeight / 2,
            barWidth, barHeight / 2);
 
        x += barWidth + 1;
    });
    requestAnimationFrame(draw);
}
function currTimeFunction() {
    const ap = document.getElementById('audioPlayer');
    const curr = document.getElementById('currentTime');
    curr.textContent =
        `Current Time: ${ap.currentTime.toFixed(2)}s`;
}
function peakFreqFunction() {
    analyseObj.getByteFrequencyData(dArr);
    const idx = dArr.indexOf(Math.max(...dArr));
    const freq = (idx / leng) * aObj.sampleRate / 2;
    const freqEle = document.getElementById('peakFrequency');
    freqEle.textContent = `Peak Frequency: ${freq.toFixed(2)} Hz`;
}
function avgVolumeFunction() {
    analyseObj.getByteFrequencyData(dArr);
    const avgVol = dArr.reduce((acc, val) => acc + val, 0) / dArr.length;
    const avgVolEle = document.getElementById('averageVolume');
    avgVolEle.textContent = `Average Volume: ${avgVol.toFixed(2)}`;
}
document.getElementById('audioPlayer')
    .addEventListener('timeupdate', function () {
    currTimeFunction();
    peakFreqFunction();
    avgVolumeFunction();
});
draw();
currTimeFunction();
peakFreqFunction();
avgVolumeFunction();




Browser Compatibility:


Article Tags :