VoiceAnalyzer / AudioVisualizer.js
therickglenn's picture
Create AudioVisualizer.js
005a6a5 verified
import React, { useEffect, useRef, useState } from "react";
const AudioVisualizer = () => {
const audioContextRef = useRef(null);
const analyserRef = useRef(null);
const dataArrayRef = useRef(null);
const sourceRef = useRef(null);
const canvasRef = useRef(null);
const [permissionGranted, setPermissionGranted] = useState(false);
const [errorMessage, setErrorMessage] = useState(null);
const initializeAudio = async () => {
try {
if (!navigator.mediaDevices || !navigator.mediaDevices.getUserMedia) {
throw new Error("getUserMedia is not supported in this browser.");
}
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
setPermissionGranted(true);
setErrorMessage(null);
audioContextRef.current = new (window.AudioContext || window.webkitAudioContext)();
analyserRef.current = audioContextRef.current.createAnalyser();
sourceRef.current = audioContextRef.current.createMediaStreamSource(stream);
sourceRef.current.connect(analyserRef.current);
analyserRef.current.fftSize = 512;
const bufferLength = analyserRef.current.frequencyBinCount;
dataArrayRef.current = new Uint8Array(bufferLength);
drawWaveform();
} catch (err) {
console.error('Error accessing microphone:', err);
setErrorMessage(err.message);
}
};
useEffect(() => {
return () => {
if (audioContextRef.current) {
audioContextRef.current.close();
}
};
}, []);
const drawWaveform = () => {
if (!canvasRef.current) return;
const canvas = canvasRef.current;
const ctx = canvas.getContext('2d');
const renderFrame = () => {
requestAnimationFrame(renderFrame);
analyserRef.current.getByteTimeDomainData(dataArrayRef.current);
ctx.fillStyle = '#000';
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.lineWidth = 2;
ctx.strokeStyle = '#00ffcc';
ctx.beginPath();
let sliceWidth = canvas.width / dataArrayRef.current.length;
let x = 0;
for (let i = 0; i < dataArrayRef.current.length; i++) {
let v = dataArrayRef.current[i] / 128.0;
let y = v * canvas.height / 2;
if (i === 0) {
ctx.moveTo(x, y);
} else {
ctx.lineTo(x, y);
}
x += sliceWidth;
}
ctx.lineTo(canvas.width, canvas.height / 2);
ctx.stroke();
};
renderFrame();
};
return (
<div className="container">
<h1>Real-Time Audio Visualizer</h1>
{!permissionGranted ? (
<button className="start-button" onClick={initializeAudio}>Start Audio Analysis</button>
) : (
<p>Analyzing audio...</p>
)}
{errorMessage && <p className="error-message">{errorMessage}</p>}
<canvas ref={canvasRef} width={600} height={300} className="visualizer"></canvas>
</div>
);
};
export default AudioVisualizer;