pages/microphone/index.js
2025-06-12 11:25:00 +02:00

67 lines
1.6 KiB
JavaScript

const btn = document.getElementById('btn');
const canvas = document.getElementById('visualizer');
const ctx = canvas.getContext('2d');
let audioContext;
let analyser;
let dataArray;
let micStream;
let animationId;
let isListening = false;
async function startMic() {
audioContext = new (window.AudioContext || window.webkitAudioContext)();
micStream = await navigator.mediaDevices.getUserMedia({ audio: true });
const source = audioContext.createMediaStreamSource(micStream);
analyser = audioContext.createAnalyser();
analyser.fftSize = 512;
const bufferLength = analyser.fftSize;
dataArray = new Uint8Array(bufferLength);
source.connect(analyser);
draw();
}
function stopMic() {
micStream.getTracks().forEach(track => track.stop());
audioContext.close();
cancelAnimationFrame(animationId);
}
function draw() {
animationId = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
ctx.fillStyle = '#313244';
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.lineWidth = 2;
ctx.strokeStyle = '#fff';
ctx.beginPath();
const sliceWidth = canvas.width / dataArray.length;
let x = 0;
for (let i = 0; i < dataArray.length; i++) {
const v = dataArray[i] / 128.0;
const y = v * canvas.height / 2;
i === 0 ? ctx.moveTo(x, y) : ctx.lineTo(x, y);
x += sliceWidth;
}
ctx.lineTo(canvas.width, canvas.height / 2);
ctx.stroke();
}
btn.addEventListener('click', async () => {
if (!isListening) {
await startMic();
} else {
stopMic();
}
isListening = !isListening;
});