mirror of
https://github.com/zyqunix/tools.git
synced 2025-07-06 06:20:30 +02:00
add mic test thing
This commit is contained in:
parent
d9114e245d
commit
c3c2773532
3 changed files with 111 additions and 0 deletions
19
microphone/index.html
Normal file
19
microphone/index.html
Normal file
|
@ -0,0 +1,19 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Mic Test</title>
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<link rel="shortcut icon" href="https://rimgo.pussthecat.org/RFbdMMB.png" type="image/x-icon">
|
||||
</head>
|
||||
<body>
|
||||
<h1>Microphone Test</h1>
|
||||
<div class="shadow card">
|
||||
<canvas id="visualizer"></canvas>
|
||||
<button id="btn">Start Listening</button>
|
||||
</div>
|
||||
|
||||
<script src="index.js"></script>
|
||||
</body>
|
||||
</html>
|
67
microphone/index.js
Normal file
67
microphone/index.js
Normal file
|
@ -0,0 +1,67 @@
|
|||
const btn = document.getElementById('btn');
|
||||
const canvas = document.getElementById('visualizer');
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
let audioContext;
|
||||
let analyser;
|
||||
let dataArray;
|
||||
let micStream;
|
||||
let animationId;
|
||||
let isListening = false;
|
||||
|
||||
async function startMic() {
|
||||
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
||||
micStream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
const source = audioContext.createMediaStreamSource(micStream);
|
||||
|
||||
analyser = audioContext.createAnalyser();
|
||||
analyser.fftSize = 512;
|
||||
|
||||
const bufferLength = analyser.fftSize;
|
||||
dataArray = new Uint8Array(bufferLength);
|
||||
|
||||
source.connect(analyser);
|
||||
draw();
|
||||
}
|
||||
|
||||
function stopMic() {
|
||||
micStream.getTracks().forEach(track => track.stop());
|
||||
audioContext.close();
|
||||
cancelAnimationFrame(animationId);
|
||||
}
|
||||
|
||||
function draw() {
|
||||
animationId = requestAnimationFrame(draw);
|
||||
|
||||
analyser.getByteTimeDomainData(dataArray);
|
||||
|
||||
ctx.fillStyle = '#fff';
|
||||
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
ctx.lineWidth = 2;
|
||||
ctx.strokeStyle = '#000';
|
||||
ctx.beginPath();
|
||||
|
||||
const sliceWidth = canvas.width / dataArray.length;
|
||||
let x = 0;
|
||||
|
||||
for (let i = 0; i < dataArray.length; i++) {
|
||||
const v = dataArray[i] / 128.0;
|
||||
const y = v * canvas.height / 2;
|
||||
|
||||
i === 0 ? ctx.moveTo(x, y) : ctx.lineTo(x, y);
|
||||
x += sliceWidth;
|
||||
}
|
||||
|
||||
ctx.lineTo(canvas.width, canvas.height / 2);
|
||||
ctx.stroke();
|
||||
}
|
||||
|
||||
btn.addEventListener('click', async () => {
|
||||
if (!isListening) {
|
||||
await startMic();
|
||||
} else {
|
||||
stopMic();
|
||||
}
|
||||
isListening = !isListening;
|
||||
});
|
25
microphone/style.css
Normal file
25
microphone/style.css
Normal file
|
@ -0,0 +1,25 @@
|
|||
@import url(/global.css);
|
||||
|
||||
canvas {
|
||||
width: 100%;
|
||||
background-color: #fff;
|
||||
border-radius: 10px;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
button {
|
||||
cursor: pointer;
|
||||
background-color: #2a2a2a;
|
||||
padding: 7px;
|
||||
color: #c0c0c0;
|
||||
border: #2c2c2c solid 2px;
|
||||
border-radius: 4px;
|
||||
margin-top: 10px;
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
background-color: #2c2c2c;
|
||||
color: #d0d0d0;
|
||||
border: #2e2e2e solid 2px;
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue