const canvas = document.querySelector("canvas"); const ctx = canvas.getContext("2d"); const audio = document.querySelector("audio");
const initCanvas = () => { const size = 600; canvas.width = size * devicePixelRatio; canvas.height = size * devicePixelRatio; canvas.style.width = canvas.style.height = `${size}px`; }; initCanvas();
function draw(data, maxVal) { const r = canvas.width / 4 + 20 * devicePixelRatio; const center = canvas.width / 2; ctx.clearRect(0, 0, canvas.width, canvas.height); const hslStep = 360 / (data.length - 1); const maxLen = canvas.width / 2 - r; const minLen = 2 * devicePixelRatio; for (let i = 0; i < data.length; i++) { ctx.beginPath(); const len = Math.max((data[i] / maxVal) * maxLen, minLen); const rotate = hslStep * i; ctx.strokeStyle = `hsl(${rotate}deg,65%,65%)`; ctx.lineWidth = minLen; const rad = (rotate * Math.PI) / 180; const startX = center + Math.cos(rad) * r; const startY = center + Math.sin(rad) * r; const endX = center + Math.cos(rad) * (r + len); const endY = center + Math.sin(rad) * (r + len); ctx.moveTo(startX, startY); ctx.lineTo(endX, endY); ctx.stroke(); } }
draw(new Array(256).fill(0), 255);
let isInit = false;
let analyser;
let buffer;
audio.onplay = () => { if (isInit) return; createAudio(); };
const createAudio = () => { const audioCtx = new AudioContext(); const source = audioCtx.createMediaElementSource(audio); analyser = audioCtx.createAnalyser(); analyser.fftSize = 256; buffer = new Uint8Array(analyser.frequencyBinCount); source.connect(analyser); source.connect(audioCtx.destination); isInit = true; };
const update = () => { requestAnimationFrame(update); analyser.getByteFrequencyData(buffer); const offset = Math.floor((buffer.length * 2) / 3); const data = new Array(offset * 2); for (let i = 0; i < offset; i++) { data[i] = data[data.length - 1 - i] = buffer[i]; } draw(data, 255); }; update();
|