Add real-time audio spectrum test for audio sources and templates
- Add WebSocket endpoints for live audio spectrum streaming at ~20Hz - Audio source test: resolves device/channel, shares stream via ref-counting - Audio template test: includes device picker dropdown for selecting input - Canvas-based 64-band spectrum visualizer with falling peaks and beat flash - Channel-aware: mono sources show left/right/mixed spectrum correctly Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -10,10 +10,10 @@
|
||||
* This module manages the editor modal and API operations.
|
||||
*/
|
||||
|
||||
import { _cachedAudioSources, set_cachedAudioSources, _cachedAudioTemplates } from '../core/state.js';
|
||||
import { fetchWithAuth, escapeHtml } from '../core/api.js';
|
||||
import { _cachedAudioSources, set_cachedAudioSources, _cachedAudioTemplates, apiKey } from '../core/state.js';
|
||||
import { API_BASE, fetchWithAuth, escapeHtml } from '../core/api.js';
|
||||
import { t } from '../core/i18n.js';
|
||||
import { showToast, showConfirm } from '../core/ui.js';
|
||||
import { showToast, showConfirm, lockBody, unlockBody } from '../core/ui.js';
|
||||
import { Modal } from '../core/modal.js';
|
||||
import { loadPictureSources } from './streams.js';
|
||||
|
||||
@@ -236,3 +236,170 @@ function _loadAudioTemplates(selectedId) {
|
||||
`<option value="${t.id}"${t.id === selectedId ? ' selected' : ''}>${escapeHtml(t.name)} (${t.engine_type.toUpperCase()})</option>`
|
||||
).join('');
|
||||
}
|
||||
|
||||
// ── Audio Source Test (real-time spectrum) ────────────────────
|
||||
|
||||
const NUM_BANDS = 64;
|
||||
const PEAK_DECAY = 0.02; // peak drop per frame
|
||||
const BEAT_FLASH_DECAY = 0.06; // beat flash fade per frame
|
||||
|
||||
let _testAudioWs = null;
|
||||
let _testAudioAnimFrame = null;
|
||||
let _testAudioLatest = null;
|
||||
let _testAudioPeaks = new Float32Array(NUM_BANDS);
|
||||
let _testBeatFlash = 0;
|
||||
|
||||
const testAudioModal = new Modal('test-audio-source-modal', { backdrop: true, lock: true });
|
||||
|
||||
export function testAudioSource(sourceId) {
|
||||
const statusEl = document.getElementById('audio-test-status');
|
||||
if (statusEl) {
|
||||
statusEl.textContent = t('audio_source.test.connecting');
|
||||
statusEl.style.display = '';
|
||||
}
|
||||
|
||||
// Reset state
|
||||
_testAudioLatest = null;
|
||||
_testAudioPeaks.fill(0);
|
||||
_testBeatFlash = 0;
|
||||
|
||||
document.getElementById('audio-test-rms').textContent = '---';
|
||||
document.getElementById('audio-test-peak').textContent = '---';
|
||||
document.getElementById('audio-test-beat-dot').classList.remove('active');
|
||||
|
||||
testAudioModal.open();
|
||||
|
||||
// Size canvas to container
|
||||
const canvas = document.getElementById('audio-test-canvas');
|
||||
_sizeCanvas(canvas);
|
||||
|
||||
// Connect WebSocket
|
||||
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsUrl = `${protocol}//${window.location.host}${API_BASE}/audio-sources/${sourceId}/test/ws?token=${encodeURIComponent(apiKey)}`;
|
||||
|
||||
try {
|
||||
_testAudioWs = new WebSocket(wsUrl);
|
||||
|
||||
_testAudioWs.onopen = () => {
|
||||
if (statusEl) statusEl.style.display = 'none';
|
||||
};
|
||||
|
||||
_testAudioWs.onmessage = (event) => {
|
||||
try {
|
||||
_testAudioLatest = JSON.parse(event.data);
|
||||
} catch {}
|
||||
};
|
||||
|
||||
_testAudioWs.onclose = () => {
|
||||
_testAudioWs = null;
|
||||
};
|
||||
|
||||
_testAudioWs.onerror = () => {
|
||||
showToast(t('audio_source.test.error'), 'error');
|
||||
_cleanupTest();
|
||||
};
|
||||
} catch {
|
||||
showToast(t('audio_source.test.error'), 'error');
|
||||
_cleanupTest();
|
||||
return;
|
||||
}
|
||||
|
||||
// Start render loop
|
||||
_testAudioAnimFrame = requestAnimationFrame(_renderLoop);
|
||||
}
|
||||
|
||||
export function closeTestAudioSourceModal() {
|
||||
_cleanupTest();
|
||||
testAudioModal.forceClose();
|
||||
}
|
||||
|
||||
function _cleanupTest() {
|
||||
if (_testAudioAnimFrame) {
|
||||
cancelAnimationFrame(_testAudioAnimFrame);
|
||||
_testAudioAnimFrame = null;
|
||||
}
|
||||
if (_testAudioWs) {
|
||||
_testAudioWs.onclose = null;
|
||||
_testAudioWs.close();
|
||||
_testAudioWs = null;
|
||||
}
|
||||
_testAudioLatest = null;
|
||||
}
|
||||
|
||||
function _sizeCanvas(canvas) {
|
||||
const rect = canvas.parentElement.getBoundingClientRect();
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
canvas.width = rect.width * dpr;
|
||||
canvas.height = 200 * dpr;
|
||||
canvas.style.height = '200px';
|
||||
canvas.getContext('2d').scale(dpr, dpr);
|
||||
}
|
||||
|
||||
function _renderLoop() {
|
||||
_renderAudioSpectrum();
|
||||
if (testAudioModal.isOpen) {
|
||||
_testAudioAnimFrame = requestAnimationFrame(_renderLoop);
|
||||
}
|
||||
}
|
||||
|
||||
function _renderAudioSpectrum() {
|
||||
const canvas = document.getElementById('audio-test-canvas');
|
||||
if (!canvas) return;
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
const dpr = window.devicePixelRatio || 1;
|
||||
const w = canvas.width / dpr;
|
||||
const h = canvas.height / dpr;
|
||||
|
||||
// Reset transform for clearing
|
||||
ctx.setTransform(dpr, 0, 0, dpr, 0, 0);
|
||||
ctx.clearRect(0, 0, w, h);
|
||||
|
||||
const data = _testAudioLatest;
|
||||
if (!data || !data.spectrum) return;
|
||||
|
||||
const spectrum = data.spectrum;
|
||||
const gap = 1;
|
||||
const barWidth = (w - gap * (NUM_BANDS - 1)) / NUM_BANDS;
|
||||
|
||||
// Beat flash background
|
||||
if (data.beat) _testBeatFlash = Math.min(1.0, data.beat_intensity + 0.3);
|
||||
if (_testBeatFlash > 0) {
|
||||
ctx.fillStyle = `rgba(255, 255, 255, ${_testBeatFlash * 0.08})`;
|
||||
ctx.fillRect(0, 0, w, h);
|
||||
_testBeatFlash = Math.max(0, _testBeatFlash - BEAT_FLASH_DECAY);
|
||||
}
|
||||
|
||||
for (let i = 0; i < NUM_BANDS; i++) {
|
||||
const val = Math.min(1, spectrum[i]);
|
||||
const barHeight = val * h;
|
||||
const x = i * (barWidth + gap);
|
||||
const y = h - barHeight;
|
||||
|
||||
// Bar color: green → yellow → red based on value
|
||||
const hue = (1 - val) * 120;
|
||||
ctx.fillStyle = `hsl(${hue}, 85%, 50%)`;
|
||||
ctx.fillRect(x, y, barWidth, barHeight);
|
||||
|
||||
// Falling peak indicator
|
||||
if (val > _testAudioPeaks[i]) {
|
||||
_testAudioPeaks[i] = val;
|
||||
} else {
|
||||
_testAudioPeaks[i] = Math.max(0, _testAudioPeaks[i] - PEAK_DECAY);
|
||||
}
|
||||
const peakY = h - _testAudioPeaks[i] * h;
|
||||
const peakHue = (1 - _testAudioPeaks[i]) * 120;
|
||||
ctx.fillStyle = `hsl(${peakHue}, 90%, 70%)`;
|
||||
ctx.fillRect(x, peakY, barWidth, 2);
|
||||
}
|
||||
|
||||
// Update stats
|
||||
document.getElementById('audio-test-rms').textContent = (data.rms * 100).toFixed(1) + '%';
|
||||
document.getElementById('audio-test-peak').textContent = (data.peak * 100).toFixed(1) + '%';
|
||||
const beatDot = document.getElementById('audio-test-beat-dot');
|
||||
if (data.beat) {
|
||||
beatDot.classList.add('active');
|
||||
} else {
|
||||
beatDot.classList.remove('active');
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user