Add server-side metrics ring buffer, seed dashboard charts from server history

Background task samples system (CPU/RAM/GPU) and per-target (FPS/timing) metrics
every 1s into a 120-sample ring buffer (~2 min). New API endpoint
GET /system/metrics-history returns the buffer. Dashboard charts now seed from
server history on load instead of sessionStorage, surviving page refreshes.

Also removes emoji from brightness source labels.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-24 13:21:37 +03:00
parent 8f79b77fe4
commit 425deb9570
9 changed files with 210 additions and 59 deletions

View File

@@ -6,11 +6,12 @@ from datetime import datetime
from typing import Optional
import psutil
from fastapi import APIRouter, HTTPException, Query
from fastapi import APIRouter, Depends, HTTPException, Query
from pydantic import BaseModel
from wled_controller import __version__
from wled_controller.api.auth import AuthRequired
from wled_controller.api.dependencies import get_processor_manager
from wled_controller.api.schemas.system import (
DisplayInfo,
DisplayListResponse,
@@ -192,6 +193,19 @@ def get_system_performance(_: AuthRequired):
)
@router.get("/api/v1/system/metrics-history", tags=["Config"])
async def get_metrics_history(
_: AuthRequired,
manager=Depends(get_processor_manager),
):
"""Return the last ~2 minutes of system and per-target metrics.
Used by the dashboard to seed charts on page load so history
survives browser refreshes.
"""
return manager.metrics_history.get_history()
# ---------------------------------------------------------------------------
# ADB helpers (for Android / scrcpy engine)
# ---------------------------------------------------------------------------

View File

@@ -0,0 +1,123 @@
"""Server-side ring buffer for system and per-target metrics."""
import asyncio
from collections import deque
from datetime import datetime
from typing import Dict, Optional
from wled_controller.utils import get_logger
logger = get_logger(__name__)
MAX_SAMPLES = 120 # ~2 minutes at 1-second interval
SAMPLE_INTERVAL = 1.0 # seconds
def _collect_system_snapshot() -> dict:
"""Collect CPU/RAM/GPU metrics (blocking — run in thread pool).
Returns a dict suitable for direct JSON serialization.
"""
import psutil
mem = psutil.virtual_memory()
snapshot = {
"t": datetime.utcnow().isoformat(),
"cpu": psutil.cpu_percent(interval=None),
"ram_pct": mem.percent,
"ram_used": round(mem.used / 1024 / 1024, 1),
"ram_total": round(mem.total / 1024 / 1024, 1),
"gpu_util": None,
"gpu_temp": None,
}
try:
from wled_controller.api.routes.system import _nvml_available, _nvml, _nvml_handle
if _nvml_available:
util = _nvml.nvmlDeviceGetUtilizationRates(_nvml_handle)
temp = _nvml.nvmlDeviceGetTemperature(_nvml_handle, _nvml.NVML_TEMPERATURE_GPU)
snapshot["gpu_util"] = float(util.gpu)
snapshot["gpu_temp"] = float(temp)
except Exception:
pass
return snapshot
class MetricsHistory:
"""In-memory ring buffer collecting system and per-target metrics."""
def __init__(self, processor_manager):
self._manager = processor_manager
self._system: deque = deque(maxlen=MAX_SAMPLES)
self._targets: Dict[str, deque] = {}
self._task: Optional[asyncio.Task] = None
async def start(self):
"""Start the background sampling loop."""
if self._task and not self._task.done():
return
self._task = asyncio.create_task(self._sample_loop())
logger.info("Metrics history sampling started")
async def stop(self):
"""Stop the background sampling loop."""
if self._task:
self._task.cancel()
try:
await self._task
except asyncio.CancelledError:
pass
self._task = None
logger.info("Metrics history sampling stopped")
async def _sample_loop(self):
"""Sample system + target metrics every SAMPLE_INTERVAL seconds."""
while True:
try:
await self._sample()
except asyncio.CancelledError:
raise
except Exception as e:
logger.warning(f"Metrics sampling error: {e}")
await asyncio.sleep(SAMPLE_INTERVAL)
async def _sample(self):
"""Collect one snapshot of system and target metrics."""
# System metrics (blocking psutil/nvml calls in thread pool)
sys_snap = await asyncio.to_thread(_collect_system_snapshot)
self._system.append(sys_snap)
# Per-target metrics from processor states
try:
all_states = self._manager.get_all_target_states()
except Exception:
all_states = {}
now = datetime.utcnow().isoformat()
active_ids = set()
for target_id, state in all_states.items():
active_ids.add(target_id)
if target_id not in self._targets:
self._targets[target_id] = deque(maxlen=MAX_SAMPLES)
if state.get("processing"):
self._targets[target_id].append({
"t": now,
"fps": state.get("fps_actual"),
"fps_target": state.get("fps_target"),
"timing": state.get("timing_total_ms"),
"errors": state.get("errors_count", 0),
})
# Prune deques for targets no longer registered
for tid in list(self._targets.keys()):
if tid not in active_ids:
del self._targets[tid]
def get_history(self) -> dict:
"""Return all history for the API response."""
return {
"system": list(self._system),
"targets": {tid: list(dq) for tid, dq in self._targets.items()},
}

View File

@@ -16,6 +16,7 @@ from wled_controller.core.devices.led_client import (
from wled_controller.core.audio.audio_capture import AudioCaptureManager
from wled_controller.core.processing.live_stream_manager import LiveStreamManager
from wled_controller.core.processing.color_strip_stream_manager import ColorStripStreamManager
from wled_controller.core.processing.metrics_history import MetricsHistory
from wled_controller.core.processing.value_stream import ValueStreamManager
from wled_controller.core.capture.screen_overlay import OverlayManager
from wled_controller.core.processing.target_processor import (
@@ -97,8 +98,13 @@ class ProcessorManager:
) if value_source_store else None
self._overlay_manager = OverlayManager()
self._event_queues: List[asyncio.Queue] = []
self._metrics_history = MetricsHistory(self)
logger.info("Processor manager initialized")
@property
def metrics_history(self) -> MetricsHistory:
return self._metrics_history
# ===== SHARED CONTEXT (passed to target processors) =====
def _build_context(self) -> TargetContext:
@@ -718,6 +724,7 @@ class ProcessorManager:
async def stop_all(self):
"""Stop processing and health monitoring for all targets and devices."""
await self._metrics_history.stop()
await self.stop_health_monitoring()
# Stop all processors
@@ -761,6 +768,7 @@ class ProcessorManager:
self._health_monitoring_active = True
for device_id in self._devices:
self._start_device_health_check(device_id)
await self._metrics_history.start()
logger.info("Started health monitoring for all devices")
async def stop_health_monitoring(self):

View File

@@ -10,10 +10,9 @@ import { renderPerfSection, initPerfCharts, startPerfPolling, stopPerfPolling }
import { startAutoRefresh } from './tabs.js';
const DASHBOARD_COLLAPSED_KEY = 'dashboard_collapsed';
const FPS_HISTORY_KEY = 'dashboard_fps_history';
const MAX_FPS_SAMPLES = 30;
const MAX_FPS_SAMPLES = 120;
let _fpsHistory = _loadFpsHistory(); // { targetId: number[] }
let _fpsHistory = {}; // { targetId: number[] }
let _fpsCharts = {}; // { targetId: Chart }
let _lastRunningIds = []; // sorted target IDs from previous render
let _uptimeBase = {}; // { targetId: { seconds, timestamp } }
@@ -21,19 +20,6 @@ let _uptimeTimer = null;
let _uptimeElements = {}; // { targetId: HTMLElement } — cached DOM refs
let _metricsElements = new Map();
function _loadFpsHistory() {
try {
const raw = sessionStorage.getItem(FPS_HISTORY_KEY);
if (raw) return JSON.parse(raw);
} catch {}
return {};
}
function _saveFpsHistory() {
try { sessionStorage.setItem(FPS_HISTORY_KEY, JSON.stringify(_fpsHistory)); }
catch {}
}
function _pushFps(targetId, value) {
if (!_fpsHistory[targetId]) _fpsHistory[targetId] = [];
_fpsHistory[targetId].push(value);
@@ -120,8 +106,26 @@ function _createFpsChart(canvasId, history, fpsTarget) {
});
}
function _initFpsCharts(runningTargetIds) {
async function _initFpsCharts(runningTargetIds) {
_destroyFpsCharts();
// Seed FPS history from server ring buffer on first load
if (Object.keys(_fpsHistory).length === 0 && runningTargetIds.length > 0) {
try {
const resp = await fetch(`${API_BASE}/system/metrics-history`, { headers: getHeaders() });
if (resp.ok) {
const data = await resp.json();
const serverTargets = data.targets || {};
for (const id of runningTargetIds) {
const samples = serverTargets[id] || [];
_fpsHistory[id] = samples.map(s => s.fps).filter(v => v != null);
}
}
} catch {
// Silently ignore — charts will fill from polling
}
}
// Clean up history for targets that are no longer running
for (const id of Object.keys(_fpsHistory)) {
if (!runningTargetIds.includes(id)) delete _fpsHistory[id];
@@ -133,7 +137,7 @@ function _initFpsCharts(runningTargetIds) {
const fpsTarget = parseFloat(canvas.dataset.fpsTarget) || 30;
_fpsCharts[id] = _createFpsChart(`dashboard-fps-${id}`, history, fpsTarget);
}
_saveFpsHistory();
_cacheMetricsElements(runningTargetIds);
}
@@ -194,7 +198,7 @@ function _updateRunningMetrics(enrichedRunning) {
}
}
}
_saveFpsHistory();
}
function _updateProfilesInPlace(profiles) {
@@ -412,7 +416,7 @@ export async function loadDashboard(forceFullRender = false) {
${_sectionContent('perf', renderPerfSection())}
</div>
<div class="dashboard-dynamic">${dynamicHtml}</div>`;
initPerfCharts();
await initPerfCharts();
} else {
const dynamic = container.querySelector('.dashboard-dynamic');
if (dynamic.innerHTML !== dynamicHtml) {
@@ -421,7 +425,7 @@ export async function loadDashboard(forceFullRender = false) {
}
_lastRunningIds = runningIds;
_cacheUptimeElements();
_initFpsCharts(runningIds);
await _initFpsCharts(runningIds);
_startUptimeTimer();
startPerfPolling();

View File

@@ -1,35 +1,19 @@
/**
* Performance charts — real-time CPU, RAM, GPU usage with Chart.js.
* History is seeded from the server-side ring buffer on init.
*/
import { API_BASE, getHeaders } from '../core/api.js';
import { t } from '../core/i18n.js';
import { dashboardPollInterval } from '../core/state.js';
const MAX_SAMPLES = 60;
const STORAGE_KEY = 'perf_history';
const MAX_SAMPLES = 120;
let _pollTimer = null;
let _charts = {}; // { cpu: Chart, ram: Chart, gpu: Chart }
let _history = _loadHistory();
let _history = { cpu: [], ram: [], gpu: [] };
let _hasGpu = null; // null = unknown, true/false after first fetch
function _loadHistory() {
try {
const raw = sessionStorage.getItem(STORAGE_KEY);
if (raw) {
const parsed = JSON.parse(raw);
if (parsed.cpu && parsed.ram && parsed.gpu) return parsed;
}
} catch {}
return { cpu: [], ram: [], gpu: [] };
}
function _saveHistory() {
try { sessionStorage.setItem(STORAGE_KEY, JSON.stringify(_history)); }
catch {}
}
/** Returns the static HTML for the perf section (canvas placeholders). */
export function renderPerfSection() {
return `<div class="perf-charts-grid">
@@ -88,13 +72,22 @@ function _createChart(canvasId, color, fillColor) {
});
}
/** Initialize Chart.js instances on the already-mounted canvases. */
export function initPerfCharts() {
_destroyCharts();
_charts.cpu = _createChart('perf-chart-cpu', '#2196F3', 'rgba(33,150,243,0.15)');
_charts.ram = _createChart('perf-chart-ram', '#4CAF50', 'rgba(76,175,80,0.15)');
_charts.gpu = _createChart('perf-chart-gpu', '#FF9800', 'rgba(255,152,0,0.15)');
// Restore any existing history data into the freshly created charts
/** Seed charts from server-side metrics history. */
async function _seedFromServer() {
try {
const resp = await fetch(`${API_BASE}/system/metrics-history`, { headers: getHeaders() });
if (!resp.ok) return;
const data = await resp.json();
const samples = data.system || [];
_history.cpu = samples.map(s => s.cpu).filter(v => v != null);
_history.ram = samples.map(s => s.ram_pct).filter(v => v != null);
_history.gpu = samples.map(s => s.gpu_util).filter(v => v != null);
// Detect GPU availability from history
if (_history.gpu.length > 0) {
_hasGpu = true;
}
for (const key of ['cpu', 'ram', 'gpu']) {
if (_charts[key] && _history[key].length > 0) {
_charts[key].data.datasets[0].data = [..._history[key]];
@@ -102,6 +95,18 @@ export function initPerfCharts() {
_charts[key].update();
}
}
} catch {
// Silently ignore — charts will fill from polling
}
}
/** Initialize Chart.js instances on the already-mounted canvases. */
export async function initPerfCharts() {
_destroyCharts();
_charts.cpu = _createChart('perf-chart-cpu', '#2196F3', 'rgba(33,150,243,0.15)');
_charts.ram = _createChart('perf-chart-ram', '#4CAF50', 'rgba(76,175,80,0.15)');
_charts.gpu = _createChart('perf-chart-gpu', '#FF9800', 'rgba(255,152,0,0.15)');
await _seedFromServer();
}
function _destroyCharts() {
@@ -158,8 +163,6 @@ async function _fetchPerformance() {
card.appendChild(noGpu);
}
}
_saveHistory();
} catch {
// Silently ignore fetch errors (e.g., network issues, tab hidden)
}
@@ -176,5 +179,4 @@ export function stopPerfPolling() {
clearInterval(_pollTimer);
_pollTimer = null;
}
_saveHistory();
}

View File

@@ -424,7 +424,7 @@
"kc.pattern_template": "Pattern Template:",
"kc.pattern_template.hint": "Select the rectangle pattern to use for color extraction",
"kc.pattern_template.none": "-- Select a pattern template --",
"kc.brightness_vs": "🔢 Brightness Source:",
"kc.brightness_vs": "Brightness Source:",
"kc.brightness_vs.hint": "Optional value source that dynamically controls brightness each frame (multiplied with the manual brightness slider)",
"kc.brightness_vs.none": "None (manual brightness only)",
"kc.created": "Key colors target created successfully",
@@ -810,7 +810,7 @@
"value_source.deleted": "Value source deleted",
"value_source.delete.confirm": "Are you sure you want to delete this value source?",
"value_source.error.name_required": "Please enter a name",
"targets.brightness_vs": "🔢 Brightness Source:",
"targets.brightness_vs": "Brightness Source:",
"targets.brightness_vs.hint": "Optional value source that dynamically controls brightness each frame (overrides device brightness)",
"targets.brightness_vs.none": "None (device brightness)"
}

View File

@@ -424,7 +424,7 @@
"kc.pattern_template": "Шаблон Паттерна:",
"kc.pattern_template.hint": "Выберите шаблон прямоугольников для извлечения цветов",
"kc.pattern_template.none": "-- Выберите шаблон паттерна --",
"kc.brightness_vs": "🔢 Источник Яркости:",
"kc.brightness_vs": "Источник Яркости:",
"kc.brightness_vs.hint": "Опциональный источник значений, динамически управляющий яркостью каждый кадр (умножается на ручной слайдер яркости)",
"kc.brightness_vs.none": "Нет (только ручная яркость)",
"kc.created": "Цель ключевых цветов успешно создана",
@@ -810,7 +810,7 @@
"value_source.deleted": "Источник значений удалён",
"value_source.delete.confirm": "Удалить этот источник значений?",
"value_source.error.name_required": "Введите название",
"targets.brightness_vs": "🔢 Источник яркости:",
"targets.brightness_vs": "Источник яркости:",
"targets.brightness_vs.hint": "Необязательный источник значений для динамического управления яркостью каждый кадр (переопределяет яркость устройства)",
"targets.brightness_vs.none": "Нет (яркость устройства)"
}

View File

@@ -34,7 +34,7 @@
<div class="form-group">
<div class="label-row">
<label for="kc-editor-brightness-vs" data-i18n="kc.brightness_vs">🔢 Brightness Source:</label>
<label for="kc-editor-brightness-vs" data-i18n="kc.brightness_vs">Brightness Source:</label>
<button type="button" class="hint-toggle" onclick="toggleHint(this)" title="?" data-i18n-aria-label="aria.hint">?</button>
</div>
<small class="input-hint" style="display:none" data-i18n="kc.brightness_vs.hint">Optional value source that dynamically controls brightness each frame (multiplied with the manual brightness slider)</small>

View File

@@ -35,7 +35,7 @@
<div class="form-group">
<div class="label-row">
<label for="target-editor-brightness-vs" data-i18n="targets.brightness_vs">🔢 Brightness Source:</label>
<label for="target-editor-brightness-vs" data-i18n="targets.brightness_vs">Brightness Source:</label>
<button type="button" class="hint-toggle" onclick="toggleHint(this)" title="?" data-i18n-aria-label="aria.hint">?</button>
</div>
<small class="input-hint" style="display:none" data-i18n="targets.brightness_vs.hint">Optional value source that dynamically controls brightness each frame (overrides device brightness)</small>