Rewrite HAOS integration: target-centric architecture with KC color sensors

- Rewrite integration to target-centric model: each picture target becomes
  a HA device under a server hub with switch, FPS, and status sensors
- Replace KC light entities with color sensors (hex state + RGB attributes)
  for better automation support via WebSocket real-time updates
- Add WebSocket manager for Key Colors color streaming
- Add KC per-stage timing metrics (calc_colors, broadcast) with rolling avg
- Fix KC timing fields missing from API by adding them to Pydantic schema
- Make start/stop processing idempotent to prevent intermittent 404 errors
- Add HAOS localization support (en, ru) using translation_key system
- Rename integration from "WLED Screen Controller" to "LED Screen Controller"
- Remove obsolete select.py (display select) and README.md

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-17 13:01:40 +03:00
parent e92fe4eb0a
commit 67da014684
19 changed files with 772 additions and 746 deletions

View File

@@ -74,13 +74,10 @@ def _process_frame(capture, border_width, pixel_mapper, previous_colors, smoothi
def _process_kc_frame(capture, rectangles, calc_fn, previous_colors, smoothing):
"""All CPU-bound work for one KC frame (runs in thread pool).
Args:
capture: ScreenCapture from live_stream.get_latest_frame()
rectangles: List of pattern rectangles to extract colors from
calc_fn: Color calculation function (average/median/dominant)
previous_colors: Previous frame colors for smoothing
smoothing: Smoothing factor (0-1)
Returns (colors, timing_ms) where colors is a dict {name: (r, g, b)}
and timing_ms is a dict with per-stage timing in milliseconds.
"""
t0 = time.perf_counter()
img = capture.image
h, w = img.shape[:2]
colors = {}
@@ -95,6 +92,7 @@ def _process_kc_frame(capture, rectangles, calc_fn, previous_colors, smoothing):
px_h = min(px_h, h - px_y)
sub_img = img[px_y:px_y + px_h, px_x:px_x + px_w]
colors[rect.name] = calc_fn(sub_img)
t1 = time.perf_counter()
if previous_colors and smoothing > 0:
for name, color in colors.items():
if name in previous_colors:
@@ -105,7 +103,13 @@ def _process_kc_frame(capture, rectangles, calc_fn, previous_colors, smoothing):
int(color[1] * (1 - alpha) + prev[1] * alpha),
int(color[2] * (1 - alpha) + prev[2] * alpha),
)
return colors
t2 = time.perf_counter()
timing_ms = {
"calc_colors": (t1 - t0) * 1000,
"smooth": (t2 - t1) * 1000,
"total": (t2 - t0) * 1000,
}
return colors, timing_ms
@dataclass
class ProcessingSettings:
@@ -137,11 +141,15 @@ class ProcessingMetrics:
fps_potential: float = 0.0
fps_current: int = 0
# Per-stage timing (ms), averaged over last 10 frames
# LED targets
timing_extract_ms: float = 0.0
timing_map_leds_ms: float = 0.0
timing_smooth_ms: float = 0.0
timing_send_ms: float = 0.0
timing_total_ms: float = 0.0
# KC targets
timing_calc_colors_ms: float = 0.0
timing_broadcast_ms: float = 0.0
@dataclass
@@ -541,7 +549,8 @@ class ProcessorManager:
state = self._targets[target_id]
if state.is_running:
raise RuntimeError(f"Processing already running for target {target_id}")
logger.debug(f"Processing already running for target {target_id}")
return
# Enforce one-target-per-device constraint
for other_id, other in self._targets.items():
@@ -1230,7 +1239,8 @@ class ProcessorManager:
state = self._kc_targets[target_id]
if state.is_running:
raise ValueError(f"KC target {target_id} is already running")
logger.debug(f"KC target {target_id} is already running")
return
if not state.picture_source_id:
raise ValueError(f"KC target {target_id} has no picture source assigned")
@@ -1324,6 +1334,7 @@ class ProcessorManager:
frame_time = 1.0 / target_fps
fps_samples: List[float] = []
timing_samples: collections.deque = collections.deque(maxlen=10)
prev_frame_time_stamp = time.time()
prev_capture = None # Track previous ScreenCapture for change detection
last_broadcast_time = 0.0 # Timestamp of last WS broadcast (for keepalive)
@@ -1367,7 +1378,7 @@ class ProcessorManager:
prev_capture = capture
# CPU-bound work in thread pool
colors = await asyncio.to_thread(
colors, frame_timing = await asyncio.to_thread(
_process_kc_frame,
capture, rectangles, calc_fn,
state.previous_colors, smoothing,
@@ -1377,10 +1388,21 @@ class ProcessorManager:
state.latest_colors = dict(colors)
# Broadcast to WebSocket clients
t_broadcast_start = time.perf_counter()
await self._broadcast_kc_colors(target_id, colors)
broadcast_ms = (time.perf_counter() - t_broadcast_start) * 1000
last_broadcast_time = time.time()
send_timestamps.append(last_broadcast_time)
# Per-stage timing (rolling average over last 10 frames)
frame_timing["broadcast"] = broadcast_ms
timing_samples.append(frame_timing)
n = len(timing_samples)
state.metrics.timing_calc_colors_ms = sum(s["calc_colors"] for s in timing_samples) / n
state.metrics.timing_smooth_ms = sum(s["smooth"] for s in timing_samples) / n
state.metrics.timing_broadcast_ms = sum(s["broadcast"] for s in timing_samples) / n
state.metrics.timing_total_ms = sum(s["total"] for s in timing_samples) / n + broadcast_ms
# Update metrics
state.metrics.frames_processed += 1
state.metrics.last_update = datetime.utcnow()
@@ -1475,6 +1497,10 @@ class ProcessorManager:
"frames_skipped": metrics.frames_skipped if state.is_running else None,
"frames_keepalive": metrics.frames_keepalive if state.is_running else None,
"fps_current": metrics.fps_current if state.is_running else None,
"timing_calc_colors_ms": round(metrics.timing_calc_colors_ms, 1) if state.is_running else None,
"timing_smooth_ms": round(metrics.timing_smooth_ms, 1) if state.is_running else None,
"timing_broadcast_ms": round(metrics.timing_broadcast_ms, 1) if state.is_running else None,
"timing_total_ms": round(metrics.timing_total_ms, 1) if state.is_running else None,
"last_update": metrics.last_update,
"errors": [metrics.last_error] if metrics.last_error else [],
}