WGC capture fixes + high-resolution timer pacing for all loops

- Fix WGC capture_frame() returning stale frames (80k "frames" in 2s)
  by tracking new-frame events; return None when no new frame arrived
- Add draw_border config passthrough with Win11 22H2+ platform check
- Add high_resolution_timer() utility (timeBeginPeriod/EndPeriod)
- Switch all processing loops from time.time() to time.perf_counter()
- Wrap all loops with high_resolution_timer() for ~1ms sleep precision
- Add animation speed badges to static/gradient color strip cards

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-21 01:23:56 +03:00
parent 5004992f26
commit 84f063eee9
8 changed files with 514 additions and 444 deletions

View File

@@ -43,14 +43,22 @@ class WGCCaptureStream(CaptureStream):
try:
capture_cursor = self.config.get("capture_cursor", False)
draw_border = self.config.get("draw_border", None)
# WGC uses 1-based monitor indexing
wgc_monitor_index = self.display_index + 1
self._capture_instance = self._wgc.WindowsCapture(
# draw_border toggling requires Windows 11 22H2+ (build 22621+).
# On older builds, passing any value crashes the capture session,
# so we only pass it when the platform supports it.
wgc_kwargs = dict(
cursor_capture=capture_cursor,
monitor_index=wgc_monitor_index,
)
if draw_border is not None and self._supports_border_toggle():
wgc_kwargs["draw_border"] = draw_border
self._capture_instance = self._wgc.WindowsCapture(**wgc_kwargs)
def on_frame_arrived(frame, capture_control):
try:
@@ -101,6 +109,16 @@ class WGCCaptureStream(CaptureStream):
logger.error(f"Failed to initialize WGC for display {self.display_index}: {e}", exc_info=True)
raise RuntimeError(f"Failed to initialize WGC for display {self.display_index}: {e}")
@staticmethod
def _supports_border_toggle() -> bool:
"""Check if the platform supports WGC border toggle (Windows 11 22H2+, build 22621+)."""
try:
import platform
build = int(platform.version().split(".")[2])
return build >= 22621
except Exception:
return False
def _cleanup_internal(self) -> None:
"""Internal cleanup helper."""
if self._capture_control:
@@ -137,12 +155,15 @@ class WGCCaptureStream(CaptureStream):
self.initialize()
try:
# Only return a frame when the callback has delivered a new one
if not self._frame_event.is_set():
return None
with self._frame_lock:
if self._latest_frame is None:
raise RuntimeError(
f"No frame available yet for display {self.display_index}."
)
return None
frame = self._latest_frame
self._frame_event.clear()
logger.debug(
f"WGC captured display {self.display_index}: "
@@ -208,7 +229,7 @@ class WGCEngine(CaptureEngine):
def get_default_config(cls) -> Dict[str, Any]:
return {
"capture_cursor": False,
"draw_border": False,
"draw_border": None, # None = OS default; False = hide border (Win 11 22H2+ only)
}
@classmethod

View File

@@ -22,6 +22,7 @@ from wled_controller.core.capture.calibration import CalibrationConfig, PixelMap
from wled_controller.core.capture.screen_capture import extract_border_pixels
from wled_controller.core.processing.live_stream import LiveStream
from wled_controller.utils import get_logger
from wled_controller.utils.timer import high_resolution_timer
logger = get_logger(__name__)
@@ -259,131 +260,132 @@ class PictureColorStripStream(ColorStripStream):
"""Background thread: poll source, process, cache colors."""
cached_frame = None
while self._running:
loop_start = time.perf_counter()
fps = self._fps
frame_time = 1.0 / fps if fps > 0 else 1.0
with high_resolution_timer():
while self._running:
loop_start = time.perf_counter()
fps = self._fps
frame_time = 1.0 / fps if fps > 0 else 1.0
try:
frame = self._live_stream.get_latest_frame()
try:
frame = self._live_stream.get_latest_frame()
if frame is None or frame is cached_frame:
if frame is None or frame is cached_frame:
if (
frame is not None
and self._frame_interpolation
and self._interp_from is not None
and self._interp_to is not None
):
t = min(1.0, (loop_start - self._interp_start) / self._interp_duration)
alpha = int(t * 256)
led_colors = (
(256 - alpha) * self._interp_from.astype(np.uint16)
+ alpha * self._interp_to.astype(np.uint16)
) >> 8
led_colors = led_colors.astype(np.uint8)
if self._saturation != 1.0:
led_colors = _apply_saturation(led_colors, self._saturation)
if self._gamma != 1.0:
led_colors = self._gamma_lut[led_colors]
if self._brightness != 1.0:
led_colors = np.clip(
led_colors.astype(np.float32) * self._brightness, 0, 255
).astype(np.uint8)
with self._colors_lock:
self._latest_colors = led_colors
elapsed = time.perf_counter() - loop_start
time.sleep(max(frame_time - elapsed, 0.001))
continue
interval = (
loop_start - self._last_capture_time
if self._last_capture_time > 0
else frame_time
)
self._last_capture_time = loop_start
cached_frame = frame
t0 = time.perf_counter()
calibration = self._calibration
border_pixels = extract_border_pixels(frame, calibration.border_width)
t1 = time.perf_counter()
led_colors = self._pixel_mapper.map_border_to_leds(border_pixels)
t2 = time.perf_counter()
# Pad or truncate to match the declared led_count
target_count = self._led_count
if target_count > 0 and len(led_colors) != target_count:
if len(led_colors) < target_count:
pad = np.zeros((target_count - len(led_colors), 3), dtype=np.uint8)
led_colors = np.concatenate([led_colors, pad])
else:
led_colors = led_colors[:target_count]
# Update interpolation buffers (raw colors, before corrections)
if self._frame_interpolation:
self._interp_from = self._interp_to
self._interp_to = led_colors.copy()
self._interp_start = loop_start
self._interp_duration = max(interval, 0.001)
# Temporal smoothing
smoothing = self._smoothing
if (
frame is not None
and self._frame_interpolation
and self._interp_from is not None
and self._interp_to is not None
self._previous_colors is not None
and smoothing > 0
and len(self._previous_colors) == len(led_colors)
):
t = min(1.0, (loop_start - self._interp_start) / self._interp_duration)
alpha = int(t * 256)
alpha = int(smoothing * 256)
led_colors = (
(256 - alpha) * self._interp_from.astype(np.uint16)
+ alpha * self._interp_to.astype(np.uint16)
(256 - alpha) * led_colors.astype(np.uint16)
+ alpha * self._previous_colors.astype(np.uint16)
) >> 8
led_colors = led_colors.astype(np.uint8)
if self._saturation != 1.0:
led_colors = _apply_saturation(led_colors, self._saturation)
if self._gamma != 1.0:
led_colors = self._gamma_lut[led_colors]
if self._brightness != 1.0:
led_colors = np.clip(
led_colors.astype(np.float32) * self._brightness, 0, 255
).astype(np.uint8)
with self._colors_lock:
self._latest_colors = led_colors
elapsed = time.perf_counter() - loop_start
time.sleep(max(frame_time - elapsed, 0.001))
continue
t3 = time.perf_counter()
interval = (
loop_start - self._last_capture_time
if self._last_capture_time > 0
else frame_time
)
self._last_capture_time = loop_start
cached_frame = frame
# Saturation
saturation = self._saturation
if saturation != 1.0:
led_colors = _apply_saturation(led_colors, saturation)
t4 = time.perf_counter()
t0 = time.perf_counter()
# Gamma (LUT lookup — O(1) per pixel)
if self._gamma != 1.0:
led_colors = self._gamma_lut[led_colors]
t5 = time.perf_counter()
calibration = self._calibration
border_pixels = extract_border_pixels(frame, calibration.border_width)
t1 = time.perf_counter()
# Brightness
brightness = self._brightness
if brightness != 1.0:
led_colors = np.clip(
led_colors.astype(np.float32) * brightness, 0, 255
).astype(np.uint8)
t6 = time.perf_counter()
led_colors = self._pixel_mapper.map_border_to_leds(border_pixels)
t2 = time.perf_counter()
self._previous_colors = led_colors
# Pad or truncate to match the declared led_count
target_count = self._led_count
if target_count > 0 and len(led_colors) != target_count:
if len(led_colors) < target_count:
pad = np.zeros((target_count - len(led_colors), 3), dtype=np.uint8)
led_colors = np.concatenate([led_colors, pad])
else:
led_colors = led_colors[:target_count]
with self._colors_lock:
self._latest_colors = led_colors
# Update interpolation buffers (raw colors, before corrections)
if self._frame_interpolation:
self._interp_from = self._interp_to
self._interp_to = led_colors.copy()
self._interp_start = loop_start
self._interp_duration = max(interval, 0.001)
self._last_timing = {
"extract_ms": (t1 - t0) * 1000,
"map_leds_ms": (t2 - t1) * 1000,
"smooth_ms": (t3 - t2) * 1000,
"saturation_ms": (t4 - t3) * 1000,
"gamma_ms": (t5 - t4) * 1000,
"brightness_ms": (t6 - t5) * 1000,
"total_ms": (t6 - t0) * 1000,
}
# Temporal smoothing
smoothing = self._smoothing
if (
self._previous_colors is not None
and smoothing > 0
and len(self._previous_colors) == len(led_colors)
):
alpha = int(smoothing * 256)
led_colors = (
(256 - alpha) * led_colors.astype(np.uint16)
+ alpha * self._previous_colors.astype(np.uint16)
) >> 8
led_colors = led_colors.astype(np.uint8)
t3 = time.perf_counter()
except Exception as e:
logger.error(f"PictureColorStripStream processing error: {e}", exc_info=True)
# Saturation
saturation = self._saturation
if saturation != 1.0:
led_colors = _apply_saturation(led_colors, saturation)
t4 = time.perf_counter()
# Gamma (LUT lookup — O(1) per pixel)
if self._gamma != 1.0:
led_colors = self._gamma_lut[led_colors]
t5 = time.perf_counter()
# Brightness
brightness = self._brightness
if brightness != 1.0:
led_colors = np.clip(
led_colors.astype(np.float32) * brightness, 0, 255
).astype(np.uint8)
t6 = time.perf_counter()
self._previous_colors = led_colors
with self._colors_lock:
self._latest_colors = led_colors
self._last_timing = {
"extract_ms": (t1 - t0) * 1000,
"map_leds_ms": (t2 - t1) * 1000,
"smooth_ms": (t3 - t2) * 1000,
"saturation_ms": (t4 - t3) * 1000,
"gamma_ms": (t5 - t4) * 1000,
"brightness_ms": (t6 - t5) * 1000,
"total_ms": (t6 - t0) * 1000,
}
except Exception as e:
logger.error(f"PictureColorStripStream processing error: {e}", exc_info=True)
elapsed = time.perf_counter() - loop_start
remaining = frame_time - elapsed
if remaining > 0:
time.sleep(remaining)
elapsed = time.perf_counter() - loop_start
remaining = frame_time - elapsed
if remaining > 0:
time.sleep(remaining)
def _compute_gradient_colors(stops: list, led_count: int) -> np.ndarray:
@@ -537,28 +539,29 @@ class StaticColorStripStream(ColorStripStream):
def _animate_loop(self) -> None:
"""Background thread: compute animated colors at ~30 fps when animation is active."""
frame_time = 1.0 / 30
while self._running:
loop_start = time.time()
anim = self._animation
if anim and anim.get("enabled"):
speed = float(anim.get("speed", 1.0))
atype = anim.get("type", "breathing")
t = loop_start
n = self._led_count
colors = None
with high_resolution_timer():
while self._running:
loop_start = time.perf_counter()
anim = self._animation
if anim and anim.get("enabled"):
speed = float(anim.get("speed", 1.0))
atype = anim.get("type", "breathing")
t = loop_start
n = self._led_count
colors = None
if atype == "breathing":
factor = 0.5 * (1 + math.sin(2 * math.pi * speed * t * 0.5))
base = np.array(self._source_color, dtype=np.float32)
pixel = np.clip(base * factor, 0, 255).astype(np.uint8)
colors = np.tile(pixel, (n, 1))
if atype == "breathing":
factor = 0.5 * (1 + math.sin(2 * math.pi * speed * t * 0.5))
base = np.array(self._source_color, dtype=np.float32)
pixel = np.clip(base * factor, 0, 255).astype(np.uint8)
colors = np.tile(pixel, (n, 1))
if colors is not None:
with self._colors_lock:
self._colors = colors
if colors is not None:
with self._colors_lock:
self._colors = colors
elapsed = time.time() - loop_start
time.sleep(max(frame_time - elapsed, 0.001))
elapsed = time.perf_counter() - loop_start
time.sleep(max(frame_time - elapsed, 0.001))
class ColorCycleColorStripStream(ColorStripStream):
@@ -651,26 +654,27 @@ class ColorCycleColorStripStream(ColorStripStream):
def _animate_loop(self) -> None:
"""Background thread: interpolate between colors at ~30 fps."""
frame_time = 1.0 / 30
while self._running:
loop_start = time.time()
color_list = self._color_list
speed = self._cycle_speed
n = self._led_count
num = len(color_list)
if num >= 2:
# 0.05 factor → one full cycle every 20s at speed=1.0
cycle_pos = (speed * loop_start * 0.05) % 1.0
seg = cycle_pos * num
idx = int(seg) % num
t_interp = seg - int(seg)
c1 = np.array(color_list[idx], dtype=np.float32)
c2 = np.array(color_list[(idx + 1) % num], dtype=np.float32)
pixel = np.clip(c1 * (1 - t_interp) + c2 * t_interp, 0, 255).astype(np.uint8)
led_colors = np.tile(pixel, (n, 1))
with self._colors_lock:
self._colors = led_colors
elapsed = time.time() - loop_start
time.sleep(max(frame_time - elapsed, 0.001))
with high_resolution_timer():
while self._running:
loop_start = time.perf_counter()
color_list = self._color_list
speed = self._cycle_speed
n = self._led_count
num = len(color_list)
if num >= 2:
# 0.05 factor → one full cycle every 20s at speed=1.0
cycle_pos = (speed * loop_start * 0.05) % 1.0
seg = cycle_pos * num
idx = int(seg) % num
t_interp = seg - int(seg)
c1 = np.array(color_list[idx], dtype=np.float32)
c2 = np.array(color_list[(idx + 1) % num], dtype=np.float32)
pixel = np.clip(c1 * (1 - t_interp) + c2 * t_interp, 0, 255).astype(np.uint8)
led_colors = np.tile(pixel, (n, 1))
with self._colors_lock:
self._colors = led_colors
elapsed = time.perf_counter() - loop_start
time.sleep(max(frame_time - elapsed, 0.001))
class GradientColorStripStream(ColorStripStream):
@@ -765,47 +769,48 @@ class GradientColorStripStream(ColorStripStream):
_cached_base: Optional[np.ndarray] = None
_cached_n: int = 0
_cached_stops: Optional[list] = None
while self._running:
loop_start = time.time()
anim = self._animation
if anim and anim.get("enabled"):
speed = float(anim.get("speed", 1.0))
atype = anim.get("type", "breathing")
t = loop_start
n = self._led_count
stops = self._stops
colors = None
with high_resolution_timer():
while self._running:
loop_start = time.perf_counter()
anim = self._animation
if anim and anim.get("enabled"):
speed = float(anim.get("speed", 1.0))
atype = anim.get("type", "breathing")
t = loop_start
n = self._led_count
stops = self._stops
colors = None
# Recompute base gradient only when stops or led_count change
if _cached_base is None or _cached_n != n or _cached_stops is not stops:
_cached_base = _compute_gradient_colors(stops, n)
_cached_n = n
_cached_stops = stops
base = _cached_base
# Recompute base gradient only when stops or led_count change
if _cached_base is None or _cached_n != n or _cached_stops is not stops:
_cached_base = _compute_gradient_colors(stops, n)
_cached_n = n
_cached_stops = stops
base = _cached_base
if atype == "breathing":
factor = 0.5 * (1 + math.sin(2 * math.pi * speed * t * 0.5))
colors = np.clip(base.astype(np.float32) * factor, 0, 255).astype(np.uint8)
if atype == "breathing":
factor = 0.5 * (1 + math.sin(2 * math.pi * speed * t * 0.5))
colors = np.clip(base.astype(np.float32) * factor, 0, 255).astype(np.uint8)
elif atype == "gradient_shift":
shift = int(speed * t * 10) % max(n, 1)
colors = np.roll(base, shift, axis=0)
elif atype == "gradient_shift":
shift = int(speed * t * 10) % max(n, 1)
colors = np.roll(base, shift, axis=0)
elif atype == "wave":
if n > 1:
i_arr = np.arange(n, dtype=np.float32)
factor = 0.5 * (1 + np.sin(
2 * math.pi * i_arr / n - 2 * math.pi * speed * t
))
colors = np.clip(
base.astype(np.float32) * factor[:, None], 0, 255
).astype(np.uint8)
else:
colors = base
elif atype == "wave":
if n > 1:
i_arr = np.arange(n, dtype=np.float32)
factor = 0.5 * (1 + np.sin(
2 * math.pi * i_arr / n - 2 * math.pi * speed * t
))
colors = np.clip(
base.astype(np.float32) * factor[:, None], 0, 255
).astype(np.uint8)
else:
colors = base
if colors is not None:
with self._colors_lock:
self._colors = colors
if colors is not None:
with self._colors_lock:
self._colors = colors
elapsed = time.time() - loop_start
time.sleep(max(frame_time - elapsed, 0.001))
elapsed = time.perf_counter() - loop_start
time.sleep(max(frame_time - elapsed, 0.001))

View File

@@ -24,6 +24,7 @@ from wled_controller.core.processing.target_processor import (
TargetProcessor,
)
from wled_controller.utils import get_logger
from wled_controller.utils.timer import high_resolution_timer
logger = get_logger(__name__)
@@ -270,7 +271,7 @@ class KCTargetProcessor(TargetProcessor):
frame_time = 1.0 / target_fps
fps_samples: collections.deque = collections.deque(maxlen=10)
timing_samples: collections.deque = collections.deque(maxlen=10)
prev_frame_time_stamp = time.time()
prev_frame_time_stamp = time.perf_counter()
prev_capture = None
last_broadcast_time = 0.0
send_timestamps: collections.deque = collections.deque()
@@ -299,94 +300,95 @@ class KCTargetProcessor(TargetProcessor):
)
try:
while self._is_running:
loop_start = time.time()
with high_resolution_timer():
while self._is_running:
loop_start = time.perf_counter()
try:
capture = self._live_stream.get_latest_frame()
try:
capture = self._live_stream.get_latest_frame()
if capture is None:
await asyncio.sleep(frame_time)
continue
if capture is None:
await asyncio.sleep(frame_time)
continue
# Skip processing if the frame hasn't changed
if capture is prev_capture:
# Keepalive: re-broadcast last colors
if self._latest_colors and (loop_start - last_broadcast_time) >= 1.0:
await self._broadcast_colors(self._latest_colors)
last_broadcast_time = time.time()
send_timestamps.append(last_broadcast_time)
self._metrics.frames_keepalive += 1
self._metrics.frames_skipped += 1
now_ts = time.time()
while send_timestamps and send_timestamps[0] < now_ts - 1.0:
# Skip processing if the frame hasn't changed
if capture is prev_capture:
# Keepalive: re-broadcast last colors
if self._latest_colors and (loop_start - last_broadcast_time) >= 1.0:
await self._broadcast_colors(self._latest_colors)
last_broadcast_time = time.perf_counter()
send_timestamps.append(last_broadcast_time)
self._metrics.frames_keepalive += 1
self._metrics.frames_skipped += 1
now_ts = time.perf_counter()
while send_timestamps and send_timestamps[0] < now_ts - 1.0:
send_timestamps.popleft()
self._metrics.fps_current = len(send_timestamps)
await asyncio.sleep(frame_time)
continue
prev_capture = capture
# Read settings fresh each frame so hot updates (brightness,
# smoothing, interpolation_mode) take effect without restart.
s = self._settings
calc_fn = calc_fns.get(s.interpolation_mode, calculate_average_color)
# CPU-bound work in thread pool
colors, colors_arr, frame_timing = await asyncio.to_thread(
_process_kc_frame,
capture, rect_names, rect_bounds, calc_fn,
prev_colors_arr, s.smoothing, s.brightness,
)
prev_colors_arr = colors_arr
self._latest_colors = dict(colors)
# Broadcast to WebSocket clients
t_broadcast_start = time.perf_counter()
await self._broadcast_colors(colors)
broadcast_ms = (time.perf_counter() - t_broadcast_start) * 1000
last_broadcast_time = time.perf_counter()
send_timestamps.append(last_broadcast_time)
# Per-stage timing (rolling average over last 10 frames)
frame_timing["broadcast"] = broadcast_ms
timing_samples.append(frame_timing)
n = len(timing_samples)
self._metrics.timing_calc_colors_ms = sum(s["calc_colors"] for s in timing_samples) / n
self._metrics.timing_smooth_ms = sum(s["smooth"] for s in timing_samples) / n
self._metrics.timing_broadcast_ms = sum(s["broadcast"] for s in timing_samples) / n
self._metrics.timing_total_ms = sum(s["total"] for s in timing_samples) / n + broadcast_ms
# Update metrics
self._metrics.frames_processed += 1
self._metrics.last_update = datetime.utcnow()
# Calculate actual FPS
now = time.perf_counter()
interval = now - prev_frame_time_stamp
prev_frame_time_stamp = now
fps_samples.append(1.0 / interval if interval > 0 else 0)
self._metrics.fps_actual = sum(fps_samples) / len(fps_samples)
# Potential FPS
processing_time = now - loop_start
self._metrics.fps_potential = 1.0 / processing_time if processing_time > 0 else 0
# fps_current
while send_timestamps and send_timestamps[0] < now - 1.0:
send_timestamps.popleft()
self._metrics.fps_current = len(send_timestamps)
await asyncio.sleep(frame_time)
continue
prev_capture = capture
# Read settings fresh each frame so hot updates (brightness,
# smoothing, interpolation_mode) take effect without restart.
s = self._settings
calc_fn = calc_fns.get(s.interpolation_mode, calculate_average_color)
except Exception as e:
self._metrics.errors_count += 1
self._metrics.last_error = str(e)
logger.error(f"KC processing error for {self._target_id}: {e}", exc_info=True)
# CPU-bound work in thread pool
colors, colors_arr, frame_timing = await asyncio.to_thread(
_process_kc_frame,
capture, rect_names, rect_bounds, calc_fn,
prev_colors_arr, s.smoothing, s.brightness,
)
prev_colors_arr = colors_arr
self._latest_colors = dict(colors)
# Broadcast to WebSocket clients
t_broadcast_start = time.perf_counter()
await self._broadcast_colors(colors)
broadcast_ms = (time.perf_counter() - t_broadcast_start) * 1000
last_broadcast_time = time.time()
send_timestamps.append(last_broadcast_time)
# Per-stage timing (rolling average over last 10 frames)
frame_timing["broadcast"] = broadcast_ms
timing_samples.append(frame_timing)
n = len(timing_samples)
self._metrics.timing_calc_colors_ms = sum(s["calc_colors"] for s in timing_samples) / n
self._metrics.timing_smooth_ms = sum(s["smooth"] for s in timing_samples) / n
self._metrics.timing_broadcast_ms = sum(s["broadcast"] for s in timing_samples) / n
self._metrics.timing_total_ms = sum(s["total"] for s in timing_samples) / n + broadcast_ms
# Update metrics
self._metrics.frames_processed += 1
self._metrics.last_update = datetime.utcnow()
# Calculate actual FPS
now = time.time()
interval = now - prev_frame_time_stamp
prev_frame_time_stamp = now
fps_samples.append(1.0 / interval if interval > 0 else 0)
self._metrics.fps_actual = sum(fps_samples) / len(fps_samples)
# Potential FPS
processing_time = now - loop_start
self._metrics.fps_potential = 1.0 / processing_time if processing_time > 0 else 0
# fps_current
while send_timestamps and send_timestamps[0] < now - 1.0:
send_timestamps.popleft()
self._metrics.fps_current = len(send_timestamps)
except Exception as e:
self._metrics.errors_count += 1
self._metrics.last_error = str(e)
logger.error(f"KC processing error for {self._target_id}: {e}", exc_info=True)
# Throttle to target FPS
elapsed = time.time() - loop_start
remaining = frame_time - elapsed
if remaining > 0:
await asyncio.sleep(remaining)
# Throttle to target FPS
elapsed = time.perf_counter() - loop_start
remaining = frame_time - elapsed
if remaining > 0:
await asyncio.sleep(remaining)
except asyncio.CancelledError:
logger.info(f"KC processing loop cancelled for target {self._target_id}")

View File

@@ -22,6 +22,7 @@ import numpy as np
from wled_controller.core.capture_engines.base import CaptureStream, ScreenCapture
from wled_controller.core.filters import ImagePool, PostprocessingFilter
from wled_controller.utils import get_logger
from wled_controller.utils.timer import high_resolution_timer
logger = get_logger(__name__)
@@ -128,24 +129,25 @@ class ScreenCaptureLiveStream(LiveStream):
def _capture_loop(self) -> None:
frame_time = 1.0 / self._fps if self._fps > 0 else 1.0
while self._running:
loop_start = time.time()
try:
frame = self._capture_stream.capture_frame()
if frame is not None:
with self._frame_lock:
self._latest_frame = frame
else:
# Small sleep when no frame available to avoid CPU spinning
time.sleep(0.001)
except Exception as e:
logger.error(f"Capture error (display={self._capture_stream.display_index}): {e}")
with high_resolution_timer():
while self._running:
loop_start = time.perf_counter()
try:
frame = self._capture_stream.capture_frame()
if frame is not None:
with self._frame_lock:
self._latest_frame = frame
else:
# Small sleep when no frame available to avoid CPU spinning
time.sleep(0.001)
except Exception as e:
logger.error(f"Capture error (display={self._capture_stream.display_index}): {e}")
# Throttle to target FPS
elapsed = time.time() - loop_start
remaining = frame_time - elapsed
if remaining > 0:
time.sleep(remaining)
# Throttle to target FPS
elapsed = time.perf_counter() - loop_start
remaining = frame_time - elapsed
if remaining > 0:
time.sleep(remaining)
class ProcessedLiveStream(LiveStream):
@@ -224,78 +226,79 @@ class ProcessedLiveStream(LiveStream):
fps = self.target_fps
frame_time = 1.0 / fps if fps > 0 else 1.0
while self._running:
loop_start = time.time()
with high_resolution_timer():
while self._running:
loop_start = time.perf_counter()
source_frame = self._source.get_latest_frame()
if source_frame is None or source_frame is cached_source_frame:
# Idle tick — run filter chain when any filter requests idle processing
if self._has_idle_filters and cached_source_frame is not None:
src = cached_source_frame.image
h, w, c = src.shape
if _idle_src_buf is None or _idle_src_buf.shape != (h, w, c):
_idle_src_buf = np.empty((h, w, c), dtype=np.uint8)
np.copyto(_idle_src_buf, src)
idle_image = _idle_src_buf
source_frame = self._source.get_latest_frame()
if source_frame is None or source_frame is cached_source_frame:
# Idle tick — run filter chain when any filter requests idle processing
if self._has_idle_filters and cached_source_frame is not None:
src = cached_source_frame.image
h, w, c = src.shape
if _idle_src_buf is None or _idle_src_buf.shape != (h, w, c):
_idle_src_buf = np.empty((h, w, c), dtype=np.uint8)
np.copyto(_idle_src_buf, src)
idle_image = _idle_src_buf
for f in self._filters:
result = f.process_image(idle_image, self._image_pool)
if result is not None:
if idle_image is not _idle_src_buf:
self._image_pool.release(idle_image)
idle_image = result
for f in self._filters:
result = f.process_image(idle_image, self._image_pool)
if result is not None:
if idle_image is not _idle_src_buf:
self._image_pool.release(idle_image)
idle_image = result
# Only publish a new frame when the filter chain produced actual
# interpolated output (idle_image advanced past the input buffer).
# If every filter passed through, idle_image is still _idle_src_buf —
# leave _latest_frame unchanged so consumers that rely on object
# identity for deduplication correctly detect no new content.
if idle_image is not _idle_src_buf:
processed = ScreenCapture(
image=idle_image,
width=cached_source_frame.width,
height=cached_source_frame.height,
display_index=cached_source_frame.display_index,
)
with self._frame_lock:
self._latest_frame = processed
# Only publish a new frame when the filter chain produced actual
# interpolated output (idle_image advanced past the input buffer).
# If every filter passed through, idle_image is still _idle_src_buf —
# leave _latest_frame unchanged so consumers that rely on object
# identity for deduplication correctly detect no new content.
if idle_image is not _idle_src_buf:
processed = ScreenCapture(
image=idle_image,
width=cached_source_frame.width,
height=cached_source_frame.height,
display_index=cached_source_frame.display_index,
)
with self._frame_lock:
self._latest_frame = processed
elapsed = time.time() - loop_start
remaining = frame_time - elapsed
time.sleep(max(remaining, 0.001))
continue
elapsed = time.perf_counter() - loop_start
remaining = frame_time - elapsed
time.sleep(max(remaining, 0.001))
continue
cached_source_frame = source_frame
cached_source_frame = source_frame
# Reuse ring buffer slot instead of allocating a new copy each frame
src = source_frame.image
h, w, c = src.shape
buf = _ring[_ring_idx]
if buf is None or buf.shape != (h, w, c):
buf = np.empty((h, w, c), dtype=np.uint8)
_ring[_ring_idx] = buf
_ring_idx = (_ring_idx + 1) % 3
# Reuse ring buffer slot instead of allocating a new copy each frame
src = source_frame.image
h, w, c = src.shape
buf = _ring[_ring_idx]
if buf is None or buf.shape != (h, w, c):
buf = np.empty((h, w, c), dtype=np.uint8)
_ring[_ring_idx] = buf
_ring_idx = (_ring_idx + 1) % 3
np.copyto(buf, src)
image = buf
np.copyto(buf, src)
image = buf
for f in self._filters:
result = f.process_image(image, self._image_pool)
if result is not None:
# Release intermediate filter output back to pool
# (don't release the ring buffer itself)
if image is not buf:
self._image_pool.release(image)
image = result
for f in self._filters:
result = f.process_image(image, self._image_pool)
if result is not None:
# Release intermediate filter output back to pool
# (don't release the ring buffer itself)
if image is not buf:
self._image_pool.release(image)
image = result
processed = ScreenCapture(
image=image,
width=source_frame.width,
height=source_frame.height,
display_index=source_frame.display_index,
)
with self._frame_lock:
self._latest_frame = processed
processed = ScreenCapture(
image=image,
width=source_frame.width,
height=source_frame.height,
display_index=source_frame.display_index,
)
with self._frame_lock:
self._latest_frame = processed
class StaticImageLiveStream(LiveStream):

View File

@@ -19,6 +19,7 @@ from wled_controller.core.processing.target_processor import (
TargetProcessor,
)
from wled_controller.utils import get_logger
from wled_controller.utils.timer import high_resolution_timer
logger = get_logger(__name__)
@@ -367,7 +368,7 @@ class WledTargetProcessor(TargetProcessor):
send_timestamps: collections.deque = collections.deque()
prev_colors = None
last_send_time = 0.0
prev_frame_time_stamp = time.time()
prev_frame_time_stamp = time.perf_counter()
loop = asyncio.get_running_loop()
logger.info(
@@ -376,102 +377,103 @@ class WledTargetProcessor(TargetProcessor):
)
try:
while self._is_running:
loop_start = now = time.time()
# Re-read target_fps each tick so hot-updates to the CSS source take effect
target_fps = stream.target_fps if stream.target_fps > 0 else 30
frame_time = 1.0 / target_fps
with high_resolution_timer():
while self._is_running:
loop_start = now = time.perf_counter()
# Re-read target_fps each tick so hot-updates to the CSS source take effect
target_fps = stream.target_fps if stream.target_fps > 0 else 30
frame_time = 1.0 / target_fps
# Re-fetch device info for runtime changes (test mode, brightness)
device_info = self._ctx.get_device_info(self._device_id)
# Re-fetch device info for runtime changes (test mode, brightness)
device_info = self._ctx.get_device_info(self._device_id)
# Skip send while in calibration test mode
if device_info and device_info.test_mode_active:
await asyncio.sleep(frame_time)
continue
try:
colors = stream.get_latest_colors()
if colors is None:
if self._metrics.frames_processed == 0:
logger.info(f"Stream returned None for target {self._target_id} (no data yet)")
# Skip send while in calibration test mode
if device_info and device_info.test_mode_active:
await asyncio.sleep(frame_time)
continue
if colors is prev_colors:
# Same frame — send keepalive if interval elapsed
if prev_colors is not None and (loop_start - last_send_time) >= standby_interval:
if not self._is_running or self._led_client is None:
break
send_colors = self._apply_brightness(prev_colors, device_info)
if self._led_client.supports_fast_send:
self._led_client.send_pixels_fast(send_colors)
else:
await self._led_client.send_pixels(send_colors)
now = time.time()
last_send_time = now
send_timestamps.append(now)
self._metrics.frames_keepalive += 1
self._metrics.frames_skipped += 1
try:
colors = stream.get_latest_colors()
if colors is None:
if self._metrics.frames_processed == 0:
logger.info(f"Stream returned None for target {self._target_id} (no data yet)")
await asyncio.sleep(frame_time)
continue
if colors is prev_colors:
# Same frame — send keepalive if interval elapsed
if prev_colors is not None and (loop_start - last_send_time) >= standby_interval:
if not self._is_running or self._led_client is None:
break
send_colors = self._apply_brightness(prev_colors, device_info)
if self._led_client.supports_fast_send:
self._led_client.send_pixels_fast(send_colors)
else:
await self._led_client.send_pixels(send_colors)
now = time.perf_counter()
last_send_time = now
send_timestamps.append(now)
self._metrics.frames_keepalive += 1
self._metrics.frames_skipped += 1
while send_timestamps and send_timestamps[0] < now - 1.0:
send_timestamps.popleft()
self._metrics.fps_current = len(send_timestamps)
await asyncio.sleep(frame_time)
continue
prev_colors = colors
# Apply device software brightness
send_colors = self._apply_brightness(colors, device_info)
# Send to LED device
if not self._is_running or self._led_client is None:
break
t_send_start = time.perf_counter()
if self._led_client.supports_fast_send:
self._led_client.send_pixels_fast(send_colors)
else:
await self._led_client.send_pixels(send_colors)
send_ms = (time.perf_counter() - t_send_start) * 1000
now = time.perf_counter()
last_send_time = now
send_timestamps.append(now)
self._metrics.timing_send_ms = send_ms
self._metrics.frames_processed += 1
self._metrics.last_update = datetime.utcnow()
if self._metrics.frames_processed <= 3 or self._metrics.frames_processed % 100 == 0:
logger.info(
f"Frame {self._metrics.frames_processed} for {self._target_id} "
f"({len(send_colors)} LEDs) — send={send_ms:.1f}ms"
)
# FPS tracking
interval = now - prev_frame_time_stamp
prev_frame_time_stamp = now
fps_samples.append(1.0 / interval if interval > 0 else 0)
self._metrics.fps_actual = sum(fps_samples) / len(fps_samples)
processing_time = now - loop_start
self._metrics.fps_potential = 1.0 / processing_time if processing_time > 0 else 0
while send_timestamps and send_timestamps[0] < now - 1.0:
send_timestamps.popleft()
self._metrics.fps_current = len(send_timestamps)
await asyncio.sleep(frame_time)
continue
prev_colors = colors
except Exception as e:
self._metrics.errors_count += 1
self._metrics.last_error = str(e)
logger.error(f"Processing error for target {self._target_id}: {e}", exc_info=True)
# Apply device software brightness
send_colors = self._apply_brightness(colors, device_info)
# Send to LED device
if not self._is_running or self._led_client is None:
break
t_send_start = time.perf_counter()
if self._led_client.supports_fast_send:
self._led_client.send_pixels_fast(send_colors)
else:
await self._led_client.send_pixels(send_colors)
send_ms = (time.perf_counter() - t_send_start) * 1000
now = time.time()
last_send_time = now
send_timestamps.append(now)
self._metrics.timing_send_ms = send_ms
self._metrics.frames_processed += 1
self._metrics.last_update = datetime.utcnow()
if self._metrics.frames_processed <= 3 or self._metrics.frames_processed % 100 == 0:
logger.info(
f"Frame {self._metrics.frames_processed} for {self._target_id} "
f"({len(send_colors)} LEDs) — send={send_ms:.1f}ms"
)
# FPS tracking
interval = now - prev_frame_time_stamp
prev_frame_time_stamp = now
fps_samples.append(1.0 / interval if interval > 0 else 0)
self._metrics.fps_actual = sum(fps_samples) / len(fps_samples)
processing_time = now - loop_start
self._metrics.fps_potential = 1.0 / processing_time if processing_time > 0 else 0
while send_timestamps and send_timestamps[0] < now - 1.0:
send_timestamps.popleft()
self._metrics.fps_current = len(send_timestamps)
except Exception as e:
self._metrics.errors_count += 1
self._metrics.last_error = str(e)
logger.error(f"Processing error for target {self._target_id}: {e}", exc_info=True)
# Throttle to target FPS
elapsed = now - loop_start
remaining = frame_time - elapsed
if remaining > 0:
await asyncio.sleep(remaining)
# Throttle to target FPS
elapsed = now - loop_start
remaining = frame_time - elapsed
if remaining > 0:
await asyncio.sleep(remaining)
except asyncio.CancelledError:
logger.info(f"Processing loop cancelled for target {self._target_id}")