Add frame interpolation postprocessing filter + KC hot-settings

Frame interpolation filter (frame_interpolation):
- New PostprocessingFilter with supports_idle_frames = True
- Backward-blend algorithm: blends frame N-1 → N over one capture
  interval, producing smooth output on idle ticks at ≤1 frame of lag
- Detects new vs idle frames via cheap 64-byte signature comparison
- No options; registered alongside other built-in filters

ProcessedLiveStream idle-tick support:
- Detects supports_idle_frames filters at construction (_has_idle_filters)
- target_fps returns 2× source rate when idle filters are present
- _process_loop runs at 2× rate; idle ticks copy cached source frame
  and run full filter chain, publishing result only when a filter
  returned actual interpolated output (not a pass-through)
- Pass-through idle ticks leave _latest_frame unchanged so consumers
  correctly deduplicate via object identity

KC target hot-settings:
- brightness, smoothing, interpolation_mode now read from self._settings
  each frame instead of captured as stale locals at loop startup
- Changes take effect within one frame without stop/restart

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-20 21:01:45 +03:00
parent 55e25b8860
commit 872949a7e1
5 changed files with 142 additions and 11 deletions

View File

@@ -258,19 +258,14 @@ class KCTargetProcessor(TargetProcessor):
async def _processing_loop(self) -> None:
"""Main processing loop for key-colors extraction."""
settings = self._settings
target_fps = self._settings.fps
target_fps = settings.fps
smoothing = settings.smoothing
brightness = settings.brightness
# Select color calculation function
# Lookup table for interpolation mode → function (used per-frame from live settings)
calc_fns = {
"average": calculate_average_color,
"median": calculate_median_color,
"dominant": calculate_dominant_color,
}
calc_fn = calc_fns.get(settings.interpolation_mode, calculate_average_color)
frame_time = 1.0 / target_fps
fps_samples: collections.deque = collections.deque(maxlen=10)
@@ -331,11 +326,16 @@ class KCTargetProcessor(TargetProcessor):
continue
prev_capture = capture
# Read settings fresh each frame so hot updates (brightness,
# smoothing, interpolation_mode) take effect without restart.
s = self._settings
calc_fn = calc_fns.get(s.interpolation_mode, calculate_average_color)
# CPU-bound work in thread pool
colors, colors_arr, frame_timing = await asyncio.to_thread(
_process_kc_frame,
capture, rect_names, rect_bounds, calc_fn,
prev_colors_arr, smoothing, brightness,
prev_colors_arr, s.smoothing, s.brightness,
)
prev_colors_arr = colors_arr

View File

@@ -168,10 +168,17 @@ class ProcessedLiveStream(LiveStream):
self._frame_lock = threading.Lock()
self._running = False
self._thread: Optional[threading.Thread] = None
# True when at least one filter requests idle-tick processing (e.g. frame interpolation).
# The processing loop then runs at 2× source rate and runs the full filter chain on idle
# ticks so those filters can produce interpolated output.
self._has_idle_filters: bool = any(
getattr(f, "supports_idle_frames", False) for f in filters
)
@property
def target_fps(self) -> int:
return self._source.target_fps
base = self._source.target_fps
return base * 2 if self._has_idle_filters else base
@property
def display_index(self) -> Optional[int]:
@@ -212,14 +219,47 @@ class ProcessedLiveStream(LiveStream):
# processed by a consumer), so the 3rd slot is always safe to reuse.
_ring: List[Optional[np.ndarray]] = [None, None, None]
_ring_idx = 0
frame_time = 1.0 / self._source.target_fps if self._source.target_fps > 0 else 1.0
# Separate buffer for idle-tick source copies (not part of the ring buffer)
_idle_src_buf: Optional[np.ndarray] = None
fps = self.target_fps
frame_time = 1.0 / fps if fps > 0 else 1.0
while self._running:
loop_start = time.time()
source_frame = self._source.get_latest_frame()
if source_frame is None or source_frame is cached_source_frame:
# Sleep until next frame is expected
# Idle tick — run filter chain when any filter requests idle processing
if self._has_idle_filters and cached_source_frame is not None:
src = cached_source_frame.image
h, w, c = src.shape
if _idle_src_buf is None or _idle_src_buf.shape != (h, w, c):
_idle_src_buf = np.empty((h, w, c), dtype=np.uint8)
np.copyto(_idle_src_buf, src)
idle_image = _idle_src_buf
for f in self._filters:
result = f.process_image(idle_image, self._image_pool)
if result is not None:
if idle_image is not _idle_src_buf:
self._image_pool.release(idle_image)
idle_image = result
# Only publish a new frame when the filter chain produced actual
# interpolated output (idle_image advanced past the input buffer).
# If every filter passed through, idle_image is still _idle_src_buf —
# leave _latest_frame unchanged so consumers that rely on object
# identity for deduplication correctly detect no new content.
if idle_image is not _idle_src_buf:
processed = ScreenCapture(
image=idle_image,
width=cached_source_frame.width,
height=cached_source_frame.height,
display_index=cached_source_frame.display_index,
)
with self._frame_lock:
self._latest_frame = processed
elapsed = time.time() - loop_start
remaining = frame_time - elapsed
time.sleep(max(remaining, 0.001))