Initial commit: WLED Screen Controller with FastAPI server and Home Assistant integration
Some checks failed
Validate / validate (push) Failing after 1m6s
Some checks failed
Validate / validate (push) Failing after 1m6s
This is a complete WLED ambient lighting controller that captures screen border pixels and sends them to WLED devices for immersive ambient lighting effects. ## Server Features: - FastAPI-based REST API with 17+ endpoints - Real-time screen capture with multi-monitor support - Advanced LED calibration system with visual GUI - API key authentication with labeled tokens - Per-device brightness control (0-100%) - Configurable FPS (1-60), border width, and color correction - Persistent device storage (JSON-based) - Comprehensive Web UI with dark/light themes - Docker support with docker-compose - Windows monitor name detection via WMI (shows "LG ULTRAWIDE" etc.) ## Web UI Features: - Device management (add, configure, remove WLED devices) - Real-time status monitoring with FPS metrics - Settings modal for device configuration - Visual calibration GUI with edge testing - Brightness slider per device - Display selection with friendly monitor names - Token-based authentication with login/logout - Responsive button layout ## Calibration System: - Support for any LED strip layout (clockwise/counterclockwise) - 4 starting position options (corners) - Per-edge LED count configuration - Visual preview with starting position indicator - Test buttons to light up individual edges - Smart LED ordering based on start position and direction ## Home Assistant Integration: - Custom HACS integration - Switch entities for processing control - Sensor entities for status and FPS - Select entities for display selection - Config flow for easy setup - Auto-discovery of devices from server ## Technical Stack: - Python 3.11+ - FastAPI + uvicorn - mss (screen capture) - httpx (async WLED client) - Pydantic (validation) - WMI (Windows monitor detection) - Structlog (logging) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
1
server/tests/__init__.py
Normal file
1
server/tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Tests for WLED Screen Controller."""
|
||||
49
server/tests/conftest.py
Normal file
49
server/tests/conftest.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Pytest configuration and fixtures."""
|
||||
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_data_dir(tmp_path):
|
||||
"""Provide a temporary directory for test data."""
|
||||
return tmp_path / "data"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_config_dir(tmp_path):
|
||||
"""Provide a temporary directory for test configuration."""
|
||||
return tmp_path / "config"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_calibration():
|
||||
"""Provide a sample calibration configuration."""
|
||||
return {
|
||||
"layout": "clockwise",
|
||||
"start_position": "bottom_left",
|
||||
"segments": [
|
||||
{"edge": "bottom", "led_start": 0, "led_count": 40, "reverse": False},
|
||||
{"edge": "right", "led_start": 40, "led_count": 30, "reverse": False},
|
||||
{"edge": "top", "led_start": 70, "led_count": 40, "reverse": True},
|
||||
{"edge": "left", "led_start": 110, "led_count": 40, "reverse": True},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_device():
|
||||
"""Provide a sample device configuration."""
|
||||
return {
|
||||
"id": "test_device_001",
|
||||
"name": "Test WLED Device",
|
||||
"url": "http://192.168.1.100",
|
||||
"led_count": 150,
|
||||
"enabled": True,
|
||||
"settings": {
|
||||
"display_index": 0,
|
||||
"fps": 30,
|
||||
"border_width": 10,
|
||||
"brightness": 0.8,
|
||||
},
|
||||
}
|
||||
75
server/tests/test_api.py
Normal file
75
server/tests/test_api.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Tests for API endpoints."""
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from wled_controller.main import app
|
||||
from wled_controller import __version__
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def test_root_endpoint():
|
||||
"""Test root endpoint."""
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["name"] == "WLED Screen Controller"
|
||||
assert data["version"] == __version__
|
||||
assert "/docs" in data["docs"]
|
||||
|
||||
|
||||
def test_health_check():
|
||||
"""Test health check endpoint."""
|
||||
response = client.get("/health")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "healthy"
|
||||
assert data["version"] == __version__
|
||||
assert "timestamp" in data
|
||||
|
||||
|
||||
def test_version_endpoint():
|
||||
"""Test version endpoint."""
|
||||
response = client.get("/api/v1/version")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["version"] == __version__
|
||||
assert "python_version" in data
|
||||
assert data["api_version"] == "v1"
|
||||
|
||||
|
||||
def test_get_displays():
|
||||
"""Test get displays endpoint."""
|
||||
response = client.get("/api/v1/config/displays")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "displays" in data
|
||||
assert "count" in data
|
||||
assert isinstance(data["displays"], list)
|
||||
assert data["count"] >= 0
|
||||
|
||||
# If displays are found, validate structure
|
||||
if data["count"] > 0:
|
||||
display = data["displays"][0]
|
||||
assert "index" in display
|
||||
assert "name" in display
|
||||
assert "width" in display
|
||||
assert "height" in display
|
||||
assert "is_primary" in display
|
||||
|
||||
|
||||
def test_openapi_docs():
|
||||
"""Test OpenAPI documentation is available."""
|
||||
response = client.get("/openapi.json")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["info"]["title"] == "WLED Screen Controller"
|
||||
assert data["info"]["version"] == __version__
|
||||
|
||||
|
||||
def test_swagger_ui():
|
||||
"""Test Swagger UI is available."""
|
||||
response = client.get("/docs")
|
||||
assert response.status_code == 200
|
||||
assert "text/html" in response.headers["content-type"]
|
||||
281
server/tests/test_calibration.py
Normal file
281
server/tests/test_calibration.py
Normal file
@@ -0,0 +1,281 @@
|
||||
"""Tests for calibration system."""
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from wled_controller.core.calibration import (
|
||||
CalibrationSegment,
|
||||
CalibrationConfig,
|
||||
PixelMapper,
|
||||
create_default_calibration,
|
||||
calibration_from_dict,
|
||||
calibration_to_dict,
|
||||
)
|
||||
from wled_controller.core.screen_capture import BorderPixels
|
||||
|
||||
|
||||
def test_calibration_segment():
|
||||
"""Test calibration segment creation."""
|
||||
segment = CalibrationSegment(
|
||||
edge="top",
|
||||
led_start=0,
|
||||
led_count=40,
|
||||
reverse=False,
|
||||
)
|
||||
|
||||
assert segment.edge == "top"
|
||||
assert segment.led_start == 0
|
||||
assert segment.led_count == 40
|
||||
assert segment.reverse is False
|
||||
|
||||
|
||||
def test_calibration_config_validation():
|
||||
"""Test calibration configuration validation."""
|
||||
segments = [
|
||||
CalibrationSegment(edge="bottom", led_start=0, led_count=40),
|
||||
CalibrationSegment(edge="right", led_start=40, led_count=30),
|
||||
CalibrationSegment(edge="top", led_start=70, led_count=40),
|
||||
CalibrationSegment(edge="left", led_start=110, led_count=40),
|
||||
]
|
||||
|
||||
config = CalibrationConfig(
|
||||
layout="clockwise",
|
||||
start_position="bottom_left",
|
||||
segments=segments,
|
||||
)
|
||||
|
||||
assert config.validate() is True
|
||||
assert config.get_total_leds() == 150
|
||||
|
||||
|
||||
def test_calibration_config_duplicate_edges():
|
||||
"""Test validation fails with duplicate edges."""
|
||||
segments = [
|
||||
CalibrationSegment(edge="top", led_start=0, led_count=40),
|
||||
CalibrationSegment(edge="top", led_start=40, led_count=40), # Duplicate
|
||||
]
|
||||
|
||||
config = CalibrationConfig(
|
||||
layout="clockwise",
|
||||
start_position="bottom_left",
|
||||
segments=segments,
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="Duplicate edges"):
|
||||
config.validate()
|
||||
|
||||
|
||||
def test_calibration_config_overlapping_indices():
|
||||
"""Test validation fails with overlapping LED indices."""
|
||||
segments = [
|
||||
CalibrationSegment(edge="bottom", led_start=0, led_count=50),
|
||||
CalibrationSegment(edge="right", led_start=40, led_count=30), # Overlaps
|
||||
]
|
||||
|
||||
config = CalibrationConfig(
|
||||
layout="clockwise",
|
||||
start_position="bottom_left",
|
||||
segments=segments,
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="overlap"):
|
||||
config.validate()
|
||||
|
||||
|
||||
def test_calibration_config_invalid_led_count():
|
||||
"""Test validation fails with invalid LED counts."""
|
||||
segments = [
|
||||
CalibrationSegment(edge="top", led_start=0, led_count=0), # Invalid
|
||||
]
|
||||
|
||||
config = CalibrationConfig(
|
||||
layout="clockwise",
|
||||
start_position="bottom_left",
|
||||
segments=segments,
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
config.validate()
|
||||
|
||||
|
||||
def test_get_segment_for_edge():
|
||||
"""Test getting segment by edge name."""
|
||||
segments = [
|
||||
CalibrationSegment(edge="bottom", led_start=0, led_count=40),
|
||||
CalibrationSegment(edge="right", led_start=40, led_count=30),
|
||||
]
|
||||
|
||||
config = CalibrationConfig(
|
||||
layout="clockwise",
|
||||
start_position="bottom_left",
|
||||
segments=segments,
|
||||
)
|
||||
|
||||
bottom_seg = config.get_segment_for_edge("bottom")
|
||||
assert bottom_seg is not None
|
||||
assert bottom_seg.led_count == 40
|
||||
|
||||
missing_seg = config.get_segment_for_edge("top")
|
||||
assert missing_seg is None
|
||||
|
||||
|
||||
def test_pixel_mapper_initialization():
|
||||
"""Test pixel mapper initialization."""
|
||||
config = create_default_calibration(150)
|
||||
mapper = PixelMapper(config, interpolation_mode="average")
|
||||
|
||||
assert mapper.calibration == config
|
||||
assert mapper.interpolation_mode == "average"
|
||||
|
||||
|
||||
def test_pixel_mapper_invalid_mode():
|
||||
"""Test pixel mapper with invalid interpolation mode."""
|
||||
config = create_default_calibration(150)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
PixelMapper(config, interpolation_mode="invalid")
|
||||
|
||||
|
||||
def test_pixel_mapper_map_border_to_leds():
|
||||
"""Test mapping border pixels to LED colors."""
|
||||
config = create_default_calibration(40) # 10 per edge
|
||||
mapper = PixelMapper(config)
|
||||
|
||||
# Create test border pixels (all red)
|
||||
border_pixels = BorderPixels(
|
||||
top=np.full((10, 100, 3), [255, 0, 0], dtype=np.uint8),
|
||||
right=np.full((100, 10, 3), [0, 255, 0], dtype=np.uint8),
|
||||
bottom=np.full((10, 100, 3), [0, 0, 255], dtype=np.uint8),
|
||||
left=np.full((100, 10, 3), [255, 255, 0], dtype=np.uint8),
|
||||
)
|
||||
|
||||
led_colors = mapper.map_border_to_leds(border_pixels)
|
||||
|
||||
assert len(led_colors) == 40
|
||||
assert all(isinstance(c, tuple) and len(c) == 3 for c in led_colors)
|
||||
|
||||
# Verify colors are reasonable (allowing for some rounding)
|
||||
# Bottom LEDs should be mostly blue
|
||||
bottom_color = led_colors[0]
|
||||
assert bottom_color[2] > 200 # Blue channel high
|
||||
|
||||
# Top LEDs should be mostly red
|
||||
top_segment = config.get_segment_for_edge("top")
|
||||
top_color = led_colors[top_segment.led_start]
|
||||
assert top_color[0] > 200 # Red channel high
|
||||
|
||||
|
||||
def test_pixel_mapper_test_calibration():
|
||||
"""Test calibration testing pattern."""
|
||||
config = create_default_calibration(100)
|
||||
mapper = PixelMapper(config)
|
||||
|
||||
# Test top edge
|
||||
led_colors = mapper.test_calibration("top", (255, 0, 0))
|
||||
|
||||
assert len(led_colors) == 100
|
||||
|
||||
# Top edge should be lit (red)
|
||||
top_segment = config.get_segment_for_edge("top")
|
||||
top_leds = led_colors[top_segment.led_start:top_segment.led_start + top_segment.led_count]
|
||||
assert all(color == (255, 0, 0) for color in top_leds)
|
||||
|
||||
# Other LEDs should be off
|
||||
other_leds = led_colors[:top_segment.led_start]
|
||||
assert all(color == (0, 0, 0) for color in other_leds)
|
||||
|
||||
|
||||
def test_pixel_mapper_test_calibration_invalid_edge():
|
||||
"""Test calibration testing with invalid edge."""
|
||||
config = CalibrationConfig(
|
||||
layout="clockwise",
|
||||
start_position="bottom_left",
|
||||
segments=[
|
||||
CalibrationSegment(edge="bottom", led_start=0, led_count=40),
|
||||
],
|
||||
)
|
||||
mapper = PixelMapper(config)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
mapper.test_calibration("top", (255, 0, 0)) # Top not in config
|
||||
|
||||
|
||||
def test_create_default_calibration():
|
||||
"""Test creating default calibration."""
|
||||
config = create_default_calibration(150)
|
||||
|
||||
assert config.layout == "clockwise"
|
||||
assert config.start_position == "bottom_left"
|
||||
assert len(config.segments) == 4
|
||||
assert config.get_total_leds() == 150
|
||||
|
||||
# Check all edges are present
|
||||
edges = {seg.edge for seg in config.segments}
|
||||
assert edges == {"top", "right", "bottom", "left"}
|
||||
|
||||
|
||||
def test_create_default_calibration_small_count():
|
||||
"""Test default calibration with small LED count."""
|
||||
config = create_default_calibration(4)
|
||||
assert config.get_total_leds() == 4
|
||||
|
||||
|
||||
def test_create_default_calibration_invalid():
|
||||
"""Test default calibration with invalid LED count."""
|
||||
with pytest.raises(ValueError):
|
||||
create_default_calibration(3) # Too few LEDs
|
||||
|
||||
|
||||
def test_calibration_from_dict():
|
||||
"""Test creating calibration from dictionary."""
|
||||
data = {
|
||||
"layout": "clockwise",
|
||||
"start_position": "bottom_left",
|
||||
"segments": [
|
||||
{"edge": "bottom", "led_start": 0, "led_count": 40, "reverse": False},
|
||||
{"edge": "right", "led_start": 40, "led_count": 30, "reverse": False},
|
||||
],
|
||||
}
|
||||
|
||||
config = calibration_from_dict(data)
|
||||
|
||||
assert config.layout == "clockwise"
|
||||
assert config.start_position == "bottom_left"
|
||||
assert len(config.segments) == 2
|
||||
assert config.get_total_leds() == 70
|
||||
|
||||
|
||||
def test_calibration_from_dict_missing_field():
|
||||
"""Test calibration from dict with missing field."""
|
||||
data = {
|
||||
"layout": "clockwise",
|
||||
# Missing start_position
|
||||
"segments": [],
|
||||
}
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
calibration_from_dict(data)
|
||||
|
||||
|
||||
def test_calibration_to_dict():
|
||||
"""Test converting calibration to dictionary."""
|
||||
config = create_default_calibration(100)
|
||||
data = calibration_to_dict(config)
|
||||
|
||||
assert "layout" in data
|
||||
assert "start_position" in data
|
||||
assert "segments" in data
|
||||
assert isinstance(data["segments"], list)
|
||||
assert len(data["segments"]) == 4
|
||||
|
||||
|
||||
def test_calibration_round_trip():
|
||||
"""Test converting calibration to dict and back."""
|
||||
original = create_default_calibration(120)
|
||||
data = calibration_to_dict(original)
|
||||
restored = calibration_from_dict(data)
|
||||
|
||||
assert restored.layout == original.layout
|
||||
assert restored.start_position == original.start_position
|
||||
assert len(restored.segments) == len(original.segments)
|
||||
assert restored.get_total_leds() == original.get_total_leds()
|
||||
120
server/tests/test_config.py
Normal file
120
server/tests/test_config.py
Normal file
@@ -0,0 +1,120 @@
|
||||
"""Tests for configuration management."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
|
||||
from wled_controller.config import (
|
||||
Config,
|
||||
ServerConfig,
|
||||
ProcessingConfig,
|
||||
WLEDConfig,
|
||||
get_config,
|
||||
reload_config,
|
||||
)
|
||||
|
||||
|
||||
def test_default_config():
|
||||
"""Test default configuration values."""
|
||||
config = Config()
|
||||
|
||||
assert config.server.host == "0.0.0.0"
|
||||
assert config.server.port == 8080
|
||||
assert config.processing.default_fps == 30
|
||||
assert config.processing.max_fps == 60
|
||||
assert config.wled.timeout == 5
|
||||
|
||||
|
||||
def test_load_from_yaml(tmp_path):
|
||||
"""Test loading configuration from YAML file."""
|
||||
config_data = {
|
||||
"server": {"host": "127.0.0.1", "port": 9000},
|
||||
"processing": {"default_fps": 60, "border_width": 20},
|
||||
"wled": {"timeout": 10},
|
||||
}
|
||||
|
||||
config_path = tmp_path / "test_config.yaml"
|
||||
with open(config_path, "w") as f:
|
||||
yaml.dump(config_data, f)
|
||||
|
||||
config = Config.from_yaml(config_path)
|
||||
|
||||
assert config.server.host == "127.0.0.1"
|
||||
assert config.server.port == 9000
|
||||
assert config.processing.default_fps == 60
|
||||
assert config.processing.border_width == 20
|
||||
assert config.wled.timeout == 10
|
||||
|
||||
|
||||
def test_load_from_yaml_file_not_found():
|
||||
"""Test loading from non-existent YAML file."""
|
||||
with pytest.raises(FileNotFoundError):
|
||||
Config.from_yaml("nonexistent.yaml")
|
||||
|
||||
|
||||
def test_environment_variables(monkeypatch):
|
||||
"""Test configuration from environment variables."""
|
||||
monkeypatch.setenv("WLED_SERVER__HOST", "192.168.1.1")
|
||||
monkeypatch.setenv("WLED_SERVER__PORT", "7000")
|
||||
monkeypatch.setenv("WLED_PROCESSING__DEFAULT_FPS", "45")
|
||||
|
||||
config = Config()
|
||||
|
||||
assert config.server.host == "192.168.1.1"
|
||||
assert config.server.port == 7000
|
||||
assert config.processing.default_fps == 45
|
||||
|
||||
|
||||
def test_server_config():
|
||||
"""Test server configuration."""
|
||||
server_config = ServerConfig(host="localhost", port=8000)
|
||||
|
||||
assert server_config.host == "localhost"
|
||||
assert server_config.port == 8000
|
||||
assert server_config.log_level == "INFO"
|
||||
|
||||
|
||||
def test_processing_config():
|
||||
"""Test processing configuration."""
|
||||
proc_config = ProcessingConfig(default_fps=25, max_fps=50)
|
||||
|
||||
assert proc_config.default_fps == 25
|
||||
assert proc_config.max_fps == 50
|
||||
assert proc_config.interpolation_mode == "average"
|
||||
|
||||
|
||||
def test_wled_config():
|
||||
"""Test WLED configuration."""
|
||||
wled_config = WLEDConfig(timeout=10, retry_attempts=5)
|
||||
|
||||
assert wled_config.timeout == 10
|
||||
assert wled_config.retry_attempts == 5
|
||||
assert wled_config.protocol == "http"
|
||||
|
||||
|
||||
def test_config_validation():
|
||||
"""Test configuration validation."""
|
||||
# Test valid interpolation mode
|
||||
config = Config(
|
||||
processing=ProcessingConfig(interpolation_mode="median")
|
||||
)
|
||||
assert config.processing.interpolation_mode == "median"
|
||||
|
||||
# Test invalid interpolation mode
|
||||
with pytest.raises(ValueError):
|
||||
ProcessingConfig(interpolation_mode="invalid")
|
||||
|
||||
|
||||
def test_get_config():
|
||||
"""Test global config getter."""
|
||||
config = get_config()
|
||||
assert isinstance(config, Config)
|
||||
|
||||
|
||||
def test_reload_config():
|
||||
"""Test config reload."""
|
||||
config1 = get_config()
|
||||
config2 = reload_config()
|
||||
assert isinstance(config2, Config)
|
||||
305
server/tests/test_device_store.py
Normal file
305
server/tests/test_device_store.py
Normal file
@@ -0,0 +1,305 @@
|
||||
"""Tests for device storage."""
|
||||
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
|
||||
from wled_controller.storage.device_store import Device, DeviceStore
|
||||
from wled_controller.core.processor_manager import ProcessingSettings
|
||||
from wled_controller.core.calibration import create_default_calibration
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_storage(tmp_path):
|
||||
"""Provide temporary storage file."""
|
||||
return tmp_path / "devices.json"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def device_store(temp_storage):
|
||||
"""Provide device store instance."""
|
||||
return DeviceStore(temp_storage)
|
||||
|
||||
|
||||
def test_device_creation():
|
||||
"""Test creating a device."""
|
||||
device = Device(
|
||||
device_id="test_001",
|
||||
name="Test Device",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
assert device.id == "test_001"
|
||||
assert device.name == "Test Device"
|
||||
assert device.url == "http://192.168.1.100"
|
||||
assert device.led_count == 150
|
||||
assert device.enabled is True
|
||||
|
||||
|
||||
def test_device_to_dict():
|
||||
"""Test converting device to dictionary."""
|
||||
device = Device(
|
||||
device_id="test_001",
|
||||
name="Test Device",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
data = device.to_dict()
|
||||
|
||||
assert data["id"] == "test_001"
|
||||
assert data["name"] == "Test Device"
|
||||
assert data["url"] == "http://192.168.1.100"
|
||||
assert data["led_count"] == 150
|
||||
assert "settings" in data
|
||||
assert "calibration" in data
|
||||
|
||||
|
||||
def test_device_from_dict():
|
||||
"""Test creating device from dictionary."""
|
||||
data = {
|
||||
"id": "test_001",
|
||||
"name": "Test Device",
|
||||
"url": "http://192.168.1.100",
|
||||
"led_count": 150,
|
||||
"enabled": True,
|
||||
"settings": {
|
||||
"display_index": 0,
|
||||
"fps": 30,
|
||||
"border_width": 10,
|
||||
},
|
||||
}
|
||||
|
||||
device = Device.from_dict(data)
|
||||
|
||||
assert device.id == "test_001"
|
||||
assert device.name == "Test Device"
|
||||
assert device.led_count == 150
|
||||
|
||||
|
||||
def test_device_round_trip():
|
||||
"""Test converting device to dict and back."""
|
||||
original = Device(
|
||||
device_id="test_001",
|
||||
name="Test Device",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
data = original.to_dict()
|
||||
restored = Device.from_dict(data)
|
||||
|
||||
assert restored.id == original.id
|
||||
assert restored.name == original.name
|
||||
assert restored.url == original.url
|
||||
assert restored.led_count == original.led_count
|
||||
|
||||
|
||||
def test_device_store_init(device_store):
|
||||
"""Test device store initialization."""
|
||||
assert device_store is not None
|
||||
assert device_store.count() == 0
|
||||
|
||||
|
||||
def test_create_device(device_store):
|
||||
"""Test creating a device in store."""
|
||||
device = device_store.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
assert device.id is not None
|
||||
assert device.name == "Test WLED"
|
||||
assert device_store.count() == 1
|
||||
|
||||
|
||||
def test_get_device(device_store):
|
||||
"""Test retrieving a device."""
|
||||
created = device_store.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
retrieved = device_store.get_device(created.id)
|
||||
|
||||
assert retrieved is not None
|
||||
assert retrieved.id == created.id
|
||||
assert retrieved.name == "Test WLED"
|
||||
|
||||
|
||||
def test_get_device_not_found(device_store):
|
||||
"""Test retrieving non-existent device."""
|
||||
device = device_store.get_device("nonexistent")
|
||||
assert device is None
|
||||
|
||||
|
||||
def test_get_all_devices(device_store):
|
||||
"""Test getting all devices."""
|
||||
device_store.create_device("Device 1", "http://192.168.1.100", 150)
|
||||
device_store.create_device("Device 2", "http://192.168.1.101", 200)
|
||||
|
||||
devices = device_store.get_all_devices()
|
||||
|
||||
assert len(devices) == 2
|
||||
assert any(d.name == "Device 1" for d in devices)
|
||||
assert any(d.name == "Device 2" for d in devices)
|
||||
|
||||
|
||||
def test_update_device(device_store):
|
||||
"""Test updating a device."""
|
||||
device = device_store.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
updated = device_store.update_device(
|
||||
device.id,
|
||||
name="Updated WLED",
|
||||
enabled=False,
|
||||
)
|
||||
|
||||
assert updated.name == "Updated WLED"
|
||||
assert updated.enabled is False
|
||||
|
||||
|
||||
def test_update_device_settings(device_store):
|
||||
"""Test updating device settings."""
|
||||
device = device_store.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
new_settings = ProcessingSettings(fps=60, border_width=20)
|
||||
|
||||
updated = device_store.update_device(
|
||||
device.id,
|
||||
settings=new_settings,
|
||||
)
|
||||
|
||||
assert updated.settings.fps == 60
|
||||
assert updated.settings.border_width == 20
|
||||
|
||||
|
||||
def test_update_device_calibration(device_store):
|
||||
"""Test updating device calibration."""
|
||||
device = device_store.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
new_calibration = create_default_calibration(150)
|
||||
|
||||
updated = device_store.update_device(
|
||||
device.id,
|
||||
calibration=new_calibration,
|
||||
)
|
||||
|
||||
assert updated.calibration is not None
|
||||
|
||||
|
||||
def test_update_device_not_found(device_store):
|
||||
"""Test updating non-existent device."""
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
device_store.update_device("nonexistent", name="New Name")
|
||||
|
||||
|
||||
def test_delete_device(device_store):
|
||||
"""Test deleting a device."""
|
||||
device = device_store.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
device_store.delete_device(device.id)
|
||||
|
||||
assert device_store.count() == 0
|
||||
assert device_store.get_device(device.id) is None
|
||||
|
||||
|
||||
def test_delete_device_not_found(device_store):
|
||||
"""Test deleting non-existent device."""
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
device_store.delete_device("nonexistent")
|
||||
|
||||
|
||||
def test_device_exists(device_store):
|
||||
"""Test checking if device exists."""
|
||||
device = device_store.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
assert device_store.device_exists(device.id) is True
|
||||
assert device_store.device_exists("nonexistent") is False
|
||||
|
||||
|
||||
def test_persistence(temp_storage):
|
||||
"""Test device persistence across store instances."""
|
||||
# Create store and add device
|
||||
store1 = DeviceStore(temp_storage)
|
||||
device = store1.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
device_id = device.id
|
||||
|
||||
# Create new store instance (loads from file)
|
||||
store2 = DeviceStore(temp_storage)
|
||||
|
||||
# Verify device persisted
|
||||
loaded_device = store2.get_device(device_id)
|
||||
assert loaded_device is not None
|
||||
assert loaded_device.name == "Test WLED"
|
||||
assert loaded_device.led_count == 150
|
||||
|
||||
|
||||
def test_clear(device_store):
|
||||
"""Test clearing all devices."""
|
||||
device_store.create_device("Device 1", "http://192.168.1.100", 150)
|
||||
device_store.create_device("Device 2", "http://192.168.1.101", 200)
|
||||
|
||||
assert device_store.count() == 2
|
||||
|
||||
device_store.clear()
|
||||
|
||||
assert device_store.count() == 0
|
||||
|
||||
|
||||
def test_update_led_count_resets_calibration(device_store):
|
||||
"""Test that updating LED count resets calibration."""
|
||||
device = device_store.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
original_calibration = device.calibration
|
||||
|
||||
# Update LED count
|
||||
updated = device_store.update_device(device.id, led_count=200)
|
||||
|
||||
# Calibration should be reset for new LED count
|
||||
assert updated.calibration.get_total_leds() == 200
|
||||
assert updated.calibration != original_calibration
|
||||
|
||||
|
||||
def test_update_calibration_led_count_mismatch(device_store):
|
||||
"""Test updating calibration with mismatched LED count fails."""
|
||||
device = device_store.create_device(
|
||||
name="Test WLED",
|
||||
url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
wrong_calibration = create_default_calibration(100)
|
||||
|
||||
with pytest.raises(ValueError, match="does not match"):
|
||||
device_store.update_device(device.id, calibration=wrong_calibration)
|
||||
254
server/tests/test_processor_manager.py
Normal file
254
server/tests/test_processor_manager.py
Normal file
@@ -0,0 +1,254 @@
|
||||
"""Tests for processor manager."""
|
||||
|
||||
import asyncio
|
||||
import pytest
|
||||
import respx
|
||||
from httpx import Response
|
||||
|
||||
from wled_controller.core.processor_manager import (
|
||||
ProcessorManager,
|
||||
ProcessingSettings,
|
||||
)
|
||||
from wled_controller.core.calibration import create_default_calibration
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wled_url():
|
||||
"""Provide test WLED device URL."""
|
||||
return "http://192.168.1.100"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_wled_responses():
|
||||
"""Provide mock WLED API responses."""
|
||||
return {
|
||||
"info": {
|
||||
"name": "Test WLED",
|
||||
"ver": "0.14.0",
|
||||
"leds": {"count": 150},
|
||||
"brand": "WLED",
|
||||
"product": "FOSS",
|
||||
"mac": "AA:BB:CC:DD:EE:FF",
|
||||
"ip": "192.168.1.100",
|
||||
},
|
||||
"state": {"on": True, "bri": 255},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def processor_manager():
|
||||
"""Provide processor manager instance."""
|
||||
return ProcessorManager()
|
||||
|
||||
|
||||
def test_processor_manager_init():
|
||||
"""Test processor manager initialization."""
|
||||
manager = ProcessorManager()
|
||||
assert manager is not None
|
||||
assert manager.get_all_devices() == []
|
||||
|
||||
|
||||
def test_add_device(processor_manager):
|
||||
"""Test adding a device."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
devices = processor_manager.get_all_devices()
|
||||
assert "test_device" in devices
|
||||
|
||||
|
||||
def test_add_device_duplicate(processor_manager):
|
||||
"""Test adding duplicate device fails."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match="already exists"):
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
|
||||
def test_remove_device(processor_manager):
|
||||
"""Test removing a device."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
processor_manager.remove_device("test_device")
|
||||
|
||||
assert "test_device" not in processor_manager.get_all_devices()
|
||||
|
||||
|
||||
def test_remove_device_not_found(processor_manager):
|
||||
"""Test removing non-existent device fails."""
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
processor_manager.remove_device("nonexistent")
|
||||
|
||||
|
||||
def test_update_settings(processor_manager):
|
||||
"""Test updating device settings."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
new_settings = ProcessingSettings(
|
||||
display_index=1,
|
||||
fps=60,
|
||||
border_width=20,
|
||||
)
|
||||
|
||||
processor_manager.update_settings("test_device", new_settings)
|
||||
|
||||
# Verify settings updated
|
||||
state = processor_manager.get_state("test_device")
|
||||
assert state["fps_target"] == 60
|
||||
|
||||
|
||||
def test_update_calibration(processor_manager):
|
||||
"""Test updating device calibration."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
new_calibration = create_default_calibration(150)
|
||||
|
||||
processor_manager.update_calibration("test_device", new_calibration)
|
||||
|
||||
|
||||
def test_update_calibration_led_count_mismatch(processor_manager):
|
||||
"""Test updating calibration with mismatched LED count fails."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
wrong_calibration = create_default_calibration(100) # Wrong count
|
||||
|
||||
with pytest.raises(ValueError, match="does not match"):
|
||||
processor_manager.update_calibration("test_device", wrong_calibration)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_start_processing(processor_manager, wled_url, mock_wled_responses):
|
||||
"""Test starting processing."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_responses["info"])
|
||||
)
|
||||
respx.post(f"{wled_url}/json/state").mock(
|
||||
return_value=Response(200, json={"success": True})
|
||||
)
|
||||
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url=wled_url,
|
||||
led_count=150,
|
||||
settings=ProcessingSettings(fps=5), # Low FPS for testing
|
||||
)
|
||||
|
||||
await processor_manager.start_processing("test_device")
|
||||
|
||||
assert processor_manager.is_processing("test_device") is True
|
||||
|
||||
# Let it process a few frames
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
# Stop processing
|
||||
await processor_manager.stop_processing("test_device")
|
||||
|
||||
assert processor_manager.is_processing("test_device") is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_processing_already_running(processor_manager):
|
||||
"""Test starting processing when already running fails."""
|
||||
# This test would need mocked WLED responses
|
||||
# Skipping actual connection for simplicity
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stop_processing_not_running(processor_manager):
|
||||
"""Test stopping processing when not running."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
# Should not raise error
|
||||
await processor_manager.stop_processing("test_device")
|
||||
|
||||
|
||||
def test_get_state(processor_manager):
|
||||
"""Test getting device state."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
settings=ProcessingSettings(fps=30, display_index=0),
|
||||
)
|
||||
|
||||
state = processor_manager.get_state("test_device")
|
||||
|
||||
assert state["device_id"] == "test_device"
|
||||
assert state["processing"] is False
|
||||
assert state["fps_target"] == 30
|
||||
assert state["display_index"] == 0
|
||||
|
||||
|
||||
def test_get_state_not_found(processor_manager):
|
||||
"""Test getting state for non-existent device."""
|
||||
with pytest.raises(ValueError, match="not found"):
|
||||
processor_manager.get_state("nonexistent")
|
||||
|
||||
|
||||
def test_get_metrics(processor_manager):
|
||||
"""Test getting device metrics."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
metrics = processor_manager.get_metrics("test_device")
|
||||
|
||||
assert metrics["device_id"] == "test_device"
|
||||
assert metrics["processing"] is False
|
||||
assert metrics["frames_processed"] == 0
|
||||
assert metrics["errors_count"] == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stop_all(processor_manager):
|
||||
"""Test stopping all processors."""
|
||||
processor_manager.add_device(
|
||||
device_id="test_device1",
|
||||
device_url="http://192.168.1.100",
|
||||
led_count=150,
|
||||
)
|
||||
processor_manager.add_device(
|
||||
device_id="test_device2",
|
||||
device_url="http://192.168.1.101",
|
||||
led_count=150,
|
||||
)
|
||||
|
||||
await processor_manager.stop_all()
|
||||
|
||||
assert processor_manager.is_processing("test_device1") is False
|
||||
assert processor_manager.is_processing("test_device2") is False
|
||||
223
server/tests/test_screen_capture.py
Normal file
223
server/tests/test_screen_capture.py
Normal file
@@ -0,0 +1,223 @@
|
||||
"""Tests for screen capture functionality."""
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from wled_controller.core.screen_capture import (
|
||||
get_available_displays,
|
||||
capture_display,
|
||||
extract_border_pixels,
|
||||
get_edge_segments,
|
||||
calculate_average_color,
|
||||
calculate_median_color,
|
||||
calculate_dominant_color,
|
||||
ScreenCapture,
|
||||
)
|
||||
|
||||
|
||||
def test_get_available_displays():
|
||||
"""Test getting available displays."""
|
||||
displays = get_available_displays()
|
||||
|
||||
assert isinstance(displays, list)
|
||||
assert len(displays) >= 1 # At least one display should be available
|
||||
|
||||
# Check first display structure
|
||||
display = displays[0]
|
||||
assert hasattr(display, "index")
|
||||
assert hasattr(display, "name")
|
||||
assert hasattr(display, "width")
|
||||
assert hasattr(display, "height")
|
||||
assert display.width > 0
|
||||
assert display.height > 0
|
||||
|
||||
|
||||
def test_capture_display():
|
||||
"""Test capturing a display."""
|
||||
# Capture the first display
|
||||
capture = capture_display(0)
|
||||
|
||||
assert isinstance(capture, ScreenCapture)
|
||||
assert capture.image is not None
|
||||
assert capture.width > 0
|
||||
assert capture.height > 0
|
||||
assert capture.display_index == 0
|
||||
assert isinstance(capture.image, np.ndarray)
|
||||
assert capture.image.shape == (capture.height, capture.width, 3)
|
||||
|
||||
|
||||
def test_capture_display_invalid_index():
|
||||
"""Test capturing with invalid display index."""
|
||||
with pytest.raises(ValueError):
|
||||
capture_display(999) # Invalid display index
|
||||
|
||||
|
||||
def test_extract_border_pixels():
|
||||
"""Test extracting border pixels."""
|
||||
# Create a test screen capture
|
||||
test_image = np.random.randint(0, 256, (100, 200, 3), dtype=np.uint8)
|
||||
capture = ScreenCapture(
|
||||
image=test_image,
|
||||
width=200,
|
||||
height=100,
|
||||
display_index=0
|
||||
)
|
||||
|
||||
border_width = 10
|
||||
borders = extract_border_pixels(capture, border_width)
|
||||
|
||||
# Check border shapes
|
||||
assert borders.top.shape == (border_width, 200, 3)
|
||||
assert borders.bottom.shape == (border_width, 200, 3)
|
||||
assert borders.left.shape == (100, border_width, 3)
|
||||
assert borders.right.shape == (100, border_width, 3)
|
||||
|
||||
|
||||
def test_extract_border_pixels_invalid_width():
|
||||
"""Test extracting borders with invalid width."""
|
||||
test_image = np.random.randint(0, 256, (100, 200, 3), dtype=np.uint8)
|
||||
capture = ScreenCapture(
|
||||
image=test_image,
|
||||
width=200,
|
||||
height=100,
|
||||
display_index=0
|
||||
)
|
||||
|
||||
# Border width too small
|
||||
with pytest.raises(ValueError):
|
||||
extract_border_pixels(capture, 0)
|
||||
|
||||
# Border width too large
|
||||
with pytest.raises(ValueError):
|
||||
extract_border_pixels(capture, 50)
|
||||
|
||||
|
||||
def test_get_edge_segments():
|
||||
"""Test dividing edge into segments."""
|
||||
# Create test edge pixels (horizontal edge)
|
||||
edge_pixels = np.random.randint(0, 256, (10, 100, 3), dtype=np.uint8)
|
||||
|
||||
segments = get_edge_segments(edge_pixels, 10, "top")
|
||||
|
||||
assert len(segments) == 10
|
||||
# Each segment should have width of approximately 10
|
||||
for segment in segments:
|
||||
assert segment.shape[0] == 10 # Height stays same
|
||||
assert 8 <= segment.shape[1] <= 12 # Width varies slightly
|
||||
assert segment.shape[2] == 3 # RGB
|
||||
|
||||
|
||||
def test_get_edge_segments_vertical():
|
||||
"""Test dividing vertical edge into segments."""
|
||||
# Create test edge pixels (vertical edge)
|
||||
edge_pixels = np.random.randint(0, 256, (100, 10, 3), dtype=np.uint8)
|
||||
|
||||
segments = get_edge_segments(edge_pixels, 10, "left")
|
||||
|
||||
assert len(segments) == 10
|
||||
# Each segment should have height of approximately 10
|
||||
for segment in segments:
|
||||
assert 8 <= segment.shape[0] <= 12 # Height varies slightly
|
||||
assert segment.shape[1] == 10 # Width stays same
|
||||
assert segment.shape[2] == 3 # RGB
|
||||
|
||||
|
||||
def test_get_edge_segments_invalid():
|
||||
"""Test edge segments with invalid parameters."""
|
||||
edge_pixels = np.random.randint(0, 256, (10, 100, 3), dtype=np.uint8)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
get_edge_segments(edge_pixels, 0, "top")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
get_edge_segments(edge_pixels, 200, "top") # More segments than pixels
|
||||
|
||||
|
||||
def test_calculate_average_color():
|
||||
"""Test calculating average color."""
|
||||
# Create uniform color region
|
||||
pixels = np.full((10, 10, 3), [100, 150, 200], dtype=np.uint8)
|
||||
|
||||
color = calculate_average_color(pixels)
|
||||
|
||||
assert color == (100, 150, 200)
|
||||
|
||||
|
||||
def test_calculate_average_color_mixed():
|
||||
"""Test average color with mixed colors."""
|
||||
# Create region with two colors
|
||||
pixels = np.zeros((10, 10, 3), dtype=np.uint8)
|
||||
pixels[:5, :, :] = [255, 0, 0] # Top half red
|
||||
pixels[5:, :, :] = [0, 0, 255] # Bottom half blue
|
||||
|
||||
color = calculate_average_color(pixels)
|
||||
|
||||
# Should be roughly purple (average of red and blue)
|
||||
assert 120 <= color[0] <= 135 # R
|
||||
assert 0 <= color[1] <= 10 # G
|
||||
assert 120 <= color[2] <= 135 # B
|
||||
|
||||
|
||||
def test_calculate_median_color():
|
||||
"""Test calculating median color."""
|
||||
# Create region with outliers
|
||||
pixels = np.full((10, 10, 3), [100, 100, 100], dtype=np.uint8)
|
||||
pixels[0, 0, :] = [255, 255, 255] # One bright outlier
|
||||
|
||||
color = calculate_median_color(pixels)
|
||||
|
||||
# Median should be close to 100, not affected by outlier
|
||||
assert 95 <= color[0] <= 105
|
||||
assert 95 <= color[1] <= 105
|
||||
assert 95 <= color[2] <= 105
|
||||
|
||||
|
||||
def test_calculate_dominant_color():
|
||||
"""Test calculating dominant color."""
|
||||
# Create region with mostly one color
|
||||
pixels = np.full((20, 20, 3), [100, 150, 200], dtype=np.uint8)
|
||||
# Add some noise
|
||||
pixels[:2, :2, :] = [50, 75, 100]
|
||||
|
||||
color = calculate_dominant_color(pixels)
|
||||
|
||||
# Dominant color should be close to the main color
|
||||
assert 90 <= color[0] <= 110
|
||||
assert 140 <= color[1] <= 160
|
||||
assert 190 <= color[2] <= 210
|
||||
|
||||
|
||||
def test_calculate_color_empty_pixels():
|
||||
"""Test color calculation with empty pixel array."""
|
||||
empty_pixels = np.array([]).reshape(0, 0, 3)
|
||||
|
||||
assert calculate_average_color(empty_pixels) == (0, 0, 0)
|
||||
assert calculate_median_color(empty_pixels) == (0, 0, 0)
|
||||
assert calculate_dominant_color(empty_pixels) == (0, 0, 0)
|
||||
|
||||
|
||||
def test_end_to_end_screen_capture():
|
||||
"""Test complete screen capture workflow."""
|
||||
# Get available displays
|
||||
displays = get_available_displays()
|
||||
assert len(displays) > 0
|
||||
|
||||
# Capture first display
|
||||
capture = capture_display(0)
|
||||
assert capture is not None
|
||||
|
||||
# Extract borders
|
||||
borders = extract_border_pixels(capture, 10)
|
||||
assert borders.top is not None
|
||||
assert borders.bottom is not None
|
||||
assert borders.left is not None
|
||||
assert borders.right is not None
|
||||
|
||||
# Get segments for top edge
|
||||
top_segments = get_edge_segments(borders.top, 10, "top")
|
||||
assert len(top_segments) == 10
|
||||
|
||||
# Calculate color for first segment
|
||||
color = calculate_average_color(top_segments[0])
|
||||
assert len(color) == 3
|
||||
assert all(0 <= c <= 255 for c in color)
|
||||
253
server/tests/test_wled_client.py
Normal file
253
server/tests/test_wled_client.py
Normal file
@@ -0,0 +1,253 @@
|
||||
"""Tests for WLED client."""
|
||||
|
||||
import pytest
|
||||
import respx
|
||||
from httpx import Response
|
||||
|
||||
from wled_controller.core.wled_client import WLEDClient, WLEDInfo
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wled_url():
|
||||
"""Provide test WLED device URL."""
|
||||
return "http://192.168.1.100"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_wled_info():
|
||||
"""Provide mock WLED info response."""
|
||||
return {
|
||||
"name": "Test WLED",
|
||||
"ver": "0.14.0",
|
||||
"leds": {"count": 150},
|
||||
"brand": "WLED",
|
||||
"product": "FOSS",
|
||||
"mac": "AA:BB:CC:DD:EE:FF",
|
||||
"ip": "192.168.1.100",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_wled_state():
|
||||
"""Provide mock WLED state response."""
|
||||
return {
|
||||
"on": True,
|
||||
"bri": 255,
|
||||
"seg": [{"id": 0, "on": True}],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_wled_client_connect(wled_url, mock_wled_info):
|
||||
"""Test connecting to WLED device."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_info)
|
||||
)
|
||||
|
||||
client = WLEDClient(wled_url)
|
||||
success = await client.connect()
|
||||
|
||||
assert success is True
|
||||
assert client.is_connected is True
|
||||
|
||||
await client.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_wled_client_connect_failure(wled_url):
|
||||
"""Test connection failure handling."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(500, text="Internal Server Error")
|
||||
)
|
||||
|
||||
client = WLEDClient(wled_url, retry_attempts=1)
|
||||
|
||||
with pytest.raises(RuntimeError):
|
||||
await client.connect()
|
||||
|
||||
assert client.is_connected is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_get_info(wled_url, mock_wled_info):
|
||||
"""Test getting device info."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_info)
|
||||
)
|
||||
|
||||
async with WLEDClient(wled_url) as client:
|
||||
info = await client.get_info()
|
||||
|
||||
assert isinstance(info, WLEDInfo)
|
||||
assert info.name == "Test WLED"
|
||||
assert info.version == "0.14.0"
|
||||
assert info.led_count == 150
|
||||
assert info.mac == "AA:BB:CC:DD:EE:FF"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_get_state(wled_url, mock_wled_info, mock_wled_state):
|
||||
"""Test getting device state."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_info)
|
||||
)
|
||||
respx.get(f"{wled_url}/json/state").mock(
|
||||
return_value=Response(200, json=mock_wled_state)
|
||||
)
|
||||
|
||||
async with WLEDClient(wled_url) as client:
|
||||
state = await client.get_state()
|
||||
|
||||
assert state["on"] is True
|
||||
assert state["bri"] == 255
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_send_pixels(wled_url, mock_wled_info):
|
||||
"""Test sending pixel data."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_info)
|
||||
)
|
||||
respx.post(f"{wled_url}/json/state").mock(
|
||||
return_value=Response(200, json={"success": True})
|
||||
)
|
||||
|
||||
async with WLEDClient(wled_url) as client:
|
||||
pixels = [
|
||||
(255, 0, 0), # Red
|
||||
(0, 255, 0), # Green
|
||||
(0, 0, 255), # Blue
|
||||
]
|
||||
|
||||
success = await client.send_pixels(pixels, brightness=200)
|
||||
assert success is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_send_pixels_invalid_values(wled_url, mock_wled_info):
|
||||
"""Test sending invalid pixel values."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_info)
|
||||
)
|
||||
|
||||
async with WLEDClient(wled_url) as client:
|
||||
# Invalid RGB value
|
||||
with pytest.raises(ValueError):
|
||||
await client.send_pixels([(300, 0, 0)])
|
||||
|
||||
# Invalid brightness
|
||||
with pytest.raises(ValueError):
|
||||
await client.send_pixels([(255, 0, 0)], brightness=300)
|
||||
|
||||
# Empty pixels list
|
||||
with pytest.raises(ValueError):
|
||||
await client.send_pixels([])
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_set_power(wled_url, mock_wled_info):
|
||||
"""Test turning device on/off."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_info)
|
||||
)
|
||||
respx.post(f"{wled_url}/json/state").mock(
|
||||
return_value=Response(200, json={"success": True})
|
||||
)
|
||||
|
||||
async with WLEDClient(wled_url) as client:
|
||||
# Turn on
|
||||
success = await client.set_power(True)
|
||||
assert success is True
|
||||
|
||||
# Turn off
|
||||
success = await client.set_power(False)
|
||||
assert success is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_set_brightness(wled_url, mock_wled_info):
|
||||
"""Test setting brightness."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_info)
|
||||
)
|
||||
respx.post(f"{wled_url}/json/state").mock(
|
||||
return_value=Response(200, json={"success": True})
|
||||
)
|
||||
|
||||
async with WLEDClient(wled_url) as client:
|
||||
success = await client.set_brightness(128)
|
||||
assert success is True
|
||||
|
||||
# Invalid brightness
|
||||
with pytest.raises(ValueError):
|
||||
await client.set_brightness(300)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_test_connection(wled_url, mock_wled_info):
|
||||
"""Test connection testing."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_info)
|
||||
)
|
||||
|
||||
async with WLEDClient(wled_url) as client:
|
||||
success = await client.test_connection()
|
||||
assert success is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_retry_logic(wled_url, mock_wled_info):
|
||||
"""Test retry logic on failures."""
|
||||
# Mock to fail twice, then succeed
|
||||
call_count = 0
|
||||
|
||||
def mock_response(request):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
if call_count < 3:
|
||||
return Response(500, text="Error")
|
||||
return Response(200, json=mock_wled_info)
|
||||
|
||||
respx.get(f"{wled_url}/json/info").mock(side_effect=mock_response)
|
||||
|
||||
client = WLEDClient(wled_url, retry_attempts=3, retry_delay=0.1)
|
||||
success = await client.connect()
|
||||
|
||||
assert success is True
|
||||
assert call_count == 3 # Failed 2 times, succeeded on 3rd
|
||||
|
||||
await client.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_context_manager(wled_url, mock_wled_info):
|
||||
"""Test async context manager usage."""
|
||||
respx.get(f"{wled_url}/json/info").mock(
|
||||
return_value=Response(200, json=mock_wled_info)
|
||||
)
|
||||
|
||||
async with WLEDClient(wled_url) as client:
|
||||
assert client.is_connected is True
|
||||
|
||||
# Client should be closed after context
|
||||
assert client.is_connected is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_request_without_connection(wled_url):
|
||||
"""Test making request without connecting first."""
|
||||
client = WLEDClient(wled_url)
|
||||
|
||||
with pytest.raises(RuntimeError):
|
||||
await client.get_state()
|
||||
Reference in New Issue
Block a user