Add Picture Streams architecture with postprocessing templates and stream test UI

Introduce Picture Stream abstraction that separates the capture pipeline into
composable layers: raw streams (display + capture engine + FPS) and processed
streams (source stream + postprocessing template). Devices reference a picture
stream instead of managing individual capture settings.

- Add PictureStream and PostprocessingTemplate data models and stores
- Add CRUD API endpoints for picture streams and postprocessing templates
- Add stream chain resolution in ProcessorManager for start_processing
- Add picture stream test endpoint with postprocessing preview support
- Add Stream Settings modal with border_width and interpolation_mode controls
- Add stream test modal with capture preview and performance metrics
- Add full frontend: Picture Streams tab, Processing Templates tab, stream
  selector on device cards, test buttons on stream cards
- Add localization keys for all new features (en, ru)
- Migrate existing devices to picture streams on startup

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-11 00:00:30 +03:00
parent 3db7ba4b0e
commit 493f14fba9
23 changed files with 2773 additions and 200 deletions

View File

@@ -1,5 +1,7 @@
"""Storage layer for device and configuration persistence."""
from .device_store import DeviceStore
from .picture_stream_store import PictureStreamStore
from .postprocessing_template_store import PostprocessingTemplateStore
__all__ = ["DeviceStore"]
__all__ = ["DeviceStore", "PictureStreamStore", "PostprocessingTemplateStore"]

View File

@@ -30,7 +30,8 @@ class Device:
enabled: bool = True,
settings: Optional[ProcessingSettings] = None,
calibration: Optional[CalibrationConfig] = None,
capture_template_id: str = "tpl_mss_default",
capture_template_id: str = "",
picture_stream_id: str = "",
created_at: Optional[datetime] = None,
updated_at: Optional[datetime] = None,
):
@@ -44,7 +45,8 @@ class Device:
enabled: Whether device is enabled
settings: Processing settings
calibration: Calibration configuration
capture_template_id: ID of assigned capture template
capture_template_id: ID of assigned capture template (legacy, use picture_stream_id)
picture_stream_id: ID of assigned picture stream
created_at: Creation timestamp
updated_at: Last update timestamp
"""
@@ -56,6 +58,7 @@ class Device:
self.settings = settings or ProcessingSettings()
self.calibration = calibration or create_default_calibration(led_count)
self.capture_template_id = capture_template_id
self.picture_stream_id = picture_stream_id
self.created_at = created_at or datetime.utcnow()
self.updated_at = updated_at or datetime.utcnow()
@@ -84,6 +87,7 @@ class Device:
},
"calibration": calibration_to_dict(self.calibration),
"capture_template_id": self.capture_template_id,
"picture_stream_id": self.picture_stream_id,
"created_at": self.created_at.isoformat(),
"updated_at": self.updated_at.isoformat(),
}
@@ -121,11 +125,8 @@ class Device:
else create_default_calibration(data["led_count"])
)
# Migration: assign default MSS template if no template set
capture_template_id = data.get("capture_template_id")
if not capture_template_id:
capture_template_id = "tpl_mss_default"
logger.info(f"Migrating device {data['id']} to default MSS template")
capture_template_id = data.get("capture_template_id", "")
picture_stream_id = data.get("picture_stream_id", "")
return cls(
device_id=data["id"],
@@ -136,6 +137,7 @@ class Device:
settings=settings,
calibration=calibration,
capture_template_id=capture_template_id,
picture_stream_id=picture_stream_id,
created_at=datetime.fromisoformat(data.get("created_at", datetime.utcnow().isoformat())),
updated_at=datetime.fromisoformat(data.get("updated_at", datetime.utcnow().isoformat())),
)
@@ -217,7 +219,8 @@ class DeviceStore:
led_count: int,
settings: Optional[ProcessingSettings] = None,
calibration: Optional[CalibrationConfig] = None,
capture_template_id: str = "tpl_mss_default",
capture_template_id: str = "",
picture_stream_id: str = "",
) -> Device:
"""Create a new device.
@@ -247,6 +250,7 @@ class DeviceStore:
settings=settings,
calibration=calibration,
capture_template_id=capture_template_id,
picture_stream_id=picture_stream_id,
)
# Store
@@ -285,6 +289,7 @@ class DeviceStore:
settings: Optional[ProcessingSettings] = None,
calibration: Optional[CalibrationConfig] = None,
capture_template_id: Optional[str] = None,
picture_stream_id: Optional[str] = None,
) -> Device:
"""Update device.
@@ -331,6 +336,8 @@ class DeviceStore:
device.calibration = calibration
if capture_template_id is not None:
device.capture_template_id = capture_template_id
if picture_stream_id is not None:
device.picture_stream_id = picture_stream_id
device.updated_at = datetime.utcnow()

View File

@@ -0,0 +1,69 @@
"""Picture stream data model."""
from dataclasses import dataclass
from datetime import datetime
from typing import Optional
@dataclass
class PictureStream:
"""Represents a picture stream configuration.
A picture stream is either:
- "raw": captures from a display using a capture engine template at a target FPS
- "processed": applies postprocessing to another picture stream
"""
id: str
name: str
stream_type: str # "raw" or "processed"
created_at: datetime
updated_at: datetime
# Raw stream fields (used when stream_type == "raw")
display_index: Optional[int] = None
capture_template_id: Optional[str] = None
target_fps: Optional[int] = None
# Processed stream fields (used when stream_type == "processed")
source_stream_id: Optional[str] = None
postprocessing_template_id: Optional[str] = None
description: Optional[str] = None
def to_dict(self) -> dict:
"""Convert stream to dictionary."""
return {
"id": self.id,
"name": self.name,
"stream_type": self.stream_type,
"display_index": self.display_index,
"capture_template_id": self.capture_template_id,
"target_fps": self.target_fps,
"source_stream_id": self.source_stream_id,
"postprocessing_template_id": self.postprocessing_template_id,
"created_at": self.created_at.isoformat(),
"updated_at": self.updated_at.isoformat(),
"description": self.description,
}
@classmethod
def from_dict(cls, data: dict) -> "PictureStream":
"""Create stream from dictionary."""
return cls(
id=data["id"],
name=data["name"],
stream_type=data["stream_type"],
display_index=data.get("display_index"),
capture_template_id=data.get("capture_template_id"),
target_fps=data.get("target_fps"),
source_stream_id=data.get("source_stream_id"),
postprocessing_template_id=data.get("postprocessing_template_id"),
created_at=datetime.fromisoformat(data["created_at"])
if isinstance(data.get("created_at"), str)
else data.get("created_at", datetime.utcnow()),
updated_at=datetime.fromisoformat(data["updated_at"])
if isinstance(data.get("updated_at"), str)
else data.get("updated_at", datetime.utcnow()),
description=data.get("description"),
)

View File

@@ -0,0 +1,332 @@
"""Picture stream storage using JSON files."""
import json
import uuid
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional, Set
from wled_controller.storage.picture_stream import PictureStream
from wled_controller.utils import get_logger
logger = get_logger(__name__)
class PictureStreamStore:
"""Storage for picture streams.
Supports raw and processed stream types with cycle detection
for processed streams that reference other streams.
"""
def __init__(self, file_path: str):
"""Initialize picture stream store.
Args:
file_path: Path to streams JSON file
"""
self.file_path = Path(file_path)
self._streams: Dict[str, PictureStream] = {}
self._load()
def _load(self) -> None:
"""Load streams from file."""
if not self.file_path.exists():
return
try:
with open(self.file_path, "r", encoding="utf-8") as f:
data = json.load(f)
streams_data = data.get("picture_streams", {})
loaded = 0
for stream_id, stream_dict in streams_data.items():
try:
stream = PictureStream.from_dict(stream_dict)
self._streams[stream_id] = stream
loaded += 1
except Exception as e:
logger.error(
f"Failed to load picture stream {stream_id}: {e}",
exc_info=True,
)
if loaded > 0:
logger.info(f"Loaded {loaded} picture streams from storage")
except Exception as e:
logger.error(f"Failed to load picture streams from {self.file_path}: {e}")
raise
logger.info(f"Picture stream store initialized with {len(self._streams)} streams")
def _save(self) -> None:
"""Save all streams to file."""
try:
self.file_path.parent.mkdir(parents=True, exist_ok=True)
streams_dict = {
stream_id: stream.to_dict()
for stream_id, stream in self._streams.items()
}
data = {
"version": "1.0.0",
"picture_streams": streams_dict,
}
with open(self.file_path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
except Exception as e:
logger.error(f"Failed to save picture streams to {self.file_path}: {e}")
raise
def _detect_cycle(self, source_stream_id: str, exclude_stream_id: Optional[str] = None) -> bool:
"""Detect if following the source chain from source_stream_id would create a cycle.
Args:
source_stream_id: The source stream ID to start walking from
exclude_stream_id: Stream ID to exclude (the stream being created/updated)
Returns:
True if a cycle would be created
"""
visited: Set[str] = set()
if exclude_stream_id:
visited.add(exclude_stream_id)
current_id = source_stream_id
while current_id:
if current_id in visited:
return True
visited.add(current_id)
current_stream = self._streams.get(current_id)
if not current_stream:
break
if current_stream.stream_type == "raw":
break
current_id = current_stream.source_stream_id
return False
def get_all_streams(self) -> List[PictureStream]:
"""Get all picture streams."""
return list(self._streams.values())
def get_stream(self, stream_id: str) -> PictureStream:
"""Get stream by ID.
Raises:
ValueError: If stream not found
"""
if stream_id not in self._streams:
raise ValueError(f"Picture stream not found: {stream_id}")
return self._streams[stream_id]
def create_stream(
self,
name: str,
stream_type: str,
display_index: Optional[int] = None,
capture_template_id: Optional[str] = None,
target_fps: Optional[int] = None,
source_stream_id: Optional[str] = None,
postprocessing_template_id: Optional[str] = None,
description: Optional[str] = None,
) -> PictureStream:
"""Create a new picture stream.
Args:
name: Stream name
stream_type: "raw" or "processed"
display_index: Display index (raw streams)
capture_template_id: Capture template ID (raw streams)
target_fps: Target FPS (raw streams)
source_stream_id: Source stream ID (processed streams)
postprocessing_template_id: Postprocessing template ID (processed streams)
description: Optional description
Raises:
ValueError: If validation fails or cycle detected
"""
if stream_type not in ("raw", "processed"):
raise ValueError(f"Invalid stream type: {stream_type}")
if stream_type == "raw":
if display_index is None:
raise ValueError("Raw streams require display_index")
if not capture_template_id:
raise ValueError("Raw streams require capture_template_id")
if target_fps is None:
raise ValueError("Raw streams require target_fps")
elif stream_type == "processed":
if not source_stream_id:
raise ValueError("Processed streams require source_stream_id")
if not postprocessing_template_id:
raise ValueError("Processed streams require postprocessing_template_id")
# Validate source stream exists
if source_stream_id not in self._streams:
raise ValueError(f"Source stream not found: {source_stream_id}")
# Check for cycles
if self._detect_cycle(source_stream_id):
raise ValueError("Cycle detected in stream chain")
# Check for duplicate name
for stream in self._streams.values():
if stream.name == name:
raise ValueError(f"Picture stream with name '{name}' already exists")
stream_id = f"ps_{uuid.uuid4().hex[:8]}"
now = datetime.utcnow()
stream = PictureStream(
id=stream_id,
name=name,
stream_type=stream_type,
display_index=display_index,
capture_template_id=capture_template_id,
target_fps=target_fps,
source_stream_id=source_stream_id,
postprocessing_template_id=postprocessing_template_id,
created_at=now,
updated_at=now,
description=description,
)
self._streams[stream_id] = stream
self._save()
logger.info(f"Created picture stream: {name} ({stream_id}, type={stream_type})")
return stream
def update_stream(
self,
stream_id: str,
name: Optional[str] = None,
display_index: Optional[int] = None,
capture_template_id: Optional[str] = None,
target_fps: Optional[int] = None,
source_stream_id: Optional[str] = None,
postprocessing_template_id: Optional[str] = None,
description: Optional[str] = None,
) -> PictureStream:
"""Update an existing picture stream.
Raises:
ValueError: If stream not found, validation fails, or cycle detected
"""
if stream_id not in self._streams:
raise ValueError(f"Picture stream not found: {stream_id}")
stream = self._streams[stream_id]
# If changing source_stream_id on a processed stream, check for cycles
if source_stream_id is not None and stream.stream_type == "processed":
if source_stream_id not in self._streams:
raise ValueError(f"Source stream not found: {source_stream_id}")
if self._detect_cycle(source_stream_id, exclude_stream_id=stream_id):
raise ValueError("Cycle detected in stream chain")
if name is not None:
stream.name = name
if display_index is not None:
stream.display_index = display_index
if capture_template_id is not None:
stream.capture_template_id = capture_template_id
if target_fps is not None:
stream.target_fps = target_fps
if source_stream_id is not None:
stream.source_stream_id = source_stream_id
if postprocessing_template_id is not None:
stream.postprocessing_template_id = postprocessing_template_id
if description is not None:
stream.description = description
stream.updated_at = datetime.utcnow()
self._save()
logger.info(f"Updated picture stream: {stream_id}")
return stream
def delete_stream(self, stream_id: str) -> None:
"""Delete a picture stream.
Raises:
ValueError: If stream not found or is referenced by another stream
"""
if stream_id not in self._streams:
raise ValueError(f"Picture stream not found: {stream_id}")
# Check if any other stream references this one as source
for other_stream in self._streams.values():
if other_stream.source_stream_id == stream_id:
raise ValueError(
f"Cannot delete stream '{self._streams[stream_id].name}': "
f"it is referenced by stream '{other_stream.name}'"
)
del self._streams[stream_id]
self._save()
logger.info(f"Deleted picture stream: {stream_id}")
def is_referenced_by_device(self, stream_id: str, device_store) -> bool:
"""Check if this stream is referenced by any device.
Args:
stream_id: Stream ID to check
device_store: DeviceStore instance
Returns:
True if any device references this stream
"""
for device in device_store.get_all_devices():
if getattr(device, "picture_stream_id", None) == stream_id:
return True
return False
def resolve_stream_chain(self, stream_id: str) -> dict:
"""Resolve a stream chain to get the final raw stream and collected postprocessing templates.
Walks the chain from the given stream to the root raw stream,
collecting postprocessing template IDs along the way.
Args:
stream_id: Starting stream ID
Returns:
Dict with:
- raw_stream: The root raw PictureStream
- postprocessing_template_ids: List of PP template IDs (in chain order)
Raises:
ValueError: If stream not found or chain is broken
"""
postprocessing_template_ids = []
visited = set()
current_id = stream_id
while True:
if current_id in visited:
raise ValueError(f"Cycle detected in stream chain at {current_id}")
visited.add(current_id)
stream = self.get_stream(current_id)
if stream.stream_type == "raw":
return {
"raw_stream": stream,
"postprocessing_template_ids": postprocessing_template_ids,
}
# Processed stream — collect PP template and follow source
if stream.postprocessing_template_id:
postprocessing_template_ids.append(stream.postprocessing_template_id)
if not stream.source_stream_id:
raise ValueError(f"Processed stream {current_id} has no source_stream_id")
current_id = stream.source_stream_id

View File

@@ -0,0 +1,53 @@
"""Postprocessing template data model."""
from dataclasses import dataclass
from datetime import datetime
from typing import Optional
@dataclass
class PostprocessingTemplate:
"""Postprocessing settings template for color correction and smoothing."""
id: str
name: str
gamma: float
saturation: float
brightness: float
smoothing: float
created_at: datetime
updated_at: datetime
description: Optional[str] = None
def to_dict(self) -> dict:
"""Convert template to dictionary."""
return {
"id": self.id,
"name": self.name,
"gamma": self.gamma,
"saturation": self.saturation,
"brightness": self.brightness,
"smoothing": self.smoothing,
"created_at": self.created_at.isoformat(),
"updated_at": self.updated_at.isoformat(),
"description": self.description,
}
@classmethod
def from_dict(cls, data: dict) -> "PostprocessingTemplate":
"""Create template from dictionary."""
return cls(
id=data["id"],
name=data["name"],
gamma=data.get("gamma", 2.2),
saturation=data.get("saturation", 1.0),
brightness=data.get("brightness", 1.0),
smoothing=data.get("smoothing", 0.3),
created_at=datetime.fromisoformat(data["created_at"])
if isinstance(data.get("created_at"), str)
else data.get("created_at", datetime.utcnow()),
updated_at=datetime.fromisoformat(data["updated_at"])
if isinstance(data.get("updated_at"), str)
else data.get("updated_at", datetime.utcnow()),
description=data.get("description"),
)

View File

@@ -0,0 +1,230 @@
"""Postprocessing template storage using JSON files."""
import json
import uuid
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional
from wled_controller.storage.postprocessing_template import PostprocessingTemplate
from wled_controller.utils import get_logger
logger = get_logger(__name__)
class PostprocessingTemplateStore:
"""Storage for postprocessing templates.
All templates are persisted to the JSON file.
On startup, if no templates exist, a default one is auto-created.
"""
def __init__(self, file_path: str):
"""Initialize postprocessing template store.
Args:
file_path: Path to templates JSON file
"""
self.file_path = Path(file_path)
self._templates: Dict[str, PostprocessingTemplate] = {}
self._load()
self._ensure_initial_template()
def _ensure_initial_template(self) -> None:
"""Auto-create a default postprocessing template if none exist."""
if self._templates:
return
now = datetime.utcnow()
template_id = f"pp_{uuid.uuid4().hex[:8]}"
template = PostprocessingTemplate(
id=template_id,
name="Default",
gamma=2.2,
saturation=1.0,
brightness=1.0,
smoothing=0.3,
created_at=now,
updated_at=now,
description="Default postprocessing template",
)
self._templates[template_id] = template
self._save()
logger.info(f"Auto-created initial postprocessing template: {template.name} ({template_id})")
def _load(self) -> None:
"""Load templates from file."""
if not self.file_path.exists():
return
try:
with open(self.file_path, "r", encoding="utf-8") as f:
data = json.load(f)
templates_data = data.get("postprocessing_templates", {})
loaded = 0
for template_id, template_dict in templates_data.items():
try:
template = PostprocessingTemplate.from_dict(template_dict)
self._templates[template_id] = template
loaded += 1
except Exception as e:
logger.error(
f"Failed to load postprocessing template {template_id}: {e}",
exc_info=True,
)
if loaded > 0:
logger.info(f"Loaded {loaded} postprocessing templates from storage")
except Exception as e:
logger.error(f"Failed to load postprocessing templates from {self.file_path}: {e}")
raise
logger.info(f"Postprocessing template store initialized with {len(self._templates)} templates")
def _save(self) -> None:
"""Save all templates to file."""
try:
self.file_path.parent.mkdir(parents=True, exist_ok=True)
templates_dict = {
template_id: template.to_dict()
for template_id, template in self._templates.items()
}
data = {
"version": "1.0.0",
"postprocessing_templates": templates_dict,
}
with open(self.file_path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
except Exception as e:
logger.error(f"Failed to save postprocessing templates to {self.file_path}: {e}")
raise
def get_all_templates(self) -> List[PostprocessingTemplate]:
"""Get all postprocessing templates."""
return list(self._templates.values())
def get_template(self, template_id: str) -> PostprocessingTemplate:
"""Get template by ID.
Raises:
ValueError: If template not found
"""
if template_id not in self._templates:
raise ValueError(f"Postprocessing template not found: {template_id}")
return self._templates[template_id]
def create_template(
self,
name: str,
gamma: float = 2.2,
saturation: float = 1.0,
brightness: float = 1.0,
smoothing: float = 0.3,
description: Optional[str] = None,
) -> PostprocessingTemplate:
"""Create a new postprocessing template.
Raises:
ValueError: If template with same name exists
"""
for template in self._templates.values():
if template.name == name:
raise ValueError(f"Postprocessing template with name '{name}' already exists")
template_id = f"pp_{uuid.uuid4().hex[:8]}"
now = datetime.utcnow()
template = PostprocessingTemplate(
id=template_id,
name=name,
gamma=gamma,
saturation=saturation,
brightness=brightness,
smoothing=smoothing,
created_at=now,
updated_at=now,
description=description,
)
self._templates[template_id] = template
self._save()
logger.info(f"Created postprocessing template: {name} ({template_id})")
return template
def update_template(
self,
template_id: str,
name: Optional[str] = None,
gamma: Optional[float] = None,
saturation: Optional[float] = None,
brightness: Optional[float] = None,
smoothing: Optional[float] = None,
description: Optional[str] = None,
) -> PostprocessingTemplate:
"""Update an existing postprocessing template.
Raises:
ValueError: If template not found
"""
if template_id not in self._templates:
raise ValueError(f"Postprocessing template not found: {template_id}")
template = self._templates[template_id]
if name is not None:
template.name = name
if gamma is not None:
template.gamma = gamma
if saturation is not None:
template.saturation = saturation
if brightness is not None:
template.brightness = brightness
if smoothing is not None:
template.smoothing = smoothing
if description is not None:
template.description = description
template.updated_at = datetime.utcnow()
self._save()
logger.info(f"Updated postprocessing template: {template_id}")
return template
def delete_template(self, template_id: str) -> None:
"""Delete a postprocessing template.
Raises:
ValueError: If template not found or is referenced by a picture stream
"""
if template_id not in self._templates:
raise ValueError(f"Postprocessing template not found: {template_id}")
del self._templates[template_id]
self._save()
logger.info(f"Deleted postprocessing template: {template_id}")
def is_referenced_by(self, template_id: str, picture_stream_store) -> bool:
"""Check if this template is referenced by any picture stream.
Args:
template_id: Template ID to check
picture_stream_store: PictureStreamStore instance
Returns:
True if any picture stream references this template
"""
for stream in picture_stream_store.get_all_streams():
if stream.postprocessing_template_id == template_id:
return True
return False

View File

@@ -13,7 +13,6 @@ class CaptureTemplate:
name: str
engine_type: str
engine_config: Dict[str, Any]
is_default: bool
created_at: datetime
updated_at: datetime
description: Optional[str] = None
@@ -29,7 +28,6 @@ class CaptureTemplate:
"name": self.name,
"engine_type": self.engine_type,
"engine_config": self.engine_config,
"is_default": self.is_default,
"created_at": self.created_at.isoformat(),
"updated_at": self.updated_at.isoformat(),
"description": self.description,
@@ -50,7 +48,6 @@ class CaptureTemplate:
name=data["name"],
engine_type=data["engine_type"],
engine_config=data.get("engine_config", {}),
is_default=data.get("is_default", False),
created_at=datetime.fromisoformat(data["created_at"])
if isinstance(data.get("created_at"), str)
else data.get("created_at", datetime.utcnow()),

View File

@@ -16,8 +16,9 @@ logger = get_logger(__name__)
class TemplateStore:
"""Storage for capture templates.
Default templates for each available engine are created in memory at startup.
Only user-created templates are persisted to the JSON file.
All templates are persisted to the JSON file.
On startup, if no templates exist, one is auto-created using the
highest-priority available engine.
"""
def __init__(self, file_path: str):
@@ -28,34 +29,40 @@ class TemplateStore:
"""
self.file_path = Path(file_path)
self._templates: Dict[str, CaptureTemplate] = {}
self._ensure_defaults()
self._load()
self._ensure_initial_template()
def _ensure_defaults(self) -> None:
"""Create default templates in memory for all available engines."""
available = EngineRegistry.get_available_engines()
def _ensure_initial_template(self) -> None:
"""Auto-create a template if none exist, using the best available engine."""
if self._templates:
return
best_engine = EngineRegistry.get_best_available_engine()
if not best_engine:
logger.warning("No capture engines available, cannot create initial template")
return
engine_class = EngineRegistry.get_engine(best_engine)
default_config = engine_class.get_default_config()
now = datetime.utcnow()
template_id = f"tpl_{uuid.uuid4().hex[:8]}"
for engine_type in available:
template_id = f"tpl_{engine_type}_default"
engine_class = EngineRegistry.get_engine(engine_type)
default_config = engine_class.get_default_config()
template = CaptureTemplate(
id=template_id,
name=best_engine.upper(),
engine_type=best_engine,
engine_config=default_config,
created_at=now,
updated_at=now,
description=f"Auto-created {best_engine.upper()} template",
)
self._templates[template_id] = CaptureTemplate(
id=template_id,
name=engine_type.upper(),
engine_type=engine_type,
engine_config=default_config,
is_default=True,
created_at=now,
updated_at=now,
description=f"Default {engine_type} capture template",
)
logger.info(f"Created {len(available)} default templates in memory")
self._templates[template_id] = template
self._save()
logger.info(f"Auto-created initial template: {template.name} ({template_id}, engine={best_engine})")
def _load(self) -> None:
"""Load user-created templates from file."""
"""Load templates from file."""
if not self.file_path.exists():
return
@@ -66,9 +73,6 @@ class TemplateStore:
templates_data = data.get("templates", {})
loaded = 0
for template_id, template_dict in templates_data.items():
# Skip any default templates that may exist in old files
if template_dict.get("is_default", False):
continue
try:
template = CaptureTemplate.from_dict(template_dict)
self._templates[template_id] = template
@@ -80,26 +84,23 @@ class TemplateStore:
)
if loaded > 0:
logger.info(f"Loaded {loaded} user templates from storage")
logger.info(f"Loaded {loaded} templates from storage")
except Exception as e:
logger.error(f"Failed to load templates from {self.file_path}: {e}")
raise
total = len(self._templates)
logger.info(f"Template store initialized with {total} templates")
logger.info(f"Template store initialized with {len(self._templates)} templates")
def _save(self) -> None:
"""Save only user-created templates to file."""
"""Save all templates to file."""
try:
# Ensure directory exists
self.file_path.parent.mkdir(parents=True, exist_ok=True)
# Only persist non-default templates
templates_dict = {
template_id: template.to_dict()
for template_id, template in self._templates.items()
if not template.is_default
}
data = {
@@ -162,7 +163,7 @@ class TemplateStore:
"""
# Check for duplicate name
for template in self._templates.values():
if template.name == name and not template.is_default:
if template.name == name:
raise ValueError(f"Template with name '{name}' already exists")
# Generate new ID
@@ -175,7 +176,6 @@ class TemplateStore:
name=name,
engine_type=engine_type,
engine_config=engine_config,
is_default=False,
created_at=now,
updated_at=now,
description=description,
@@ -209,16 +209,13 @@ class TemplateStore:
Updated template
Raises:
ValueError: If template not found or is a default template
ValueError: If template not found
"""
if template_id not in self._templates:
raise ValueError(f"Template not found: {template_id}")
template = self._templates[template_id]
if template.is_default:
raise ValueError("Cannot modify default templates")
# Update fields
if name is not None:
template.name = name
@@ -244,16 +241,11 @@ class TemplateStore:
template_id: Template ID
Raises:
ValueError: If template not found or is a default template
ValueError: If template not found
"""
if template_id not in self._templates:
raise ValueError(f"Template not found: {template_id}")
template = self._templates[template_id]
if template.is_default:
raise ValueError("Cannot delete default templates")
# Remove and save
del self._templates[template_id]
self._save()