fix: remove destructive DELETE+INSERT shutdown save that caused progressive data loss

_save_all() in BaseSqliteStore did DELETE FROM table + INSERT all in-memory items
on every shutdown. Since SQLite stores use write-through caching (every CRUD writes
immediately), this was redundant. Worse, if in-memory state had fewer items than
the DB, the DELETE wiped rows and only partial data was reinserted.

- Make _save_all() a no-op (DB is always up to date via write-through)
- Replace self._save() with self._save_item() in 6 seed/default creation methods
- Remove _save_all_stores() shutdown hook (replaced with log-only message)
This commit is contained in:
2026-03-25 13:16:35 +03:00
parent 382a42755d
commit 9a3433a733
8 changed files with 17 additions and 52 deletions

View File

@@ -103,26 +103,12 @@ processor_manager = ProcessorManager(
def _save_all_stores() -> None:
"""Persist every store to disk.
"""Shutdown hook — SQLite stores use write-through caching, so this is a no-op.
Called during graceful shutdown to ensure in-memory data survives
restarts even if no CRUD happened during the session.
Every create/update/delete already goes to the database immediately.
Kept for backward compatibility with server_ref.py which calls this.
"""
all_stores = [
device_store, template_store, pp_template_store,
picture_source_store, output_target_store, pattern_template_store,
color_strip_store, audio_source_store, audio_template_store,
value_source_store, automation_store, scene_preset_store,
sync_clock_store, cspt_store, gradient_store, weather_source_store,
]
saved = 0
for store in all_stores:
try:
store._save(force=True)
saved += 1
except Exception as e:
logger.error(f"Failed to save {store._json_key} on shutdown: {e}")
logger.info(f"Shutdown save: persisted {saved}/{len(all_stores)} stores to disk")
logger.info("Shutdown: all stores already persisted (write-through cache)")
@asynccontextmanager

View File

@@ -58,7 +58,7 @@ class AudioTemplateStore(BaseSqliteStore[AudioCaptureTemplate]):
)
self._items[template_id] = template
self._save()
self._save_item(template_id, template)
logger.info(
f"Auto-created initial audio template: {template.name} "
f"({template_id}, engine={best_engine})"

View File

@@ -71,35 +71,13 @@ class BaseSqliteStore(Generic[T]):
self._db.delete_row(self._table_name, item_id)
def _save_all(self, *, force: bool = False) -> None:
"""Persist all items to SQLite.
"""No-op — SQLite stores use write-through caching.
Used during shutdown to ensure in-memory state is flushed.
When ``force`` is True, bypasses the frozen-writes check.
Every create/update calls ``_save_item()`` and every delete calls
``_delete_item()``, so the database is always up to date.
A bulk DELETE + re-INSERT here would be destructive if in-memory
state diverged from the DB (e.g., partial load on startup).
"""
from wled_controller.storage.database import _writes_frozen
if _writes_frozen and not force:
logger.warning(f"Save blocked (frozen after restore): {self._table_name}")
return
items_to_write = []
with self._lock:
for item_id, item in self._items.items():
data = item.to_dict()
import json
items_to_write.append((
item_id,
data.get("name", ""),
json.dumps(data, ensure_ascii=False),
))
if items_to_write:
# Use transaction for atomicity: clear + re-insert
with self._db.transaction() as conn:
conn.execute(f"DELETE FROM [{self._table_name}]")
conn.executemany(
f"INSERT INTO [{self._table_name}] (id, name, data) VALUES (?, ?, ?)",
items_to_write,
)
# -- Backward compat: _save() used by subclass create/update methods -----

View File

@@ -55,7 +55,7 @@ class ColorStripProcessingTemplateStore(BaseSqliteStore[ColorStripProcessingTemp
)
self._items[template_id] = template
self._save()
self._save_item(template_id, template)
logger.info(f"Auto-created initial color strip processing template: {template.name} ({template_id})")
def _validate_strip_filters(self, filters: List[FilterInstance]) -> None:

View File

@@ -58,7 +58,7 @@ class GradientStore(BaseSqliteStore[Gradient]):
now = datetime.now(timezone.utc)
for name, tuples in _BUILTIN_DEFS.items():
gid = f"gr_builtin_{name}"
self._items[gid] = Gradient(
gradient = Gradient(
id=gid,
name=name.capitalize(),
stops=_tuples_to_stops(tuples),
@@ -67,7 +67,8 @@ class GradientStore(BaseSqliteStore[Gradient]):
updated_at=now,
description=f"Built-in {name} gradient",
)
self._save()
self._items[gid] = gradient
self._save_item(gid, gradient)
logger.info(f"Seeded {len(_BUILTIN_DEFS)} built-in gradients")
# Aliases

View File

@@ -52,7 +52,7 @@ class PatternTemplateStore(BaseSqliteStore[PatternTemplate]):
)
self._items[template_id] = template
self._save()
self._save_item(template_id, template)
logger.info(f"Auto-created initial pattern template: {template.name} ({template_id})")
def create_template(

View File

@@ -57,7 +57,7 @@ class PostprocessingTemplateStore(BaseSqliteStore[PostprocessingTemplate]):
)
self._items[template_id] = template
self._save()
self._save_item(template_id, template)
logger.info(f"Auto-created initial postprocessing template: {template.name} ({template_id})")
def create_template(

View File

@@ -59,7 +59,7 @@ class TemplateStore(BaseSqliteStore[CaptureTemplate]):
)
self._items[template_id] = template
self._save()
self._save_item(template_id, template)
logger.info(f"Auto-created initial template: {template.name} ({template_id}, engine={best_engine})")
def create_template(