Add quiet hours, fix Telegram bugs, and improve cache performance
All checks were successful
Validate / Hassfest (push) Successful in 5s

- Add quiet hours support to queue notifications during configured time windows
- Fix UnboundLocalError when single-item document chunk exceeds max_asset_data_size
- Fix document-only multi-item chunks being silently dropped (missing skip guard)
- Fix notification queue entity lookup by storing entity_id in queued params
- Fix quiet hours using OS timezone instead of HA-configured timezone (dt_util.now)
- Fix chat_action schema rejecting empty string from "Disabled" selector
- Fix stale thumbhash cache entries not being removed on mismatch
- Fix translation descriptions for send_large_photos_as_documents
- Add batch async_set_many() to TelegramFileCache to reduce disk writes
- Add max-entries eviction (2000) for thumbhash cache to prevent unbounded growth
- Eliminate redundant _is_asset_id/get_asset_thumbhash lookups in media group loop

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-09 09:45:34 +03:00
parent dd7032b411
commit 678e8a6e62
8 changed files with 437 additions and 52 deletions

View File

@@ -115,9 +115,26 @@ class TelegramFileCache:
mode,
)
# Maximum number of entries to keep in thumbhash mode to prevent unbounded growth
THUMBHASH_MAX_ENTRIES = 2000
async def _cleanup_expired(self) -> None:
"""Remove expired cache entries (TTL mode only)."""
"""Remove expired cache entries (TTL mode) or trim old entries (thumbhash mode)."""
if self._use_thumbhash:
files = self._data.get("files", {}) if self._data else {}
if len(files) > self.THUMBHASH_MAX_ENTRIES:
sorted_keys = sorted(
files, key=lambda k: files[k].get("cached_at", "")
)
keys_to_remove = sorted_keys[: len(files) - self.THUMBHASH_MAX_ENTRIES]
for key in keys_to_remove:
del files[key]
await self._store.async_save(self._data)
_LOGGER.debug(
"Trimmed thumbhash cache from %d to %d entries",
len(keys_to_remove) + self.THUMBHASH_MAX_ENTRIES,
self.THUMBHASH_MAX_ENTRIES,
)
return
if not self._data or "files" not in self._data:
@@ -164,9 +181,10 @@ class TelegramFileCache:
stored_thumbhash = entry.get("thumbhash")
if stored_thumbhash and stored_thumbhash != thumbhash:
_LOGGER.debug(
"Cache miss for %s: thumbhash changed",
"Cache miss for %s: thumbhash changed, removing stale entry",
key[:36],
)
del self._data["files"][key]
return None
# If no thumbhash provided (asset not in monitored album),
# return cached entry anyway — self-heals on Telegram rejection
@@ -210,7 +228,91 @@ class TelegramFileCache:
await self._store.async_save(self._data)
_LOGGER.debug("Cached Telegram file_id for key (type: %s)", media_type)
async def async_set_many(
self, entries: list[tuple[str, str, str, str | None]]
) -> None:
"""Store multiple file_ids in a single disk write.
Args:
entries: List of (key, file_id, media_type, thumbhash) tuples
"""
if not entries:
return
if self._data is None:
self._data = {"files": {}}
now_iso = datetime.now(timezone.utc).isoformat()
for key, file_id, media_type, thumbhash in entries:
entry_data: dict[str, Any] = {
"file_id": file_id,
"type": media_type,
"cached_at": now_iso,
}
if thumbhash is not None:
entry_data["thumbhash"] = thumbhash
self._data["files"][key] = entry_data
await self._store.async_save(self._data)
_LOGGER.debug("Batch cached %d Telegram file_ids", len(entries))
async def async_remove(self) -> None:
"""Remove all cache data."""
await self._store.async_remove()
self._data = None
class NotificationQueue:
"""Persistent queue for notifications deferred during quiet hours.
Stores full service call parameters so notifications can be replayed
exactly as they were originally called.
"""
def __init__(self, hass: HomeAssistant, entry_id: str) -> None:
"""Initialize the notification queue."""
self._store: Store[dict[str, Any]] = Store(
hass, STORAGE_VERSION, f"{STORAGE_KEY_PREFIX}.notification_queue.{entry_id}"
)
self._data: dict[str, Any] | None = None
async def async_load(self) -> None:
"""Load queue data from storage."""
self._data = await self._store.async_load() or {"queue": []}
_LOGGER.debug(
"Loaded notification queue with %d items",
len(self._data.get("queue", [])),
)
async def async_enqueue(self, notification_params: dict[str, Any]) -> None:
"""Add a notification to the queue."""
if self._data is None:
self._data = {"queue": []}
self._data["queue"].append({
"params": notification_params,
"queued_at": datetime.now(timezone.utc).isoformat(),
})
await self._store.async_save(self._data)
_LOGGER.debug("Queued notification during quiet hours (total: %d)", len(self._data["queue"]))
def get_all(self) -> list[dict[str, Any]]:
"""Get all queued notifications."""
if not self._data:
return []
return list(self._data.get("queue", []))
def has_pending(self) -> bool:
"""Check if there are pending notifications."""
return bool(self._data and self._data.get("queue"))
async def async_clear(self) -> None:
"""Clear all queued notifications."""
if self._data:
self._data["queue"] = []
await self._store.async_save(self._data)
async def async_remove(self) -> None:
"""Remove all queue data."""
await self._store.async_remove()
self._data = None