Batch API endpoints, reduce frontend polling by ~75%, fix resource leaks

Backend: add batch endpoints for target states, metrics, and device
health to replace O(N) individual API calls per poll cycle.
Frontend: use batch endpoints in dashboard/targets/profiles tabs,
fix Chart.js instance leaks, debounce server event reloads, add
i18n active-tab guards, clean up ResizeObserver on pattern editor
close, cache uptime timer DOM refs, increase KC auto-refresh to 2s.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-22 18:55:09 +03:00
parent d4a0f3a7f5
commit 9392741f08
8 changed files with 125 additions and 73 deletions

View File

@@ -10,8 +10,10 @@ import { Modal } from '../core/modal.js';
const profileModal = new Modal('profile-editor-modal');
// Re-render profiles when language changes
document.addEventListener('languageChanged', () => { if (apiKey) loadProfiles(); });
// Re-render profiles when language changes (only if tab is active)
document.addEventListener('languageChanged', () => {
if (apiKey && (localStorage.getItem('activeTab') || 'dashboard') === 'profiles') loadProfiles();
});
// React to real-time profile state changes from global events WS
document.addEventListener('server:profile_state_changed', () => {
@@ -33,16 +35,11 @@ export async function loadProfiles() {
const data = await profilesResp.json();
const targetsData = targetsResp.ok ? await targetsResp.json() : { targets: [] };
const allTargets = targetsData.targets || [];
// State is not included in the list response — fetch per-target in parallel
const stateResults = await Promise.all(
allTargets.map(tgt =>
fetchWithAuth(`/picture-targets/${tgt.id}/state`)
.then(r => r.ok ? r.json() : null)
.catch(() => null)
)
);
// Batch fetch all target states in a single request
const batchStatesResp = await fetchWithAuth('/picture-targets/batch/states');
const allStates = batchStatesResp.ok ? (await batchStatesResp.json()).states : {};
const runningTargetIds = new Set(
allTargets.filter((_, i) => stateResults[i]?.processing).map(tgt => tgt.id)
allTargets.filter(tgt => allStates[tgt.id]?.processing).map(tgt => tgt.id)
);
set_profilesCache(data.profiles);
renderProfiles(data.profiles, runningTargetIds);
@@ -390,16 +387,11 @@ export async function toggleProfileTargets(profileId) {
const profileResp = await fetchWithAuth(`/profiles/${profileId}`);
if (!profileResp.ok) throw new Error('Failed to load profile');
const profile = await profileResp.json();
// Fetch actual processing state for each target in this profile
const stateResults = await Promise.all(
profile.target_ids.map(id =>
fetchWithAuth(`/picture-targets/${id}/state`)
.then(r => r.ok ? r.json() : null)
.catch(() => null)
)
);
// Batch fetch all target states to determine which are running
const batchResp = await fetchWithAuth('/picture-targets/batch/states');
const allStates = batchResp.ok ? (await batchResp.json()).states : {};
const runningSet = new Set(
profile.target_ids.filter((_, i) => stateResults[i]?.processing)
profile.target_ids.filter(id => allStates[id]?.processing)
);
const shouldStop = profile.target_ids.some(id => runningSet.has(id));
await Promise.all(profile.target_ids.map(id =>