Zhen Ye
fix: track cards update during playback + periodic GPT re-analysis
6a99834
// API Client Module - Backend communication
APP.api.client = {};
APP.api.client.hfDetectAsync = async function (formData) {
const { state } = APP.core;
if (!state.hf.baseUrl) return;
const resp = await fetch(`${state.hf.baseUrl}/detect/async`, {
method: "POST",
body: formData
});
if (!resp.ok) {
const err = await resp.json().catch(() => ({ detail: resp.statusText }));
throw new Error(err.detail || "Async detection submission failed");
}
const data = await resp.json();
// Store URLs from response
if (data.status_url) {
state.hf.statusUrl = data.status_url.startsWith("http")
? data.status_url
: `${state.hf.baseUrl}${data.status_url}`;
}
if (data.video_url) {
state.hf.videoUrl = data.video_url.startsWith("http")
? data.video_url
: `${state.hf.baseUrl}${data.video_url}`;
}
if (data.depth_video_url) {
state.hf.depthVideoUrl = data.depth_video_url.startsWith("http")
? data.depth_video_url
: `${state.hf.baseUrl}${data.depth_video_url}`;
}
if (data.depth_first_frame_url) {
state.hf.depthFirstFrameUrl = data.depth_first_frame_url.startsWith("http")
? data.depth_first_frame_url
: `${state.hf.baseUrl}${data.depth_first_frame_url}`;
}
return data;
};
APP.api.client.checkJobStatus = async function (jobId) {
const { state } = APP.core;
if (!state.hf.baseUrl) return { status: "error" };
const url = state.hf.statusUrl || `${state.hf.baseUrl}/detect/job/${jobId}`;
const resp = await fetch(url, { cache: "no-store" });
if (!resp.ok) {
if (resp.status === 404) return { status: "not_found" };
throw new Error(`Status check failed: ${resp.status}`);
}
return await resp.json();
};
APP.api.client.cancelBackendJob = async function (jobId, reason) {
const { state } = APP.core;
const { log } = APP.ui.logging;
if (!state.hf.baseUrl || !jobId) return;
// Don't attempt cancel on HF Space (it doesn't support it)
if (state.hf.baseUrl.includes("hf.space")) {
log(`Job cancel skipped for HF Space (${reason || "user request"})`, "w");
return { status: "skipped", message: "Cancel disabled for HF Space" };
}
try {
const resp = await fetch(`${state.hf.baseUrl}/detect/job/${jobId}`, {
method: "DELETE"
});
if (resp.ok) {
const result = await resp.json();
log(`Job ${jobId.substring(0, 8)} cancelled`, "w");
return result;
}
if (resp.status === 404) return { status: "not_found" };
throw new Error("Cancel failed");
} catch (err) {
log(`Cancel error: ${err.message}`, "e");
return { status: "error", message: err.message };
}
};
/**
* Sync GPT enrichment data from polled first_frame_detections into state.detections.
* Returns true if any card was updated and needs re-render.
*/
APP.api.client._syncGptFromDetections = function (rawDets, logLabel) {
const { state } = APP.core;
const { log } = APP.ui.logging;
let needsRender = false;
// Phase A: Sync assessment status, relevance fields
for (const rd of rawDets) {
const tid = rd.track_id || `T${String(rawDets.indexOf(rd) + 1).padStart(2, "0")}`;
const existing = (state.detections || []).find(d => d.id === tid);
if (existing) {
if (rd.assessment_status && existing.assessment_status !== rd.assessment_status) {
existing.assessment_status = rd.assessment_status;
needsRender = true;
}
if (rd.mission_relevant !== undefined && rd.mission_relevant !== null) {
existing.mission_relevant = rd.mission_relevant;
}
if (rd.relevance_reason) {
existing.relevance_reason = rd.relevance_reason;
}
}
}
// Phase B: Full GPT feature merge (gated on gpt_raw)
const hasGptData = rawDets.some(d => d.gpt_raw);
if (hasGptData) {
state.hf.firstFrameDetections = rawDets;
for (const rd of rawDets) {
const tid = rd.track_id || `T${String(rawDets.indexOf(rd) + 1).padStart(2, "0")}`;
const existing = (state.detections || []).find(d => d.id === tid);
if (existing && rd.gpt_raw) {
const g = rd.gpt_raw;
existing.features = APP.core.gptMapping.buildFeatures(g);
existing.threat_level_score = rd.threat_level_score || g.threat_level_score || 0;
existing.threat_classification = rd.threat_classification || g.threat_classification || "Unknown";
existing.weapon_readiness = rd.weapon_readiness || g.weapon_readiness || "Unknown";
existing.gpt_distance_m = rd.gpt_distance_m || null;
existing.gpt_direction = rd.gpt_direction || null;
needsRender = true;
}
}
log(`Track cards updated with GPT assessment${logLabel ? " (" + logLabel + ")" : ""}`, "g");
}
if (needsRender && APP.ui && APP.ui.cards && APP.ui.cards.renderFrameTrackList) {
APP.ui.cards.renderFrameTrackList();
}
return needsRender;
};
APP.api.client.pollAsyncJob = async function () {
const { state } = APP.core;
const { log, setHfStatus } = APP.ui.logging;
const { fetchProcessedVideo, fetchDepthVideo, fetchDepthFirstFrame } = APP.core.video;
const syncGpt = APP.api.client._syncGptFromDetections;
const pollInterval = 3000; // 3 seconds
const maxAttempts = 200; // 10 minutes max
let attempts = 0;
let fetchingVideo = false;
return new Promise((resolve, reject) => {
state.hf.asyncPollInterval = setInterval(async () => {
attempts++;
try {
const resp = await fetch(state.hf.statusUrl, { cache: "no-store" });
if (!resp.ok) {
if (resp.status === 404) {
clearInterval(state.hf.asyncPollInterval);
reject(new Error("Job expired or not found"));
return;
}
throw new Error(`Status check failed: ${resp.statusText}`);
}
const status = await resp.json();
state.hf.asyncStatus = status.status;
state.hf.asyncProgress = status;
if (status.status === "completed") {
if (fetchingVideo) return;
fetchingVideo = true;
const completedJobId = state.hf.asyncJobId;
log(`✓ Backend job ${completedJobId.substring(0, 8)}: completed successfully`, "g");
setHfStatus("job completed, fetching video...");
// Final GPT sync — enrichment may have completed during
// processing but the poll never landed on a "processing"
// cycle that picked it up (common for segmentation mode
// where _enrich_first_frame_gpt is skipped).
if (status.first_frame_detections && status.first_frame_detections.length > 0) {
syncGpt(status.first_frame_detections, "final sync");
}
try {
await fetchProcessedVideo();
await fetchDepthVideo();
await fetchDepthFirstFrame();
clearInterval(state.hf.asyncPollInterval);
state.hf.completedJobId = state.hf.asyncJobId; // preserve for post-completion sync
state.hf.asyncJobId = null;
setHfStatus("ready");
resolve();
} catch (err) {
if (err && err.code === "VIDEO_PENDING") {
setHfStatus("job completed, finalizing video...");
fetchingVideo = false;
return;
}
clearInterval(state.hf.asyncPollInterval);
state.hf.asyncJobId = null;
reject(err);
}
} else if (status.status === "failed") {
clearInterval(state.hf.asyncPollInterval);
const errMsg = status.error || "Processing failed";
log(`✗ Backend job ${state.hf.asyncJobId.substring(0, 8)}: failed - ${errMsg}`, "e");
state.hf.asyncJobId = null;
setHfStatus(`error: ${errMsg}`);
reject(new Error(errMsg));
} else {
// Still processing
const progressInfo = status.progress ? ` (${Math.round(status.progress * 100)}%)` : "";
setHfStatus(`job ${state.hf.asyncJobId.substring(0, 8)}: ${status.status}${progressInfo} (${attempts})`);
// Check if GPT enrichment has updated first-frame detections
if (status.first_frame_detections && status.first_frame_detections.length > 0) {
syncGpt(status.first_frame_detections);
}
}
if (attempts >= maxAttempts) {
clearInterval(state.hf.asyncPollInterval);
reject(new Error("Polling timeout (10 minutes)"));
}
} catch (err) {
clearInterval(state.hf.asyncPollInterval);
reject(err);
}
}, pollInterval);
});
};
// External detection hook (can be replaced by user)
APP.api.client.externalDetect = async function (input) {
console.log("externalDetect called", input);
return [];
};
// External features hook (can be replaced by user)
APP.api.client.externalFeatures = async function (detections, frameInfo) {
console.log("externalFeatures called for", detections.length, "objects");
return {};
};
// External tracker hook (can be replaced by user)
APP.api.client.externalTrack = async function (videoEl) {
console.log("externalTrack called");
return [];
};
// Call HF object detection directly (for first frame)
APP.api.client.callHfObjectDetection = async function (canvas) {
const { state } = APP.core;
const { canvasToBlob } = APP.core.utils;
const { CONFIG } = APP.core;
const proxyBase = (CONFIG.PROXY_URL || "").trim();
if (proxyBase) {
const blob = await canvasToBlob(canvas);
const form = new FormData();
form.append("image", blob, "frame.jpg");
const resp = await fetch(`${proxyBase.replace(/\/$/, "")}/detect`, {
method: "POST",
body: form
});
if (!resp.ok) {
let detail = `Proxy inference failed (${resp.status})`;
try {
const err = await resp.json();
detail = err.detail || err.error || detail;
} catch (_) { }
throw new Error(detail);
}
return await resp.json();
}
// Default: use the backend base URL
const blob = await canvasToBlob(canvas);
const form = new FormData();
form.append("image", blob, "frame.jpg");
const resp = await fetch(`${state.hf.baseUrl}/detect/frame`, {
method: "POST",
body: form
});
if (!resp.ok) {
throw new Error(`Frame detection failed: ${resp.statusText}`);
}
return await resp.json();
};
// Capture current video frame and send to backend for GPT analysis
APP.api.client.analyzeFrame = async function (videoEl, tracks) {
const { state } = APP.core;
const { canvasToBlob } = APP.core.utils;
// Capture current video frame
const canvas = document.createElement("canvas");
canvas.width = videoEl.videoWidth;
canvas.height = videoEl.videoHeight;
canvas.getContext("2d").drawImage(videoEl, 0, 0);
const blob = await canvasToBlob(canvas);
// Convert normalized bbox (0-1) back to pixel coords for backend
const w = canvas.width, h = canvas.height;
const dets = tracks.map(t => ({
track_id: t.id,
label: t.label,
bbox: [
Math.round(t.bbox.x * w),
Math.round(t.bbox.y * h),
Math.round((t.bbox.x + t.bbox.w) * w),
Math.round((t.bbox.y + t.bbox.h) * h),
],
score: t.score,
}));
const form = new FormData();
form.append("image", blob, "frame.jpg");
form.append("detections", JSON.stringify(dets));
const jobId = state.hf.asyncJobId || state.hf.completedJobId;
if (jobId) form.append("job_id", jobId);
const resp = await fetch(`${state.hf.baseUrl}/detect/analyze-frame`, {
method: "POST",
body: form,
});
if (!resp.ok) throw new Error(`Frame analysis failed: ${resp.statusText}`);
return await resp.json();
};
// Chat about threats using GPT
APP.api.client.chatAboutThreats = async function (question, detections) {
const { state } = APP.core;
const form = new FormData();
form.append("question", question);
form.append("detections", JSON.stringify(detections));
if (state.hf.missionSpec) {
form.append("mission_context", JSON.stringify(state.hf.missionSpec));
}
const resp = await fetch(`${state.hf.baseUrl}/chat/threat`, {
method: "POST",
body: form
});
if (!resp.ok) {
const err = await resp.json().catch(() => ({ detail: resp.statusText }));
throw new Error(err.detail || "Chat request failed");
}
return await resp.json();
};