Zhen Ye
fix: track cards update during playback + periodic GPT re-analysis
6a99834
// Video management: loading, unloading, first frame capture, depth handling
APP.core.video = {};
APP.core.video.captureFirstFrame = async function () {
const { state } = APP.core;
const { $ } = APP.core.utils;
const videoHidden = $("#videoHidden");
if (!videoHidden || !state.videoUrl) return;
videoHidden.src = state.videoUrl;
videoHidden.load();
await new Promise((resolve, reject) => {
videoHidden.onloadeddata = resolve;
videoHidden.onerror = () => reject(new Error("Video failed to load"));
setTimeout(() => reject(new Error("Video load timeout")), 10000);
});
// Seek to first frame
videoHidden.currentTime = 0;
await new Promise(r => {
videoHidden.onseeked = r;
setTimeout(r, 500);
});
state.frame.w = videoHidden.videoWidth || 1280;
state.frame.h = videoHidden.videoHeight || 720;
};
APP.core.video.drawFirstFrame = function () {
const { state } = APP.core;
const { $ } = APP.core.utils;
const frameCanvas = $("#frameCanvas");
const frameOverlay = $("#frameOverlay");
const videoHidden = $("#videoHidden");
const frameEmpty = $("#frameEmpty");
if (!frameCanvas || !videoHidden) return;
const ctx = frameCanvas.getContext("2d");
frameCanvas.width = state.frame.w;
frameCanvas.height = state.frame.h;
ctx.drawImage(videoHidden, 0, 0, state.frame.w, state.frame.h);
// Also resize overlay to match so bbox coordinates align
if (frameOverlay) {
frameOverlay.width = state.frame.w;
frameOverlay.height = state.frame.h;
}
if (frameEmpty) frameEmpty.style.display = "none";
};
APP.core.video.frameToBitmap = async function (videoEl) {
const w = videoEl.videoWidth || 1280;
const h = videoEl.videoHeight || 720;
const canvas = document.createElement("canvas");
canvas.width = w;
canvas.height = h;
const ctx = canvas.getContext("2d");
ctx.drawImage(videoEl, 0, 0, w, h);
return canvas;
};
APP.core.video.seekTo = function (videoEl, time) {
return new Promise((resolve) => {
if (!videoEl) { resolve(); return; }
videoEl.currentTime = Math.max(0, time);
videoEl.onseeked = () => resolve();
setTimeout(resolve, 600);
});
};
APP.core.video.unloadVideo = async function (options = {}) {
const { state } = APP.core;
const { $, $$ } = APP.core.utils;
const { log, setStatus, setHfStatus } = APP.ui.logging;
const preserveInput = !!options.preserveInput;
// Stop polling if running
if (state.hf.asyncPollInterval) {
clearInterval(state.hf.asyncPollInterval);
state.hf.asyncPollInterval = null;
}
// Revoke blob URLs
if (state.videoUrl && state.videoUrl.startsWith("blob:")) {
URL.revokeObjectURL(state.videoUrl);
}
if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) {
try { URL.revokeObjectURL(state.hf.processedUrl); } catch (_) { }
}
if (state.hf.depthVideoUrl && state.hf.depthVideoUrl.startsWith("blob:")) {
try { URL.revokeObjectURL(state.hf.depthVideoUrl); } catch (_) { }
}
if (state.hf.depthFirstFrameUrl && state.hf.depthFirstFrameUrl.startsWith("blob:")) {
try { URL.revokeObjectURL(state.hf.depthFirstFrameUrl); } catch (_) { }
}
// Reset state
state.videoUrl = null;
state.videoFile = null;
state.videoLoaded = false;
state.useProcessedFeed = false;
state.useDepthFeed = false;
state.useFrameDepthView = false;
state.hf.missionId = null;
state.hf.plan = null;
state.hf.processedUrl = null;
state.hf.processedBlob = null;
state.hf.depthVideoUrl = null;
state.hf.depthBlob = null;
state.hf.depthFirstFrameUrl = null;
state.hf.depthFirstFrameBlob = null;
state.hf.summary = null;
state.hf.busy = false;
state.hf.lastError = null;
state.hf.asyncJobId = null;
state.hf.completedJobId = null;
state.hf.asyncStatus = "idle";
state.hf.firstFrameUrl = null;
state.hf.videoUrl = null;
setHfStatus("idle");
state.hasReasoned = false;
state.firstFrameReady = false;
state.isReasoning = false;
// Reset button states
const btnReason = $("#btnReason");
const btnCancelReason = $("#btnCancelReason");
const btnEngage = $("#btnEngage");
if (btnReason) {
btnReason.disabled = false;
btnReason.style.opacity = "1";
btnReason.style.cursor = "pointer";
}
if (btnCancelReason) btnCancelReason.style.display = "none";
if (btnEngage) btnEngage.disabled = true;
state.detections = [];
state.selectedId = null;
state.tracker.tracks = [];
state.tracker.nextId = 1;
state.tracker.running = false;
state.tracker.selectedTrackId = null;
state.tracker.beamOn = false;
// Clear video elements
const videoHidden = $("#videoHidden");
const videoEngage = $("#videoEngage");
const videoFile = $("#videoFile");
if (videoHidden) {
videoHidden.removeAttribute("src");
videoHidden.load();
}
if (videoEngage) {
videoEngage.removeAttribute("src");
videoEngage.load();
}
if (!preserveInput && videoFile) {
videoFile.value = "";
}
// Update UI
const videoMeta = $("#videoMeta");
const frameEmpty = $("#frameEmpty");
const engageEmpty = $("#engageEmpty");
const frameNote = $("#frameNote");
const engageNote = $("#engageNote");
if (!preserveInput && videoMeta) videoMeta.textContent = "No file";
if (frameEmpty) frameEmpty.style.display = "flex";
if (engageEmpty) engageEmpty.style.display = "flex";
if (frameNote) frameNote.textContent = "Awaiting video";
if (engageNote) engageNote.textContent = "Awaiting video";
// Clear canvases
APP.core.video.clearCanvas($("#frameCanvas"));
APP.core.video.clearCanvas($("#frameOverlay"));
APP.core.video.clearCanvas($("#engageOverlay"));
// Re-render UI components
if (APP.ui.radar.renderFrameRadar) APP.ui.radar.renderFrameRadar();
if (APP.ui.cards.renderFrameTrackList) APP.ui.cards.renderFrameTrackList();
if (APP.ui.trade.renderTrade) APP.ui.trade.renderTrade();
setStatus("warn", "STANDBY · No video loaded");
log("Video unloaded. Demo reset.", "w");
};
APP.core.video.clearCanvas = function (canvas) {
if (!canvas) return;
const ctx = canvas.getContext("2d");
ctx.clearRect(0, 0, canvas.width, canvas.height);
};
APP.core.video.resizeOverlays = function () {
const { state } = APP.core;
const { $ } = APP.core.utils;
const videoEngage = $("#videoEngage");
const engageOverlay = $("#engageOverlay");
const frameOverlay = $("#frameOverlay");
if (videoEngage && engageOverlay) {
const w = videoEngage.videoWidth || state.frame.w;
const h = videoEngage.videoHeight || state.frame.h;
engageOverlay.width = w;
engageOverlay.height = h;
engageOverlay.style.pointerEvents = "auto";
}
if (frameOverlay) {
frameOverlay.width = state.frame.w;
frameOverlay.height = state.frame.h;
}
};
// Depth video handling
APP.core.video.fetchDepthVideo = async function () {
const { state } = APP.core;
const { log } = APP.ui.logging;
// Depth is optional - skip silently if no URL
if (!state.hf.depthVideoUrl) {
return;
}
try {
const resp = await fetch(state.hf.depthVideoUrl, { cache: "no-store" });
if (!resp.ok) {
// 404 = depth not enabled/available - this is fine, not an error
if (resp.status === 404) {
state.hf.depthVideoUrl = null;
return;
}
// 202 = still processing
if (resp.status === 202) {
return;
}
throw new Error(`Failed to fetch depth video: ${resp.statusText}`);
}
const nullOrigin = (window.location && window.location.origin) === "null" || (window.location && window.location.protocol === "file:");
if (nullOrigin) {
state.hf.depthBlob = null;
state.hf.depthVideoUrl = `${state.hf.depthVideoUrl}?t=${Date.now()}`;
log("Depth video ready (streaming URL)");
return;
}
const blob = await resp.blob();
state.hf.depthBlob = blob;
const blobUrl = URL.createObjectURL(blob);
state.hf.depthVideoUrl = blobUrl;
log(`Depth video ready (${(blob.size / 1024 / 1024).toFixed(1)} MB)`, "g");
APP.core.video.updateDepthChip();
} catch (err) {
// Don't log as error - depth is optional
state.hf.depthVideoUrl = null;
state.hf.depthBlob = null;
}
};
APP.core.video.fetchDepthFirstFrame = async function () {
const { state } = APP.core;
const { log } = APP.ui.logging;
// Depth is optional - skip silently if no URL
if (!state.hf.depthFirstFrameUrl) return;
try {
const resp = await fetch(state.hf.depthFirstFrameUrl, { cache: "no-store" });
// 404 or other errors - depth not available, that's fine
if (!resp.ok) {
state.hf.depthFirstFrameUrl = null;
return;
}
const blob = await resp.blob();
state.hf.depthFirstFrameBlob = blob;
state.hf.depthFirstFrameUrl = URL.createObjectURL(blob);
log("First frame depth ready", "g");
} catch (err) {
// Silently clear - depth is optional
state.hf.depthFirstFrameUrl = null;
state.hf.depthFirstFrameBlob = null;
}
};
APP.core.video.fetchProcessedVideo = async function () {
const { state } = APP.core;
const { log } = APP.ui.logging;
const { $ } = APP.core.utils;
const resp = await fetch(state.hf.videoUrl, { cache: "no-store" });
if (!resp.ok) {
if (resp.status === 202) {
const err = new Error("Video still processing");
err.code = "VIDEO_PENDING";
throw err;
}
throw new Error(`Failed to fetch video: ${resp.statusText}`);
}
const nullOrigin = (window.location && window.location.origin) === "null" || (window.location && window.location.protocol === "file:");
if (nullOrigin) {
state.hf.processedBlob = null;
state.hf.processedUrl = `${state.hf.videoUrl}?t=${Date.now()}`;
const btnEngage = $("#btnEngage");
if (btnEngage) btnEngage.disabled = false;
log("Processed video ready (streaming URL)");
return;
}
const blob = await resp.blob();
if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) {
URL.revokeObjectURL(state.hf.processedUrl);
}
state.hf.processedBlob = blob;
state.hf.processedUrl = URL.createObjectURL(blob);
const btnEngage = $("#btnEngage");
if (btnEngage) btnEngage.disabled = false;
log(`Processed video ready (${(blob.size / 1024 / 1024).toFixed(1)} MB)`);
};
APP.core.video.updateDepthChip = function () {
const { state } = APP.core;
const { $ } = APP.core.utils;
const chipDepth = $("#chipDepth");
if (!chipDepth) return;
if (state.hf.depthVideoUrl || state.hf.depthBlob) {
chipDepth.style.cursor = "pointer";
chipDepth.style.opacity = "1";
} else {
chipDepth.style.cursor = "not-allowed";
chipDepth.style.opacity = "0.5";
}
};
APP.core.video.toggleDepthView = function () {
const { state } = APP.core;
const { $, log } = APP.core.utils;
const { log: uiLog } = APP.ui.logging;
if (!state.hf.depthVideoUrl && !state.hf.depthBlob) {
uiLog("Depth video not available yet. Run Reason and wait for processing.", "w");
return;
}
state.useDepthFeed = !state.useDepthFeed;
const videoEngage = $("#videoEngage");
const chipDepth = $("#chipDepth");
if (state.useDepthFeed) {
if (chipDepth) chipDepth.textContent = "VIEW:DEPTH";
if (videoEngage) {
const currentTime = videoEngage.currentTime;
const wasPlaying = !videoEngage.paused;
videoEngage.src = state.hf.depthVideoUrl;
videoEngage.load();
videoEngage.currentTime = currentTime;
if (wasPlaying) videoEngage.play();
}
} else {
if (chipDepth) chipDepth.textContent = "VIEW:DEFAULT";
if (videoEngage) {
const currentTime = videoEngage.currentTime;
const wasPlaying = !videoEngage.paused;
const feedUrl = state.useProcessedFeed ? state.hf.processedUrl : state.videoUrl;
videoEngage.src = feedUrl;
videoEngage.load();
videoEngage.currentTime = currentTime;
if (wasPlaying) videoEngage.play();
}
}
};
APP.core.video.toggleFirstFrameDepthView = function () {
const { state } = APP.core;
const { $ } = APP.core.utils;
const { log } = APP.ui.logging;
if (!state.hf.depthFirstFrameUrl) {
log("First frame depth not available", "w");
return;
}
state.useFrameDepthView = !state.useFrameDepthView;
const frameCanvas = $("#frameCanvas");
const chipFrameDepth = $("#chipFrameDepth");
if (state.useFrameDepthView) {
if (chipFrameDepth) chipFrameDepth.textContent = "VIEW:DEPTH";
// Draw depth first frame
const img = new Image();
img.onload = () => {
if (frameCanvas) {
frameCanvas.width = state.frame.w;
frameCanvas.height = state.frame.h;
frameCanvas.getContext("2d").drawImage(img, 0, 0, state.frame.w, state.frame.h);
APP.ui.overlays.renderFrameOverlay();
}
};
img.src = state.hf.depthFirstFrameUrl;
} else {
if (chipFrameDepth) chipFrameDepth.textContent = "VIEW:DEFAULT";
// Re-draw original first frame
APP.core.video.drawFirstFrame();
APP.ui.overlays.renderFrameOverlay();
}
};
APP.core.video.toggleProcessedFeed = function () {
const { state } = APP.core;
const { $ } = APP.core.utils;
const { log } = APP.ui.logging;
if (!state.hf.processedUrl) {
log("Processed video not available yet", "w");
return;
}
state.useProcessedFeed = !state.useProcessedFeed;
state.useDepthFeed = false; // Reset depth view when switching feeds
const videoEngage = $("#videoEngage");
const chipFeed = $("#chipFeed");
const chipDepth = $("#chipDepth");
if (state.useProcessedFeed) {
if (chipFeed) chipFeed.textContent = "FEED:HF";
if (videoEngage) {
const currentTime = videoEngage.currentTime;
const wasPlaying = !videoEngage.paused;
videoEngage.src = state.hf.processedUrl;
videoEngage.load();
videoEngage.currentTime = currentTime;
if (wasPlaying) videoEngage.play();
}
} else {
if (chipFeed) chipFeed.textContent = "FEED:RAW";
if (videoEngage) {
const currentTime = videoEngage.currentTime;
const wasPlaying = !videoEngage.paused;
videoEngage.src = state.videoUrl;
videoEngage.load();
videoEngage.currentTime = currentTime;
if (wasPlaying) videoEngage.play();
}
}
if (chipDepth) chipDepth.textContent = "VIEW:DEFAULT";
};
// ========= Streaming Mode for Tab 2 (Live Backend Processing) =========
APP.core.video.setStreamingMode = function (url) {
const { $ } = APP.core.utils;
const videoEngage = $("#videoEngage");
const engageEmpty = $("#engageEmpty");
// Ensure stream image element exists
let streamView = $("#streamView");
if (!streamView) {
streamView = document.createElement("img");
streamView.id = "streamView";
streamView.style.width = "100%";
streamView.style.height = "100%";
streamView.style.objectFit = "contain";
streamView.style.position = "absolute";
streamView.style.top = "0";
streamView.style.left = "0";
streamView.style.zIndex = "10"; // Above video
streamView.style.backgroundColor = "#000";
// Insert into the wrapper (parent of videoEngage)
if (videoEngage && videoEngage.parentNode) {
videoEngage.parentNode.appendChild(streamView);
// Ensure container is relative for absolute positioning
if (getComputedStyle(videoEngage.parentNode).position === "static") {
videoEngage.parentNode.style.position = "relative";
}
}
}
if (streamView) {
// Reset state
streamView.style.display = "block";
streamView.onerror = () => {
// If stream fails (404 etc), silently revert
streamView.style.display = "none";
if (videoEngage) videoEngage.style.display = "block";
if (engageEmpty && !videoEngage.src) engageEmpty.style.display = "flex";
};
streamView.src = url;
if (videoEngage) videoEngage.style.display = "none";
// Also hide empty state
if (engageEmpty) engageEmpty.style.display = "none";
}
};
APP.core.video.stopStreamingMode = function () {
const { $ } = APP.core.utils;
const videoEngage = $("#videoEngage");
const streamView = $("#streamView");
if (streamView) {
streamView.src = ""; // Stop connection
streamView.style.display = "none";
}
if (videoEngage) videoEngage.style.display = "block";
};
// ========= Display Processed First Frame (from backend) =========
APP.core.video.displayProcessedFirstFrame = function () {
const { state } = APP.core;
const { $ } = APP.core.utils;
const { log } = APP.ui.logging;
const frameCanvas = $("#frameCanvas");
const frameOverlay = $("#frameOverlay");
const frameEmpty = $("#frameEmpty");
const frameNote = $("#frameNote");
if (!state.hf.firstFrameUrl) {
log("Processed first frame URL not available", "w");
return;
}
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
if (frameCanvas) {
// Update frame dimensions from image
state.frame.w = img.naturalWidth || 1280;
state.frame.h = img.naturalHeight || 720;
// Resize both canvas and overlay to match frame dimensions
frameCanvas.width = state.frame.w;
frameCanvas.height = state.frame.h;
frameCanvas.getContext("2d").drawImage(img, 0, 0, state.frame.w, state.frame.h);
// CRITICAL: Resize frameOverlay to match so bbox coordinates align
if (frameOverlay) {
frameOverlay.width = state.frame.w;
frameOverlay.height = state.frame.h;
}
// Hide empty state
if (frameEmpty) frameEmpty.style.display = "none";
if (frameNote) frameNote.textContent = "Processed (from backend)";
// Re-render overlay on top (now with matching dimensions)
if (APP.ui.overlays.renderFrameOverlay) {
APP.ui.overlays.renderFrameOverlay();
}
log("Processed first frame displayed", "g");
}
};
img.onerror = () => {
log("Failed to load processed first frame", "e");
// Fallback to local first frame
APP.core.video.drawFirstFrame();
};
img.src = state.hf.firstFrameUrl;
};
// ========= Display First Frame with Depth Overlay (if available) =========
APP.core.video.displayFirstFrameWithDepth = function () {
const { state } = APP.core;
const { $ } = APP.core.utils;
// Check if we're in depth view mode and depth is available
if (state.useFrameDepthView && state.hf.depthFirstFrameUrl) {
const frameCanvas = $("#frameCanvas");
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
if (frameCanvas) {
frameCanvas.width = state.frame.w;
frameCanvas.height = state.frame.h;
frameCanvas.getContext("2d").drawImage(img, 0, 0, state.frame.w, state.frame.h);
if (APP.ui.overlays.renderFrameOverlay) {
APP.ui.overlays.renderFrameOverlay();
}
}
};
img.onerror = () => {
// Fallback to processed or raw first frame
APP.core.video.displayProcessedFirstFrame();
};
img.src = state.hf.depthFirstFrameUrl;
} else if (state.hf.firstFrameUrl) {
// Show processed first frame
APP.core.video.displayProcessedFirstFrame();
} else {
// Fallback to local video first frame
APP.core.video.drawFirstFrame();
}
};