/* =========================
Weapon-Grade Demo Engine
- Tab 1: first-frame perception + reasoning
- Tab 2: closed-loop tracking + dynamic dwell update
- Tab 3: trade-space console
========================= */
(() => {
const API_CONFIG = window.API_CONFIG || {};
const BACKEND_BASE = (() => {
const raw = (API_CONFIG.BACKEND_BASE || API_CONFIG.BASE_URL || "").trim();
if (raw) return raw.replace(/\/$/, "");
const origin = (window.location && window.location.origin) || "";
if (origin && origin !== "null") return origin;
return "";
})();
const $ = (sel, root = document) => root.querySelector(sel);
const $$ = (sel, root = document) => Array.from(root.querySelectorAll(sel));
const clamp = (x, a, b) => Math.min(b, Math.max(a, x));
const lerp = (a, b, t) => a + (b - a) * t;
const now = () => performance.now();
const ENABLE_KILL = false;
const state = {
videoUrl: null,
videoFile: null,
videoLoaded: false,
useProcessedFeed: false,
useDepthFeed: false, // Flag for depth view (Tab 2 video)
useFrameDepthView: false, // Flag for first frame depth view (Tab 1)
hasReasoned: false,
isReasoning: false, // Flag to prevent concurrent Reason executions
hf: {
baseUrl: BACKEND_BASE,
detector: "auto",
asyncJobId: null, // Current job ID from /detect/async
asyncPollInterval: null, // Polling timer handle
firstFrameUrl: null, // First frame preview URL
firstFrameDetections: null, // First-frame detections from backend
statusUrl: null, // Status polling URL
videoUrl: null, // Final video URL
asyncStatus: "idle", // "idle"|"processing"|"completed"|"failed"
asyncProgress: null, // Progress data from status endpoint
queries: [], // Mission objective used as query
processedUrl: null,
processedBlob: null,
depthVideoUrl: null, // Depth video URL
depthFirstFrameUrl: null, // First frame depth URL
depthBlob: null, // Depth video blob
depthFirstFrameBlob: null, // Depth first frame blob
summary: null,
busy: false,
lastError: null
},
detector: {
mode: "coco",
kind: "object",
loaded: false,
model: null,
loading: false,
cocoBlocked: false,
hfTrackingWarned: false
},
tracker: {
mode: "iou",
tracks: [],
nextId: 1,
lastDetTime: 0,
running: false,
selectedTrackId: null,
beamOn: false,
lastFrameTime: 0
},
frame: {
w: 1280,
h: 720,
bitmap: null
},
detections: [], // from Tab 1
selectedId: null,
intelBusy: false,
ui: {
cursorMode: "on",
agentCursor: { x: 0.65, y: 0.28, vx: 0, vy: 0, visible: false, target: null, mode: "idle", t0: 0 }
}
};
// Config: Update track reasoning every 30 frames
const REASON_INTERVAL = 30;
// ========= Elements =========
const sysDot = $("#sys-dot");
const sysStatus = $("#sys-status");
const sysLog = $("#sysLog");
const telemetry = $("#telemetry");
const videoFile = $("#videoFile");
const btnEject = $("#btnEject");
const detectorSelect = $("#detectorSelect");
const trackerSelect = $("#trackerSelect");
function getDetectorSelection() {
const opt = detectorSelect?.options?.[detectorSelect.selectedIndex];
return {
value: detectorSelect?.value || "coco",
kind: opt?.dataset?.kind || "object",
label: (opt?.textContent || "").trim()
};
}
const helPower = $("#helPower");
const helAperture = $("#helAperture");
const helM2 = $("#helM2");
const helJitter = $("#helJitter");
const helDuty = $("#helDuty");
const helMode = $("#helMode");
const atmVis = $("#atmVis");
const atmCn2 = $("#atmCn2");
const seaSpray = $("#seaSpray");
const aoQ = $("#aoQ");
const rangeBase = $("#rangeBase");
const detHz = $("#detHz");
const policyMode = $("#policyMode");
const assessWindow = $("#assessWindow");
const cursorMode = $("#cursorMode");
const btnReason = $("#btnReason");
const btnCancelReason = $("#btnCancelReason");
const btnRecompute = $("#btnRecompute");
const btnClear = $("#btnClear");
const frameCanvas = $("#frameCanvas");
const frameOverlay = $("#frameOverlay");
const frameRadar = $("#frameRadar");
const frameEmpty = $("#frameEmpty");
const frameNote = $("#frameNote");
// const objList = $("#objList"); // Removed
// const objList = $("#objList"); // Removed
const objCount = $("#objCount");
const featureTable = $("#featureTable");
const selId = $("#selId");
const checkEnableGPT = $("#enableGPTToggle");
const trackCount = $("#trackCount");
const frameTrackList = $("#frameTrackList");
// Removed old summary references
const videoHidden = $("#videoHidden");
const videoEngage = $("#videoEngage");
const engageOverlay = $("#engageOverlay");
const engageEmpty = $("#engageEmpty");
const engageNote = $("#engageNote");
// Mission-driven (HF Space) backend controls
const missionText = $("#missionText");
const hfBackendStatus = $("#hfBackendStatus");
const intelSummaryBox = $("#intelSummaryBox");
const intelStamp = $("#intelStamp");
const intelDot = $("#intelDot");
const btnIntelRefresh = $("#btnIntelRefresh");
const intelThumbs = [$("#intelThumb0"), $("#intelThumb1"), $("#intelThumb2")];
const missionClassesEl = $("#missionClasses");
const missionIdEl = $("#missionId");
const chipFeed = $("#chipFeed");
const btnEngage = $("#btnEngage");
// Debug hook for console inspection
window.__LP_STATE__ = state;
const btnPause = $("#btnPause");
const btnReset = $("#btnReset");
const btnToggleSidebar = $("#btnToggleSidebar");
const chipPolicy = $("#chipPolicy");
const chipTracks = $("#chipTracks");
const chipBeam = $("#chipBeam");
const chipHz = $("#chipHz");
const chipDepth = $("#chipDepth");
const chipFrameDepth = $("#chipFrameDepth");
const dwellText = $("#dwellText");
const dwellBar = $("#dwellBar");
const radarCanvas = $("#radarCanvas");
const trackList = $("#trackList");
const liveStamp = $("#liveStamp");
const tradeCanvas = $("#tradeCanvas");
const tradeTarget = $("#tradeTarget");
const rMin = $("#rMin");
const rMax = $("#rMax");
const showPk = $("#showPk");
const btnReplot = $("#btnReplot");
const btnSnap = $("#btnSnap");
// ========= UI: knobs display =========
function syncKnobDisplays() {
$("#helPowerVal").textContent = helPower.value;
$("#helApertureVal").textContent = (+helAperture.value).toFixed(2);
$("#helM2Val").textContent = (+helM2.value).toFixed(1);
$("#helJitterVal").textContent = (+helJitter.value).toFixed(1);
$("#helDutyVal").textContent = helDuty.value;
$("#atmVisVal").textContent = atmVis.value;
$("#atmCn2Val").textContent = atmCn2.value;
$("#seaSprayVal").textContent = seaSpray.value;
$("#aoQVal").textContent = aoQ.value;
$("#rangeBaseVal").textContent = rangeBase.value;
$("#detHzVal").textContent = detHz.value;
$("#assessWindowVal").textContent = (+assessWindow.value).toFixed(1);
chipPolicy.textContent = `POLICY:${policyMode.value.toUpperCase()}`;
chipHz.textContent = `DET:${detHz.value}Hz`;
telemetry.textContent = `HEL=${helPower.value}kW · VIS=${atmVis.value}km · Cn²=${atmCn2.value}/10 · AO=${aoQ.value}/10 · DET=${detHz.value}Hz`;
}
$$("input,select").forEach(el => el.addEventListener("input", () => {
syncKnobDisplays();
if (state.hasReasoned) {
// keep it responsive: recompute power/dwell numerics even without rerunning detection
recomputeHEL(); // async but we don't await here for UI responsiveness
renderFrameOverlay();
renderTrade();
}
}));
syncKnobDisplays();
renderMissionContext();
setHfStatus("idle");
const detInit = getDetectorSelection();
state.detector.mode = detInit.value;
state.detector.kind = detInit.kind;
state.hf.detector = detInit.value;
// Toggle RAW vs HF feed
chipFeed.addEventListener("click", async () => {
if (!state.videoLoaded) return;
if (!state.hf.processedUrl) {
log("HF processed feed not ready yet. Run Reason (HF mode) and wait for backend.", "w");
return;
}
await setEngageFeed(!state.useProcessedFeed);
log(`Engage feed set to: ${state.useProcessedFeed ? "HF" : "RAW"}`, "t");
});
// Toggle depth view
chipDepth.addEventListener("click", async () => {
if (!state.videoLoaded) return;
if (!state.hf.depthVideoUrl) {
log("Depth video not ready yet. Run Reason and wait for depth processing.", "w");
return;
}
await toggleDepthView();
log(`View set to: ${state.useDepthFeed ? "DEPTH" : "DEFAULT"}`, "t");
});
// Toggle first frame depth view (Tab 1)
if (chipFrameDepth) {
chipFrameDepth.addEventListener("click", () => {
if (!state.videoLoaded) return;
if (!state.hf.depthFirstFrameUrl) {
log("First frame depth not ready yet. Run Reason and wait for depth processing.", "w");
return;
}
toggleFirstFrameDepthView();
log(`First frame view set to: ${state.useFrameDepthView ? "DEPTH" : "DEFAULT"}`, "t");
});
}
// Refresh intel summary (unbiased)
if (btnIntelRefresh) {
btnIntelRefresh.addEventListener("click", async () => {
if (!state.videoLoaded) return;
log("Refreshing mission intel summary (unbiased)…", "t");
await computeIntelSummary();
});
}
// ========= Logging =========
function log(msg, level = "t") {
const ts = new Date().toLocaleTimeString();
const prefix = level === "e" ? "[ERR]" : (level === "w" ? "[WARN]" : (level === "g" ? "[OK]" : "[SYS]"));
const line = `${ts} ${prefix} ${msg}\n`;
const span = document.createElement("span");
span.className = level;
span.textContent = line;
sysLog.appendChild(span);
sysLog.scrollTop = sysLog.scrollHeight;
}
function setStatus(kind, text) {
sysStatus.textContent = text;
sysDot.className = "dot" + (kind === "warn" ? " warn" : (kind === "bad" ? " bad" : ""));
}
// ========= Mission Intel Summary (unbiased, no location) =========
function setIntelStatus(kind, text) {
if (!intelStamp || !intelDot) return;
intelStamp.innerHTML = text;
intelDot.className = "dot" + (kind === "warn" ? " warn" : (kind === "bad" ? " bad" : ""));
intelDot.style.width = "7px";
intelDot.style.height = "7px";
intelDot.style.boxShadow = "none";
}
function setIntelThumb(i, dataUrl) {
const img = intelThumbs?.[i];
if (!img) return;
img.src = dataUrl || "";
}
function resetIntelUI() {
if (!intelSummaryBox) return;
intelSummaryBox.innerHTML = 'Upload a video, then click Reason to generate an unbiased scene summary.';
setIntelStatus("warn", "Idle");
setIntelThumb(0, "");
setIntelThumb(1, "");
setIntelThumb(2, "");
}
function pluralize(label, n) {
if (n === 1) return label;
if (label.endsWith("s")) return label;
return label + "s";
}
// [Deleted] inferSceneDescriptor
async function computeIntelSummary() {
if (!intelSummaryBox) return;
if (!state.videoLoaded) { resetIntelUI(); return; }
if (state.intelBusy) return;
state.intelBusy = true;
setIntelStatus("warn", "Generating…");
intelSummaryBox.textContent = "Sampling frames and running analysis…";
try {
const dur = (videoHidden?.duration || videoEngage?.duration || 0);
const times = [0, dur ? dur * 0.33 : 1, dur ? dur * 0.66 : 2];
const frames = [];
// Sample frames
for (let i = 0; i < times.length; i++) {
await seekTo(videoHidden, times[i]);
const bmp = await frameToBitmap(videoHidden);
// Draw to temp canvas to get dataURL
const c = document.createElement("canvas");
c.width = 640; c.height = 360; // downscale
const ctx = c.getContext("2d");
ctx.drawImage(bmp, 0, 0, c.width, c.height);
const dataUrl = c.toDataURL("image/jpeg", 0.6);
frames.push(dataUrl);
// update thumb
try { setIntelThumb(i, dataUrl); } catch (_) { }
}
// Call external hook
const summary = await externalIntel(frames);
intelSummaryBox.textContent = summary;
setIntelStatus("good", `Updated · ${new Date().toLocaleTimeString()}`);
} catch (err) {
setIntelStatus("bad", "Summary unavailable");
intelSummaryBox.textContent = `Unable to generate summary: ${err.message}`;
console.error(err);
} finally {
state.intelBusy = false;
}
}
// ========= Tabs =========
$$(".tabbtn").forEach(btn => {
btn.addEventListener("click", () => {
$$(".tabbtn").forEach(b => b.classList.remove("active"));
btn.classList.add("active");
const tab = btn.dataset.tab;
$$(".tab").forEach(t => t.classList.remove("active"));
$(`#tab-${tab}`).classList.add("active");
if (tab === "trade") renderTrade();
if (tab === "engage") {
resizeOverlays();
renderRadar();
renderTrackCards();
}
});
});
// ========= Video load / unload =========
async function unloadVideo(options = {}) {
const preserveInput = !!options.preserveInput;
// Stop polling if running
if (state.hf.asyncPollInterval) {
clearInterval(state.hf.asyncPollInterval);
state.hf.asyncPollInterval = null;
}
if (state.videoUrl && state.videoUrl.startsWith("blob:")) {
URL.revokeObjectURL(state.videoUrl);
}
if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) {
try { URL.revokeObjectURL(state.hf.processedUrl); } catch (_) { }
}
if (state.hf.depthVideoUrl && state.hf.depthVideoUrl.startsWith("blob:")) {
try { URL.revokeObjectURL(state.hf.depthVideoUrl); } catch (_) { }
}
if (state.hf.depthFirstFrameUrl && state.hf.depthFirstFrameUrl.startsWith("blob:")) {
try { URL.revokeObjectURL(state.hf.depthFirstFrameUrl); } catch (_) { }
}
state.videoUrl = null;
state.videoFile = null;
state.videoLoaded = false;
state.useProcessedFeed = false;
state.useDepthFeed = false;
state.useFrameDepthView = false;
state.hf.missionId = null;
state.hf.plan = null;
state.hf.processedUrl = null;
state.hf.processedBlob = null;
state.hf.depthVideoUrl = null;
state.hf.depthBlob = null;
state.hf.depthFirstFrameUrl = null;
state.hf.depthFirstFrameBlob = null;
state.hf.summary = null;
state.hf.busy = false;
state.hf.lastError = null;
state.hf.asyncJobId = null;
state.hf.asyncStatus = "idle";
setHfStatus("idle");
renderMissionContext();
resetIntelUI();
state.hasReasoned = false;
state.isReasoning = false; // Reset reasoning lock
// Reset Reason button state
btnReason.disabled = false;
btnReason.style.opacity = "1";
btnReason.style.cursor = "pointer";
btnCancelReason.style.display = "none";
btnEngage.disabled = true;
state.detections = [];
state.selectedId = null;
state.tracker.tracks = [];
state.tracker.nextId = 1;
state.tracker.running = false;
state.tracker.selectedTrackId = null;
state.tracker.beamOn = false;
videoHidden.removeAttribute("src");
videoEngage.removeAttribute("src");
videoHidden.load();
videoEngage.load();
if (!preserveInput) {
videoFile.value = "";
}
if (!preserveInput) {
$("#videoMeta").textContent = "No file";
}
frameEmpty.style.display = "flex";
engageEmpty.style.display = "flex";
frameNote.textContent = "Awaiting video";
engageNote.textContent = "Awaiting video";
clearCanvas(frameCanvas);
clearCanvas(frameOverlay);
clearCanvas(engageOverlay);
renderRadar();
renderFrameTrackList();
// renderSummary();
renderFeatures(null);
renderTrade();
setStatus("warn", "STANDBY · No video loaded");
log("Video unloaded. Demo reset.", "w");
}
btnEject.addEventListener("click", async () => {
await unloadVideo();
});
videoFile.addEventListener("change", async (e) => {
const file = e.target.files && e.target.files[0];
if (!file) return;
const pendingFile = file;
await unloadVideo({ preserveInput: true });
state.videoFile = pendingFile;
const nullOrigin = (window.location && window.location.origin) === "null";
if (nullOrigin) {
state.videoUrl = await readFileAsDataUrl(pendingFile);
} else {
state.videoUrl = URL.createObjectURL(pendingFile);
}
// STOP any existing async polling
stopAsyncPolling();
// reset HF backend state for this new upload
if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) {
try { URL.revokeObjectURL(state.hf.processedUrl); } catch (_) { }
}
if (state.hf.depthVideoUrl && state.hf.depthVideoUrl.startsWith("blob:")) {
try { URL.revokeObjectURL(state.hf.depthVideoUrl); } catch (_) { }
}
if (state.hf.depthFirstFrameUrl && state.hf.depthFirstFrameUrl.startsWith("blob:")) {
try { URL.revokeObjectURL(state.hf.depthFirstFrameUrl); } catch (_) { }
}
state.hf.processedUrl = null;
state.hf.processedBlob = null;
state.hf.depthVideoUrl = null;
state.hf.depthBlob = null;
state.hf.depthFirstFrameUrl = null;
state.hf.depthFirstFrameBlob = null;
state.hf.asyncJobId = null;
state.hf.firstFrameUrl = null;
state.hf.firstFrameDetections = null;
state.hf.statusUrl = null;
state.hf.videoUrl = null;
state.hf.asyncStatus = "idle";
state.hf.asyncProgress = null;
state.hf.queries = [];
state.hf.summary = null;
state.hf.lastError = null;
state.hf.busy = false;
state.useProcessedFeed = false;
state.useDepthFeed = false;
state.useFrameDepthView = false;
setHfStatus("idle");
renderMissionContext();
videoHidden.src = state.videoUrl;
videoEngage.removeAttribute("src");
videoEngage.load();
// Initialize with no engage feed until processed video is ready
videoEngage.setAttribute("data-processed", "false");
btnEngage.disabled = true;
setStatus("warn", "LOADING · Parsing video metadata");
log(`Video selected: ${pendingFile.name} (${Math.round(pendingFile.size / 1024 / 1024)} MB)`, "t");
await Promise.all([
waitVideoReady(videoHidden),
waitVideoReady(videoHidden)
]);
const dur = videoHidden.duration || 0;
const w = videoHidden.videoWidth || 1280;
const h = videoHidden.videoHeight || 720;
state.videoLoaded = true;
state.frame.w = w;
state.frame.h = h;
$("#videoMeta").textContent = `${pendingFile.name} · ${dur.toFixed(1)}s · ${w}×${h}`;
frameEmpty.style.display = "none";
engageEmpty.style.display = "none";
frameNote.textContent = `${w}×${h} · First frame only`;
engageNote.textContent = `${dur.toFixed(1)}s · paused`;
setStatus("warn", "READY · Video loaded (run Reason)");
log("Video loaded. Ready for first-frame reasoning.", "g");
resizeOverlays();
await captureFirstFrame();
drawFirstFrame();
renderFrameOverlay();
renderRadar();
renderTrade();
});
function displayAsyncFirstFrame() {
if (!state.hf.firstFrameUrl) return;
log(`Fetching HF first frame: ${state.hf.firstFrameUrl}`, "t");
// Display first frame with detections overlaid (segmentation masks or bounding boxes)
const img = new Image();
img.crossOrigin = "anonymous";
img.src = `${state.hf.firstFrameUrl}?t=${Date.now()}`; // Cache bust
img.onload = () => {
frameCanvas.width = img.width;
frameCanvas.height = img.height;
frameOverlay.width = img.width;
frameOverlay.height = img.height;
const ctx = frameCanvas.getContext("2d");
ctx.clearRect(0, 0, img.width, img.height);
ctx.drawImage(img, 0, 0);
frameEmpty.style.display = "none";
log(`✓ HF first frame displayed (${img.width}×${img.height})`, "g");
};
img.onerror = (err) => {
console.error("Failed to load first frame:", err);
log("✗ HF first frame load failed - check CORS or URL", "e");
};
}
async function waitVideoReady(v) {
return new Promise((resolve) => {
const onReady = () => { v.removeEventListener("loadedmetadata", onReady); resolve(); };
if (v.readyState >= 1 && v.videoWidth) { resolve(); return; }
v.addEventListener("loadedmetadata", onReady);
v.load();
});
}
function readFileAsDataUrl(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => resolve(String(reader.result || ""));
reader.onerror = () => reject(new Error("Failed to read file"));
reader.readAsDataURL(file);
});
}
async function captureFirstFrame() {
if (!state.videoLoaded) return;
await seekTo(videoHidden, 0.0);
const bmp = await frameToBitmap(videoHidden);
state.frame.bitmap = bmp;
log("Captured first frame for Tab 1 reasoning.", "t");
}
async function seekTo(v, timeSec) {
return new Promise((resolve, reject) => {
const onSeeked = () => { v.removeEventListener("seeked", onSeeked); resolve(); };
const onError = () => { v.removeEventListener("error", onError); reject(new Error("Video seek error")); };
v.addEventListener("seeked", onSeeked);
v.addEventListener("error", onError);
try {
v.currentTime = clamp(timeSec, 0, Math.max(0, v.duration - 0.05));
} catch (err) {
v.removeEventListener("seeked", onSeeked);
v.removeEventListener("error", onError);
reject(err);
}
});
}
async function frameToBitmap(videoEl) {
const w = videoEl.videoWidth || 1280;
const h = videoEl.videoHeight || 720;
const c = document.createElement("canvas");
c.width = w; c.height = h;
const ctx = c.getContext("2d");
ctx.drawImage(videoEl, 0, 0, w, h);
if ("createImageBitmap" in window) {
return await createImageBitmap(c);
}
return c; // fallback
}
function clearCanvas(canvas) {
const ctx = canvas.getContext("2d");
ctx.clearRect(0, 0, canvas.width, canvas.height);
}
// ========= Detector loading =========
// ========= Mission-driven HF Space backend =========
const ALL_CLASSES_PROMPT = "No mission objective provided. Run full-class object detection across all supported classes. Detect and label every object you can with bounding boxes; do not filter by mission.";
const DEFAULT_QUERY_CLASSES = [
"person",
"car",
"truck",
"motorcycle",
"bicycle",
"bus",
"train",
"airplane"
];
function missionPromptOrAll() {
const t = (missionText?.value || "").trim();
return t || ALL_CLASSES_PROMPT;
}
const HF_SPACE_DETECTORS = new Set([
"hf_yolov8",
"detr_resnet50",
"grounding_dino",
"sam3",
"drone_yolo",
]);
// Backend currently requires latitude/longitude form fields. We send neutral defaults (no UI, no location in outputs).
const DEFAULT_LAT = "0";
const DEFAULT_LON = "0";
function isHfMode(mode) {
return !["coco", "external"].includes(mode);
}
function isHfSpaceDetector(det) {
return HF_SPACE_DETECTORS.has(det);
}
function isSpaceUrl(value) {
return /^https?:\/\/huggingface\.co\/spaces\//.test(String(value || ""));
}
function isHfInferenceModel(value) {
const v = String(value || "");
return v.includes("/") && !isSpaceUrl(v);
}
function setHfStatus(msg) {
if (!hfBackendStatus) return;
const statusPrefix = state.hf.asyncStatus !== "idle"
? `[${state.hf.asyncStatus.toUpperCase()}] `
: "";
const normalized = String(msg || "").toLowerCase();
let display = msg;
if (normalized.includes("ready")) {
display = "MISSION PACKAGE READY";
} else if (normalized.includes("idle")) {
display = "STANDBY";
} else if (normalized.includes("completed")) {
display = "PROCESS COMPLETE";
} else if (normalized.includes("processing")) {
display = "ACTIVE SCAN";
} else if (normalized.includes("cancelled")) {
display = "STAND-DOWN";
} else if (normalized.includes("error")) {
display = "FAULT CONDITION";
}
hfBackendStatus.textContent = `HF Backend: ${statusPrefix}${display}`;
// Color coding
if (msg.includes("error")) {
hfBackendStatus.style.color = "var(--bad)";
} else if (msg.includes("ready") || msg.includes("completed")) {
hfBackendStatus.style.color = "var(--good)";
} else {
hfBackendStatus.style.color = "var(--warn)";
}
}
function renderMissionContext() {
const queries = state.hf.queries || [];
if (missionClassesEl) missionClassesEl.textContent = queries.length ? queries.join(", ") : "—";
if (missionIdEl) missionIdEl.textContent = `Mission: ${state.hf.missionId || "—"}`;
if (chipFeed) {
chipFeed.textContent = state.useProcessedFeed ? "FEED:HF" : "FEED:RAW";
}
updateDepthChip();
}
function normalizeToken(s) {
return String(s || "")
.toLowerCase()
.replace(/[_\-]+/g, " ")
.replace(/[^a-z0-9\s]/g, "")
.trim();
}
function missionSynonyms(tokens) {
const out = new Set();
tokens.forEach(t => {
const v = normalizeToken(t);
if (!v) return;
out.add(v);
// broad synonyms / fallbacks for common mission terms
if (v.includes("drone") || v.includes("uav") || v.includes("quad") || v.includes("small uav")) {
["airplane", "bird", "kite"].forEach(x => out.add(x));
}
if (v.includes("aircraft") || v.includes("fixed wing") || v.includes("jet")) {
["airplane", "bird"].forEach(x => out.add(x));
}
if (v.includes("boat") || v.includes("ship") || v.includes("vessel") || v.includes("usv")) {
["boat"].forEach(x => out.add(x));
}
if (v.includes("person") || v.includes("diver") || v.includes("swimmer")) {
["person"].forEach(x => out.add(x));
}
if (v.includes("vehicle") || v.includes("truck") || v.includes("car")) {
["car", "truck"].forEach(x => out.add(x));
}
});
return out;
}
function filterPredsByMission(preds) {
// Mission filtering is now handled by the backend
// Local detectors (COCO) will show all results
return preds;
}
function isMissionFocusLabel(label) {
// Mission focus detection is now handled by the backend
// All detections from HF backend are considered mission-relevant
return false;
}
// ========= HF Async Detection Pipeline =========
async function hfDetectAsync() {
const sel = getDetectorSelection();
const detector = sel.value;
const kind = sel.kind;
const videoFile = state.videoFile;
// Reset State & UI for new run
state.detections = [];
state.selectedId = null;
state.tracker.tracks = []; // Clear tracking state too
// Clear cached backend results so they don't reappear
state.hf.firstFrameDetections = null;
// Explicitly clear UI using standard renderers
renderFrameTrackList();
renderFrameOverlay();
// Force a clear of the radar canvas (renderFrameRadar loop will pick up empty state next frame)
if (frameRadar) {
const ctx = frameRadar.getContext("2d");
ctx.clearRect(0, 0, frameRadar.width, frameRadar.height);
}
// Clear counts
if (trackCount) trackCount.textContent = "0";
if (objCount) objCount.textContent = "0";
// Show loading state in list manually if needed, or let renderFrameTrackList handle it (it shows "No objects tracked")
// But we want "Computing..."
if (frameTrackList) frameTrackList.innerHTML = '
Computing...
';
renderFeatures(null); // Clear feature panel
if (!videoFile) {
throw new Error("No video loaded");
}
// Determine mode based on kind
let mode;
if (kind === "segmentation") {
mode = "segmentation";
} else if (kind === "drone") {
mode = "drone_detection";
} else {
mode = "object_detection";
}
// Use mission objective directly as detector input
const missionObjective = (missionText?.value || "").trim();
let queries = "";
if (missionObjective) {
// Use mission objective text directly - let backend interpret it
queries = missionObjective;
state.hf.queries = [missionObjective];
log(`Using mission objective: "${queries}"`);
} else {
if (mode === "drone_detection") {
// Drone mode defaults on backend; omit queries entirely.
queries = "";
state.hf.queries = [];
log("No mission objective specified - using drone defaults");
} else {
// No mission objective - use predefined classes
queries = DEFAULT_QUERY_CLASSES.join(", ");
state.hf.queries = DEFAULT_QUERY_CLASSES.slice();
log("No mission objective specified - using default classes");
}
}
// Build FormData
const form = new FormData();
form.append("video", videoFile);
form.append("mode", mode);
if (queries) {
form.append("queries", queries);
}
// Add detector for object_detection mode
if (mode === "object_detection" && detector) {
form.append("detector", detector);
}
if (mode === "segmentation") {
form.append("segmenter", "sam3");
}
// drone_detection uses drone_yolo automatically
// Add depth_estimator parameter for depth processing
const enableDepthToggle = document.getElementById("enableDepthToggle");
const useLegacyDepth = enableDepthToggle && enableDepthToggle.checked;
const useGPT = checkEnableGPT && checkEnableGPT.checked;
form.append("depth_estimator", useLegacyDepth ? "depth" : "");
form.append("enable_depth", useLegacyDepth ? "true" : "false");
form.append("enable_gpt", useGPT ? "true" : "false");
// Submit async job
setHfStatus(`submitting ${mode} job...`);
log(`Submitting ${mode} to ${state.hf.baseUrl || "(same-origin)"} (detector=${detector || "n/a"})`, "t");
const resp = await fetch(`${state.hf.baseUrl}/detect/async`, {
method: "POST",
body: form
});
if (!resp.ok) {
const err = await resp.json().catch(() => ({ detail: resp.statusText }));
throw new Error(err.detail || "Async detection submission failed");
}
const data = await resp.json();
// Store job info
state.hf.asyncJobId = data.job_id;
state.hf.firstFrameUrl = `${state.hf.baseUrl}${data.first_frame_url}`;
state.hf.firstFrameDetections = Array.isArray(data.first_frame_detections)
? data.first_frame_detections
: null;
state.hf.statusUrl = `${state.hf.baseUrl}${data.status_url}`;
state.hf.videoUrl = `${state.hf.baseUrl}${data.video_url}`;
state.hf.asyncStatus = data.status;
// Store depth URLs if provided
if (data.depth_video_url) {
state.hf.depthVideoUrl = `${state.hf.baseUrl}${data.depth_video_url}`;
log("Depth video URL received", "t");
}
if (data.first_frame_depth_url) {
state.hf.depthFirstFrameUrl = `${state.hf.baseUrl}${data.first_frame_depth_url}`;
log("First frame depth URL received (will fetch when ready)", "t");
}
// Start Streaming if available
if (data.stream_url) {
log("Activating live stream...", "t");
const streamUrl = `${state.hf.baseUrl}${data.stream_url}`;
setStreamingMode(streamUrl);
// NOTE: Auto-switch removed to allow viewing First Frame on Tab 1
log("Live view available in 'Engage' tab.", "g");
setStatus("warn", "Live processing... View in Engage tab");
// Trigger resize/render (background setup)
resizeOverlays();
renderRadar();
renderTrackCards();
}
// Display first frame immediately (if object detection, segmentation, or drone)
if ((mode === "object_detection" || mode === "segmentation" || mode === "drone_detection") && state.hf.firstFrameUrl) {
const count = Array.isArray(data.first_frame_detections) ? data.first_frame_detections.length : null;
if (count != null) {
log(`First frame: ${count} detections`);
} else {
log("First frame ready (no detections payload)", "t");
}
displayAsyncFirstFrame();
// Populate state.detections with backend results so Radar and Cards work
if (state.hf.firstFrameDetections) {
state.detections = state.hf.firstFrameDetections.map((d, i) => {
const id = `T${String(i + 1).padStart(2, '0')}`;
const [x1, y1, x2, y2] = d.bbox || [0, 0, 0, 0];
const w = x2 - x1;
const h = y2 - y1;
const ap = defaultAimpoint(d.label); // Ensure defaultAimpoint is accessible
return {
id,
label: d.label,
score: d.score,
bbox: { x: x1, y: y1, w: w, h: h },
aim: { ...ap },
features: null,
baseRange_m: d.gpt_distance_m || null, // GPT is sole source of distance
baseAreaFrac: null,
baseDwell_s: null,
reqP_kW: null,
maxP_kW: null,
pkill: null,
// GPT properties - sole source of distance estimation
gpt_distance_m: d.gpt_distance_m,
gpt_direction: d.gpt_direction,
gpt_description: d.gpt_description,
// Depth visualization only (not for distance)
depth_rel: d.depth_rel
};
});
// Update UI components
log(`Populating UI with ${state.detections.length} tracked objects`, "t");
renderFrameTrackList();
renderFrameRadar();
renderFeatures(null);
renderTrade();
renderFrameOverlay();
}
}
log(`Backend job ID: ${data.job_id} (polling every 3s)`, "t");
setHfStatus(`job ${data.job_id.substring(0, 8)}: processing...`);
// Start polling
await pollAsyncJob();
}
async function cancelBackendJob(jobId, source = "user") {
if (!jobId) return;
if ((state.hf.baseUrl || "").includes("hf.space")) {
log("Cancel request suppressed for HF Space (replica job store can return 404).", "w");
return { status: "skipped", message: "Cancel disabled for HF Space" };
}
// Check if job is already in a terminal state
if (state.hf.asyncStatus === "completed" || state.hf.asyncStatus === "failed") {
log(`Backend job ${jobId.substring(0, 8)}: already ${state.hf.asyncStatus}, skipping cancel`, "t");
return { status: state.hf.asyncStatus, message: `Job already ${state.hf.asyncStatus}` };
}
log(`Sending DELETE to /detect/job/${jobId.substring(0, 8)}... (${source})`, "t");
try {
const response = await fetch(`${state.hf.baseUrl}/detect/job/${jobId}`, {
method: "DELETE"
});
if (response.ok) {
const result = await response.json();
log(`✓ Backend job ${jobId.substring(0, 8)}: ${result.message || "cancelled"} (status: ${result.status})`, "g");
return result;
} else if (response.status === 404) {
const detail = await response.json().catch(() => ({ detail: "Job not found" }));
log(`⚠ Backend job ${jobId.substring(0, 8)}: ${detail.detail || "not found or already cleaned up"}`, "w");
return { status: "not_found", message: detail.detail };
} else {
const errorText = await response.text().catch(() => "Unknown error");
log(`✗ Backend job ${jobId.substring(0, 8)}: cancel failed (${response.status}) - ${errorText}`, "e");
return { status: "error", message: errorText };
}
} catch (err) {
log(`✗ Backend job ${jobId.substring(0, 8)}: cancel error - ${err.message}`, "e");
return { status: "error", message: err.message };
}
}
function setStreamingMode(url) {
// Ensure stream image element exists
let streamView = $("#streamView");
if (!streamView) {
streamView = document.createElement("img");
streamView.id = "streamView";
streamView.style.width = "100%";
streamView.style.height = "100%";
streamView.style.objectFit = "contain";
streamView.style.position = "absolute";
streamView.style.top = "0";
streamView.style.left = "0";
streamView.style.zIndex = "10"; // Above video
streamView.style.backgroundColor = "#000";
// Insert into the wrapper
// videoEngage is likely inside a container or just in the DOM
// We'll insert it as a sibling or wrapper child
if (videoEngage && videoEngage.parentNode) {
videoEngage.parentNode.appendChild(streamView);
// Ensure container is relative
if (getComputedStyle(videoEngage.parentNode).position === "static") {
videoEngage.parentNode.style.position = "relative";
}
}
}
if (streamView) {
streamView.src = url;
streamView.style.display = "block";
if (videoEngage) videoEngage.style.display = "none";
// Also hide empty state
if (engageEmpty) engageEmpty.style.display = "none";
}
}
function stopStreamingMode() {
const streamView = $("#streamView");
if (streamView) {
streamView.src = ""; // Stop connection
streamView.style.display = "none";
}
if (videoEngage) videoEngage.style.display = "block";
// If no video loaded yet, might want to show empty?
// But usually we stop streaming when video IS loaded.
}
function cancelReasoning() {
// Stop HF polling if running
if (state.hf.asyncPollInterval) {
clearInterval(state.hf.asyncPollInterval);
state.hf.asyncPollInterval = null;
log("HF polling stopped.", "w");
}
stopStreamingMode();
// Cancel backend job if it exists
const jobId = state.hf.asyncJobId;
if (jobId) {
cancelBackendJob(jobId, "cancel button");
}
// Reset state
state.isReasoning = false;
state.hf.busy = false;
state.hf.asyncJobId = null;
state.hf.asyncStatus = "cancelled";
// Re-enable Reason button
btnReason.disabled = false;
btnReason.style.opacity = "1";
btnReason.style.cursor = "pointer";
// Hide Cancel button
btnCancelReason.style.display = "none";
setStatus("warn", "CANCELLED · Reasoning stopped");
setHfStatus("cancelled (stopped by user)");
log("Reasoning cancelled by user.", "w");
}
async function pollAsyncJob() {
const pollInterval = 3000; // 3 seconds
const maxAttempts = 200; // 10 minutes max (3s × 200)
let attempts = 0;
let fetchingVideo = false;
return new Promise((resolve, reject) => {
state.hf.asyncPollInterval = setInterval(async () => {
attempts++;
try {
const resp = await fetch(state.hf.statusUrl, { cache: "no-store" });
if (!resp.ok) {
if (resp.status === 404) {
clearInterval(state.hf.asyncPollInterval);
reject(new Error("Job expired or not found"));
return;
}
throw new Error(`Status check failed: ${resp.statusText}`);
}
const status = await resp.json();
state.hf.asyncStatus = status.status;
state.hf.asyncProgress = status;
if (status.status === "completed") {
if (fetchingVideo) return;
fetchingVideo = true;
const completedJobId = state.hf.asyncJobId;
log(`✓ Backend job ${completedJobId.substring(0, 8)}: completed successfully`, "g");
setHfStatus("job completed, fetching video...");
try {
await fetchProcessedVideo();
await fetchDepthVideo();
await fetchDepthFirstFrame();
clearInterval(state.hf.asyncPollInterval);
// Clear job ID to prevent cancel attempts after completion
state.hf.asyncJobId = null;
setHfStatus("ready");
stopStreamingMode();
resolve();
} catch (err) {
if (err && err.code === "VIDEO_PENDING") {
setHfStatus("job completed, finalizing video...");
fetchingVideo = false;
return;
}
clearInterval(state.hf.asyncPollInterval);
state.hf.asyncJobId = null; // Clear on error too
stopStreamingMode();
reject(err);
}
} else if (status.status === "failed") {
clearInterval(state.hf.asyncPollInterval);
const errMsg = status.error || "Processing failed";
log(`✗ Backend job ${state.hf.asyncJobId.substring(0, 8)}: failed - ${errMsg}`, "e");
// Clear job ID to prevent cancel attempts after failure
state.hf.asyncJobId = null;
setHfStatus(`error: ${errMsg}`);
stopStreamingMode();
reject(new Error(errMsg));
} else {
// Still processing
setHfStatus(`job ${state.hf.asyncJobId.substring(0, 8)}: ${status.status}... (${attempts})`);
}
if (attempts >= maxAttempts) {
clearInterval(state.hf.asyncPollInterval);
reject(new Error("Polling timeout (10 minutes)"));
}
} catch (err) {
clearInterval(state.hf.asyncPollInterval);
reject(err);
}
}, pollInterval);
});
}
async function fetchProcessedVideo() {
const resp = await fetch(state.hf.videoUrl, { cache: "no-store" });
if (!resp.ok) {
if (resp.status === 202) {
const err = new Error("Video still processing");
err.code = "VIDEO_PENDING";
throw err;
}
throw new Error(`Failed to fetch video: ${resp.statusText}`);
}
const nullOrigin = (window.location && window.location.origin) === "null";
if (nullOrigin) {
// Avoid blob:null URLs when opened via file://
state.hf.processedBlob = null;
state.hf.processedUrl = `${state.hf.videoUrl}?t=${Date.now()}`;
btnEngage.disabled = false;
log("Processed video ready (streaming URL)");
return;
}
const blob = await resp.blob();
// Revoke old URL if exists
if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) {
URL.revokeObjectURL(state.hf.processedUrl);
}
state.hf.processedBlob = blob;
state.hf.processedUrl = URL.createObjectURL(blob);
btnEngage.disabled = false;
log(`Processed video ready (${(blob.size / 1024 / 1024).toFixed(1)} MB)`);
}
async function fetchDepthVideo() {
if (!state.hf.depthVideoUrl) {
log("No depth video URL available", "w");
return;
}
try {
const resp = await fetch(state.hf.depthVideoUrl, { cache: "no-store" });
if (!resp.ok) {
if (resp.status === 202) {
log("Depth video still processing", "w");
return;
}
throw new Error(`Failed to fetch depth video: ${resp.statusText}`);
}
const nullOrigin = (window.location && window.location.origin) === "null";
if (nullOrigin) {
state.hf.depthBlob = null;
state.hf.depthVideoUrl = `${state.hf.depthVideoUrl}?t=${Date.now()}`;
log("Depth video ready (streaming URL)");
return;
}
const blob = await resp.blob();
// Store the original URL before creating blob
const originalUrl = state.hf.depthVideoUrl;
state.hf.depthBlob = blob;
const blobUrl = URL.createObjectURL(blob);
state.hf.depthVideoUrl = blobUrl;
log(`Depth video ready (${(blob.size / 1024 / 1024).toFixed(1)} MB) - Click VIEW chip to toggle`, "g");
updateDepthChip();
} catch (err) {
log(`Error fetching depth video: ${err.message}`, "e");
}
}
async function fetchDepthFirstFrame() {
if (!state.hf.depthFirstFrameUrl) {
log("No depth first frame URL available", "w");
return;
}
try {
const resp = await fetch(state.hf.depthFirstFrameUrl, { cache: "no-store" });
if (!resp.ok) {
if (resp.status === 202) {
log("Depth first frame still processing", "w");
return;
}
throw new Error(`Failed to fetch depth first frame: ${resp.statusText}`);
}
// Fetch as blob and create blob URL
const blob = await resp.blob();
// Store the blob and create a blob URL
state.hf.depthFirstFrameBlob = blob;
const blobUrl = URL.createObjectURL(blob);
// Replace the server URL with the blob URL
const originalUrl = state.hf.depthFirstFrameUrl;
state.hf.depthFirstFrameUrl = blobUrl;
log(`✓ Depth first frame ready (${(blob.size / 1024).toFixed(1)} KB) - Click VIEW chip on Tab 1 to toggle`, "g");
updateFirstFrameDepthChip();
} catch (err) {
log(`Error fetching depth first frame: ${err.message}`, "e");
}
}
function stopAsyncPolling() {
if (state.hf.asyncPollInterval) {
clearInterval(state.hf.asyncPollInterval);
state.hf.asyncPollInterval = null;
}
}
async function setEngageFeed(useProcessed) {
state.useProcessedFeed = !!useProcessed;
renderMissionContext();
if (!state.videoLoaded) return;
const desired = (state.useProcessedFeed && state.hf.processedUrl) ? state.hf.processedUrl : null;
if (!desired) return;
const wasPlaying = !videoEngage.paused;
const t = videoEngage.currentTime || 0;
try { videoEngage.pause(); } catch (_) { }
if (videoEngage.src !== desired) {
videoEngage.src = desired;
// Mark video element to show/hide based on whether it's processed content
videoEngage.setAttribute('data-processed', state.useProcessedFeed ? 'true' : 'false');
log(`Video feed switched to: ${state.useProcessedFeed ? 'HF processed' : 'raw'} (data-processed=${state.useProcessedFeed})`, "t");
videoEngage.load();
await waitVideoReady(videoEngage);
try { videoEngage.currentTime = Math.min(t, (videoEngage.duration || t)); } catch (_) { }
}
resizeOverlays();
if (wasPlaying) {
try { await videoEngage.play(); } catch (_) { }
}
}
async function toggleDepthView() {
state.useDepthFeed = !state.useDepthFeed;
updateDepthChip();
if (!state.videoLoaded) return;
const wasPlaying = !videoEngage.paused;
const t = videoEngage.currentTime || 0;
try { videoEngage.pause(); } catch (_) { }
let desiredSrc;
if (state.useDepthFeed && state.hf.depthVideoUrl) {
desiredSrc = state.hf.depthVideoUrl;
} else if (state.useProcessedFeed && state.hf.processedUrl) {
desiredSrc = state.hf.processedUrl;
} else {
desiredSrc = state.videoUrl;
}
if (videoEngage.src !== desiredSrc) {
videoEngage.src = desiredSrc;
videoEngage.setAttribute('data-depth', state.useDepthFeed ? 'true' : 'false');
log(`Video view switched to: ${state.useDepthFeed ? 'depth' : 'default'}`, "t");
videoEngage.load();
await waitVideoReady(videoEngage);
try { videoEngage.currentTime = Math.min(t, (videoEngage.duration || t)); } catch (_) { }
}
resizeOverlays();
if (wasPlaying) {
try { await videoEngage.play(); } catch (_) { }
}
}
function updateDepthChip() {
if (chipDepth) {
chipDepth.textContent = state.useDepthFeed ? "VIEW:DEPTH" : "VIEW:DEFAULT";
}
}
function toggleFirstFrameDepthView() {
state.useFrameDepthView = !state.useFrameDepthView;
updateFirstFrameDepthChip();
displayFirstFrameWithDepth();
}
function updateFirstFrameDepthChip() {
if (chipFrameDepth) {
chipFrameDepth.textContent = state.useFrameDepthView ? "VIEW:DEPTH" : "VIEW:DEFAULT";
}
}
function displayFirstFrameWithDepth() {
// Determine which URL to use based on state
let frameUrl;
if (state.useFrameDepthView && state.hf.depthFirstFrameUrl) {
// Check if we have a blob URL (starts with 'blob:')
if (state.hf.depthFirstFrameUrl.startsWith('blob:')) {
frameUrl = state.hf.depthFirstFrameUrl;
} else {
log("Depth first frame not ready yet. Please wait for processing to complete.", "w");
state.useFrameDepthView = false; // Revert to default view
updateFirstFrameDepthChip();
frameUrl = state.hf.firstFrameUrl;
}
} else if (state.hf.firstFrameUrl) {
frameUrl = state.hf.firstFrameUrl;
} else {
log("No first frame URL available", "w");
return;
}
if (!frameUrl) {
log("No valid frame URL to display", "w");
return;
}
log(`Displaying ${state.useFrameDepthView ? 'depth' : 'default'} first frame`, "t");
// Load and display the frame
const img = new Image();
img.crossOrigin = "anonymous";
img.src = frameUrl;
img.onload = () => {
frameCanvas.width = img.width;
frameCanvas.height = img.height;
frameOverlay.width = img.width;
frameOverlay.height = img.height;
const ctx = frameCanvas.getContext("2d");
ctx.clearRect(0, 0, img.width, img.height);
ctx.drawImage(img, 0, 0);
frameEmpty.style.display = "none";
log(`✓ ${state.useFrameDepthView ? 'Depth' : 'Default'} first frame displayed (${img.width}×${img.height})`, "g");
};
img.onerror = (err) => {
console.error(`Failed to load ${state.useFrameDepthView ? 'depth' : 'default'} first frame:`, err);
log(`✗ ${state.useFrameDepthView ? 'Depth' : 'Default'} first frame load failed - reverting to default view`, "e");
// If depth frame fails, revert to default
if (state.useFrameDepthView) {
state.useFrameDepthView = false;
updateFirstFrameDepthChip();
displayFirstFrameWithDepth(); // Retry with default view
}
};
}
async function startHfPipeline() {
if (state.hf.busy) {
log("HF pipeline already running");
return;
}
const { kind } = getDetectorSelection();
// Only process if using HF detectors (not local/external)
if (["local", "external"].includes(kind)) {
log("Skipping HF pipeline (not using HF detector)");
return;
}
state.hf.busy = true;
state.hf.lastError = null;
// Background processing (non-blocking)
(async () => {
try {
// Run async detection (mission text will be used directly as queries if no manual labels provided)
await hfDetectAsync();
// Auto-switch to processed feed when ready
if (state.hf.processedUrl && !state.useProcessedFeed) {
log("Auto-switching to HF processed video feed (segmentation/detection overlays)", "g");
await setEngageFeed(true);
}
} catch (err) {
console.error("HF pipeline error:", err);
state.hf.lastError = err.message;
setHfStatus(`error: ${err.message}`);
log(`⚠ HF error: ${err.message}`);
} finally {
state.hf.busy = false;
}
})();
}
detectorSelect.addEventListener("change", () => {
const sel = getDetectorSelection();
state.detector.mode = sel.value;
state.detector.kind = sel.kind;
state.hf.detector = sel.value;
// local detector is still used for aimpoint math + tracking; HF Space provides mission-driven video detection.
ensureCocoDetector();
renderMissionContext();
log(`Detector mode set to: ${state.detector.mode}`, "t");
});
trackerSelect.addEventListener("change", () => {
state.tracker.mode = trackerSelect.value;
log(`Tracker mode set to: ${state.tracker.mode}`, "t");
});
async function ensureCocoDetector() {
if (state.detector.loaded || state.detector.loading) return;
if (state.detector.mode !== "coco") return;
state.detector.loading = true;
setStatus("warn", "LOADING · Detector model");
log("Loading COCO-SSD detector (browser model).", "t");
try {
await loadScriptOnce("tfjs", "https://cdn.jsdelivr.net/npm/@tensorflow/tfjs@4.16.0/dist/tf.min.js");
await loadScriptOnce("coco-ssd", "https://cdn.jsdelivr.net/npm/@tensorflow-models/coco-ssd@2.2.2/dist/coco-ssd.min.js");
if (!window.cocoSsd || !window.tf) throw new Error("TF.js or cocoSsd not available.");
state.detector.model = await window.cocoSsd.load();
state.detector.loaded = true;
log("COCO-SSD detector loaded.", "g");
setStatus(state.videoLoaded ? "warn" : "warn", state.videoLoaded ? "READY · Video loaded (run Reason)" : "STANDBY · No video loaded");
} catch (err) {
log(`Detector load failed: ${err.message}. Switch to External detector or use HF models.`, "w");
setStatus("warn", "READY · Detector not loaded (use External or HF)");
state.detector.loaded = false;
state.detector.model = null;
} finally {
state.detector.loading = false;
}
}
const loadedScripts = new Map();
function loadScriptOnce(key, src) {
return new Promise((resolve, reject) => {
if (loadedScripts.get(key) === "loaded") { resolve(); return; }
if (loadedScripts.get(key) === "loading") {
const iv = setInterval(() => {
if (loadedScripts.get(key) === "loaded") { clearInterval(iv); resolve(); }
if (loadedScripts.get(key) === "failed") { clearInterval(iv); reject(new Error("Script failed earlier")); }
}, 50);
return;
}
loadedScripts.set(key, "loading");
const s = document.createElement("script");
s.src = src;
s.async = true;
s.onload = () => { loadedScripts.set(key, "loaded"); resolve(); };
s.onerror = () => { loadedScripts.set(key, "failed"); reject(new Error(`Failed to load ${src}`)); };
document.head.appendChild(s);
});
}
// Start loading detector opportunistically if selected.
ensureCocoDetector();
// ========= Core physics-lite model =========
function getKnobs() {
const PkW = +helPower.value;
const aperture = +helAperture.value;
const M2 = +helM2.value;
const jitter_urad = +helJitter.value;
const duty = (+helDuty.value) / 100;
const mode = helMode.value;
const vis_km = +atmVis.value;
const cn2 = +atmCn2.value;
const spray = +seaSpray.value;
const ao = +aoQ.value;
const baseRange = +rangeBase.value;
return { PkW, aperture, M2, jitter_urad, duty, mode, vis_km, cn2, spray, ao, baseRange };
}
// ========= External Hooks (API Integration Points) =========
/**
* Hook: Object Detection
* @param {Object} input { canvas, width, height }
* @returns {Promise} [{ bbox:[x,y,w,h], class:"label", score:0.95 }, ...]
*/
async function externalDetect(input) {
// TODO: Call your object detection endpoint here
console.log("externalDetect called", input);
return [];
}
/**
* Hook: Feature Extraction
* @param {Array} detections Array of detection objects
* @param {Object} frameInfo { width, height }
* @returns {Promise