diff --git "a/LaserPerception/LaserPerception.js" "b/LaserPerception/LaserPerception.js" new file mode 100644--- /dev/null +++ "b/LaserPerception/LaserPerception.js" @@ -0,0 +1,3085 @@ +/* ========================= + Weapon-Grade Demo Engine + - Tab 1: first-frame perception + reasoning + - Tab 2: closed-loop tracking + dynamic dwell update + - Tab 3: trade-space console + ========================= */ + + (() => { + const API_CONFIG = window.API_CONFIG || {}; + const BACKEND_BASE = (() => { + const raw = (API_CONFIG.BACKEND_BASE || API_CONFIG.BASE_URL || "").trim(); + if (raw) return raw.replace(/\/$/, ""); + const origin = (window.location && window.location.origin) || ""; + if (origin && origin !== "null") return origin; + return ""; + })(); + const $ = (sel, root = document) => root.querySelector(sel); + const $$ = (sel, root = document) => Array.from(root.querySelectorAll(sel)); + const clamp = (x, a, b) => Math.min(b, Math.max(a, x)); + const lerp = (a, b, t) => a + (b - a) * t; + const now = () => performance.now(); + + const state = { + videoUrl: null, + videoFile: null, + videoLoaded: false, + useProcessedFeed: false, + hasReasoned: false, + isReasoning: false, // Flag to prevent concurrent Reason executions + + hf: { + baseUrl: BACKEND_BASE, + detector: "auto", + asyncJobId: null, // Current job ID from /detect/async + asyncPollInterval: null, // Polling timer handle + firstFrameUrl: null, // First frame preview URL + firstFrameDetections: null, // First-frame detections from backend + statusUrl: null, // Status polling URL + videoUrl: null, // Final video URL + asyncStatus: "idle", // "idle"|"processing"|"completed"|"failed" + asyncProgress: null, // Progress data from status endpoint + queries: [], // Mission objective used as query + processedUrl: null, + processedBlob: null, + summary: null, + busy: false, + lastError: null + }, + + detector: { + mode: "coco", + kind: "object", + loaded: false, + model: null, + loading: false, + cocoBlocked: false, + hfTrackingWarned: false + }, + + tracker: { + mode: "iou", + tracks: [], + nextId: 1, + lastDetTime: 0, + running: false, + selectedTrackId: null, + beamOn: false, + lastFrameTime: 0 + }, + + frame: { + w: 1280, + h: 720, + bitmap: null + }, + + detections: [], // from Tab 1 + selectedId: null, + + intelBusy: false, + + ui: { + cursorMode: "on", + agentCursor: { x: 0.65, y: 0.28, vx: 0, vy: 0, visible: false, target: null, mode: "idle", t0: 0 } + } + }; + + // ========= Elements ========= + const sysDot = $("#sys-dot"); + const sysStatus = $("#sys-status"); + const sysLog = $("#sysLog"); + const telemetry = $("#telemetry"); + + const videoFile = $("#videoFile"); + const btnEject = $("#btnEject"); + + const detectorSelect = $("#detectorSelect"); + const trackerSelect = $("#trackerSelect"); + + function getDetectorSelection() { + const opt = detectorSelect?.options?.[detectorSelect.selectedIndex]; + return { + value: detectorSelect?.value || "coco", + kind: opt?.dataset?.kind || "object", + label: (opt?.textContent || "").trim() + }; + } + + const helPower = $("#helPower"); + const helAperture = $("#helAperture"); + const helM2 = $("#helM2"); + const helJitter = $("#helJitter"); + const helDuty = $("#helDuty"); + const helMode = $("#helMode"); + + const atmVis = $("#atmVis"); + const atmCn2 = $("#atmCn2"); + const seaSpray = $("#seaSpray"); + const aoQ = $("#aoQ"); + const rangeBase = $("#rangeBase"); + const detHz = $("#detHz"); + + const policyMode = $("#policyMode"); + const assessWindow = $("#assessWindow"); + const cursorMode = $("#cursorMode"); + + const btnReason = $("#btnReason"); + const btnCancelReason = $("#btnCancelReason"); + const btnRecompute = $("#btnRecompute"); + const btnClear = $("#btnClear"); + + const frameCanvas = $("#frameCanvas"); + const frameOverlay = $("#frameOverlay"); + const frameEmpty = $("#frameEmpty"); + const frameNote = $("#frameNote"); + + const objList = $("#objList"); + const objCount = $("#objCount"); + const featureTable = $("#featureTable"); + const selId = $("#selId"); + + const summaryStamp = $("#summaryStamp"); + const summaryTable = $("#summaryTable"); + const mMaxP = $("#mMaxP"); + const mReqP = $("#mReqP"); + const mMargin = $("#mMargin"); + const mPlan = $("#mPlan"); + const mMaxPSub = $("#mMaxPSub"); + const mReqPSub = $("#mReqPSub"); + const mMarginSub = $("#mMarginSub"); + const mPlanSub = $("#mPlanSub"); + + const videoHidden = $("#videoHidden"); + + const videoEngage = $("#videoEngage"); + const engageOverlay = $("#engageOverlay"); + const engageEmpty = $("#engageEmpty"); + const engageNote = $("#engageNote"); + + // Mission-driven (HF Space) backend controls + const missionText = $("#missionText"); + const hfBackendStatus = $("#hfBackendStatus"); + + const intelSummaryBox = $("#intelSummaryBox"); + const intelStamp = $("#intelStamp"); + const intelDot = $("#intelDot"); + const btnIntelRefresh = $("#btnIntelRefresh"); + const intelThumbs = [$("#intelThumb0"), $("#intelThumb1"), $("#intelThumb2")]; + const missionClassesEl = $("#missionClasses"); + const missionIdEl = $("#missionId"); + const chipFeed = $("#chipFeed"); + + const btnEngage = $("#btnEngage"); + const btnPause = $("#btnPause"); + const btnReset = $("#btnReset"); + const btnToggleSidebar = $("#btnToggleSidebar"); + + const chipPolicy = $("#chipPolicy"); + const chipTracks = $("#chipTracks"); + const chipBeam = $("#chipBeam"); + const chipHz = $("#chipHz"); + + const dwellText = $("#dwellText"); + const dwellBar = $("#dwellBar"); + + const radarCanvas = $("#radarCanvas"); + const trackList = $("#trackList"); + const liveStamp = $("#liveStamp"); + + const tradeCanvas = $("#tradeCanvas"); + const tradeTarget = $("#tradeTarget"); + const rMin = $("#rMin"); + const rMax = $("#rMax"); + const showPk = $("#showPk"); + const btnReplot = $("#btnReplot"); + const btnSnap = $("#btnSnap"); + + // ========= UI: knobs display ========= + function syncKnobDisplays() { + $("#helPowerVal").textContent = helPower.value; + $("#helApertureVal").textContent = (+helAperture.value).toFixed(2); + $("#helM2Val").textContent = (+helM2.value).toFixed(1); + $("#helJitterVal").textContent = (+helJitter.value).toFixed(1); + $("#helDutyVal").textContent = helDuty.value; + + $("#atmVisVal").textContent = atmVis.value; + $("#atmCn2Val").textContent = atmCn2.value; + $("#seaSprayVal").textContent = seaSpray.value; + $("#aoQVal").textContent = aoQ.value; + + $("#rangeBaseVal").textContent = rangeBase.value; + $("#detHzVal").textContent = detHz.value; + $("#assessWindowVal").textContent = (+assessWindow.value).toFixed(1); + + chipPolicy.textContent = `POLICY:${policyMode.value.toUpperCase()}`; + chipHz.textContent = `DET:${detHz.value}Hz`; + + telemetry.textContent = `HEL=${helPower.value}kW · VIS=${atmVis.value}km · Cn²=${atmCn2.value}/10 · AO=${aoQ.value}/10 · DET=${detHz.value}Hz`; + } + $$("input,select").forEach(el => el.addEventListener("input", () => { + syncKnobDisplays(); + if (state.hasReasoned) { + // keep it responsive: recompute power/dwell numerics even without rerunning detection + recomputeHEL(); // async but we don't await here for UI responsiveness + renderFrameOverlay(); + renderTrade(); + } + })); + syncKnobDisplays(); + renderMissionContext(); + setHfStatus("idle"); + + const detInit = getDetectorSelection(); + state.detector.mode = detInit.value; + state.detector.kind = detInit.kind; + state.hf.detector = detInit.value; + + // Toggle RAW vs HF feed + chipFeed.addEventListener("click", async () => { + if (!state.videoLoaded) return; + if (!state.hf.processedUrl) { + log("HF processed feed not ready yet. Run Reason (HF mode) and wait for backend.", "w"); + return; + } + await setEngageFeed(!state.useProcessedFeed); + log(`Engage feed set to: ${state.useProcessedFeed ? "HF" : "RAW"}`, "t"); + }); + + + // Refresh intel summary (unbiased) + if (btnIntelRefresh) { + btnIntelRefresh.addEventListener("click", async () => { + if (!state.videoLoaded) return; + log("Refreshing mission intel summary (unbiased)…", "t"); + await computeIntelSummary(); + }); + } + + // ========= Logging ========= + function log(msg, level = "t") { + const ts = new Date().toLocaleTimeString(); + const prefix = level === "e" ? "[ERR]" : (level === "w" ? "[WARN]" : (level === "g" ? "[OK]" : "[SYS]")); + const line = `${ts} ${prefix} ${msg}\n`; + const span = document.createElement("span"); + span.className = level; + span.textContent = line; + sysLog.appendChild(span); + sysLog.scrollTop = sysLog.scrollHeight; + } + + function setStatus(kind, text) { + sysStatus.textContent = text; + sysDot.className = "dot" + (kind === "warn" ? " warn" : (kind === "bad" ? " bad" : "")); + } + + // ========= Mission Intel Summary (unbiased, no location) ========= + function setIntelStatus(kind, text) { + if (!intelStamp || !intelDot) return; + intelStamp.innerHTML = text; + intelDot.className = "dot" + (kind === "warn" ? " warn" : (kind === "bad" ? " bad" : "")); + intelDot.style.width = "7px"; + intelDot.style.height = "7px"; + intelDot.style.boxShadow = "none"; + } + + function setIntelThumb(i, dataUrl) { + const img = intelThumbs?.[i]; + if (!img) return; + img.src = dataUrl || ""; + } + + function resetIntelUI() { + if (!intelSummaryBox) return; + intelSummaryBox.innerHTML = 'Upload a video, then click Reason to generate an unbiased scene summary.'; + setIntelStatus("warn", "Idle"); + setIntelThumb(0, ""); + setIntelThumb(1, ""); + setIntelThumb(2, ""); + } + + function pluralize(label, n) { + if (n === 1) return label; + if (label.endsWith("s")) return label; + return label + "s"; + } + // [Deleted] inferSceneDescriptor + + async function computeIntelSummary() { + if (!intelSummaryBox) return; + if (!state.videoLoaded) { resetIntelUI(); return; } + if (state.intelBusy) return; + + state.intelBusy = true; + setIntelStatus("warn", "Generating…"); + intelSummaryBox.textContent = "Sampling frames and running analysis…"; + + try { + const dur = (videoHidden?.duration || videoEngage?.duration || 0); + const times = [0, dur ? dur * 0.33 : 1, dur ? dur * 0.66 : 2]; + const frames = []; + + // Sample frames + for (let i = 0; i < times.length; i++) { + await seekTo(videoHidden, times[i]); + const bmp = await frameToBitmap(videoHidden); + // Draw to temp canvas to get dataURL + const c = document.createElement("canvas"); + c.width = 640; c.height = 360; // downscale + const ctx = c.getContext("2d"); + ctx.drawImage(bmp, 0, 0, c.width, c.height); + const dataUrl = c.toDataURL("image/jpeg", 0.6); + frames.push(dataUrl); + + // update thumb + try { setIntelThumb(i, dataUrl); } catch (_) { } + } + + // Call external hook + const summary = await externalIntel(frames); + + intelSummaryBox.textContent = summary; + setIntelStatus("good", `Updated · ${new Date().toLocaleTimeString()}`); + } catch (err) { + setIntelStatus("bad", "Summary unavailable"); + intelSummaryBox.textContent = `Unable to generate summary: ${err.message}`; + console.error(err); + } finally { + state.intelBusy = false; + } + } + + + // ========= Tabs ========= + $$(".tabbtn").forEach(btn => { + btn.addEventListener("click", () => { + $$(".tabbtn").forEach(b => b.classList.remove("active")); + btn.classList.add("active"); + const tab = btn.dataset.tab; + $$(".tab").forEach(t => t.classList.remove("active")); + $(`#tab-${tab}`).classList.add("active"); + if (tab === "trade") renderTrade(); + if (tab === "engage") resizeOverlays(); + }); + }); + + // ========= Video load / unload ========= + async function unloadVideo(options = {}) { + const preserveInput = !!options.preserveInput; + // Stop polling if running + if (state.hf.asyncPollInterval) { + clearInterval(state.hf.asyncPollInterval); + state.hf.asyncPollInterval = null; + } + + if (state.videoUrl && state.videoUrl.startsWith("blob:")) { + URL.revokeObjectURL(state.videoUrl); + } + if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) { + try { URL.revokeObjectURL(state.hf.processedUrl); } catch (_) { } + } + state.videoUrl = null; + state.videoFile = null; + state.videoLoaded = false; + state.useProcessedFeed = false; + + state.hf.missionId = null; + state.hf.plan = null; + state.hf.processedUrl = null; + state.hf.processedBlob = null; + state.hf.summary = null; + state.hf.busy = false; + state.hf.lastError = null; + state.hf.asyncJobId = null; + state.hf.asyncStatus = "idle"; + setHfStatus("idle"); + renderMissionContext(); + resetIntelUI(); + state.hasReasoned = false; + state.isReasoning = false; // Reset reasoning lock + + // Reset Reason button state + btnReason.disabled = false; + btnReason.style.opacity = "1"; + btnReason.style.cursor = "pointer"; + btnCancelReason.style.display = "none"; + btnEngage.disabled = true; + + state.detections = []; + state.selectedId = null; + + state.tracker.tracks = []; + state.tracker.nextId = 1; + state.tracker.running = false; + state.tracker.selectedTrackId = null; + state.tracker.beamOn = false; + + videoHidden.removeAttribute("src"); + videoEngage.removeAttribute("src"); + videoHidden.load(); + videoEngage.load(); + if (!preserveInput) { + videoFile.value = ""; + } + + if (!preserveInput) { + $("#videoMeta").textContent = "No file"; + } + frameEmpty.style.display = "flex"; + engageEmpty.style.display = "flex"; + frameNote.textContent = "Awaiting video"; + engageNote.textContent = "Awaiting video"; + + clearCanvas(frameCanvas); + clearCanvas(frameOverlay); + clearCanvas(engageOverlay); + renderRadar(); + + renderObjectList(); + renderSummary(); + renderFeatures(null); + renderTrade(); + + setStatus("warn", "STANDBY · No video loaded"); + log("Video unloaded. Demo reset.", "w"); + } + + btnEject.addEventListener("click", async () => { + await unloadVideo(); + }); + + videoFile.addEventListener("change", async (e) => { + const file = e.target.files && e.target.files[0]; + if (!file) return; + const pendingFile = file; + + await unloadVideo({ preserveInput: true }); + state.videoFile = pendingFile; + + const nullOrigin = (window.location && window.location.origin) === "null"; + if (nullOrigin) { + state.videoUrl = await readFileAsDataUrl(pendingFile); + } else { + state.videoUrl = URL.createObjectURL(pendingFile); + } + // STOP any existing async polling + stopAsyncPolling(); + + // reset HF backend state for this new upload + if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) { + try { URL.revokeObjectURL(state.hf.processedUrl); } catch (_) { } + } + state.hf.processedUrl = null; + state.hf.processedBlob = null; + state.hf.asyncJobId = null; + state.hf.firstFrameUrl = null; + state.hf.firstFrameDetections = null; + state.hf.statusUrl = null; + state.hf.videoUrl = null; + state.hf.asyncStatus = "idle"; + state.hf.asyncProgress = null; + state.hf.queries = []; + state.hf.summary = null; + state.hf.lastError = null; + state.hf.busy = false; + state.useProcessedFeed = false; + setHfStatus("idle"); + renderMissionContext(); + videoHidden.src = state.videoUrl; + videoEngage.removeAttribute("src"); + videoEngage.load(); + // Initialize with no engage feed until processed video is ready + videoEngage.setAttribute("data-processed", "false"); + btnEngage.disabled = true; + + setStatus("warn", "LOADING · Parsing video metadata"); + log(`Video selected: ${pendingFile.name} (${Math.round(pendingFile.size / 1024 / 1024)} MB)`, "t"); + + await Promise.all([ + waitVideoReady(videoHidden), + waitVideoReady(videoHidden) + ]); + + const dur = videoHidden.duration || 0; + const w = videoHidden.videoWidth || 1280; + const h = videoHidden.videoHeight || 720; + + state.videoLoaded = true; + state.frame.w = w; + state.frame.h = h; + + $("#videoMeta").textContent = `${pendingFile.name} · ${dur.toFixed(1)}s · ${w}×${h}`; + frameEmpty.style.display = "none"; + engageEmpty.style.display = "none"; + frameNote.textContent = `${w}×${h} · First frame only`; + engageNote.textContent = `${dur.toFixed(1)}s · paused`; + + setStatus("warn", "READY · Video loaded (run Reason)"); + log("Video loaded. Ready for first-frame reasoning.", "g"); + + resizeOverlays(); + await captureFirstFrame(); + drawFirstFrame(); + renderFrameOverlay(); + renderRadar(); + renderTrade(); + }); + + function displayAsyncFirstFrame() { + if (!state.hf.firstFrameUrl) return; + + log(`Fetching HF first frame: ${state.hf.firstFrameUrl}`, "t"); + + // Display first frame with detections overlaid (segmentation masks or bounding boxes) + const img = new Image(); + img.crossOrigin = "anonymous"; + img.src = `${state.hf.firstFrameUrl}?t=${Date.now()}`; // Cache bust + + img.onload = () => { + frameCanvas.width = img.width; + frameCanvas.height = img.height; + frameOverlay.width = img.width; + frameOverlay.height = img.height; + + const ctx = frameCanvas.getContext("2d"); + ctx.clearRect(0, 0, img.width, img.height); + ctx.drawImage(img, 0, 0); + + frameEmpty.style.display = "none"; + log(`✓ HF first frame displayed (${img.width}×${img.height})`, "g"); + }; + + img.onerror = (err) => { + console.error("Failed to load first frame:", err); + log("✗ HF first frame load failed - check CORS or URL", "e"); + }; + } + + async function waitVideoReady(v) { + return new Promise((resolve) => { + const onReady = () => { v.removeEventListener("loadedmetadata", onReady); resolve(); }; + if (v.readyState >= 1 && v.videoWidth) { resolve(); return; } + v.addEventListener("loadedmetadata", onReady); + v.load(); + }); + } + + function readFileAsDataUrl(file) { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onload = () => resolve(String(reader.result || "")); + reader.onerror = () => reject(new Error("Failed to read file")); + reader.readAsDataURL(file); + }); + } + + async function captureFirstFrame() { + if (!state.videoLoaded) return; + await seekTo(videoHidden, 0.0); + const bmp = await frameToBitmap(videoHidden); + state.frame.bitmap = bmp; + log("Captured first frame for Tab 1 reasoning.", "t"); + } + + async function seekTo(v, timeSec) { + return new Promise((resolve, reject) => { + const onSeeked = () => { v.removeEventListener("seeked", onSeeked); resolve(); }; + const onError = () => { v.removeEventListener("error", onError); reject(new Error("Video seek error")); }; + v.addEventListener("seeked", onSeeked); + v.addEventListener("error", onError); + try { + v.currentTime = clamp(timeSec, 0, Math.max(0, v.duration - 0.05)); + } catch (err) { + v.removeEventListener("seeked", onSeeked); + v.removeEventListener("error", onError); + reject(err); + } + }); + } + + async function frameToBitmap(videoEl) { + const w = videoEl.videoWidth || 1280; + const h = videoEl.videoHeight || 720; + const c = document.createElement("canvas"); + c.width = w; c.height = h; + const ctx = c.getContext("2d"); + ctx.drawImage(videoEl, 0, 0, w, h); + if ("createImageBitmap" in window) { + return await createImageBitmap(c); + } + return c; // fallback + } + + function clearCanvas(canvas) { + const ctx = canvas.getContext("2d"); + ctx.clearRect(0, 0, canvas.width, canvas.height); + } + + // ========= Detector loading ========= + + // ========= Mission-driven HF Space backend ========= + const ALL_CLASSES_PROMPT = "No mission objective provided. Run full-class object detection across all supported classes. Detect and label every object you can with bounding boxes; do not filter by mission."; + const DEFAULT_QUERY_CLASSES = [ + "person", + "car", + "truck", + "motorcycle", + "bicycle", + "bus", + "train", + "airplane" + ]; + function missionPromptOrAll() { + const t = (missionText?.value || "").trim(); + return t || ALL_CLASSES_PROMPT; + } + + const HF_SPACE_DETECTORS = new Set([ + "hf_yolov8", + "detr_resnet50", + "grounding_dino", + "sam3", + "drone_yolo" + ]); + + // Backend currently requires latitude/longitude form fields. We send neutral defaults (no UI, no location in outputs). + const DEFAULT_LAT = "0"; + const DEFAULT_LON = "0"; + + function isHfMode(mode) { + return !["coco", "external"].includes(mode); + } + + function isHfSpaceDetector(det) { + return HF_SPACE_DETECTORS.has(det); + } + + function isSpaceUrl(value) { + return /^https?:\/\/huggingface\.co\/spaces\//.test(String(value || "")); + } + + function isHfInferenceModel(value) { + const v = String(value || ""); + return v.includes("/") && !isSpaceUrl(v); + } + + function setHfStatus(msg) { + if (!hfBackendStatus) return; + const statusPrefix = state.hf.asyncStatus !== "idle" + ? `[${state.hf.asyncStatus.toUpperCase()}] ` + : ""; + const normalized = String(msg || "").toLowerCase(); + let display = msg; + if (normalized.includes("ready")) { + display = "MISSION PACKAGE READY"; + } else if (normalized.includes("idle")) { + display = "STANDBY"; + } else if (normalized.includes("completed")) { + display = "PROCESS COMPLETE"; + } else if (normalized.includes("processing")) { + display = "ACTIVE SCAN"; + } else if (normalized.includes("cancelled")) { + display = "STAND-DOWN"; + } else if (normalized.includes("error")) { + display = "FAULT CONDITION"; + } + hfBackendStatus.textContent = `HF Backend: ${statusPrefix}${display}`; + + // Color coding + if (msg.includes("error")) { + hfBackendStatus.style.color = "var(--bad)"; + } else if (msg.includes("ready") || msg.includes("completed")) { + hfBackendStatus.style.color = "var(--good)"; + } else { + hfBackendStatus.style.color = "var(--warn)"; + } + } + + function renderMissionContext() { + const queries = state.hf.queries || []; + if (missionClassesEl) missionClassesEl.textContent = queries.length ? queries.join(", ") : "—"; + if (missionIdEl) missionIdEl.textContent = `Mission: ${state.hf.missionId || "—"}`; + if (chipFeed) { + chipFeed.textContent = state.useProcessedFeed ? "FEED:HF" : "FEED:RAW"; + } + } + + function normalizeToken(s) { + return String(s || "") + .toLowerCase() + .replace(/[_\-]+/g, " ") + .replace(/[^a-z0-9\s]/g, "") + .trim(); + } + + function missionSynonyms(tokens) { + const out = new Set(); + tokens.forEach(t => { + const v = normalizeToken(t); + if (!v) return; + out.add(v); + // broad synonyms / fallbacks for common mission terms + if (v.includes("drone") || v.includes("uav") || v.includes("quad") || v.includes("small uav")) { + ["airplane", "bird", "kite"].forEach(x => out.add(x)); + } + if (v.includes("aircraft") || v.includes("fixed wing") || v.includes("jet")) { + ["airplane", "bird"].forEach(x => out.add(x)); + } + if (v.includes("boat") || v.includes("ship") || v.includes("vessel") || v.includes("usv")) { + ["boat"].forEach(x => out.add(x)); + } + if (v.includes("person") || v.includes("diver") || v.includes("swimmer")) { + ["person"].forEach(x => out.add(x)); + } + if (v.includes("vehicle") || v.includes("truck") || v.includes("car")) { + ["car", "truck"].forEach(x => out.add(x)); + } + }); + return out; + } + + function filterPredsByMission(preds) { + // Mission filtering is now handled by the backend + // Local detectors (COCO) will show all results + return preds; + } + + function isMissionFocusLabel(label) { + // Mission focus detection is now handled by the backend + // All detections from HF backend are considered mission-relevant + return false; + } + + // ========= HF Async Detection Pipeline ========= + + async function hfDetectAsync() { + const sel = getDetectorSelection(); + const detector = sel.value; + const kind = sel.kind; + const videoFile = state.videoFile; + + if (!videoFile) { + throw new Error("No video loaded"); + } + + // Determine mode based on kind + let mode; + if (kind === "segmentation") { + mode = "segmentation"; + } else if (kind === "drone") { + mode = "drone_detection"; + } else { + mode = "object_detection"; + } + + // Use mission objective directly as detector input + const missionObjective = (missionText?.value || "").trim(); + let queries = ""; + + if (missionObjective) { + // Use mission objective text directly - let backend interpret it + queries = missionObjective; + state.hf.queries = [missionObjective]; + log(`Using mission objective: "${queries}"`); + } else { + if (mode === "drone_detection") { + // Drone mode defaults on backend; omit queries entirely. + queries = ""; + state.hf.queries = []; + log("No mission objective specified - using drone defaults"); + } else { + // No mission objective - use predefined classes + queries = DEFAULT_QUERY_CLASSES.join(", "); + state.hf.queries = DEFAULT_QUERY_CLASSES.slice(); + log("No mission objective specified - using default classes"); + } + } + + // Build FormData + const form = new FormData(); + form.append("video", videoFile); + form.append("mode", mode); + if (queries) { + form.append("queries", queries); + } + + // Add detector for object_detection mode + if (mode === "object_detection" && detector) { + form.append("detector", detector); + } + if (mode === "segmentation") { + form.append("segmenter", "sam3"); + } + // drone_detection uses drone_yolo automatically + + // Submit async job + setHfStatus(`submitting ${mode} job...`); + log(`Submitting ${mode} to ${state.hf.baseUrl || "(same-origin)"} (detector=${detector || "n/a"})`, "t"); + const resp = await fetch(`${state.hf.baseUrl}/detect/async`, { + method: "POST", + body: form + }); + + if (!resp.ok) { + const err = await resp.json().catch(() => ({ detail: resp.statusText })); + throw new Error(err.detail || "Async detection submission failed"); + } + + const data = await resp.json(); + + // Store job info + state.hf.asyncJobId = data.job_id; + state.hf.firstFrameUrl = `${state.hf.baseUrl}${data.first_frame_url}`; + state.hf.firstFrameDetections = Array.isArray(data.first_frame_detections) + ? data.first_frame_detections + : null; + state.hf.statusUrl = `${state.hf.baseUrl}${data.status_url}`; + state.hf.videoUrl = `${state.hf.baseUrl}${data.video_url}`; + state.hf.asyncStatus = data.status; + + // Display first frame immediately (if object detection, segmentation, or drone) + if ((mode === "object_detection" || mode === "segmentation" || mode === "drone_detection") && state.hf.firstFrameUrl) { + const count = Array.isArray(data.first_frame_detections) ? data.first_frame_detections.length : null; + if (count != null) { + log(`First frame: ${count} detections`); + } else { + log("First frame ready (no detections payload)", "t"); + } + displayAsyncFirstFrame(); + } + + log(`Backend job ID: ${data.job_id} (polling every 3s)`, "t"); + setHfStatus(`job ${data.job_id.substring(0, 8)}: processing...`); + + // Start polling + await pollAsyncJob(); + } + + async function cancelBackendJob(jobId, source = "user") { + if (!jobId) return; + if ((state.hf.baseUrl || "").includes("hf.space")) { + log("Cancel request suppressed for HF Space (replica job store can return 404).", "w"); + return { status: "skipped", message: "Cancel disabled for HF Space" }; + } + + // Check if job is already in a terminal state + if (state.hf.asyncStatus === "completed" || state.hf.asyncStatus === "failed") { + log(`Backend job ${jobId.substring(0, 8)}: already ${state.hf.asyncStatus}, skipping cancel`, "t"); + return { status: state.hf.asyncStatus, message: `Job already ${state.hf.asyncStatus}` }; + } + + log(`Sending DELETE to /detect/job/${jobId.substring(0, 8)}... (${source})`, "t"); + + try { + const response = await fetch(`${state.hf.baseUrl}/detect/job/${jobId}`, { + method: "DELETE" + }); + + if (response.ok) { + const result = await response.json(); + log(`✓ Backend job ${jobId.substring(0, 8)}: ${result.message || "cancelled"} (status: ${result.status})`, "g"); + return result; + } else if (response.status === 404) { + const detail = await response.json().catch(() => ({ detail: "Job not found" })); + log(`⚠ Backend job ${jobId.substring(0, 8)}: ${detail.detail || "not found or already cleaned up"}`, "w"); + return { status: "not_found", message: detail.detail }; + } else { + const errorText = await response.text().catch(() => "Unknown error"); + log(`✗ Backend job ${jobId.substring(0, 8)}: cancel failed (${response.status}) - ${errorText}`, "e"); + return { status: "error", message: errorText }; + } + } catch (err) { + log(`✗ Backend job ${jobId.substring(0, 8)}: cancel error - ${err.message}`, "e"); + return { status: "error", message: err.message }; + } + } + + function cancelReasoning() { + // Stop HF polling if running + if (state.hf.asyncPollInterval) { + clearInterval(state.hf.asyncPollInterval); + state.hf.asyncPollInterval = null; + log("HF polling stopped.", "w"); + } + + // Cancel backend job if it exists + const jobId = state.hf.asyncJobId; + if (jobId) { + cancelBackendJob(jobId, "cancel button"); + } + + // Reset state + state.isReasoning = false; + state.hf.busy = false; + state.hf.asyncJobId = null; + state.hf.asyncStatus = "cancelled"; + + // Re-enable Reason button + btnReason.disabled = false; + btnReason.style.opacity = "1"; + btnReason.style.cursor = "pointer"; + + // Hide Cancel button + btnCancelReason.style.display = "none"; + + setStatus("warn", "CANCELLED · Reasoning stopped"); + setHfStatus("cancelled (stopped by user)"); + log("Reasoning cancelled by user.", "w"); + } + + async function pollAsyncJob() { + const pollInterval = 3000; // 3 seconds + const maxAttempts = 200; // 10 minutes max (3s × 200) + let attempts = 0; + let fetchingVideo = false; + + return new Promise((resolve, reject) => { + state.hf.asyncPollInterval = setInterval(async () => { + attempts++; + + try { + const resp = await fetch(state.hf.statusUrl, { cache: "no-store" }); + + if (!resp.ok) { + if (resp.status === 404) { + clearInterval(state.hf.asyncPollInterval); + reject(new Error("Job expired or not found")); + return; + } + throw new Error(`Status check failed: ${resp.statusText}`); + } + + const status = await resp.json(); + state.hf.asyncStatus = status.status; + state.hf.asyncProgress = status; + + if (status.status === "completed") { + if (fetchingVideo) return; + fetchingVideo = true; + const completedJobId = state.hf.asyncJobId; + log(`✓ Backend job ${completedJobId.substring(0, 8)}: completed successfully`, "g"); + setHfStatus("job completed, fetching video..."); + try { + await fetchProcessedVideo(); + clearInterval(state.hf.asyncPollInterval); + // Clear job ID to prevent cancel attempts after completion + state.hf.asyncJobId = null; + setHfStatus("ready"); + resolve(); + } catch (err) { + if (err && err.code === "VIDEO_PENDING") { + setHfStatus("job completed, finalizing video..."); + fetchingVideo = false; + return; + } + clearInterval(state.hf.asyncPollInterval); + state.hf.asyncJobId = null; // Clear on error too + reject(err); + } + } else if (status.status === "failed") { + clearInterval(state.hf.asyncPollInterval); + const errMsg = status.error || "Processing failed"; + log(`✗ Backend job ${state.hf.asyncJobId.substring(0, 8)}: failed - ${errMsg}`, "e"); + // Clear job ID to prevent cancel attempts after failure + state.hf.asyncJobId = null; + setHfStatus(`error: ${errMsg}`); + reject(new Error(errMsg)); + } else { + // Still processing + setHfStatus(`job ${state.hf.asyncJobId.substring(0, 8)}: ${status.status}... (${attempts})`); + } + + if (attempts >= maxAttempts) { + clearInterval(state.hf.asyncPollInterval); + reject(new Error("Polling timeout (10 minutes)")); + } + } catch (err) { + clearInterval(state.hf.asyncPollInterval); + reject(err); + } + }, pollInterval); + }); + } + + async function fetchProcessedVideo() { + const resp = await fetch(state.hf.videoUrl, { cache: "no-store" }); + + if (!resp.ok) { + if (resp.status === 202) { + const err = new Error("Video still processing"); + err.code = "VIDEO_PENDING"; + throw err; + } + throw new Error(`Failed to fetch video: ${resp.statusText}`); + } + + const nullOrigin = (window.location && window.location.origin) === "null"; + if (nullOrigin) { + // Avoid blob:null URLs when opened via file:// + state.hf.processedBlob = null; + state.hf.processedUrl = `${state.hf.videoUrl}?t=${Date.now()}`; + btnEngage.disabled = false; + log("Processed video ready (streaming URL)"); + return; + } + + const blob = await resp.blob(); + + // Revoke old URL if exists + if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) { + URL.revokeObjectURL(state.hf.processedUrl); + } + + state.hf.processedBlob = blob; + state.hf.processedUrl = URL.createObjectURL(blob); + + btnEngage.disabled = false; + log(`Processed video ready (${(blob.size / 1024 / 1024).toFixed(1)} MB)`); + } + + function stopAsyncPolling() { + if (state.hf.asyncPollInterval) { + clearInterval(state.hf.asyncPollInterval); + state.hf.asyncPollInterval = null; + } + } + + + async function setEngageFeed(useProcessed) { + state.useProcessedFeed = !!useProcessed; + renderMissionContext(); + + if (!state.videoLoaded) return; + + const desired = (state.useProcessedFeed && state.hf.processedUrl) ? state.hf.processedUrl : null; + if (!desired) return; + + const wasPlaying = !videoEngage.paused; + const t = videoEngage.currentTime || 0; + + try { videoEngage.pause(); } catch (_) { } + + if (videoEngage.src !== desired) { + videoEngage.src = desired; + // Mark video element to show/hide based on whether it's processed content + videoEngage.setAttribute('data-processed', state.useProcessedFeed ? 'true' : 'false'); + log(`Video feed switched to: ${state.useProcessedFeed ? 'HF processed' : 'raw'} (data-processed=${state.useProcessedFeed})`, "t"); + videoEngage.load(); + await waitVideoReady(videoEngage); + try { videoEngage.currentTime = Math.min(t, (videoEngage.duration || t)); } catch (_) { } + } + + resizeOverlays(); + if (wasPlaying) { + try { await videoEngage.play(); } catch (_) { } + } + } + + async function startHfPipeline() { + if (state.hf.busy) { + log("HF pipeline already running"); + return; + } + + const { kind } = getDetectorSelection(); + + // Only process if using HF detectors (not local/external) + if (["local", "external"].includes(kind)) { + log("Skipping HF pipeline (not using HF detector)"); + return; + } + + state.hf.busy = true; + state.hf.lastError = null; + + // Background processing (non-blocking) + (async () => { + try { + // Run async detection (mission text will be used directly as queries if no manual labels provided) + await hfDetectAsync(); + + // Auto-switch to processed feed when ready + if (state.hf.processedUrl && !state.useProcessedFeed) { + log("Auto-switching to HF processed video feed (segmentation/detection overlays)", "g"); + await setEngageFeed(true); + } + + } catch (err) { + console.error("HF pipeline error:", err); + state.hf.lastError = err.message; + setHfStatus(`error: ${err.message}`); + log(`⚠ HF error: ${err.message}`); + } finally { + state.hf.busy = false; + } + })(); + } + + + detectorSelect.addEventListener("change", () => { + const sel = getDetectorSelection(); + state.detector.mode = sel.value; + state.detector.kind = sel.kind; + state.hf.detector = sel.value; + // local detector is still used for aimpoint math + tracking; HF Space provides mission-driven video detection. + ensureCocoDetector(); + renderMissionContext(); + log(`Detector mode set to: ${state.detector.mode}`, "t"); + }); + + trackerSelect.addEventListener("change", () => { + state.tracker.mode = trackerSelect.value; + log(`Tracker mode set to: ${state.tracker.mode}`, "t"); + }); + + async function ensureCocoDetector() { + if (state.detector.loaded || state.detector.loading) return; + if (state.detector.mode !== "coco") return; + + state.detector.loading = true; + setStatus("warn", "LOADING · Detector model"); + log("Loading COCO-SSD detector (browser model).", "t"); + + try { + await loadScriptOnce("tfjs", "https://cdn.jsdelivr.net/npm/@tensorflow/tfjs@4.16.0/dist/tf.min.js"); + await loadScriptOnce("coco-ssd", "https://cdn.jsdelivr.net/npm/@tensorflow-models/coco-ssd@2.2.2/dist/coco-ssd.min.js"); + if (!window.cocoSsd || !window.tf) throw new Error("TF.js or cocoSsd not available."); + state.detector.model = await window.cocoSsd.load(); + state.detector.loaded = true; + log("COCO-SSD detector loaded.", "g"); + setStatus(state.videoLoaded ? "warn" : "warn", state.videoLoaded ? "READY · Video loaded (run Reason)" : "STANDBY · No video loaded"); + } catch (err) { + log(`Detector load failed: ${err.message}. Switch to External detector or use HF models.`, "w"); + setStatus("warn", "READY · Detector not loaded (use External or HF)"); + state.detector.loaded = false; + state.detector.model = null; + } finally { + state.detector.loading = false; + } + } + + const loadedScripts = new Map(); + function loadScriptOnce(key, src) { + return new Promise((resolve, reject) => { + if (loadedScripts.get(key) === "loaded") { resolve(); return; } + if (loadedScripts.get(key) === "loading") { + const iv = setInterval(() => { + if (loadedScripts.get(key) === "loaded") { clearInterval(iv); resolve(); } + if (loadedScripts.get(key) === "failed") { clearInterval(iv); reject(new Error("Script failed earlier")); } + }, 50); + return; + } + + loadedScripts.set(key, "loading"); + const s = document.createElement("script"); + s.src = src; + s.async = true; + s.onload = () => { loadedScripts.set(key, "loaded"); resolve(); }; + s.onerror = () => { loadedScripts.set(key, "failed"); reject(new Error(`Failed to load ${src}`)); }; + document.head.appendChild(s); + }); + } + + // Start loading detector opportunistically if selected. + ensureCocoDetector(); + + // ========= Core physics-lite model ========= + function getKnobs() { + const PkW = +helPower.value; + const aperture = +helAperture.value; + const M2 = +helM2.value; + const jitter_urad = +helJitter.value; + const duty = (+helDuty.value) / 100; + const mode = helMode.value; + + const vis_km = +atmVis.value; + const cn2 = +atmCn2.value; + const spray = +seaSpray.value; + const ao = +aoQ.value; + + const baseRange = +rangeBase.value; + + return { PkW, aperture, M2, jitter_urad, duty, mode, vis_km, cn2, spray, ao, baseRange }; + } + + // ========= External Hooks (API Integration Points) ========= + + /** + * Hook: Object Detection + * @param {Object} input { canvas, width, height } + * @returns {Promise} [{ bbox:[x,y,w,h], class:"label", score:0.95 }, ...] + */ + async function externalDetect(input) { + // TODO: Call your object detection endpoint here + console.log("externalDetect called", input); + return []; + } + + /** + * Hook: Feature Extraction + * @param {Array} detections Array of detection objects + * @param {Object} frameInfo { width, height } + * @returns {Promise} Map of { "id": { reflectivity:0.5, ... } } + */ + async function externalFeatures(detections, frameInfo) { + // TODO: Call your feature extraction endpoint here + console.log("externalFeatures called for", detections.length, "objects"); + return {}; + } + + /** + * Hook: HEL synthesis + * @param {Array} detections Array of detection objects + * @param {Object} knobs HEL/atmosphere knobs + * @returns {Promise} { targets: {id: {...}}, system: {...} } + */ + async function externalHEL(detections, knobs) { + // TODO: Call your HEL model/service here + console.log("externalHEL called for", detections.length, "objects", knobs); + // Return minimal structure to keep UI running + return { + targets: {}, + system: { maxP_kW: 0, reqP_kW: 0, margin_kW: 0, medianRange_m: 0 } + }; + } + + /** + * Hook: External Tracker + * @param {HTMLVideoElement} videoEl + * @returns {Promise} [{ bbox:[x,y,w,h], class:"label", score:0.95 }, ...] + */ + async function externalTrack(videoEl) { + // TODO: Call your external tracker / vision system here + console.log("externalTrack called"); + return []; + } + + /** + * Hook: Mission Intel Summary + * @param {Array} frames Array of dataURLs or canvases + * @returns {Promise} Summary text + */ + async function externalIntel(frames) { + // TODO: Call your VLM / Intel summary endpoint here + console.log("externalIntel called with", frames.length, "frames"); + return "Video processed. No external intel provider connected."; + } + + // ========= Core Physics & Logic Adapters ========= + function getKnobs() { + const PkW = +helPower.value; + const aperture = +helAperture.value; + const M2 = +helM2.value; + const jitter_urad = +helJitter.value; + const duty = (+helDuty.value) / 100; + const mode = helMode.value; + const vis_km = +atmVis.value; + const cn2 = +atmCn2.value; + const spray = +seaSpray.value; + const ao = +aoQ.value; + const baseRange = +rangeBase.value; + return { PkW, aperture, M2, jitter_urad, duty, mode, vis_km, cn2, spray, ao, baseRange }; + } + + // ========= Safe Stubs for Client-Side Visualization (Tab 2 / Tab 3) ========= + // These functions were removed to allow backend control, but are mocked here + // to prevent UI crashes in the Engagement/Trade tabs until you wire them up. + + function maxPowerAtTarget(range_m) { + // Placeholder: return 0 or simple fallback + return { Ptar: 0, Pout: 0, trans: 0, turb: 0, beam: 0 }; + } + + function requiredPowerFromFeatures(feat) { return 10; } // Safe default + + function requiredDwell(range_m, reqP, maxP, baseDwell) { return 1.0; } // Safe default + + function pkillFromMargin(margin_kW, dwell_s, reqDwell_s) { return 0; } + + + + // ========= Aimpoint rules (default) ========= + function defaultAimpoint(label) { + const l = (label || "object").toLowerCase(); + if (l.includes("airplane") || l.includes("drone") || l.includes("uav") || l.includes("kite") || l.includes("bird")) { + return { relx: 0.62, rely: 0.55, label: "engine" }; + } + if (l.includes("helicopter")) { + return { relx: 0.50, rely: 0.45, label: "rotor_hub" }; + } + if (l.includes("boat") || l.includes("ship")) { + return { relx: 0.60, rely: 0.55, label: "bridge/engine" }; + } + if (l.includes("truck") || l.includes("car")) { + return { relx: 0.55, rely: 0.62, label: "engine_block" }; + } + return { relx: 0.50, rely: 0.55, label: "center_mass" }; + } + + // ========= Feature generation (hookable) ========= + // (Merged into externalFeatures above) + + + // [Deleted] synthFeatures, hashString, mulberry32, pick + + + // ========= Detector hook ========= + // ========= Detector hook ========= + // (This block is merged into externalDetect above, removing old declaration) + + + function canvasToBlob(canvas, quality = 0.88) { + return new Promise((resolve, reject) => { + if (!canvas.toBlob) { reject(new Error("Canvas.toBlob not supported")); return; } + canvas.toBlob(blob => { + if (!blob) { reject(new Error("Canvas toBlob failed")); return; } + resolve(blob); + }, "image/jpeg", quality); + }); + } + + async function callHfObjectDetection(modelId, canvas) { + const proxyBase = (API_CONFIG.PROXY_URL || "").trim(); + if (proxyBase) { + const blob = await canvasToBlob(canvas); + const form = new FormData(); + form.append("model", modelId); + form.append("image", blob, "frame.jpg"); + const resp = await fetch(`${proxyBase.replace(/\/$/, "")}/detect`, { + method: "POST", + body: form + }); + if (!resp.ok) { + let detail = `Proxy inference failed (${resp.status})`; + try { + const err = await resp.json(); + detail = err.detail || err.error || detail; + } catch (_) { } + throw new Error(detail); + } + const payload = await resp.json(); + if (!Array.isArray(payload)) throw new Error("Unexpected proxy response format."); + return payload; + } + + const token = API_CONFIG.HF_TOKEN; + if (!token) throw new Error("HF token missing (config.js)."); + + const blob = await canvasToBlob(canvas); + const base = (API_CONFIG.HF_INFERENCE_BASE || "https://router.huggingface.co/hf-inference/models").replace(/\/$/, ""); + const resp = await fetch(`${base}/${modelId}`, { + method: "POST", + headers: { Authorization: `Bearer ${token}` }, + body: blob + }); + if (!resp.ok) { + let detail = `HF inference failed (${resp.status})`; + try { + const err = await resp.json(); + detail = err.error || err.detail || detail; + } catch (_) { } + throw new Error(detail); + } + const payload = await resp.json(); + if (!Array.isArray(payload)) throw new Error("Unexpected HF response format."); + return payload.map(p => { + const b = p.box || p.bbox || p.bounding_box || {}; + const xmin = b.xmin ?? b.x ?? 0; + const ymin = b.ymin ?? b.y ?? 0; + const xmax = b.xmax ?? (b.x + (b.w || 0)) ?? 0; + const ymax = b.ymax ?? (b.y + (b.h || 0)) ?? 0; + return { + bbox: [xmin, ymin, Math.max(1, xmax - xmin), Math.max(1, ymax - ymin)], + class: p.label || p.class || "object", + score: p.score ?? p.confidence ?? 0 + }; + }); + } + + async function detectWithCoco(inputForModel, applyMissionFilter) { + await ensureCocoDetector(); + if (!state.detector.model) { + log("Detector model not available in this browser. Switch to External detector or use HF models.", "w"); + return []; + } + let preds = await state.detector.model.detect(inputForModel); + if (applyMissionFilter) preds = filterPredsByMission(preds); + + const filtered = preds + .filter(p => p.score >= 0.45) + .slice(0, 14) + .map(p => ({ bbox: p.bbox, class: p.class, score: p.score })); + + if (!filtered.length) { + log("Detector returned no confident objects for this frame.", "w"); + } + return filtered; + } + + async function waitForBackendDetections(timeoutMs = 2000) { + const start = Date.now(); + while ((Date.now() - start) < timeoutMs) { + if (Array.isArray(state.hf.firstFrameDetections)) { + return state.hf.firstFrameDetections; + } + await new Promise(resolve => setTimeout(resolve, 100)); + } + return null; + } + + async function runDetectOnFrame() { + const w = state.frame.w, h = state.frame.h; + const inputForModel = frameCanvas; // canvas contains the first frame + const sel = getDetectorSelection(); + const mode = sel.value; + const kind = sel.kind; + + if (mode === "coco") { + return await detectWithCoco(inputForModel, false); + } + + if (mode === "external") { + try { + const res = await externalDetect({ canvas: frameCanvas, width: w, height: h }); + if (Array.isArray(res)) return res; + log("External detector returned invalid response.", "w"); + return []; + } catch (err) { + log(`External detector failed: ${err.message}`, "w"); + return []; + } + } + + if (kind === "segmentation") { + // For segmentation, we don't have instant local inference + // User needs to process full video via HF async endpoint + log("Segmentation requires full video processing via HF backend"); + return []; + } + + if (kind === "drone") { + const backendDets = await waitForBackendDetections(); + if (Array.isArray(backendDets) && backendDets.length) { + return backendDets.map(d => { + const bbox = Array.isArray(d.bbox) ? d.bbox : [0, 0, 1, 1]; + const x1 = bbox[0] || 0; + const y1 = bbox[1] || 0; + const x2 = bbox[2] || 0; + const y2 = bbox[3] || 0; + return { + bbox: [x1, y1, Math.max(1, x2 - x1), Math.max(1, y2 - y1)], + class: d.label || "drone", + score: d.score ?? 0 + }; + }); + } + // Same for drone detection + log("Drone detection requires full video processing via HF backend"); + return []; + } + + if (kind === "object") { + // HF object detection models + if (["hf_yolov8", "detr_resnet50", "grounding_dino"].includes(mode)) { + const backendDets = await waitForBackendDetections(); + if (Array.isArray(backendDets) && backendDets.length) { + return backendDets.map(d => { + const bbox = Array.isArray(d.bbox) ? d.bbox : [0, 0, 1, 1]; + const x1 = bbox[0] || 0; + const y1 = bbox[1] || 0; + const x2 = bbox[2] || 0; + const y2 = bbox[3] || 0; + return { + bbox: [x1, y1, Math.max(1, x2 - x1), Math.max(1, y2 - y1)], + class: d.label || "object", + score: d.score ?? 0 + }; + }); + } + // For first-frame detection, we can show a placeholder or skip + // The actual detections come from the async endpoint + log(`${mode} requires backend async processing`); + return []; + } else { + // Fallback to COCO if unknown + return await detectWithCoco(inputForModel, false); + } + } + + return []; + } + + + // ========= Render first frame ======== + function drawFirstFrame() { + const ctx = frameCanvas.getContext("2d"); + const w = state.frame.w, h = state.frame.h; + frameCanvas.width = w; frameCanvas.height = h; + frameOverlay.width = w; frameOverlay.height = h; + + ctx.clearRect(0, 0, w, h); + + // Check if we have HF processed first frame (segmentation or object detection with overlays) + if (state.hf.firstFrameUrl) { + // HF backend will draw the processed frame via displayAsyncFirstFrame() + // Don't draw dark background - let the processed image show through + log("Waiting for HF processed first frame to display...", "t"); + return; + } + + // For local detection: show dark background, no original frame + ctx.fillStyle = "#0b1026"; + ctx.fillRect(0, 0, w, h); + + if (!state.frame.bitmap) { + ctx.fillStyle = "rgba(255,255,255,.65)"; + ctx.font = "16px " + getComputedStyle(document.body).fontFamily; + ctx.fillText("No frame available", 18, 28); + return; + } + + // Original frame bitmap is NOT drawn for local detection - only processed results will be displayed + // ctx.drawImage(state.frame.bitmap, 0, 0, w, h); + } + + // ========= Agent cursor (optional, purely visual) ========= + function ensureAgentCursorOverlay() { + if ($("#agentCursor")) return; + const el = document.createElement("div"); + el.id = "agentCursor"; + el.style.position = "fixed"; + el.style.zIndex = "9999"; + el.style.width = "14px"; + el.style.height = "14px"; + el.style.borderRadius = "999px"; + el.style.pointerEvents = "none"; + el.style.background = "radial-gradient(circle at 30% 30%, rgba(34,211,238,.95), rgba(124,58,237,.65))"; + el.style.boxShadow = "0 0 18px rgba(34,211,238,.55), 0 0 46px rgba(124,58,237,.25)"; + el.style.border = "1px solid rgba(255,255,255,.25)"; + el.style.opacity = "0"; + document.body.appendChild(el); + } + + function setCursorVisible(v) { + ensureAgentCursorOverlay(); + const el = $("#agentCursor"); + el.style.opacity = v ? "1" : "0"; + state.ui.agentCursor.visible = v; + } + + function moveCursorToRect(rect, mode = "glide") { + state.ui.agentCursor.target = rect; + state.ui.agentCursor.mode = mode; + state.ui.agentCursor.t0 = now(); + setCursorVisible(state.ui.cursorMode === "on"); + } + + function tickAgentCursor() { + const el = $("#agentCursor"); + if (!el || state.ui.cursorMode !== "on" || !state.ui.agentCursor.visible) return; + const c = state.ui.agentCursor; + if (!c.target) return; + + const tx = c.target.left + c.target.width * 0.72; + const ty = c.target.top + c.target.height * 0.50; + + // smooth spring + const ease = 0.12; + const dx = tx - (c.x * window.innerWidth); + const dy = ty - (c.y * window.innerHeight); + c.vx = (c.vx + dx * 0.0018) * 0.85; + c.vy = (c.vy + dy * 0.0018) * 0.85; + + const px = (c.x * window.innerWidth) + c.vx * 18; + const py = (c.y * window.innerHeight) + c.vy * 18; + c.x = clamp(px / window.innerWidth, 0.02, 0.98); + c.y = clamp(py / window.innerHeight, 0.02, 0.98); + + el.style.transform = `translate(${c.x * window.innerWidth}px, ${c.y * window.innerHeight}px)`; + + // hide after settle + const settle = Math.hypot(dx, dy); + if (settle < 6 && (now() - c.t0) > 650) { + // keep visible but soften + el.style.opacity = "0.75"; + } + } + + cursorMode.addEventListener("change", () => { + state.ui.cursorMode = cursorMode.value; + if (state.ui.cursorMode === "off") setCursorVisible(false); + }); + + // ========= Reason pipeline (Tab 1) ========= + btnReason.addEventListener("click", async () => { + if (!state.videoLoaded) { + log("No video loaded. Upload a video first.", "w"); + setStatus("warn", "READY · Upload a video"); + return; + } + + // Prevent concurrent executions + if (state.isReasoning) { + log("Reason already in progress. Please wait for it to complete.", "w"); + return; + } + + // Lock the Reason process + state.isReasoning = true; + btnReason.disabled = true; + btnReason.style.opacity = "0.5"; + btnReason.style.cursor = "not-allowed"; + + // Show Cancel button + btnCancelReason.style.display = "inline-block"; + + // Reset previous processed video output before new run + if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) { + try { URL.revokeObjectURL(state.hf.processedUrl); } catch (_) { } + } + state.hf.processedUrl = null; + state.hf.processedBlob = null; + state.useProcessedFeed = false; + btnEngage.disabled = true; + videoEngage.removeAttribute("src"); + videoEngage.load(); + + // Clear previous detections before running new detection + state.detections = []; + state.selectedId = null; + renderObjectList(); + renderFrameOverlay(); + renderSummary(); + renderFeatures(null); + renderTrade(); + + setStatus("warn", "REASONING · Running perception pipeline"); + // Start mission-driven HF backend (planning → video detection) in parallel. + startHfPipeline(); + log("Reason started: detection → features → HEL synthesis.", "t"); + + // a little agent cursor flair + if (state.ui.cursorMode === "on") { + moveCursorToRect(btnReason.getBoundingClientRect()); + setTimeout(() => moveCursorToRect(frameCanvas.getBoundingClientRect()), 260); + setTimeout(() => moveCursorToRect(objList.getBoundingClientRect()), 560); + setTimeout(() => moveCursorToRect(summaryTable.getBoundingClientRect()), 880); + } + + try { + // Mission objective is optional: + // - If blank: run unbiased detection across all classes immediately (no server wait). + // - If provided: still show immediate first-frame results, while HF computes mission focus in the background. + const missionPromptRaw = (missionText?.value || "").trim(); + if (!missionPromptRaw) { + state.hf.plan = null; + state.hf.missionId = null; + renderMissionContext(); + setHfStatus("processing (all objects, background)…"); + } else { + // Mission objective will be used directly by the detector + setHfStatus("processing (mission-focused, background)…"); + } + + + await captureFirstFrame(); + drawFirstFrame(); + + const dets = await runDetectOnFrame(); + + state.detections = dets.map((d, i) => { + const id = `T${String(i + 1).padStart(2, "0")}`; + const ap = defaultAimpoint(d.class); + return { + id, + label: d.class, + score: d.score, + bbox: normBBox(d.bbox, state.frame.w, state.frame.h), + aim: { ...ap }, // rel inside bbox + features: null, + baseRange_m: null, + baseAreaFrac: null, + baseDwell_s: null, + reqP_kW: null, + maxP_kW: null, + pkill: null + }; + }); + + // range estimate calibration + // [Deleted] calibrateRanges(); + + // feature generation + const featureMap = await externalFeatures(state.detections, { width: state.frame.w, height: state.frame.h }); + if (featureMap) { + state.detections.forEach(d => { + const f = featureMap[d.id] || featureMap[d.label] || null; + if (f) d.features = f; + }); + log("Features populated from external hook.", "g"); + } else { + // Fallback if no external features: empty object + state.detections.forEach(d => d.features = {}); + log("No external features provided.", "t"); + } + + // If external features provide aimpoint label, align aimpoint marker + state.detections.forEach(d => { + if (d.features && d.features.aimpoint_label) { + const apLabel = String(d.features.aimpoint_label); + d.aim.label = apLabel; + // keep rel location but slightly adjust by label type + const ap = aimpointByLabel(apLabel); + d.aim.relx = ap.relx; + d.aim.rely = ap.rely; + } + }); + + // compute HEL synthesis (now async) + await recomputeHEL(); + + // pick default selection + state.selectedId = state.detections[0]?.id || null; + renderObjectList(); + renderFrameOverlay(); + renderSummary(); + renderFeatures(getSelected()); + renderTrade(); + + state.hasReasoned = true; + setStatus("good", "READY · Reason complete (you can Engage)"); + log("Reason complete.", "g"); + + // Generate intel summary (async) + computeIntelSummary(); + } catch (err) { + setStatus("bad", "ERROR · Reason failed"); + log(`Reason failed: ${err.message}`, "e"); + console.error(err); + } finally { + // Always unlock the Reason process + state.isReasoning = false; + btnReason.disabled = false; + btnReason.style.opacity = "1"; + btnReason.style.cursor = "pointer"; + + // Hide Cancel button + btnCancelReason.style.display = "none"; + } + }); + + // Cancel button handler + btnCancelReason.addEventListener("click", () => { + cancelReasoning(); + }); + + btnRecompute.addEventListener("click", () => { + if (!state.hasReasoned) return; + recomputeHEL(); + renderSummary(); + renderFrameOverlay(); + renderTrade(); + log("Recomputed HEL metrics using current knobs (no new detection).", "t"); + }); + + btnClear.addEventListener("click", () => { + state.detections = []; + state.selectedId = null; + state.hasReasoned = false; + state.isReasoning = false; // Reset reasoning lock + btnReason.disabled = false; // Re-enable button if it was locked + btnReason.style.opacity = "1"; + btnReason.style.cursor = "pointer"; + btnCancelReason.style.display = "none"; // Hide Cancel button + renderObjectList(); + renderFrameOverlay(); + renderSummary(); + renderFeatures(null); + renderTrade(); + log("Cleared Tab 1 outputs.", "w"); + setStatus("warn", state.videoLoaded ? "READY · Video loaded (run Reason)" : "STANDBY · No video loaded"); + }); + + function aimpointByLabel(label) { + const l = String(label || "").toLowerCase(); + if (l.includes("engine") || l.includes("fuel")) return { relx: 0.64, rely: 0.58, label: label }; + if (l.includes("wing")) return { relx: 0.42, rely: 0.52, label: label }; + if (l.includes("nose") || l.includes("sensor")) return { relx: 0.28, rely: 0.48, label: label }; + if (l.includes("rotor")) return { relx: 0.52, rely: 0.42, label: label }; + return { relx: 0.50, rely: 0.55, label: label || "center_mass" }; + } + + function normBBox(bbox, w, h) { + const [x, y, bw, bh] = bbox; + return { + x: clamp(x, 0, w - 1), + y: clamp(y, 0, h - 1), + w: clamp(bw, 1, w), + h: clamp(bh, 1, h) + }; + } + + // [Deleted] calibrateRanges + + + async function recomputeHEL() { + if (!state.detections.length) return; + const knobs = getKnobs(); + summaryStamp.textContent = "Computing..."; + + try { + const result = await externalHEL(state.detections, knobs); + const metrics = result.targets || {}; + const sys = result.system || {}; + + state.detections.forEach(d => { + const r = metrics[d.id] || {}; + d.maxP_kW = r.maxP || 0; + d.reqP_kW = r.reqP || 0; + d.baseDwell_s = r.dwell || 0; + d.pkill = r.pkill || 0; + }); + + // Update system headline stats + mMaxP.textContent = sys.maxP ? `${sys.maxP} kW` : "—"; + mReqP.textContent = sys.reqP ? `${sys.reqP} kW` : "—"; + const margin = sys.margin || 0; + mMargin.textContent = `${margin > 0 ? "+" : ""}${margin} kW`; + mMargin.style.color = margin >= 0 ? "rgba(34,197,94,.95)" : "rgba(239,68,68,.95)"; + + mMaxPSub.textContent = "Calculated by external HEL engine"; + + // Simple ranking for plan + const ranked = state.detections.slice().sort((a, b) => (b.pkill || 0) - (a.pkill || 0)); + if (ranked.length && ranked[0].pkill > 0) { + mPlan.textContent = `${ranked[0].id} → Engage`; + mPlanSub.textContent = "Highest P(kill) target"; + } else { + mPlan.textContent = "—"; + mPlanSub.textContent = "No viable targets"; + } + + } catch (err) { + console.error("HEL recompute failed", err); + } + + summaryStamp.textContent = new Date().toLocaleTimeString(); + renderSummary(); + refreshTradeTargets(); + } + + function getSelected() { + return state.detections.find(d => d.id === state.selectedId) || null; + } + + // ========= Rendering: Object list, features, summary table ========= + function renderObjectList() { + objList.innerHTML = ""; + objCount.textContent = `${state.detections.length}`; + if (!state.detections.length) { + const empty = document.createElement("div"); + empty.className = "mini"; + empty.style.padding = "8px"; + empty.textContent = "No detections yet. Click Reason."; + objList.appendChild(empty); + return; + } + + const dets = state.detections.slice(); + // Sort by confidence (mission filtering is handled by backend) + dets.sort((a, b) => ((b.score || 0) - (a.score || 0))); + + dets.forEach(d => { + const div = document.createElement("div"); + div.className = "obj" + (d.id === state.selectedId ? " active" : ""); + div.dataset.id = d.id; + + const rangeTxt = d.baseRange_m ? `${Math.round(d.baseRange_m)} m` : "—"; + const dwellTxt = d.baseDwell_s ? `${d.baseDwell_s.toFixed(1)} s` : "—"; + const pkTxt = (d.pkill != null) ? `${Math.round(d.pkill * 100)}%` : "—"; + + div.innerHTML = ` +
+
+
${d.id}
+
${escapeHtml(d.label)}
+
+
${isMissionFocusLabel(d.label) ? `FOCUS` : ""}
${Math.round(d.score * 100)}%
+
+
+ RANGE:${rangeTxt} + DWELL:${dwellTxt} + P(k):${pkTxt} + AIM:${escapeHtml(d.aim?.label || "center")} +
+ `; + + div.addEventListener("click", () => { + state.selectedId = d.id; + renderObjectList(); + renderFeatures(d); + renderFrameOverlay(); + renderTrade(); + }); + + objList.appendChild(div); + }); + } + + function renderFeatures(det) { + selId.textContent = det ? det.id : "—"; + const tbody = featureTable.querySelector("tbody"); + tbody.innerHTML = ""; + if (!det) { + tbody.innerHTML = `—No target selected`; + return; + } + const feats = det.features || {}; + const keys = Object.keys(feats); + const show = keys.slice(0, 12); + + show.forEach(k => { + const tr = document.createElement("tr"); + tr.innerHTML = `${escapeHtml(k)}${escapeHtml(String(feats[k]))}`; + tbody.appendChild(tr); + }); + + if (show.length < 10) { + for (let i = show.length; i < 10; i++) { + const tr = document.createElement("tr"); + tr.innerHTML = `—awaiting additional expert outputs`; + tbody.appendChild(tr); + } + } + } + + function renderSummary() { + const tbody = summaryTable.querySelector("tbody"); + tbody.innerHTML = ""; + if (!state.detections.length) { + tbody.innerHTML = `—No outputs yet. Click Reason.`; + mMaxP.textContent = "—"; + mReqP.textContent = "—"; + mMargin.textContent = "—"; + mPlan.textContent = "—"; + return; + } + + state.detections.forEach(d => { + const tr = document.createElement("tr"); + const range = d.baseRange_m ?? 0; + const reqP = d.reqP_kW ?? 0; + const maxP = d.maxP_kW ?? 0; + const dwell = d.baseDwell_s ?? 0; + const p = d.pkill ?? 0; + + tr.innerHTML = ` + ${d.id} + ${escapeHtml(d.label)} + ${Math.round(range)} + ${escapeHtml(d.aim?.label || "center")} + ${reqP.toFixed(1)} kW + ${maxP.toFixed(1)} kW + ${dwell.toFixed(1)} s + ${Math.round(p * 100)}% + `; + tbody.appendChild(tr); + }); + } + + function escapeHtml(s) { + return s.replace(/[&<>"']/g, m => ({ "&": "&", "<": "<", ">": ">", '"': """, "'": "'" }[m])); + } + + // ========= Frame overlay rendering ========= + function renderFrameOverlay() { + const ctx = frameOverlay.getContext("2d"); + const w = frameOverlay.width, h = frameOverlay.height; + ctx.clearRect(0, 0, w, h); + + if (!state.detections.length) return; + + // subtle scanning effect + const t = now() / 1000; + const scanX = (Math.sin(t * 0.65) * 0.5 + 0.5) * w; + ctx.fillStyle = "rgba(34,211,238,.06)"; + ctx.fillRect(scanX - 8, 0, 16, h); + + state.detections.forEach((d, idx) => { + const isSel = d.id === state.selectedId; + + const b = d.bbox; + const pad = 2; + + // box + ctx.lineWidth = isSel ? 3 : 2; + const isFocus = isMissionFocusLabel(d.label); + ctx.strokeStyle = isSel ? "rgba(34,211,238,.95)" : (isFocus ? "rgba(34,211,238,.70)" : "rgba(124,58,237,.55)"); + ctx.shadowColor = isSel ? "rgba(34,211,238,.40)" : "rgba(124,58,237,.25)"; + ctx.shadowBlur = isSel ? 18 : 10; + roundRect(ctx, b.x, b.y, b.w, b.h, 10, false, true); + + // pseudo mask glow (for segmentation-like effect) + ctx.shadowBlur = 0; + const g = ctx.createRadialGradient(b.x + b.w * 0.5, b.y + b.h * 0.5, 10, b.x + b.w * 0.5, b.y + b.h * 0.5, Math.max(b.w, b.h) * 0.75); + g.addColorStop(0, isSel ? "rgba(34,211,238,.16)" : "rgba(124,58,237,.10)"); + g.addColorStop(1, "rgba(0,0,0,0)"); + ctx.fillStyle = g; + ctx.fillRect(b.x, b.y, b.w, b.h); + + // aimpoint marker (red circle + crosshair) + const ax = b.x + b.w * d.aim.relx; + const ay = b.y + b.h * d.aim.rely; + drawAimpoint(ctx, ax, ay, isSel); + + // label + const range = Math.round(d.baseRange_m || 0); + const dwell = (d.baseDwell_s != null) ? d.baseDwell_s.toFixed(1) : "—"; + const pk = (d.pkill != null) ? Math.round(d.pkill * 100) : "—"; + + ctx.font = "bold 14px " + getComputedStyle(document.body).fontFamily; + const tag = `${d.id} · ${d.label} · R=${range}m · DWELL=${dwell}s · Pk=${pk}%`; + const tw = ctx.measureText(tag).width; + const tx = clamp(b.x, 6, w - tw - 12); + const ty = clamp(b.y - 12, 18, h - 12); + + ctx.fillStyle = "rgba(0,0,0,.50)"; + ctx.strokeStyle = "rgba(255,255,255,.14)"; + ctx.lineWidth = 1; + roundRect(ctx, tx - 6, ty - 14, tw + 12, 18, 8, true, true); + + ctx.fillStyle = "rgba(255,255,255,.86)"; + ctx.fillText(tag, tx, ty); + }); + + // click-to-select on canvas (manual aimpoint override can be added later) + frameOverlay.style.pointerEvents = "auto"; + frameOverlay.onclick = (ev) => { + const rect = frameOverlay.getBoundingClientRect(); + const sx = frameOverlay.width / rect.width; + const sy = frameOverlay.height / rect.height; + const x = (ev.clientX - rect.left) * sx; + const y = (ev.clientY - rect.top) * sy; + + const hit = state.detections + .map(d => ({ d, inside: x >= d.bbox.x && x <= d.bbox.x + d.bbox.w && y >= d.bbox.y && y <= d.bbox.y + d.bbox.h })) + .filter(o => o.inside) + .sort((a, b) => (a.d.bbox.w * a.d.bbox.h) - (b.d.bbox.w * b.d.bbox.h))[0]; + + if (hit) { + state.selectedId = hit.d.id; + renderObjectList(); + renderFeatures(hit.d); + renderFrameOverlay(); + renderTrade(); + } + }; + } + + function roundRect(ctx, x, y, w, h, r, fill, stroke) { + if (w < 2 * r) r = w / 2; + if (h < 2 * r) r = h / 2; + ctx.beginPath(); + ctx.moveTo(x + r, y); + ctx.arcTo(x + w, y, x + w, y + h, r); + ctx.arcTo(x + w, y + h, x, y + h, r); + ctx.arcTo(x, y + h, x, y, r); + ctx.arcTo(x, y, x + w, y, r); + ctx.closePath(); + if (fill) ctx.fill(); + if (stroke) ctx.stroke(); + } + + function drawAimpoint(ctx, x, y, isSel) { + ctx.save(); + ctx.shadowBlur = isSel ? 18 : 12; + ctx.shadowColor = "rgba(239,68,68,.45)"; + ctx.strokeStyle = "rgba(239,68,68,.95)"; + ctx.lineWidth = isSel ? 3 : 2; + ctx.beginPath(); + ctx.arc(x, y, isSel ? 10 : 9, 0, Math.PI * 2); + ctx.stroke(); + + ctx.shadowBlur = 0; + ctx.strokeStyle = "rgba(255,255,255,.70)"; + ctx.lineWidth = 1.5; + ctx.beginPath(); + ctx.moveTo(x - 14, y); ctx.lineTo(x - 4, y); + ctx.moveTo(x + 4, y); ctx.lineTo(x + 14, y); + ctx.moveTo(x, y - 14); ctx.lineTo(x, y - 4); + ctx.moveTo(x, y + 4); ctx.lineTo(x, y + 14); + ctx.stroke(); + + ctx.fillStyle = "rgba(239,68,68,.95)"; + ctx.beginPath(); + ctx.arc(x, y, 2.5, 0, Math.PI * 2); + ctx.fill(); + ctx.restore(); + } + + // ========= Engage tab: tracking + dynamic dwell ========= + btnEngage.addEventListener("click", async () => { + if (!state.videoLoaded) { log("No video loaded for Engage.", "w"); return; } + if (!state.hf.processedUrl) { log("Processed video not ready yet. Wait for completion.", "w"); return; } + if (!state.hasReasoned) { log("Run Reason first to initialize baseline dwell and aimpoints.", "w"); return; } + + if (videoEngage.paused) { + try { + await videoEngage.play(); + } catch (err) { + log("Video play failed (browser policy). Click inside the page then try Engage again.", "w"); + return; + } + } + + state.tracker.running = true; + state.tracker.beamOn = true; + state.tracker.lastDetTime = 0; + state.tracker.lastFrameTime = now(); + engageNote.textContent = "Running"; + chipBeam.textContent = "BEAM:ON"; + log("Engage started: tracking enabled, dwell accumulation active.", "g"); + + // Initialize tracks: + // - Prefer Tab 1 detections if available (same first-frame context) + // - Otherwise, seed from the current video frame (actual detector output) + if (!state.tracker.tracks.length) { + if (state.detections && state.detections.length) { + seedTracksFromTab1(); + } else { + const dets = await detectOnVideoFrame(); + if (dets && dets.length) { + seedTracksFromDetections(dets); + log(`Seeded ${state.tracker.tracks.length} tracks from video-frame detections.`, "t"); + } else { + log("No detections available to seed tracks yet. Tracks will appear as detections arrive.", "w"); + } + } + } + + resizeOverlays(); + startLoop(); + }); + + btnPause.addEventListener("click", () => { + if (!state.videoLoaded) return; + if (!videoEngage.paused) { + videoEngage.pause(); + log("Video paused.", "t"); + } + state.tracker.beamOn = false; + chipBeam.textContent = "BEAM:OFF"; + }); + + btnReset.addEventListener("click", async () => { + if (!state.videoLoaded) return; + videoEngage.pause(); + await seekTo(videoEngage, 0); + state.tracker.tracks.forEach(t => { t.dwellAccum = 0; t.killed = false; t.state = "TRACK"; }); + state.tracker.selectedTrackId = null; + state.tracker.beamOn = false; + state.tracker.running = false; + dwellBar.style.width = "0%"; + dwellText.textContent = "—"; + engageNote.textContent = "paused"; + chipBeam.textContent = "BEAM:OFF"; + log("Engage reset: video rewound, dwell cleared.", "w"); + renderRadar(); + renderTrackCards(); + renderEngageOverlay(); + }); + + // Toggle sidebar (radar + live tracks) for fullscreen video + btnToggleSidebar.addEventListener("click", () => { + const engageGrid = $(".engage-grid"); + const isCollapsed = engageGrid.classList.contains("sidebar-collapsed"); + + if (isCollapsed) { + engageGrid.classList.remove("sidebar-collapsed"); + btnToggleSidebar.textContent = "◀ Hide Sidebar"; + log("Sidebar expanded.", "t"); + } else { + engageGrid.classList.add("sidebar-collapsed"); + btnToggleSidebar.textContent = "▶ Show Sidebar"; + log("Sidebar collapsed - video fullscreen.", "t"); + } + }); + + function seedTracksFromTab1() { + state.tracker.tracks = state.detections.map(d => { + const t = { + id: d.id, + label: d.label, + bbox: { ...d.bbox }, + score: d.score, + aimRel: { relx: d.aim.relx, rely: d.aim.rely, label: d.aim.label }, + baseAreaFrac: d.baseAreaFrac || ((d.bbox.w * d.bbox.h) / (state.frame.w * state.frame.h)), + baseRange_m: d.baseRange_m || +rangeBase.value, + baseDwell_s: d.baseDwell_s || 4.0, + reqP_kW: d.reqP_kW || 35, + lastSeen: now(), + vx: 0, vy: 0, + dwellAccum: 0, + killed: false, + state: "TRACK", // TRACK -> SETTLE -> FIRE -> ASSESS -> KILL + assessT: 0 + }; + return t; + }); + state.tracker.nextId = state.detections.length + 1; + log(`Seeded ${state.tracker.tracks.length} tracks from Tab 1 detections.`, "t"); + } + + function seedTracksFromDetections(dets) { + const w = videoEngage.videoWidth || state.frame.w; + const h = videoEngage.videoHeight || state.frame.h; + + state.tracker.tracks = dets.slice(0, 12).map((d, i) => { + const id = `T${String(i + 1).padStart(2, "0")}`; + const ap = defaultAimpoint(d.class); + const bb = normBBox(d.bbox, w, h); + return { + id, + label: d.class, + bbox: { ...bb }, + score: d.score, + aimRel: { relx: ap.relx, rely: ap.rely, label: ap.label }, + baseAreaFrac: (bb.w * bb.h) / (w * h), + baseRange_m: +rangeBase.value, + baseDwell_s: 5.0, + reqP_kW: 40, + lastSeen: now(), + vx: 0, vy: 0, + dwellAccum: 0, + killed: false, + state: "TRACK", + assessT: 0 + }; + }); + state.tracker.nextId = state.tracker.tracks.length + 1; + } + + function iou(a, b) { + const ax2 = a.x + a.w, ay2 = a.y + a.h; + const bx2 = b.x + b.w, by2 = b.y + b.h; + const ix1 = Math.max(a.x, b.x), iy1 = Math.max(a.y, b.y); + const ix2 = Math.min(ax2, bx2), iy2 = Math.min(ay2, by2); + const iw = Math.max(0, ix2 - ix1), ih = Math.max(0, iy2 - iy1); + const inter = iw * ih; + const ua = a.w * a.h + b.w * b.h - inter; + return ua <= 0 ? 0 : inter / ua; + } + + async function externalTrack(videoEl) { + // Hook for user tracking: should return predictions similar to detector output + if (typeof window.__HEL_TRACK__ === "function") { + return await window.__HEL_TRACK__(videoEl); + } + throw new Error("External tracker hook is not installed."); + } + + async function detectOnVideoFrame() { + const mode = state.detector.mode; + if (mode === "external") { + try { return await externalTrack(videoEngage); } + catch (e) { log(`External tracker failed: ${e.message}`, "w"); return []; } + } + if (state.detector.cocoBlocked) { + return []; + } + if (isHfMode(mode)) { + if (!state.detector.hfTrackingWarned) { + state.detector.hfTrackingWarned = true; + log("HF mode uses backend processing; local COCO tracking is disabled to avoid GPU leaks. Use External tracker or RAW feed.", "w"); + } + return []; + } + if (mode === "coco") { + await ensureCocoDetector(); + if (state.detector.model) { + try { + let preds = await state.detector.model.detect(videoEngage); + return preds + .filter(p => p.score >= 0.45) + .slice(0, 18) + .map(p => ({ bbox: p.bbox, class: p.class, score: p.score })); + } catch (err) { + if (err && err.name === "SecurityError") { + state.detector.cocoBlocked = true; + log("Local COCO tracking blocked by tainted video. Use External tracker or RAW feed.", "w"); + return []; + } + throw err; + } + } + return []; + } + return []; + } + + + function matchAndUpdateTracks(dets, dtSec) { + // Convert detections to bbox in video coordinates + const w = videoEngage.videoWidth || state.frame.w; + const h = videoEngage.videoHeight || state.frame.h; + + const detObjs = dets.map(d => ({ + bbox: normBBox(d.bbox, w, h), + label: d.class, + score: d.score + })); + + // mark all tracks as unmatched + const tracks = state.tracker.tracks; + + const used = new Set(); + for (const tr of tracks) { + let best = null; + let bestI = 0.0; + let bestIdx = -1; + for (let i = 0; i < detObjs.length; i++) { + if (used.has(i)) continue; + const IoU = iou(tr.bbox, detObjs[i].bbox); + if (IoU > bestI) { + bestI = IoU; + best = detObjs[i]; + bestIdx = i; + } + } + if (best && bestI >= 0.18) { + used.add(bestIdx); + + // velocity estimate + const cx0 = tr.bbox.x + tr.bbox.w * 0.5; + const cy0 = tr.bbox.y + tr.bbox.h * 0.5; + const cx1 = best.bbox.x + best.bbox.w * 0.5; + const cy1 = best.bbox.y + best.bbox.h * 0.5; + tr.vx = (cx1 - cx0) / Math.max(1e-6, dtSec); + tr.vy = (cy1 - cy0) / Math.max(1e-6, dtSec); + + // smooth bbox update + tr.bbox.x = lerp(tr.bbox.x, best.bbox.x, 0.65); + tr.bbox.y = lerp(tr.bbox.y, best.bbox.y, 0.65); + tr.bbox.w = lerp(tr.bbox.w, best.bbox.w, 0.55); + tr.bbox.h = lerp(tr.bbox.h, best.bbox.h, 0.55); + + tr.label = best.label || tr.label; + tr.score = best.score || tr.score; + tr.lastSeen = now(); + } + } + + // add unmatched detections as new tracks (optional) + for (let i = 0; i < detObjs.length; i++) { + if (used.has(i)) continue; + // create new track only if big enough (avoid clutter) + const a = detObjs[i].bbox.w * detObjs[i].bbox.h; + if (a < (w * h) * 0.0025) continue; + + const newId = `T${String(state.tracker.nextId++).padStart(2, "0")}`; + const ap = defaultAimpoint(detObjs[i].label); + tracks.push({ + id: newId, + label: detObjs[i].label, + bbox: { ...detObjs[i].bbox }, + score: detObjs[i].score, + aimRel: { relx: ap.relx, rely: ap.rely, label: ap.label }, + baseAreaFrac: (detObjs[i].bbox.w * detObjs[i].bbox.h) / (w * h), + baseRange_m: +rangeBase.value, + baseDwell_s: 5.5, + reqP_kW: 42, + lastSeen: now(), + vx: 0, vy: 0, + dwellAccum: 0, + killed: false, + state: "TRACK", + assessT: 0 + }); + log(`New track created: ${newId} (${detObjs[i].label})`, "t"); + } + + // prune old tracks if they disappear + const tNow = now(); + state.tracker.tracks = tracks.filter(tr => (tNow - tr.lastSeen) < 2200 || tr.killed); + } + + function predictTracks(dtSec) { + const w = videoEngage.videoWidth || state.frame.w; + const h = videoEngage.videoHeight || state.frame.h; + state.tracker.tracks.forEach(tr => { + if (tr.killed) return; + tr.bbox.x = clamp(tr.bbox.x + tr.vx * dtSec * 0.12, 0, w - 1); + tr.bbox.y = clamp(tr.bbox.y + tr.vy * dtSec * 0.12, 0, h - 1); + }); + } + + function rangeFromArea(track) { + const w = videoEngage.videoWidth || state.frame.w; + const h = videoEngage.videoHeight || state.frame.h; + const a = (track.bbox.w * track.bbox.h) / (w * h); + const baseA = Math.max(1e-6, track.baseAreaFrac || a); + const rel = Math.sqrt(baseA / Math.max(1e-6, a)); + return clamp(track.baseRange_m * rel, 80, 16000); + } + + function dwellFromRange(track, range_m) { + const mp = maxPowerAtTarget(range_m); + const baseReq = track.reqP_kW || 40; + const baseD = track.baseDwell_s || 5; + + // Use Tab1 baseline as reference; scale by range and power ratio. + const dwell = requiredDwell(range_m, baseReq, mp.Ptar, baseD); + return dwell; + } + + function chooseTargetAuto() { + // choose highest (maxP-reqP)/dwell among visible tracks + let best = null; + state.tracker.tracks.forEach(tr => { + if (tr.killed) return; + const range = rangeFromArea(tr); + const mp = maxPowerAtTarget(range); + const margin = mp.Ptar - (tr.reqP_kW || 0); + const dwell = dwellFromRange(tr, range); + const score = margin / Math.max(0.8, dwell); + if (!best || score > best.score) best = { id: tr.id, score, margin, dwell }; + }); + return best ? best.id : null; + } + + function updateEngagementState(dtSec) { + const assessS = +assessWindow.value; + let targetId = state.tracker.selectedTrackId; + + if (policyMode.value === "auto") { + targetId = chooseTargetAuto(); + state.tracker.selectedTrackId = targetId; + } + + if (!state.tracker.beamOn || !targetId) return; + + const tr = state.tracker.tracks.find(t => t.id === targetId); + if (!tr || tr.killed) return; + + const range = rangeFromArea(tr); + const reqD = dwellFromRange(tr, range); + + // state machine: TRACK -> SETTLE -> FIRE -> ASSESS -> KILL + if (tr.state === "TRACK") { + tr.state = "SETTLE"; + tr.assessT = 0; + } + + if (tr.state === "SETTLE") { + tr.assessT += dtSec; + if (tr.assessT >= 0.25) { tr.state = "FIRE"; tr.assessT = 0; } + } else if (tr.state === "FIRE") { + tr.dwellAccum += dtSec; + if (tr.dwellAccum >= reqD) { + tr.state = "ASSESS"; + tr.assessT = 0; + } + } else if (tr.state === "ASSESS") { + tr.assessT += dtSec; + if (tr.assessT >= assessS) { + tr.killed = true; + tr.state = "KILL"; + state.tracker.beamOn = false; // stop beam after kill to make it dramatic + chipBeam.textContent = "BEAM:OFF"; + log(`Target ${tr.id} assessed neutralized.`, "g"); + } + } + + // update dwell bar UI + const pct = clamp(tr.dwellAccum / Math.max(0.001, reqD), 0, 1) * 100; + dwellBar.style.width = `${pct.toFixed(0)}%`; + dwellText.textContent = `${tr.id} · ${tr.state} · ${(tr.dwellAccum).toFixed(1)}s / ${reqD.toFixed(1)}s · R=${Math.round(range)}m`; + } + + function pickTrackAt(x, y) { + const hits = state.tracker.tracks + .filter(t => !t.killed) + .filter(t => x >= t.bbox.x && x <= t.bbox.x + t.bbox.w && y >= t.bbox.y && y <= t.bbox.y + t.bbox.h) + .sort((a, b) => (a.bbox.w * a.bbox.h) - (b.bbox.w * b.bbox.h)); + return hits[0] || null; + } + + // Main loop + let rafId = null; + async function startLoop() { + if (rafId) cancelAnimationFrame(rafId); + async function tick() { + rafId = requestAnimationFrame(tick); + tickAgentCursor(); + + if (!state.tracker.running) return; + + const tNow = now(); + const dtSec = (tNow - state.tracker.lastFrameTime) / 1000; + state.tracker.lastFrameTime = tNow; + + // detection schedule + const hz = +detHz.value; + const period = 1000 / Math.max(1, hz); + if ((tNow - state.tracker.lastDetTime) >= period) { + state.tracker.lastDetTime = tNow; + const dets = await detectOnVideoFrame(); + matchAndUpdateTracks(dets, Math.max(0.016, dtSec)); + } else { + predictTracks(Math.max(0.016, dtSec)); + } + + updateEngagementState(Math.max(0.016, dtSec)); + renderEngageOverlay(); + renderRadar(); + renderTrackCards(); + + chipTracks.textContent = `TRACKS:${state.tracker.tracks.filter(t => !t.killed).length}`; + liveStamp.textContent = new Date().toLocaleTimeString(); + } + tick(); + } + + function renderEngageOverlay() { + if (engageOverlay.style.display === "none") { + return; + } + const ctx = engageOverlay.getContext("2d"); + const w = engageOverlay.width, h = engageOverlay.height; + ctx.clearRect(0, 0, w, h); + + if (!state.videoLoaded) return; + + // Draw dark background instead of video frame (only processed overlays shown) + ctx.fillStyle = "#0b1026"; + ctx.fillRect(0, 0, w, h); + + // draw track boxes and labels + const tNow = now(); + state.tracker.tracks.forEach(tr => { + const isSel = tr.id === state.tracker.selectedTrackId; + const killed = tr.killed; + + const b = tr.bbox; + const ax = b.x + b.w * tr.aimRel.relx; + const ay = b.y + b.h * tr.aimRel.rely; + + const range = rangeFromArea(tr); + const reqD = dwellFromRange(tr, range); + + const mp = maxPowerAtTarget(range); + const margin = mp.Ptar - (tr.reqP_kW || 0); + + const color = killed ? "rgba(34,197,94,.55)" : (isSel ? "rgba(34,211,238,.95)" : "rgba(124,58,237,.65)"); + + // box + ctx.lineWidth = isSel ? 3 : 2; + ctx.strokeStyle = color; + ctx.shadowBlur = isSel ? 16 : 10; + ctx.shadowColor = color; + roundRect(ctx, b.x, b.y, b.w, b.h, 10, false, true); + ctx.shadowBlur = 0; + + // aimpoint + if (!killed) { + drawAimpoint(ctx, ax, ay, isSel); + } else { + // killed marker + ctx.fillStyle = "rgba(34,197,94,.95)"; + ctx.font = "14px " + getComputedStyle(document.body).fontFamily; + ctx.fillText("NEUTRALIZED", b.x + 10, b.y + 22); + } + + // dwell ring + if (!killed) { + const pct = clamp(tr.dwellAccum / Math.max(0.001, reqD), 0, 1); + ctx.beginPath(); + ctx.strokeStyle = "rgba(34,197,94,.85)"; + ctx.lineWidth = 3; + ctx.arc(ax, ay, 16, -Math.PI / 2, -Math.PI / 2 + Math.PI * 2 * pct); + ctx.stroke(); + } + + // label with distance + dwell + margin + const tag = `${tr.id} · R=${Math.round(range)}m · DWELL=${reqD.toFixed(1)}s · ΔP=${margin >= 0 ? "+" : ""}${margin.toFixed(1)}kW`; + ctx.font = "bold 14px " + getComputedStyle(document.body).fontFamily; + const tw = ctx.measureText(tag).width; + const tx = clamp(b.x, 6, w - tw - 12); + const ty = clamp(b.y - 12, 18, h - 12); + + ctx.fillStyle = "rgba(0,0,0,.55)"; + ctx.strokeStyle = "rgba(255,255,255,.14)"; + ctx.lineWidth = 1; + roundRect(ctx, tx - 6, ty - 14, tw + 12, 18, 8, true, true); + + ctx.fillStyle = "rgba(255,255,255,.86)"; + ctx.fillText(tag, tx, ty); + + // engagement strip indicator near bbox bottom + const st = tr.state || "TRACK"; + const stColor = st === "FIRE" ? "rgba(239,68,68,.92)" : (st === "ASSESS" ? "rgba(245,158,11,.92)" : (st === "KILL" ? "rgba(34,197,94,.92)" : "rgba(34,211,238,.92)")); + ctx.fillStyle = stColor; + ctx.globalAlpha = 0.85; + ctx.fillRect(b.x, b.y + b.h + 4, clamp(b.w * 0.55, 70, b.w), 5); + ctx.globalAlpha = 1; + + ctx.fillStyle = "rgba(255,255,255,.82)"; + ctx.font = "11px " + getComputedStyle(document.body).fontFamily; + ctx.fillText(st, b.x, b.y + b.h + 18); + + // beam line to selected aimpoint + if (state.tracker.beamOn && isSel && !killed) { + ctx.strokeStyle = "rgba(239,68,68,.45)"; + ctx.lineWidth = 2; + ctx.setLineDash([6, 6]); + ctx.beginPath(); + ctx.moveTo(w * 0.5, h * 0.98); + ctx.lineTo(ax, ay); + ctx.stroke(); + ctx.setLineDash([]); + } + }); + } + + function renderTrackCards() { + trackList.innerHTML = ""; + const alive = state.tracker.tracks.filter(t => !t.killed); + if (!alive.length) { + const div = document.createElement("div"); + div.className = "mini"; + div.style.padding = "8px"; + div.textContent = "No live tracks. Run Engage or adjust detector."; + trackList.appendChild(div); + return; + } + + alive.forEach(tr => { + const range = rangeFromArea(tr); + const reqD = dwellFromRange(tr, range); + const mp = maxPowerAtTarget(range); + const margin = mp.Ptar - (tr.reqP_kW || 0); + const pk = pkillFromMargin(margin, tr.dwellAccum, reqD); + + const div = document.createElement("div"); + div.className = "obj" + (tr.id === state.tracker.selectedTrackId ? " active" : ""); + div.innerHTML = ` +
+
+
${tr.id}
+
${escapeHtml(tr.label)}
+
+
${margin >= 0 ? "+" : ""}${margin.toFixed(1)}kW
+
+
+ R:${Math.round(range)}m + DW:${reqD.toFixed(1)}s + Pk:${Math.round(pk * 100)}% + AP:${escapeHtml(tr.aimRel.label)} + STATE:${tr.state} +
+ `; + div.addEventListener("click", () => { + if (policyMode.value !== "manual") return; + state.tracker.selectedTrackId = tr.id; + state.tracker.beamOn = true; + chipBeam.textContent = "BEAM:ON"; + renderTrackCards(); + }); + trackList.appendChild(div); + }); + } + + // ========= Radar rendering ========= + function renderRadar() { + const ctx = radarCanvas.getContext("2d"); + const w = radarCanvas.width, h = radarCanvas.height; + ctx.clearRect(0, 0, w, h); + + // background + ctx.fillStyle = "rgba(0,0,0,.35)"; + ctx.fillRect(0, 0, w, h); + + const cx = w * 0.5, cy = h * 0.55; + const R = Math.min(w, h) * 0.42; + + // rings + ctx.strokeStyle = "rgba(255,255,255,.10)"; + ctx.lineWidth = 1; + for (let i = 1; i <= 4; i++) { + ctx.beginPath(); + ctx.arc(cx, cy, R * i / 4, 0, Math.PI * 2); + ctx.stroke(); + } + // cross + ctx.beginPath(); ctx.moveTo(cx - R, cy); ctx.lineTo(cx + R, cy); ctx.stroke(); + ctx.beginPath(); ctx.moveTo(cx, cy - R); ctx.lineTo(cx, cy + R); ctx.stroke(); + + // sweep + const t = now() / 1000; + const ang = (t * 0.65) % (Math.PI * 2); + ctx.strokeStyle = "rgba(34,211,238,.22)"; + ctx.lineWidth = 2; + ctx.beginPath(); + ctx.moveTo(cx, cy); + ctx.lineTo(cx + Math.cos(ang) * R, cy + Math.sin(ang) * R); + ctx.stroke(); + + // ownship + ctx.fillStyle = "rgba(34,211,238,.85)"; + ctx.beginPath(); + ctx.arc(cx, cy, 5, 0, Math.PI * 2); + ctx.fill(); + + // tracks as blips + const tracks = state.tracker.tracks.filter(t => !t.killed); + tracks.forEach(tr => { + const range = rangeFromArea(tr); + const rr = clamp(range / Math.max(250, +rangeBase.value), 0.1, 3.5); // relative + const b = tr.bbox; + + // bearing from image position + const vw = videoEngage.videoWidth || state.frame.w; + const vh = videoEngage.videoHeight || state.frame.h; + const tx = (b.x + b.w * 0.5) / vw - 0.5; + const ty = (b.y + b.h * 0.5) / vh - 0.5; + const bearing = Math.atan2(ty, tx); + + const rad = clamp(rr, 0, 3.2) * (R / 3.2); + const px = cx + Math.cos(bearing) * rad; + const py = cy + Math.sin(bearing) * rad; + + // color by engagement state + const col = tr.state === "FIRE" ? "rgba(239,68,68,.9)" : + (tr.state === "ASSESS" ? "rgba(245,158,11,.9)" : + "rgba(124,58,237,.9)"); + + ctx.fillStyle = col; + ctx.beginPath(); + ctx.arc(px, py, 5, 0, Math.PI * 2); + ctx.fill(); + + ctx.fillStyle = "rgba(255,255,255,.75)"; + ctx.font = "11px " + getComputedStyle(document.body).fontFamily; + ctx.fillText(tr.id, px + 8, py + 4); + }); + + // label + ctx.fillStyle = "rgba(255,255,255,.55)"; + ctx.font = "11px " + getComputedStyle(document.body).fontFamily; + ctx.fillText("CENTER: OWN-SHIP", 10, 18); + ctx.fillText("BLIPS: RELATIVE RANGE + BEARING (from video kinematics)", 10, 36); + } + + // ========= Resizing overlays to match video viewports ========= + function resizeOverlays() { + // Engage overlay matches displayed video size + const rect = videoEngage.getBoundingClientRect(); + if (rect.width > 0 && rect.height > 0) { + const w = Math.round(rect.width * devicePixelRatio); + const h = Math.round(rect.height * devicePixelRatio); + engageOverlay.width = w; + engageOverlay.height = h; + engageOverlay.style.width = rect.width + "px"; + engageOverlay.style.height = rect.height + "px"; + + // scale track bboxes if video intrinsic differs from overlay (we keep bboxes in intrinsic coords) + // rendering code assumes overlay coords = intrinsic coords. Therefore we remap by setting ctx transform each render. + // Instead, we store bboxes in intrinsic and draw by transform. + // We will implement by setting ctx.setTransform in renderEngageOverlay. + } + + // Frame overlay uses intrinsic, but we still keep canvas scaled by CSS. No action needed. + } + window.addEventListener("resize", resizeOverlays); + + // Adjust engage overlay transform for drawing in intrinsic coordinates + const _renderEngageOverlay = renderEngageOverlay; + renderEngageOverlay = function () { + const ctx = engageOverlay.getContext("2d"); + const rect = videoEngage.getBoundingClientRect(); + const vw = videoEngage.videoWidth || state.frame.w; + const vh = videoEngage.videoHeight || state.frame.h; + const pxW = engageOverlay.width; + const pxH = engageOverlay.height; + + ctx.setTransform(1, 0, 0, 1, 0, 0); + ctx.clearRect(0, 0, pxW, pxH); + + if (!rect.width || !rect.height) return; + const sx = pxW / vw; + const sy = pxH / vh; + ctx.setTransform(sx, 0, 0, sy, 0, 0); + + _renderEngageOverlay(); + ctx.setTransform(1, 0, 0, 1, 0, 0); + }; + + // Also adjust click picking because overlay is scaled + engageOverlay.addEventListener("click", (ev) => { + if (!state.videoLoaded) return; + if (policyMode.value !== "manual") return; + + const rect = engageOverlay.getBoundingClientRect(); + const vw = videoEngage.videoWidth || state.frame.w; + const vh = videoEngage.videoHeight || state.frame.h; + const pxW = engageOverlay.width; + const pxH = engageOverlay.height; + + const xPx = (ev.clientX - rect.left) * devicePixelRatio; + const yPx = (ev.clientY - rect.top) * devicePixelRatio; + + // inverse transform + const x = xPx * (vw / pxW); + const y = yPx * (vh / pxH); + + const tr = pickTrackAt(x, y); + if (tr) { + state.tracker.selectedTrackId = tr.id; + state.tracker.beamOn = true; + chipBeam.textContent = "BEAM:ON"; + log(`Manual target selected: ${tr.id}`, "t"); + renderTrackCards(); + } + }, { passive: true }); + + // ========= Trade-space rendering ========= + function refreshTradeTargets() { + const sel = tradeTarget.value; + tradeTarget.innerHTML = ""; + const ids = state.detections.map(d => d.id); + if (!ids.length) { + const opt = document.createElement("option"); + opt.value = ""; + opt.textContent = "No targets"; + tradeTarget.appendChild(opt); + return; + } + ids.forEach(id => { + const opt = document.createElement("option"); + opt.value = id; + opt.textContent = id; + tradeTarget.appendChild(opt); + }); + if (sel && ids.includes(sel)) tradeTarget.value = sel; + else tradeTarget.value = state.selectedId || ids[0]; + } + + btnReplot.addEventListener("click", renderTrade); + tradeTarget.addEventListener("change", renderTrade); + + btnSnap.addEventListener("click", () => { + if (!state.detections.length) return; + const id = tradeTarget.value; + const d = state.detections.find(x => x.id === id) || state.detections[0]; + const snap = { + target: id, + helPower_kW: +helPower.value, + vis_km: +atmVis.value, + cn2: +atmCn2.value, + ao: +aoQ.value, + baseRange_m: d.baseRange_m, + reqP_kW: d.reqP_kW, + baseDwell_s: d.baseDwell_s + }; + log("SNAPSHOT: " + JSON.stringify(snap), "t"); + }); + + function renderTrade() { + const ctx = tradeCanvas.getContext("2d"); + const W = tradeCanvas.width, H = tradeCanvas.height; + ctx.clearRect(0, 0, W, H); + + // background + ctx.fillStyle = "rgba(0,0,0,.32)"; + ctx.fillRect(0, 0, W, H); + + if (!state.detections.length) { + ctx.fillStyle = "rgba(255,255,255,.75)"; + ctx.font = "14px " + getComputedStyle(document.body).fontFamily; + ctx.fillText("Run Reason to populate trade-space curves.", 18, 34); + return; + } + + const id = tradeTarget.value || state.selectedId || state.detections[0].id; + const d = state.detections.find(x => x.id === id) || state.detections[0]; + + const r0 = Math.max(50, +rMin.value || 200); + const r1 = Math.max(r0 + 50, +rMax.value || 6000); + + // margins + const padL = 64, padR = 18, padT = 18, padB = 52; + const plotW = W - padL - padR; + const plotH = H - padT - padB; + + // compute sweep + const N = 120; + const xs = []; + let maxY = 0; + let minY = Infinity; + + for (let i = 0; i <= N; i++) { + const r = r0 + (r1 - r0) * (i / N); + const mp = maxPowerAtTarget(r).Ptar; + const reqP = d.reqP_kW || 40; + const reqD = requiredDwell(r, reqP, mp, d.baseDwell_s || 5); + + xs.push({ r, mp, reqP, reqD }); + maxY = Math.max(maxY, mp, reqP); + minY = Math.min(minY, mp, reqP); + } + + maxY = Math.max(maxY, 20); + minY = Math.max(0, minY - 10); + + // axes + ctx.strokeStyle = "rgba(255,255,255,.14)"; + ctx.lineWidth = 1; + ctx.beginPath(); + ctx.moveTo(padL, padT); + ctx.lineTo(padL, padT + plotH); + ctx.lineTo(padL + plotW, padT + plotH); + ctx.stroke(); + + // grid lines + ctx.strokeStyle = "rgba(255,255,255,.07)"; + for (let i = 1; i <= 5; i++) { + const y = padT + plotH * (i / 5); + ctx.beginPath(); ctx.moveTo(padL, y); ctx.lineTo(padL + plotW, y); ctx.stroke(); + } + for (let i = 1; i <= 6; i++) { + const x = padL + plotW * (i / 6); + ctx.beginPath(); ctx.moveTo(x, padT); ctx.lineTo(x, padT + plotH); ctx.stroke(); + } + + // helpers + const xMap = (r) => padL + (r - r0) / (r1 - r0) * plotW; + const yMap = (p) => padT + (1 - (p - minY) / (maxY - minY)) * plotH; + + // curve: max power at target + ctx.strokeStyle = "rgba(34,211,238,.95)"; + ctx.lineWidth = 2.5; + ctx.beginPath(); + xs.forEach((pt, i) => { + const x = xMap(pt.r); + const y = yMap(pt.mp); + if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); + }); + ctx.stroke(); + + // curve: required power + ctx.strokeStyle = "rgba(239,68,68,.90)"; + ctx.lineWidth = 2.5; + ctx.beginPath(); + xs.forEach((pt, i) => { + const x = xMap(pt.r); + const y = yMap(pt.reqP); + if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); + }); + ctx.stroke(); + + // annotate margin zones + ctx.fillStyle = "rgba(34,197,94,.08)"; + ctx.beginPath(); + xs.forEach((pt, i) => { + const x = xMap(pt.r); + const y = yMap(Math.max(pt.reqP, pt.mp)); + if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); + }); + for (let i = xs.length - 1; i >= 0; i--) { + const x = xMap(xs[i].r); + const y = yMap(Math.min(xs[i].reqP, xs[i].mp)); + ctx.lineTo(x, y); + } + ctx.closePath(); + ctx.fill(); + + // second axis for dwell (scaled) + const dwellMax = Math.max(...xs.map(p => p.reqD)); + const yMapD = (dwell) => padT + (1 - (dwell / Math.max(1e-6, dwellMax))) * plotH; + + ctx.strokeStyle = "rgba(124,58,237,.85)"; + ctx.lineWidth = 2.2; + ctx.beginPath(); + xs.forEach((pt, i) => { + const x = xMap(pt.r); + const y = yMapD(pt.reqD); + if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); + }); + ctx.stroke(); + + // optional pkill band + if (showPk.value === "on") { + ctx.fillStyle = "rgba(245,158,11,.08)"; + ctx.beginPath(); + xs.forEach((pt, i) => { + const x = xMap(pt.r); + const mp = pt.mp; + const margin = mp - pt.reqP; + const pk = pkillFromMargin(margin, d.baseDwell_s || 5, pt.reqD); + const y = padT + plotH * (1 - pk); + if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y); + }); + ctx.lineTo(padL + plotW, padT + plotH); + ctx.lineTo(padL, padT + plotH); + ctx.closePath(); + ctx.fill(); + } + + // labels + ctx.fillStyle = "rgba(255,255,255,.84)"; + ctx.font = "bold 14px " + getComputedStyle(document.body).fontFamily; + ctx.fillText(`Target: ${id} (${d.label})`, padL, 16); + + ctx.fillStyle = "rgba(34,211,238,.95)"; + ctx.fillText("Max P@Target (kW)", padL + 10, padT + plotH + 30); + + ctx.fillStyle = "rgba(239,68,68,.92)"; + ctx.fillText("Required P@Target (kW)", padL + 190, padT + plotH + 30); + + ctx.fillStyle = "rgba(124,58,237,.90)"; + ctx.fillText(`Required Dwell (s, scaled)`, padL + 420, padT + plotH + 30); + + ctx.fillStyle = "rgba(255,255,255,.55)"; + ctx.font = "11px " + getComputedStyle(document.body).fontFamily; + ctx.fillText(`Range (m)`, padL + plotW - 64, padT + plotH + 46); + + // axis ticks + ctx.fillStyle = "rgba(255,255,255,.55)"; + ctx.font = "11px " + getComputedStyle(document.body).fontFamily; + + for (let i = 0; i <= 5; i++) { + const p = minY + (maxY - minY) * (1 - i / 5); + const y = padT + plotH * (i / 5); + ctx.fillText(p.toFixed(0), 12, y + 4); + } + + for (let i = 0; i <= 6; i++) { + const r = r0 + (r1 - r0) * (i / 6); + const x = padL + plotW * (i / 6); + ctx.fillText(r.toFixed(0), x - 14, padT + plotH + 18); + } + + // marker at baseline range + const baseR = d.baseRange_m || +rangeBase.value; + const xb = xMap(clamp(baseR, r0, r1)); + ctx.strokeStyle = "rgba(255,255,255,.28)"; + ctx.setLineDash([6, 6]); + ctx.beginPath(); + ctx.moveTo(xb, padT); + ctx.lineTo(xb, padT + plotH); + ctx.stroke(); + ctx.setLineDash([]); + } + + // ========= Helpers: keep drawing when idle ========= + function idleLoop() { + requestAnimationFrame(idleLoop); + tickAgentCursor(); + } + idleLoop(); + + // ========= Init ========= + unloadVideo(); + log("Console initialized. Upload a video to begin.", "t"); + +})();