Zhen Ye
feat(threat-assessment): implement naval threat analysis with GPT-4o\n\n- Rename utils/gpt_distance.py to utils/gpt_reasoning.py and update logic for 15 naval threat features\n- Add Pydantic schemas for NavalThreatAssessment in utils/schemas.py\n- Update backend (app.py, inference.py) to use new threat estimation and pass full metadata\n- refactor(frontend): render threat level badges and detailed feature table in UI
8094b21
// Main Entry Point - Wire up all event handlers and run the application
document.addEventListener("DOMContentLoaded", () => {
// Shortcuts
const { state } = APP.core;
const { $, $$ } = APP.core.utils;
const { log, setStatus, setHfStatus } = APP.ui.logging;
const { hfDetectAsync, checkJobStatus, cancelBackendJob, pollAsyncJob } = APP.api.client;
// Core modules
const { captureFirstFrame, drawFirstFrame, unloadVideo, toggleDepthView, toggleFirstFrameDepthView, toggleProcessedFeed, resizeOverlays, setStreamingMode, stopStreamingMode, displayProcessedFirstFrame } = APP.core.video;
const { syncKnobDisplays, recomputeHEL } = APP.core.hel;
const { load: loadDemo, getFrameData: getDemoFrameData, enable: enableDemo } = APP.core.demo;
// UI Renderers
const { renderFrameRadar, renderLiveRadar } = APP.ui.radar;
const { renderFrameOverlay, renderEngageOverlay } = APP.ui.overlays;
const { renderFrameTrackList } = APP.ui.cards;
const { renderFeatures } = APP.ui.features;
const { renderTrade, populateTradeTarget, snapshotTrade } = APP.ui.trade;
const { computeIntelSummary, resetIntelUI, renderMissionContext } = APP.ui.intel;
const { tickAgentCursor, moveCursorToRect } = APP.ui.cursor;
const { matchAndUpdateTracks, predictTracks } = APP.core.tracker;
const { defaultAimpoint } = APP.core.physics;
const { normBBox } = APP.core.utils;
// DOM Elements
const videoEngage = $("#videoEngage");
const videoHidden = $("#videoHidden");
const videoFile = $("#videoFile");
const btnReason = $("#btnReason");
const btnCancelReason = $("#btnCancelReason");
const btnRecompute = $("#btnRecompute");
const btnClear = $("#btnClear");
const btnEject = $("#btnEject");
const btnEngage = $("#btnEngage");
const btnReset = $("#btnReset");
const btnPause = $("#btnPause");
const btnToggleSidebar = $("#btnToggleSidebar");
const btnIntelRefresh = $("#btnIntelRefresh");
const btnReplot = $("#btnReplot");
const btnSnap = $("#btnSnap");
const detectorSelect = $("#detectorSelect");
const missionText = $("#missionText");
const cursorMode = $("#cursorMode");
const frameCanvas = $("#frameCanvas");
const frameTrackList = $("#frameTrackList");
const frameEmpty = $("#frameEmpty");
const frameNote = $("#frameNote");
const engageEmpty = $("#engageEmpty");
const engageNote = $("#engageNote");
const chipFeed = $("#chipFeed");
const chipDepth = $("#chipDepth");
const chipFrameDepth = $("#chipFrameDepth");
// Initialization
function init() {
log("System initializing...", "t");
setupFileUpload();
setupControls();
setupKnobListeners();
setupChipToggles();
setupTabSwitching();
// Initial UI sync
syncKnobDisplays();
renderMissionContext();
setHfStatus("idle");
// Start main loop
requestAnimationFrame(loop);
// Load demo data (if available)
loadDemo().then(() => {
// hidden usage: enable if video filename matches "demo" or manually
// APP.core.demo.enable(true);
});
log("System READY.", "g");
}
function setupFileUpload() {
if (!videoFile) return;
videoFile.addEventListener("change", async (e) => {
const file = e.target.files[0];
if (!file) return;
state.videoFile = file;
state.videoUrl = URL.createObjectURL(file);
state.videoLoaded = true;
// Show meta
const videoMeta = $("#videoMeta");
if (videoMeta) videoMeta.textContent = file.name;
// Load video into engage player
if (videoEngage) {
videoEngage.src = state.videoUrl;
videoEngage.load();
}
// Hide empty states
if (engageEmpty) engageEmpty.style.display = "none";
// Capture first frame dimensions (but don't draw - wait for processed frame from backend)
try {
await captureFirstFrame();
// Show placeholder message - actual frame will come from backend
if (frameNote) frameNote.textContent = "Video loaded (run Reason for processed frame)";
if (engageNote) engageNote.textContent = "Ready for Engage";
} catch (err) {
log(`First frame capture failed: ${err.message}`, "e");
}
setStatus("warn", "READY · Video loaded (run Reason)");
log(`Video loaded: ${file.name}`, "g");
// Load video-specific demo tracks (e.g., helicopter demo)
if (APP.core.demo.loadForVideo) {
await APP.core.demo.loadForVideo(file.name);
}
// Auto-enable demo mode if filename contains "demo" or helicopter video
const shouldEnableDemo = file.name.toLowerCase().includes("demo") ||
file.name.toLowerCase().includes("enhance_video_movement");
if (shouldEnableDemo && APP.core.demo.data) {
enableDemo(true);
log("Auto-enabled DEMO mode for this video.", "g");
}
});
}
function setupControls() {
// Reason button
if (btnReason) {
btnReason.addEventListener("click", runReason);
}
// Cancel Reason button
if (btnCancelReason) {
btnCancelReason.addEventListener("click", cancelReasoning);
}
// Recompute HEL button
if (btnRecompute) {
btnRecompute.addEventListener("click", async () => {
if (!state.hasReasoned) return;
await recomputeHEL();
renderFrameOverlay();
renderTrade();
log("HEL parameters recomputed.", "g");
});
}
// Clear button
if (btnClear) {
btnClear.addEventListener("click", () => {
state.detections = [];
state.selectedId = null;
renderFrameTrackList();
renderFrameOverlay();
renderFeatures(null);
renderTrade();
log("Detections cleared.", "t");
});
}
// Eject button
if (btnEject) {
btnEject.addEventListener("click", async () => {
await unloadVideo();
});
}
// Engage button
if (btnEngage) {
btnEngage.addEventListener("click", runEngage);
}
// Pause button
if (btnPause) {
btnPause.addEventListener("click", () => {
if (videoEngage) videoEngage.pause();
state.tracker.running = false;
log("Engage paused.", "t");
});
}
// Reset button
if (btnReset) {
btnReset.addEventListener("click", () => {
if (videoEngage) {
videoEngage.pause();
videoEngage.currentTime = 0;
}
state.tracker.tracks = [];
state.tracker.running = false;
state.tracker.nextId = 1;
renderFrameTrackList();
renderFrameRadar();
renderLiveRadar();
log("Engage reset.", "t");
});
}
// Sidebar toggle (Tab 2)
if (btnToggleSidebar) {
btnToggleSidebar.addEventListener("click", () => {
const engageGrid = $(".engage-grid");
if (engageGrid) {
engageGrid.classList.toggle("sidebar-collapsed");
btnToggleSidebar.textContent = engageGrid.classList.contains("sidebar-collapsed")
? "▶ Show Sidebar"
: "◀ Hide Sidebar";
}
});
}
// Intel refresh
if (btnIntelRefresh) {
btnIntelRefresh.addEventListener("click", async () => {
if (!state.videoLoaded) return;
log("Refreshing mission intel summary...", "t");
await computeIntelSummary();
});
}
// Trade space controls
if (btnReplot) {
btnReplot.addEventListener("click", renderTrade);
}
if (btnSnap) {
btnSnap.addEventListener("click", snapshotTrade);
}
const tradeTarget = $("#tradeTarget");
if (tradeTarget) {
tradeTarget.addEventListener("change", renderTrade);
}
// Track selection event
document.addEventListener("track-selected", (e) => {
state.selectedId = e.detail.id;
state.tracker.selectedTrackId = e.detail.id;
renderFrameTrackList();
renderFrameOverlay();
const det = state.detections.find(d => d.id === state.selectedId);
renderFeatures(det);
});
// Cursor mode toggle
if (cursorMode) {
cursorMode.addEventListener("change", () => {
state.ui.cursorMode = cursorMode.value;
if (state.ui.cursorMode === "off" && APP.ui.cursor.setCursorVisible) {
APP.ui.cursor.setCursorVisible(false);
}
});
}
}
function setupKnobListeners() {
// Listen to all inputs and selects for knob updates
const inputs = Array.from(document.querySelectorAll("input, select"));
inputs.forEach(el => {
el.addEventListener("input", () => {
syncKnobDisplays();
if (state.hasReasoned) {
recomputeHEL();
renderFrameOverlay();
renderTrade();
}
});
});
// Initial sync
syncKnobDisplays();
}
function setupChipToggles() {
// Toggle processed/raw feed
if (chipFeed) {
chipFeed.style.cursor = "pointer";
chipFeed.addEventListener("click", () => {
if (!state.videoLoaded) return;
toggleProcessedFeed();
log(`Feed set to: ${state.useProcessedFeed ? "HF" : "RAW"}`, "t");
});
}
// Toggle depth view (Tab 2)
if (chipDepth) {
chipDepth.style.cursor = "pointer";
chipDepth.addEventListener("click", () => {
if (!state.videoLoaded) return;
toggleDepthView();
log(`Engage view set to: ${state.useDepthFeed ? "DEPTH" : "DEFAULT"}`, "t");
});
}
// Toggle first frame depth view (Tab 1)
if (chipFrameDepth) {
chipFrameDepth.style.cursor = "pointer";
chipFrameDepth.addEventListener("click", () => {
if (!state.videoLoaded) return;
if (!state.hf.depthFirstFrameUrl) {
log("First frame depth not ready yet. Run Reason and wait for depth processing.", "w");
return;
}
toggleFirstFrameDepthView();
log(`First frame view set to: ${state.useFrameDepthView ? "DEPTH" : "DEFAULT"}`, "t");
});
}
}
function setupTabSwitching() {
const tabs = Array.from(document.querySelectorAll(".tabbtn"));
tabs.forEach(btn => {
btn.addEventListener("click", () => {
tabs.forEach(b => b.classList.remove("active"));
document.querySelectorAll(".tab").forEach(t => t.classList.remove("active"));
btn.classList.add("active");
const tabId = `#tab-${btn.dataset.tab}`;
const tab = $(tabId);
if (tab) tab.classList.add("active");
// Tab-specific actions
if (btn.dataset.tab === "trade") {
populateTradeTarget();
renderTrade();
}
if (btn.dataset.tab === "engage") {
resizeOverlays();
renderLiveRadar();
}
});
});
}
async function runReason() {
if (!state.videoLoaded) {
log("No video loaded. Upload a video first.", "w");
setStatus("warn", "READY · Upload a video");
return;
}
if (state.isReasoning) {
log("Reason already in progress. Please wait.", "w");
return;
}
// Lock the Reason process
state.isReasoning = true;
if (btnReason) {
btnReason.disabled = true;
btnReason.style.opacity = "0.5";
btnReason.style.cursor = "not-allowed";
}
if (btnCancelReason) btnCancelReason.style.display = "inline-block";
if (btnEngage) btnEngage.disabled = true;
// Clear previous detections
state.detections = [];
state.selectedId = null;
renderFrameTrackList();
renderFrameOverlay();
renderFeatures(null);
renderTrade();
setStatus("warn", "REASONING · Running perception pipeline");
// Agent cursor flair
if (state.ui.cursorMode === "on" && moveCursorToRect) {
if (btnReason) moveCursorToRect(btnReason.getBoundingClientRect());
if (frameCanvas) setTimeout(() => moveCursorToRect(frameCanvas.getBoundingClientRect()), 260);
if (frameTrackList) setTimeout(() => moveCursorToRect(frameTrackList.getBoundingClientRect()), 560);
}
try {
const mode = detectorSelect ? detectorSelect.value : "hf_yolov8";
const queries = missionText ? missionText.value.trim() : "";
const enableGPT = $("#enableGPTToggle")?.checked || false;
const enableDepth = $("#enableDepthToggle")?.checked || false;
const form = new FormData();
form.append("video", state.videoFile);
form.append("mode", "object_detection");
if (queries) form.append("queries", queries);
form.append("detector", mode);
form.append("enable_gpt", enableGPT ? "true" : "false");
form.append("enable_depth", enableDepth ? "true" : "false");
log(`Submitting job to ${state.hf.baseUrl}...`, "t");
setHfStatus("submitting job...");
const data = await hfDetectAsync(form);
state.hf.asyncJobId = data.job_id;
// Store raw detections (will process after image loads to get correct dimensions)
const rawDetections = data.first_frame_detections || [];
// Display processed first frame from backend (only processed frame, not raw)
// This is async - image loading will update state.frame.w/h
if (data.first_frame_url) {
state.hf.firstFrameUrl = data.first_frame_url.startsWith("http")
? data.first_frame_url
: `${state.hf.baseUrl}${data.first_frame_url}`;
// Wait for image to load so we have correct dimensions before processing detections
await new Promise((resolve, reject) => {
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
// Update frame dimensions from loaded image
state.frame.w = img.naturalWidth || 1280;
state.frame.h = img.naturalHeight || 720;
// Resize canvases to match
const frameCanvas = $("#frameCanvas");
const frameOverlay = $("#frameOverlay");
if (frameCanvas) {
frameCanvas.width = state.frame.w;
frameCanvas.height = state.frame.h;
frameCanvas.getContext("2d").drawImage(img, 0, 0, state.frame.w, state.frame.h);
}
if (frameOverlay) {
frameOverlay.width = state.frame.w;
frameOverlay.height = state.frame.h;
}
// Hide empty state
const frameEmpty = $("#frameEmpty");
const frameNote = $("#frameNote");
if (frameEmpty) frameEmpty.style.display = "none";
if (frameNote) frameNote.textContent = "Processed (from backend)";
log(`Processed first frame displayed (${state.frame.w}×${state.frame.h})`, "g");
resolve();
};
img.onerror = () => {
log("Failed to load processed first frame, using local frame", "w");
drawFirstFrame();
resolve();
};
img.src = state.hf.firstFrameUrl;
});
}
// NOW process detections (after frame dimensions are correct)
if (rawDetections.length > 0) {
processFirstFrameDetections(rawDetections);
}
// Mark first frame as ready (for radar display)
state.firstFrameReady = true;
// Store depth URLs if provided
if (data.depth_video_url) {
state.hf.depthVideoUrl = data.depth_video_url.startsWith("http")
? data.depth_video_url
: `${state.hf.baseUrl}${data.depth_video_url}`;
log("Depth video URL received", "t");
}
if (data.first_frame_depth_url) {
state.hf.depthFirstFrameUrl = data.first_frame_depth_url.startsWith("http")
? data.first_frame_depth_url
: `${state.hf.baseUrl}${data.first_frame_depth_url}`;
log("First frame depth URL received", "t");
}
// Enable streaming mode if stream_url is provided (Tab 2 live view)
const enableStream = $("#enableStreamToggle")?.checked;
if (data.stream_url && enableStream) {
const streamUrl = data.stream_url.startsWith("http")
? data.stream_url
: `${state.hf.baseUrl}${data.stream_url}`;
log("Activating live stream...", "t");
setStreamingMode(streamUrl);
log("Live view available in 'Engage' tab.", "g");
setStatus("warn", "Live processing... View in Engage tab");
// Trigger resize/render for Tab 2
resizeOverlays();
renderLiveRadar();
}
// Start polling for completion
pollAsyncJob().then(() => {
log("Video processing complete.", "g");
// Stop streaming mode once video is ready
stopStreamingMode();
state.hasReasoned = true;
setStatus("good", "READY · Reason complete (you can Engage)");
log("Reason complete. Ready to Engage.", "g");
// Seed tracks for Tab 2
seedTracksFromTab1();
renderFrameRadar();
// Generate intel summary (async)
computeIntelSummary();
}).catch(err => {
log(`Polling error: ${err.message}`, "e");
stopStreamingMode();
});
// Initial status (processing in background)
setStatus("warn", "PROCESSING · Analysing video...");
log("Reasoning started (processing in background)...", "t");
} catch (err) {
setStatus("bad", "ERROR · Reason failed");
log(`Reason failed: ${err.message}`, "e");
console.error(err);
} finally {
state.isReasoning = false;
if (btnReason) {
btnReason.disabled = false;
btnReason.style.opacity = "1";
btnReason.style.cursor = "pointer";
}
if (btnCancelReason) btnCancelReason.style.display = "none";
}
}
function processFirstFrameDetections(dets) {
state.detections = dets.map((d, i) => {
const id = `T${String(i + 1).padStart(2, "0")}`;
const ap = defaultAimpoint(d.label || d.class);
const bbox = d.bbox
? { x: d.bbox[0], y: d.bbox[1], w: d.bbox[2] - d.bbox[0], h: d.bbox[3] - d.bbox[1] }
: { x: 0, y: 0, w: 10, h: 10 };
return {
id,
label: d.label || d.class,
score: d.score || 0.5,
bbox,
aim: { ...ap },
aim: { ...ap },
features: d.gpt_raw ? {
"Vessel Class": d.gpt_raw.specific_class || d.gpt_raw.vessel_category || "Unknown",
"Threat Lvl": d.gpt_raw.threat_level_score + "/10",
"Status": d.gpt_raw.threat_classification || "?",
"Weapons": (d.gpt_raw.visible_weapons || []).join(", ") || "None Visible",
"Readiness": d.gpt_raw.weapon_readiness || "Unknown",
"Motion": d.gpt_raw.motion_status || "Unknown",
"Sensors": (d.gpt_raw.sensor_profile || []).join(", ") || "None",
"Flags/ID": (d.gpt_raw.identity_markers || []).join(", ") || (d.gpt_raw.flag_state || "Unknown"),
"Activity": d.gpt_raw.deck_activity || "None",
"Range": (d.gpt_raw.range_estimation_nm ? d.gpt_raw.range_estimation_nm + " NM" : "Unknown"),
"Wake": d.gpt_raw.wake_description || "None"
} : {},
baseRange_m: null,
baseAreaFrac: (bbox.w * bbox.h) / (state.frame.w * state.frame.h),
baseDwell_s: 5.0,
reqP_kW: 40,
maxP_kW: 0,
pkill: 0,
// New depth fields
depth_est_m: (d.depth_est_m !== undefined && d.depth_est_m !== null) ? d.depth_est_m : null,
depth_rel: (d.depth_rel !== undefined && d.depth_rel !== null) ? d.depth_rel : null,
depth_valid: d.depth_valid ?? false,
gpt_distance_m: d.gpt_distance_m || null,
gpt_direction: d.gpt_direction || null,
gpt_description: d.gpt_description || null,
// New Threat Intelligence
threat_level_score: d.threat_level_score || 0,
threat_classification: d.threat_classification || "Unknown",
weapon_readiness: d.weapon_readiness || "Unknown"
};
});
state.selectedId = state.detections[0]?.id || null;
renderFrameTrackList();
renderFeatures(state.detections[0] || null);
renderFrameOverlay();
log(`Detected ${state.detections.length} objects in first frame.`, "g");
}
function seedTracksFromTab1() {
const rangeBase = $("#rangeBase");
state.tracker.tracks = state.detections.map(d => ({
id: d.id,
label: d.label,
bbox: { ...d.bbox },
score: d.score,
aimRel: d.aim ? { relx: d.aim.relx, rely: d.aim.rely, label: d.aim.label } : { relx: 0.5, rely: 0.5, label: "center_mass" },
baseAreaFrac: d.baseAreaFrac || ((d.bbox.w * d.bbox.h) / (state.frame.w * state.frame.h)),
baseRange_m: d.baseRange_m || (rangeBase ? +rangeBase.value : 1500),
baseDwell_s: d.baseDwell_s || 4.0,
reqP_kW: d.reqP_kW || 35,
depth_rel: d.depth_rel,
depth_est_m: d.depth_est_m,
depth_valid: d.depth_valid,
lastDepthBbox: d.depth_valid ? { ...d.bbox } : null,
gpt_distance_m: d.gpt_distance_m,
gpt_direction: d.gpt_direction,
gpt_description: d.gpt_description,
lastSeen: APP.core.utils.now(),
vx: 0,
vy: 0,
dwellAccum: 0,
killed: false,
state: "TRACK",
assessT: 0
}));
state.tracker.nextId = state.detections.length + 1;
log(`Seeded ${state.tracker.tracks.length} tracks from Tab 1 detections.`, "t");
}
function cancelReasoning() {
// Stop HF polling if running
if (state.hf.asyncPollInterval) {
clearInterval(state.hf.asyncPollInterval);
state.hf.asyncPollInterval = null;
log("HF polling stopped.", "w");
}
// Stop streaming mode
stopStreamingMode();
// Cancel backend job if it exists
const jobId = state.hf.asyncJobId;
if (jobId) {
cancelBackendJob(jobId, "cancel button");
}
// Reset state
state.isReasoning = false;
state.hf.busy = false;
state.hf.asyncJobId = null;
state.hf.asyncStatus = "cancelled";
// Re-enable Reason button
if (btnReason) {
btnReason.disabled = false;
btnReason.style.opacity = "1";
btnReason.style.cursor = "pointer";
}
if (btnCancelReason) btnCancelReason.style.display = "none";
setStatus("warn", "CANCELLED · Reasoning stopped");
setHfStatus("cancelled (stopped by user)");
log("Reasoning cancelled by user.", "w");
}
function runEngage() {
if (!state.hasReasoned) {
log("Please run Reason first.", "w");
return;
}
if (state.hf.asyncJobId) {
log("Processing still in progress. Please wait.", "w");
// If we are streaming, make sure we are on the engage tab to see it
const engageTab = $(`.tabbtn[data-tab="engage"]`);
if (engageTab) engageTab.click();
return;
}
// Switch to engage tab
const engageTab = $(`.tabbtn[data-tab="engage"]`);
if (engageTab) engageTab.click();
// Set video source
if (videoEngage) {
videoEngage.src = state.hf.processedUrl || state.videoUrl;
videoEngage.play().catch(err => {
log(`Video playback failed: ${err.message}`, "e");
});
}
state.tracker.running = true;
state.tracker.lastFrameTime = APP.core.utils.now();
// Ensure tracks are seeded
if (state.tracker.tracks.length === 0) {
seedTracksFromTab1();
}
log("Engage sequence started.", "g");
}
function loop() {
const { now } = APP.core.utils;
const t = now();
// Guard against huge dt on first frame
if (state.tracker.lastFrameTime === 0) state.tracker.lastFrameTime = t;
const dt = Math.min((t - state.tracker.lastFrameTime) / 1000, 0.1);
state.tracker.lastFrameTime = t;
// Update tracker when engaged
if (state.tracker.running && videoEngage && !videoEngage.paused) {
// DEMO MODE BYPASS
if (APP.core.demo.active && APP.core.demo.data) {
const demoTracks = getDemoFrameData(videoEngage.currentTime);
if (demoTracks) {
// Deep clone to avoid mutating source data
const tracksClone = JSON.parse(JSON.stringify(demoTracks));
state.tracker.tracks = tracksClone.map(d => ({
...d,
// Ensure defaults
lastSeen: t,
state: "TRACK",
depth_valid: true,
depth_est_m: d.gpt_distance_m || 1000,
}));
// Normalize if needed (frontend usually expects 0..1)
const w = videoEngage.videoWidth || state.frame.w || 1280;
const h = videoEngage.videoHeight || state.frame.h || 720;
state.tracker.tracks.forEach(tr => {
// Check if inputs are absolute pixels (if x > 1 or w > 1)
// We assume demo data is in pixels (as per spec)
if (tr.bbox.x > 1 || tr.bbox.w > 1) {
tr.bbox.x /= w;
tr.bbox.y /= h;
tr.bbox.w /= w;
tr.bbox.h /= h;
}
// Note: history in 'tr' is also in pixels in the source JSON.
// But we don't normalize history here because radar.js currently handles raw pixels for history?
// Actually, we should probably standardize everything to normalized if possible,
// but let's check radar.js first.
});
} else {
// NORMAL MODE
predictTracks(dt);
// Sync with backend every few frames (approx 5Hz)
if (t - state.tracker.lastHFSync > 200) {
// Estimate frame index
const fps = 30; // hardcoded for now, ideal: state.fps
const frameIdx = Math.floor(videoEngage.currentTime * fps);
// Only sync if we have a job ID
if (state.hf.asyncJobId) {
APP.core.tracker.syncWithBackend(frameIdx);
}
state.tracker.lastHFSync = t;
}
}
}
} // End if(running)
// Render UI
if (renderFrameRadar) renderFrameRadar();
if (renderLiveRadar) renderLiveRadar();
if (renderFrameOverlay) renderFrameOverlay();
if (renderEngageOverlay) renderEngageOverlay();
if (tickAgentCursor) tickAgentCursor();
requestAnimationFrame(loop);
}
// Expose state for debugging
window.__LP_STATE__ = state;
// Start
init();
});