Zhen Ye
refactor: rename hf_yolov8 β†’ yolo11 across codebase
f89fa0b
// Main Entry Point - Wire up all event handlers and run the application
document.addEventListener("DOMContentLoaded", () => {
// Shortcuts
const { state } = APP.core;
const { $, $$ } = APP.core.utils;
const { log, setStatus, setHfStatus } = APP.ui.logging;
const { hfDetectAsync, checkJobStatus, cancelBackendJob, pollAsyncJob } = APP.api.client;
// Core modules
const { captureFirstFrame, drawFirstFrame, unloadVideo, toggleDepthView, toggleFirstFrameDepthView, toggleProcessedFeed, resizeOverlays, setStreamingMode, stopStreamingMode, displayProcessedFirstFrame } = APP.core.video;
const { syncKnobDisplays, recomputeHEL } = APP.core.hel;
const { load: loadDemo, getFrameData: getDemoFrameData, enable: enableDemo } = APP.core.demo;
// UI Renderers
const { renderFrameOverlay, renderEngageOverlay, initClickHandler } = APP.ui.overlays;
const { renderFrameTrackList } = APP.ui.cards;
const { tickAgentCursor, moveCursorToRect } = APP.ui.cursor;
const { matchAndUpdateTracks, predictTracks } = APP.core.tracker;
const { defaultAimpoint } = APP.core.physics;
// DOM Elements
const videoEngage = $("#videoEngage");
const videoHidden = $("#videoHidden");
const videoFile = $("#videoFile");
const btnReason = $("#btnReason");
const btnCancelReason = $("#btnCancelReason");
const btnRecompute = $("#btnRecompute");
const btnClear = $("#btnClear");
const btnEject = $("#btnEject");
const btnEngage = $("#btnEngage");
const btnReset = $("#btnReset");
const btnPause = $("#btnPause");
const btnToggleSidebar = $("#btnToggleSidebar");
const detectorSelect = $("#detectorSelect");
const missionText = $("#missionText");
const cursorMode = $("#cursorMode");
const frameCanvas = $("#frameCanvas");
const frameTrackList = $("#frameTrackList");
const frameEmpty = $("#frameEmpty");
const frameNote = $("#frameNote");
const engageEmpty = $("#engageEmpty");
const engageNote = $("#engageNote");
const chipFeed = $("#chipFeed");
const chipDepth = $("#chipDepth");
const chipFrameDepth = $("#chipFrameDepth");
// Initialization
function init() {
log("System initializing...", "t");
setupFileUpload();
setupControls();
setupKnobListeners();
setupChipToggles();
setupTabSwitching();
// Initial UI sync
syncKnobDisplays();
setHfStatus("idle");
// Enable click-to-select on engage overlay
initClickHandler();
// Start main loop
requestAnimationFrame(loop);
// Load demo data (if available)
loadDemo().then(() => {
// hidden usage: enable if video filename matches "demo" or manually
// APP.core.demo.enable(true);
});
log("System READY.", "g");
}
function setupFileUpload() {
if (!videoFile) return;
videoFile.addEventListener("change", async (e) => {
const file = e.target.files[0];
if (!file) return;
state.videoFile = file;
state.videoUrl = URL.createObjectURL(file);
state.videoLoaded = true;
// Show meta
const videoMeta = $("#videoMeta");
if (videoMeta) videoMeta.textContent = file.name;
// Load video into engage player
if (videoEngage) {
videoEngage.src = state.videoUrl;
videoEngage.load();
}
// Hide empty states
if (engageEmpty) engageEmpty.style.display = "none";
// Capture first frame dimensions (but don't draw - wait for processed frame from backend)
try {
await captureFirstFrame();
// Show placeholder message - actual frame will come from backend
if (frameNote) frameNote.textContent = "Video loaded (run Detect for processed frame)";
if (engageNote) engageNote.textContent = "Ready for Track";
} catch (err) {
log(`First frame capture failed: ${err.message}`, "e");
}
setStatus("warn", "READY Β· Video loaded (run Detect)");
log(`Video loaded: ${file.name}`, "g");
// Load video-specific demo tracks (e.g., helicopter demo)
if (APP.core.demo.loadForVideo) {
await APP.core.demo.loadForVideo(file.name);
}
// Auto-enable demo mode if filename contains "demo" or helicopter video
const shouldEnableDemo = file.name.toLowerCase().includes("demo") ||
file.name.toLowerCase().includes("enhance_video_movement");
if (shouldEnableDemo && APP.core.demo.data) {
enableDemo(true);
log("Auto-enabled DEMO mode for this video.", "g");
}
});
}
function setupControls() {
// Reason button
if (btnReason) {
btnReason.addEventListener("click", runReason);
}
// Cancel Reason button
if (btnCancelReason) {
btnCancelReason.addEventListener("click", cancelReasoning);
}
// Recompute HEL button
if (btnRecompute) {
btnRecompute.addEventListener("click", async () => {
if (!state.hasReasoned) return;
await recomputeHEL();
renderFrameOverlay();
log("Parameters recomputed.", "g");
});
}
// Clear button
if (btnClear) {
btnClear.addEventListener("click", () => {
state.detections = [];
state.selectedId = null;
renderFrameTrackList();
renderFrameOverlay();
log("Detections cleared.", "t");
});
}
// Eject button
if (btnEject) {
btnEject.addEventListener("click", async () => {
await unloadVideo();
});
}
// Engage button
if (btnEngage) {
btnEngage.addEventListener("click", runEngage);
}
// Pause button
if (btnPause) {
btnPause.addEventListener("click", () => {
if (videoEngage) videoEngage.pause();
state.tracker.running = false;
log("Tracking paused.", "t");
});
}
// Reset button
if (btnReset) {
btnReset.addEventListener("click", () => {
if (videoEngage) {
videoEngage.pause();
videoEngage.currentTime = 0;
}
state.tracker.tracks = [];
state.tracker.running = false;
state.tracker.nextId = 1;
renderFrameTrackList();
log("Tracking reset.", "t");
});
}
// Sidebar toggle (Tab 2)
if (btnToggleSidebar) {
btnToggleSidebar.addEventListener("click", () => {
const engageGrid = $(".engage-grid");
if (engageGrid) {
engageGrid.classList.toggle("sidebar-collapsed");
btnToggleSidebar.textContent = engageGrid.classList.contains("sidebar-collapsed")
? "β–Ά Show Sidebar"
: "β—€ Hide Sidebar";
}
});
}
}
// Track selection event β€” re-renders cards (with inline features for active card)
document.addEventListener("track-selected", (e) => {
state.selectedId = e.detail.id;
state.tracker.selectedTrackId = e.detail.id;
renderFrameTrackList();
renderFrameOverlay();
renderEngageOverlay();
// Scroll selected card into view (Tab 2 sidebar)
const card = document.getElementById(`card-${e.detail.id}`);
if (card) card.scrollIntoView({ behavior: "smooth", block: "nearest" });
});
// Cursor mode toggle
if (cursorMode) {
cursorMode.addEventListener("change", () => {
state.ui.cursorMode = cursorMode.value;
if (state.ui.cursorMode === "off" && APP.ui.cursor.setCursorVisible) {
APP.ui.cursor.setCursorVisible(false);
}
});
}
function setupKnobListeners() {
// Listen to all inputs and selects for knob updates
const inputs = Array.from(document.querySelectorAll("input, select"));
inputs.forEach(el => {
el.addEventListener("input", () => {
syncKnobDisplays();
if (state.hasReasoned) {
recomputeHEL();
renderFrameOverlay();
}
});
});
// Initial sync
syncKnobDisplays();
}
function setupChipToggles() {
// Toggle processed/raw feed
if (chipFeed) {
chipFeed.style.cursor = "pointer";
chipFeed.addEventListener("click", () => {
if (!state.videoLoaded) return;
toggleProcessedFeed();
log(`Feed set to: ${state.useProcessedFeed ? "HF" : "RAW"}`, "t");
});
}
// Toggle depth view (Tab 2)
if (chipDepth) {
chipDepth.style.cursor = "pointer";
chipDepth.addEventListener("click", () => {
if (!state.videoLoaded) return;
toggleDepthView();
log(`Engage view set to: ${state.useDepthFeed ? "DEPTH" : "DEFAULT"}`, "t");
});
}
// Toggle first frame depth view (Tab 1)
if (chipFrameDepth) {
chipFrameDepth.style.cursor = "pointer";
chipFrameDepth.addEventListener("click", () => {
if (!state.videoLoaded) return;
if (!state.hf.depthFirstFrameUrl) {
log("First frame depth not ready yet. Run Detect and wait for depth processing.", "w");
return;
}
toggleFirstFrameDepthView();
log(`First frame view set to: ${state.useFrameDepthView ? "DEPTH" : "DEFAULT"}`, "t");
});
}
}
function setupTabSwitching() {
const tabs = Array.from(document.querySelectorAll(".tabbtn"));
tabs.forEach(btn => {
btn.addEventListener("click", () => {
tabs.forEach(b => b.classList.remove("active"));
document.querySelectorAll(".tab").forEach(t => t.classList.remove("active"));
btn.classList.add("active");
const tabId = `#tab-${btn.dataset.tab}`;
const tab = $(tabId);
if (tab) tab.classList.add("active");
// Tab-specific actions
if (btn.dataset.tab === "engage") {
resizeOverlays();
}
});
});
}
async function runReason() {
if (!state.videoLoaded) {
log("No video loaded. Upload a video first.", "w");
setStatus("warn", "READY Β· Upload a video");
return;
}
if (state.isReasoning) {
log("Detection already in progress. Please wait.", "w");
return;
}
// Lock the Reason process
state.isReasoning = true;
if (btnReason) {
btnReason.disabled = true;
btnReason.style.opacity = "0.5";
btnReason.style.cursor = "not-allowed";
}
if (btnCancelReason) btnCancelReason.style.display = "inline-block";
if (btnEngage) btnEngage.disabled = true;
// Clear previous detections
state.detections = [];
state.selectedId = null;
renderFrameTrackList();
renderFrameOverlay();
setStatus("warn", "DETECTING Β· Running perception pipeline");
// Agent cursor flair
if (state.ui.cursorMode === "on" && moveCursorToRect) {
if (btnReason) moveCursorToRect(btnReason.getBoundingClientRect());
if (frameCanvas) setTimeout(() => moveCursorToRect(frameCanvas.getBoundingClientRect()), 260);
if (frameTrackList) setTimeout(() => moveCursorToRect(frameTrackList.getBoundingClientRect()), 560);
}
try {
const selectedOption = detectorSelect ? detectorSelect.options[detectorSelect.selectedIndex] : null;
const selectedValue = detectorSelect ? detectorSelect.value : "yolo11";
const kind = selectedOption ? selectedOption.getAttribute("data-kind") : "object";
const queries = missionText ? missionText.value.trim() : "";
const enableGPT = $("#enableGPTToggle")?.checked || false;
const enableDepth = false; // depth mode disabled
// Determine mode and model parameter from data-kind attribute
let mode, detectorParam, segmenterParam;
if (kind === "segmentation") {
mode = "segmentation";
segmenterParam = selectedValue;
detectorParam = "yolo11"; // default, unused for segmentation
} else if (kind === "drone") {
mode = "drone_detection";
detectorParam = selectedValue;
segmenterParam = "GSAM2-L";
} else {
mode = "object_detection";
detectorParam = selectedValue;
segmenterParam = "GSAM2-L";
}
const form = new FormData();
form.append("video", state.videoFile);
form.append("mode", mode);
if (queries) form.append("queries", queries);
form.append("detector", detectorParam);
form.append("segmenter", segmenterParam);
form.append("enable_gpt", enableGPT ? "true" : "false");
form.append("enable_depth", enableDepth ? "true" : "false");
log(`Submitting job to ${state.hf.baseUrl}...`, "t");
setHfStatus("submitting job...");
const data = await hfDetectAsync(form);
state.hf.asyncJobId = data.job_id;
// Store mission specification for chatbot context
if (data.mission_spec) {
state.hf.missionSpec = data.mission_spec;
}
// Store raw detections (will process after image loads to get correct dimensions)
const rawDetections = data.first_frame_detections || [];
// Display processed first frame from backend (only processed frame, not raw)
// This is async - image loading will update state.frame.w/h
if (data.first_frame_url) {
state.hf.firstFrameUrl = data.first_frame_url.startsWith("http")
? data.first_frame_url
: `${state.hf.baseUrl}${data.first_frame_url}`;
// Wait for image to load so we have correct dimensions before processing detections
await new Promise((resolve, reject) => {
const img = new Image();
img.crossOrigin = "anonymous";
img.onload = () => {
// Update frame dimensions from loaded image
state.frame.w = img.naturalWidth || 1280;
state.frame.h = img.naturalHeight || 720;
// Resize canvases to match
const frameCanvas = $("#frameCanvas");
const frameOverlay = $("#frameOverlay");
if (frameCanvas) {
frameCanvas.width = state.frame.w;
frameCanvas.height = state.frame.h;
frameCanvas.getContext("2d").drawImage(img, 0, 0, state.frame.w, state.frame.h);
}
if (frameOverlay) {
frameOverlay.width = state.frame.w;
frameOverlay.height = state.frame.h;
}
// Hide empty state
const frameEmpty = $("#frameEmpty");
const frameNote = $("#frameNote");
if (frameEmpty) frameEmpty.style.display = "none";
if (frameNote) frameNote.textContent = "Processed (from backend)";
log(`Processed first frame displayed (${state.frame.w}Γ—${state.frame.h})`, "g");
resolve();
};
img.onerror = () => {
log("Failed to load processed first frame, using local frame", "w");
drawFirstFrame();
resolve();
};
img.src = state.hf.firstFrameUrl;
});
}
// NOW process detections (after frame dimensions are correct)
if (rawDetections.length > 0) {
processFirstFrameDetections(rawDetections);
}
// Mark first frame as ready (for radar display)
state.firstFrameReady = true;
// Store depth URLs if provided
if (data.depth_video_url) {
state.hf.depthVideoUrl = data.depth_video_url.startsWith("http")
? data.depth_video_url
: `${state.hf.baseUrl}${data.depth_video_url}`;
log("Depth video URL received", "t");
}
if (data.first_frame_depth_url) {
state.hf.depthFirstFrameUrl = data.first_frame_depth_url.startsWith("http")
? data.first_frame_depth_url
: `${state.hf.baseUrl}${data.first_frame_depth_url}`;
log("First frame depth URL received", "t");
}
// Enable streaming mode if stream_url is provided (Tab 2 live view)
const enableStream = $("#enableStreamToggle")?.checked;
if (data.stream_url && enableStream) {
const streamUrl = data.stream_url.startsWith("http")
? data.stream_url
: `${state.hf.baseUrl}${data.stream_url}`;
log("Activating live stream...", "t");
setStreamingMode(streamUrl);
log("Live view available in 'Track' tab.", "g");
setStatus("warn", "Live processing... View in Track tab");
// Trigger resize/render for Tab 2
resizeOverlays();
}
// Start polling for completion
pollAsyncJob().then(() => {
log("Video processing complete.", "g");
// Stop streaming mode once video is ready
stopStreamingMode();
state.hasReasoned = true;
setStatus("good", "READY Β· Detection complete (you can Track)");
log("Detection complete. Ready to Track.", "g");
// Seed tracks for Tab 2
seedTracksFromTab1();
// Re-enable engage button
if (btnEngage) btnEngage.disabled = false;
}).catch(err => {
log(`Polling error: ${err.message}`, "e");
stopStreamingMode();
});
// Initial status (processing in background)
setStatus("warn", "PROCESSING Β· Analysing video...");
log("Reasoning started (processing in background)...", "t");
} catch (err) {
setStatus("bad", "ERROR Β· Detection failed");
log(`Detection failed: ${err.message}`, "e");
console.error(err);
} finally {
state.isReasoning = false;
if (btnReason) {
btnReason.disabled = false;
btnReason.style.opacity = "1";
btnReason.style.cursor = "pointer";
}
if (btnCancelReason) btnCancelReason.style.display = "none";
// Re-enable engage button in case of failure
if (btnEngage) btnEngage.disabled = false;
}
}
function processFirstFrameDetections(dets) {
state.detections = dets.map((d, i) => {
const id = d.track_id || `T${String(i + 1).padStart(2, "0")}`;
const ap = defaultAimpoint(d.label || d.class);
const bbox = d.bbox
? { x: d.bbox[0], y: d.bbox[1], w: d.bbox[2] - d.bbox[0], h: d.bbox[3] - d.bbox[1] }
: { x: 0, y: 0, w: 10, h: 10 };
// Build features from universal schema via canonical mapping
let features = APP.core.gptMapping.buildFeatures(d.gpt_raw);
return {
id,
label: d.label || d.class,
score: d.score || 0.5,
bbox,
aim: { ...ap },
features,
baseRange_m: null,
baseAreaFrac: (bbox.w * bbox.h) / (state.frame.w * state.frame.h),
baseDwell_s: 5.0,
reqP_kW: 40,
maxP_kW: 0,
pkill: 0,
// Depth fields
depth_est_m: (d.depth_est_m !== undefined && d.depth_est_m !== null) ? d.depth_est_m : null,
depth_rel: (d.depth_rel !== undefined && d.depth_rel !== null) ? d.depth_rel : null,
depth_valid: d.depth_valid ?? false,
gpt_distance_m: d.gpt_distance_m || null,
gpt_direction: d.gpt_direction || null,
gpt_description: d.gpt_description || null,
// Threat Intelligence
threat_level_score: d.threat_level_score || 0,
threat_classification: d.threat_classification || "Unknown",
weapon_readiness: d.weapon_readiness || "Unknown",
// Mission relevance and assessment status
mission_relevant: d.mission_relevant ?? null,
relevance_reason: d.relevance_reason || null,
assessment_status: d.assessment_status || APP.core.gptMapping.STATUS.UNASSESSED,
assessment_frame_index: d.assessment_frame_index ?? null,
};
});
state.selectedId = state.detections[0]?.id || null;
renderFrameTrackList();
renderFrameOverlay();
log(`Detected ${state.detections.length} objects in first frame.`, "g");
}
function seedTracksFromTab1() {
const rangeBase = $("#rangeBase");
state.tracker.tracks = state.detections.map(d => ({
id: d.id,
label: d.label,
bbox: { ...d.bbox },
score: d.score,
aimRel: d.aim ? { relx: d.aim.relx, rely: d.aim.rely, label: d.aim.label } : { relx: 0.5, rely: 0.5, label: "center_mass" },
baseAreaFrac: d.baseAreaFrac || ((d.bbox.w * d.bbox.h) / (state.frame.w * state.frame.h)),
baseRange_m: d.baseRange_m || (rangeBase ? +rangeBase.value : 1500),
baseDwell_s: d.baseDwell_s || 4.0,
reqP_kW: d.reqP_kW || 35,
depth_rel: d.depth_rel,
depth_est_m: d.depth_est_m,
depth_valid: d.depth_valid,
lastDepthBbox: d.depth_valid ? { ...d.bbox } : null,
gpt_distance_m: d.gpt_distance_m,
gpt_direction: d.gpt_direction,
gpt_description: d.gpt_description,
lastSeen: APP.core.utils.now(),
vx: 0,
vy: 0,
dwellAccum: 0,
killed: false,
state: "TRACK",
assessT: 0
}));
state.tracker.nextId = state.detections.length + 1;
log(`Seeded ${state.tracker.tracks.length} tracks from Tab 1 detections.`, "t");
}
function cancelReasoning() {
// Stop HF polling if running
if (state.hf.asyncPollInterval) {
clearInterval(state.hf.asyncPollInterval);
state.hf.asyncPollInterval = null;
log("HF polling stopped.", "w");
}
// Stop streaming mode
stopStreamingMode();
// Cancel backend job if it exists
const jobId = state.hf.asyncJobId;
if (jobId) {
cancelBackendJob(jobId, "cancel button");
}
// Reset state
state.isReasoning = false;
state.hf.busy = false;
state.hf.asyncJobId = null;
state.hf.completedJobId = null;
state.hf.asyncStatus = "cancelled";
// Re-enable Reason button
if (btnReason) {
btnReason.disabled = false;
btnReason.style.opacity = "1";
btnReason.style.cursor = "pointer";
}
if (btnCancelReason) btnCancelReason.style.display = "none";
setStatus("warn", "CANCELLED Β· Detection stopped");
setHfStatus("cancelled (stopped by user)");
log("Detection cancelled by user.", "w");
}
function runEngage() {
if (!state.hasReasoned) {
log("Please run Detect first.", "w");
return;
}
if (state.hf.asyncJobId) {
log("Processing still in progress. Please wait.", "w");
// If we are streaming, make sure we are on the engage tab to see it
const engageTab = $(`.tabbtn[data-tab="engage"]`);
if (engageTab) engageTab.click();
return;
}
// Switch to engage tab
const engageTab = $(`.tabbtn[data-tab="engage"]`);
if (engageTab) engageTab.click();
// Set video source
if (videoEngage) {
videoEngage.src = state.hf.processedUrl || state.videoUrl;
videoEngage.play().catch(err => {
log(`Video playback failed: ${err.message}`, "e");
});
}
state.tracker.running = true;
state.tracker.lastFrameTime = APP.core.utils.now();
// Ensure tracks are seeded
if (state.tracker.tracks.length === 0) {
seedTracksFromTab1();
}
log("Tracking started.", "g");
}
function loop() {
const { now } = APP.core.utils;
const t = now();
// Guard against huge dt on first frame
if (state.tracker.lastFrameTime === 0) state.tracker.lastFrameTime = t;
const dt = Math.min((t - state.tracker.lastFrameTime) / 1000, 0.1);
state.tracker.lastFrameTime = t;
// ── Always keep track positions fresh (playing OR paused) ──
// This ensures bboxes remain clickable regardless of playback state.
if (state.tracker.running && videoEngage && state.tracker.tracks.length > 0) {
if (APP.core.demo.active && APP.core.demo.data) {
// DEMO MODE: sync tracks to current video time (even when paused)
const demoTracks = getDemoFrameData(videoEngage.currentTime);
if (demoTracks) {
const tracksClone = JSON.parse(JSON.stringify(demoTracks));
state.tracker.tracks = tracksClone.map(d => ({
...d,
lastSeen: t,
state: "TRACK",
depth_valid: true,
depth_est_m: d.gpt_distance_m || 1000,
}));
const w = videoEngage.videoWidth || state.frame.w || 1280;
const h = videoEngage.videoHeight || state.frame.h || 720;
state.tracker.tracks.forEach(tr => {
if (tr.bbox.x > 1 || tr.bbox.w > 1) {
tr.bbox.x /= w;
tr.bbox.y /= h;
tr.bbox.w /= w;
tr.bbox.h /= h;
}
});
}
} else {
// NORMAL MODE: predict positions every frame
predictTracks(dt);
// Backend sync every 333ms (works while paused too)
const jobId = state.hf.asyncJobId || state.hf.completedJobId;
if (jobId && (t - state.tracker.lastHFSync > 333)) {
const frameIdx = Math.floor(videoEngage.currentTime * 30);
APP.core.tracker.syncWithBackend(frameIdx);
state.tracker.lastHFSync = t;
}
}
}
// ── Card rendering & GPT analysis: only during active playback ──
if (state.tracker.running && videoEngage && !videoEngage.paused) {
state.tracker.frameCount++;
const framesSinceRender = state.tracker.frameCount - state.tracker._lastCardRenderFrame;
if (state.tracker._newObjectDetected || framesSinceRender >= 40) {
renderFrameTrackList();
state.tracker._lastCardRenderFrame = state.tracker.frameCount;
state.tracker._newObjectDetected = false;
if (!state.tracker._gptBusy && state.tracker.tracks.length > 0) {
state.tracker._gptBusy = true;
APP.api.client.analyzeFrame(videoEngage, state.tracker.tracks)
.then(enriched => {
for (const rd of enriched) {
const tid = rd.track_id || rd.id;
const existing = (state.detections || []).find(d => d.id === tid);
if (existing && rd.gpt_raw) {
existing.gpt_raw = rd.gpt_raw;
existing.features = APP.core.gptMapping.buildFeatures(rd.gpt_raw);
existing.assessment_status = rd.assessment_status || "ASSESSED";
existing.threat_level_score = rd.threat_level_score || 0;
existing.gpt_description = rd.gpt_description || existing.gpt_description;
existing.gpt_distance_m = rd.gpt_distance_m || existing.gpt_distance_m;
existing.gpt_direction = rd.gpt_direction || existing.gpt_direction;
}
}
renderFrameTrackList();
state.tracker._gptBusy = false;
})
.catch(err => {
console.warn("Frame GPT analysis failed:", err);
state.tracker._gptBusy = false;
});
}
}
}
// Render UI
if (renderFrameOverlay) renderFrameOverlay();
if (renderEngageOverlay) renderEngageOverlay();
if (tickAgentCursor) tickAgentCursor();
requestAnimationFrame(loop);
}
// Expose state for debugging
window.__LP_STATE__ = state;
// Start
init();
});