File size: 13,786 Bytes
ff50694
 
c17ec01
ff50694
 
 
c17ec01
 
 
ff50694
c17ec01
 
 
 
 
 
 
 
ff50694
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c17ec01
ff50694
c17ec01
ff50694
 
c17ec01
 
 
 
 
 
 
 
 
 
 
ff50694
 
 
 
 
c17ec01
 
ff50694
 
c17ec01
ff50694
c17ec01
 
 
ff50694
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56e4f43
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d257dcc
56e4f43
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ff50694
 
 
 
56e4f43
ff50694
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56e4f43
 
 
 
 
 
 
 
ff50694
 
 
 
 
 
6a99834
ff50694
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1a9b396
 
 
56e4f43
1a9b396
ff50694
 
 
 
 
 
 
 
 
 
 
c17ec01
ff50694
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c17ec01
ff50694
c17ec01
ff50694
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c17ec01
ff50694
c17ec01
 
ff50694
 
 
 
c17ec01
ff50694
0effcd5
6a99834
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0effcd5
 
 
 
 
 
 
0c01887
 
 
0effcd5
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
// API Client Module - Backend communication
APP.api.client = {};

APP.api.client.hfDetectAsync = async function (formData) {
    const { state } = APP.core;
    if (!state.hf.baseUrl) return;

    const resp = await fetch(`${state.hf.baseUrl}/detect/async`, {
        method: "POST",
        body: formData
    });

    if (!resp.ok) {
        const err = await resp.json().catch(() => ({ detail: resp.statusText }));
        throw new Error(err.detail || "Async detection submission failed");
    }

    const data = await resp.json();

    // Store URLs from response
    if (data.status_url) {
        state.hf.statusUrl = data.status_url.startsWith("http")
            ? data.status_url
            : `${state.hf.baseUrl}${data.status_url}`;
    }

    if (data.video_url) {
        state.hf.videoUrl = data.video_url.startsWith("http")
            ? data.video_url
            : `${state.hf.baseUrl}${data.video_url}`;
    }

    if (data.depth_video_url) {
        state.hf.depthVideoUrl = data.depth_video_url.startsWith("http")
            ? data.depth_video_url
            : `${state.hf.baseUrl}${data.depth_video_url}`;
    }

    if (data.depth_first_frame_url) {
        state.hf.depthFirstFrameUrl = data.depth_first_frame_url.startsWith("http")
            ? data.depth_first_frame_url
            : `${state.hf.baseUrl}${data.depth_first_frame_url}`;
    }

    return data;
};

APP.api.client.checkJobStatus = async function (jobId) {
    const { state } = APP.core;
    if (!state.hf.baseUrl) return { status: "error" };

    const url = state.hf.statusUrl || `${state.hf.baseUrl}/detect/job/${jobId}`;
    const resp = await fetch(url, { cache: "no-store" });

    if (!resp.ok) {
        if (resp.status === 404) return { status: "not_found" };
        throw new Error(`Status check failed: ${resp.status}`);
    }

    return await resp.json();
};

APP.api.client.cancelBackendJob = async function (jobId, reason) {
    const { state } = APP.core;
    const { log } = APP.ui.logging;

    if (!state.hf.baseUrl || !jobId) return;

    // Don't attempt cancel on HF Space (it doesn't support it)
    if (state.hf.baseUrl.includes("hf.space")) {
        log(`Job cancel skipped for HF Space (${reason || "user request"})`, "w");
        return { status: "skipped", message: "Cancel disabled for HF Space" };
    }

    try {
        const resp = await fetch(`${state.hf.baseUrl}/detect/job/${jobId}`, {
            method: "DELETE"
        });

        if (resp.ok) {
            const result = await resp.json();
            log(`Job ${jobId.substring(0, 8)} cancelled`, "w");
            return result;
        }
        if (resp.status === 404) return { status: "not_found" };
        throw new Error("Cancel failed");
    } catch (err) {
        log(`Cancel error: ${err.message}`, "e");
        return { status: "error", message: err.message };
    }
};

/**
 * Sync GPT enrichment data from polled first_frame_detections into state.detections.
 * Returns true if any card was updated and needs re-render.
 */
APP.api.client._syncGptFromDetections = function (rawDets, logLabel) {
    const { state } = APP.core;
    const { log } = APP.ui.logging;
    let needsRender = false;

    // Phase A: Sync assessment status, relevance fields
    for (const rd of rawDets) {
        const tid = rd.track_id || `T${String(rawDets.indexOf(rd) + 1).padStart(2, "0")}`;
        const existing = (state.detections || []).find(d => d.id === tid);
        if (existing) {
            if (rd.assessment_status && existing.assessment_status !== rd.assessment_status) {
                existing.assessment_status = rd.assessment_status;
                needsRender = true;
            }
            if (rd.mission_relevant !== undefined && rd.mission_relevant !== null) {
                existing.mission_relevant = rd.mission_relevant;
            }
            if (rd.relevance_reason) {
                existing.relevance_reason = rd.relevance_reason;
            }
        }
    }

    // Phase B: Full GPT feature merge (gated on gpt_raw)
    const hasGptData = rawDets.some(d => d.gpt_raw);
    if (hasGptData) {
        state.hf.firstFrameDetections = rawDets;
        for (const rd of rawDets) {
            const tid = rd.track_id || `T${String(rawDets.indexOf(rd) + 1).padStart(2, "0")}`;
            const existing = (state.detections || []).find(d => d.id === tid);
            if (existing && rd.gpt_raw) {
                const g = rd.gpt_raw;
                existing.features = APP.core.gptMapping.buildFeatures(g);
                existing.threat_level_score = rd.threat_level_score || g.threat_level_score || 0;
                existing.threat_classification = rd.threat_classification || g.threat_classification || "Unknown";
                existing.weapon_readiness = rd.weapon_readiness || g.weapon_readiness || "Unknown";
                existing.gpt_distance_m = rd.gpt_distance_m || null;
                existing.gpt_direction = rd.gpt_direction || null;
                needsRender = true;
            }
        }
        log(`Track cards updated with GPT assessment${logLabel ? " (" + logLabel + ")" : ""}`, "g");
    }

    if (needsRender && APP.ui && APP.ui.cards && APP.ui.cards.renderFrameTrackList) {
        APP.ui.cards.renderFrameTrackList();
    }

    return needsRender;
};

APP.api.client.pollAsyncJob = async function () {
    const { state } = APP.core;
    const { log, setHfStatus } = APP.ui.logging;
    const { fetchProcessedVideo, fetchDepthVideo, fetchDepthFirstFrame } = APP.core.video;
    const syncGpt = APP.api.client._syncGptFromDetections;

    const pollInterval = 3000; // 3 seconds
    const maxAttempts = 200;   // 10 minutes max
    let attempts = 0;
    let fetchingVideo = false;

    return new Promise((resolve, reject) => {
        state.hf.asyncPollInterval = setInterval(async () => {
            attempts++;

            try {
                const resp = await fetch(state.hf.statusUrl, { cache: "no-store" });

                if (!resp.ok) {
                    if (resp.status === 404) {
                        clearInterval(state.hf.asyncPollInterval);
                        reject(new Error("Job expired or not found"));
                        return;
                    }
                    throw new Error(`Status check failed: ${resp.statusText}`);
                }

                const status = await resp.json();
                state.hf.asyncStatus = status.status;
                state.hf.asyncProgress = status;

                if (status.status === "completed") {
                    if (fetchingVideo) return;
                    fetchingVideo = true;

                    const completedJobId = state.hf.asyncJobId;
                    log(`✓ Backend job ${completedJobId.substring(0, 8)}: completed successfully`, "g");
                    setHfStatus("job completed, fetching video...");

                    // Final GPT sync — enrichment may have completed during
                    // processing but the poll never landed on a "processing"
                    // cycle that picked it up (common for segmentation mode
                    // where _enrich_first_frame_gpt is skipped).
                    if (status.first_frame_detections && status.first_frame_detections.length > 0) {
                        syncGpt(status.first_frame_detections, "final sync");
                    }

                    try {
                        await fetchProcessedVideo();
                        await fetchDepthVideo();
                        await fetchDepthFirstFrame();

                        clearInterval(state.hf.asyncPollInterval);
                        state.hf.completedJobId = state.hf.asyncJobId;  // preserve for post-completion sync
                        state.hf.asyncJobId = null;
                        setHfStatus("ready");
                        resolve();
                    } catch (err) {
                        if (err && err.code === "VIDEO_PENDING") {
                            setHfStatus("job completed, finalizing video...");
                            fetchingVideo = false;
                            return;
                        }
                        clearInterval(state.hf.asyncPollInterval);
                        state.hf.asyncJobId = null;
                        reject(err);
                    }
                } else if (status.status === "failed") {
                    clearInterval(state.hf.asyncPollInterval);
                    const errMsg = status.error || "Processing failed";
                    log(`✗ Backend job ${state.hf.asyncJobId.substring(0, 8)}: failed - ${errMsg}`, "e");
                    state.hf.asyncJobId = null;
                    setHfStatus(`error: ${errMsg}`);
                    reject(new Error(errMsg));
                } else {
                    // Still processing
                    const progressInfo = status.progress ? ` (${Math.round(status.progress * 100)}%)` : "";
                    setHfStatus(`job ${state.hf.asyncJobId.substring(0, 8)}: ${status.status}${progressInfo} (${attempts})`);

                    // Check if GPT enrichment has updated first-frame detections
                    if (status.first_frame_detections && status.first_frame_detections.length > 0) {
                        syncGpt(status.first_frame_detections);
                    }
                }

                if (attempts >= maxAttempts) {
                    clearInterval(state.hf.asyncPollInterval);
                    reject(new Error("Polling timeout (10 minutes)"));
                }
            } catch (err) {
                clearInterval(state.hf.asyncPollInterval);
                reject(err);
            }
        }, pollInterval);
    });
};

// External detection hook (can be replaced by user)
APP.api.client.externalDetect = async function (input) {
    console.log("externalDetect called", input);
    return [];
};

// External features hook (can be replaced by user)
APP.api.client.externalFeatures = async function (detections, frameInfo) {
    console.log("externalFeatures called for", detections.length, "objects");
    return {};
};

// External tracker hook (can be replaced by user)
APP.api.client.externalTrack = async function (videoEl) {
    console.log("externalTrack called");
    return [];
};

// Call HF object detection directly (for first frame)
APP.api.client.callHfObjectDetection = async function (canvas) {
    const { state } = APP.core;
    const { canvasToBlob } = APP.core.utils;
    const { CONFIG } = APP.core;

    const proxyBase = (CONFIG.PROXY_URL || "").trim();

    if (proxyBase) {
        const blob = await canvasToBlob(canvas);
        const form = new FormData();
        form.append("image", blob, "frame.jpg");

        const resp = await fetch(`${proxyBase.replace(/\/$/, "")}/detect`, {
            method: "POST",
            body: form
        });

        if (!resp.ok) {
            let detail = `Proxy inference failed (${resp.status})`;
            try {
                const err = await resp.json();
                detail = err.detail || err.error || detail;
            } catch (_) { }
            throw new Error(detail);
        }

        return await resp.json();
    }

    // Default: use the backend base URL
    const blob = await canvasToBlob(canvas);
    const form = new FormData();
    form.append("image", blob, "frame.jpg");

    const resp = await fetch(`${state.hf.baseUrl}/detect/frame`, {
        method: "POST",
        body: form
    });

    if (!resp.ok) {
        throw new Error(`Frame detection failed: ${resp.statusText}`);
    }

    return await resp.json();
};

// Capture current video frame and send to backend for GPT analysis
APP.api.client.analyzeFrame = async function (videoEl, tracks) {
    const { state } = APP.core;
    const { canvasToBlob } = APP.core.utils;

    // Capture current video frame
    const canvas = document.createElement("canvas");
    canvas.width = videoEl.videoWidth;
    canvas.height = videoEl.videoHeight;
    canvas.getContext("2d").drawImage(videoEl, 0, 0);
    const blob = await canvasToBlob(canvas);

    // Convert normalized bbox (0-1) back to pixel coords for backend
    const w = canvas.width, h = canvas.height;
    const dets = tracks.map(t => ({
        track_id: t.id,
        label: t.label,
        bbox: [
            Math.round(t.bbox.x * w),
            Math.round(t.bbox.y * h),
            Math.round((t.bbox.x + t.bbox.w) * w),
            Math.round((t.bbox.y + t.bbox.h) * h),
        ],
        score: t.score,
    }));

    const form = new FormData();
    form.append("image", blob, "frame.jpg");
    form.append("detections", JSON.stringify(dets));
    const jobId = state.hf.asyncJobId || state.hf.completedJobId;
    if (jobId) form.append("job_id", jobId);

    const resp = await fetch(`${state.hf.baseUrl}/detect/analyze-frame`, {
        method: "POST",
        body: form,
    });
    if (!resp.ok) throw new Error(`Frame analysis failed: ${resp.statusText}`);
    return await resp.json();
};

// Chat about threats using GPT
APP.api.client.chatAboutThreats = async function (question, detections) {
    const { state } = APP.core;

    const form = new FormData();
    form.append("question", question);
    form.append("detections", JSON.stringify(detections));
    if (state.hf.missionSpec) {
        form.append("mission_context", JSON.stringify(state.hf.missionSpec));
    }

    const resp = await fetch(`${state.hf.baseUrl}/chat/threat`, {
        method: "POST",
        body: form
    });

    if (!resp.ok) {
        const err = await resp.json().catch(() => ({ detail: resp.statusText }));
        throw new Error(err.detail || "Chat request failed");
    }

    return await resp.json();
};