Zhen Ye commited on
Commit
ff50694
·
1 Parent(s): c17ec01

feat: Continuous object tracking, speed estimation, and overlay syncing

Browse files
LaserPerception/LaserPerception.css DELETED
@@ -1,927 +0,0 @@
1
- /* =========================================
2
- LaserPerception Design System
3
- ========================================= */
4
-
5
- :root {
6
- /* --- Colors --- */
7
- --bg: #060914;
8
- --panel: #0b1026;
9
- --panel2: #0a0f22;
10
-
11
- --stroke: rgba(255, 255, 255, .08);
12
- --stroke2: rgba(255, 255, 255, .12);
13
-
14
- --text: rgba(255, 255, 255, .92);
15
- --muted: rgba(255, 255, 255, .62);
16
- --faint: rgba(255, 255, 255, .42);
17
-
18
- --good: #22c55e;
19
- --warn: #f59e0b;
20
- --bad: #ef4444;
21
-
22
- --accent: #7c3aed;
23
- --cyan: #22d3ee;
24
- --mag: #fb7185;
25
-
26
- /* --- Effects --- */
27
- --shadow: 0 18px 60px rgba(0, 0, 0, .55);
28
-
29
- /* --- Typography --- */
30
- --mono: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
31
- --sans: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Arial, sans-serif;
32
- }
33
-
34
- * {
35
- box-sizing: border-box;
36
- }
37
-
38
- html,
39
- body {
40
- height: 100%;
41
- margin: 0;
42
- }
43
-
44
- body {
45
- background:
46
- radial-gradient(1200px 700px at 20% 8%, rgba(124, 58, 237, .22), transparent 60%),
47
- radial-gradient(900px 500px at 82% 18%, rgba(34, 211, 238, .18), transparent 60%),
48
- radial-gradient(800px 520px at 52% 82%, rgba(251, 113, 133, .10), transparent 65%),
49
- linear-gradient(180deg, #040614, #060914);
50
- color: var(--text);
51
- font-family: var(--sans);
52
- overflow: hidden;
53
- }
54
-
55
- /* =========================================
56
- Layout & Structure
57
- ========================================= */
58
-
59
- #app {
60
- height: 100%;
61
- display: flex;
62
- flex-direction: column;
63
- }
64
-
65
- header {
66
- display: flex;
67
- align-items: center;
68
- justify-content: space-between;
69
- padding: 14px 16px 12px;
70
- border-bottom: 1px solid var(--stroke);
71
- background: linear-gradient(180deg, rgba(255, 255, 255, .035), transparent);
72
- }
73
-
74
- .workspace {
75
- flex: 1;
76
- display: grid;
77
- grid-template-columns: 540px 1fr;
78
- /* Fixed sidebar width */
79
- gap: 12px;
80
- padding: 12px;
81
- min-height: 0;
82
- }
83
-
84
- aside,
85
- main {
86
- background: rgba(255, 255, 255, .02);
87
- border: 1px solid var(--stroke);
88
- border-radius: 16px;
89
- box-shadow: var(--shadow);
90
- overflow: hidden;
91
- display: flex;
92
- flex-direction: column;
93
- min-height: 0;
94
- }
95
-
96
- footer {
97
- padding: 10px 14px;
98
- border-top: 1px solid var(--stroke);
99
- color: var(--muted);
100
- font-size: 11px;
101
- display: flex;
102
- justify-content: space-between;
103
- align-items: center;
104
- gap: 10px;
105
- background: linear-gradient(0deg, rgba(255, 255, 255, .03), transparent);
106
- }
107
-
108
- footer .mono {
109
- font-family: var(--mono);
110
- color: rgba(255, 255, 255, .76);
111
- }
112
-
113
- /* =========================================
114
- Brand & Status
115
- ========================================= */
116
-
117
- .brand {
118
- display: flex;
119
- gap: 12px;
120
- align-items: center;
121
- min-width: 420px;
122
- }
123
-
124
- .logo {
125
- width: 40px;
126
- height: 40px;
127
- border-radius: 14px;
128
- background:
129
- radial-gradient(circle at 30% 30%, rgba(34, 211, 238, .9), rgba(124, 58, 237, .9) 55%, rgba(0, 0, 0, .1) 70%),
130
- linear-gradient(135deg, rgba(255, 255, 255, .10), transparent 60%);
131
- box-shadow: 0 16px 46px rgba(124, 58, 237, .25);
132
- border: 1px solid rgba(255, 255, 255, .16);
133
- position: relative;
134
- overflow: hidden;
135
- }
136
-
137
- .logo:after {
138
- content: "";
139
- position: absolute;
140
- inset: -40px;
141
- background: conic-gradient(from 180deg, transparent, rgba(255, 255, 255, .10), transparent);
142
- animation: spin 10s linear infinite;
143
- }
144
-
145
- @keyframes spin {
146
- to {
147
- transform: rotate(360deg);
148
- }
149
- }
150
-
151
- .brand h1 {
152
- font-size: 14px;
153
- margin: 0;
154
- letter-spacing: .16em;
155
- text-transform: uppercase;
156
- }
157
-
158
- .brand .sub {
159
- font-size: 12px;
160
- color: var(--muted);
161
- margin-top: 2px;
162
- line-height: 1.2;
163
- }
164
-
165
- .status-row {
166
- display: flex;
167
- gap: 10px;
168
- align-items: center;
169
- flex-wrap: wrap;
170
- justify-content: flex-end;
171
- }
172
-
173
- /* =========================================
174
- Components: Cards & Panels
175
- ========================================= */
176
-
177
- .card {
178
- padding: 12px 12px 10px;
179
- border-bottom: 1px solid var(--stroke);
180
- position: relative;
181
- }
182
-
183
- .card:last-child {
184
- border-bottom: none;
185
- }
186
-
187
- .card h2 {
188
- margin: 0;
189
- font-size: 12px;
190
- letter-spacing: .14em;
191
- text-transform: uppercase;
192
- color: rgba(255, 255, 255, .78);
193
- }
194
-
195
- .card small {
196
- color: var(--muted);
197
- }
198
-
199
- .card .hint {
200
- color: var(--faint);
201
- font-size: 11px;
202
- line-height: 1.35;
203
- margin-top: 6px;
204
- }
205
-
206
- .panel {
207
- background: linear-gradient(180deg, rgba(255, 255, 255, .03), rgba(255, 255, 255, .015));
208
- border: 1px solid var(--stroke);
209
- border-radius: 16px;
210
- padding: 10px;
211
- box-shadow: 0 10px 30px rgba(0, 0, 0, .35);
212
- overflow: hidden;
213
- position: relative;
214
- }
215
-
216
- .panel h3 {
217
- margin: 0 0 8px;
218
- font-size: 12px;
219
- letter-spacing: .14em;
220
- text-transform: uppercase;
221
- color: rgba(255, 255, 255, .78);
222
- display: flex;
223
- align-items: center;
224
- justify-content: space-between;
225
- gap: 8px;
226
- }
227
-
228
- .panel h3 .rightnote {
229
- font-size: 11px;
230
- color: var(--muted);
231
- font-family: var(--mono);
232
- letter-spacing: 0;
233
- text-transform: none;
234
- }
235
-
236
- .collapse-btn {
237
- background: rgba(255, 255, 255, .05);
238
- border: 1px solid rgba(255, 255, 255, .12);
239
- border-radius: 8px;
240
- padding: 4px 8px;
241
- color: var(--muted);
242
- cursor: pointer;
243
- font-size: 11px;
244
- font-family: var(--mono);
245
- transition: all 0.2s ease;
246
- text-transform: none;
247
- letter-spacing: 0;
248
- }
249
-
250
- .collapse-btn:hover {
251
- background: rgba(255, 255, 255, .08);
252
- color: var(--text);
253
- border-color: rgba(255, 255, 255, .18);
254
- }
255
-
256
- /* =========================================
257
- Components: Inputs & Controls
258
- ========================================= */
259
-
260
- .grid2 {
261
- display: grid;
262
- grid-template-columns: 1fr 1fr;
263
- gap: 8px;
264
- margin-top: 10px;
265
- }
266
-
267
- .row {
268
- display: flex;
269
- gap: 8px;
270
- align-items: center;
271
- justify-content: space-between;
272
- margin-top: 8px;
273
- }
274
-
275
- label {
276
- font-size: 11px;
277
- color: var(--muted);
278
- }
279
-
280
- input[type="range"] {
281
- width: 100%;
282
- }
283
-
284
- select,
285
- textarea,
286
- input[type="text"],
287
- input[type="number"] {
288
- width: 100%;
289
- background: rgba(255, 255, 255, .04);
290
- border: 1px solid var(--stroke2);
291
- border-radius: 10px;
292
- padding: 8px 10px;
293
- color: var(--text);
294
- outline: none;
295
- font-size: 12px;
296
- }
297
-
298
- select:focus,
299
- textarea:focus,
300
- input[type="text"]:focus,
301
- input[type="number"]:focus {
302
- border-color: rgba(124, 58, 237, .55);
303
- box-shadow: 0 0 0 3px rgba(124, 58, 237, .16);
304
- }
305
-
306
- .btn {
307
- user-select: none;
308
- cursor: pointer;
309
- border: none;
310
- border-radius: 12px;
311
- padding: 10px 12px;
312
- font-weight: 700;
313
- font-size: 12px;
314
- letter-spacing: .04em;
315
- color: rgba(255, 255, 255, .92);
316
- background: linear-gradient(135deg, rgba(124, 58, 237, .95), rgba(34, 211, 238, .45));
317
- box-shadow: 0 18px 40px rgba(124, 58, 237, .24);
318
- }
319
-
320
- .btn:hover {
321
- filter: brightness(1.06);
322
- }
323
-
324
- .btn:active {
325
- transform: translateY(1px);
326
- }
327
-
328
- .btn.secondary {
329
- background: rgba(255, 255, 255, .06);
330
- border: 1px solid var(--stroke2);
331
- box-shadow: none;
332
- font-weight: 600;
333
- }
334
-
335
- .btn.secondary:hover {
336
- background: rgba(255, 255, 255, .08);
337
- }
338
-
339
- .btn.danger {
340
- background: linear-gradient(135deg, rgba(239, 68, 68, .95), rgba(251, 113, 133, .55));
341
- box-shadow: 0 18px 40px rgba(239, 68, 68, .18);
342
- }
343
-
344
- .btnrow {
345
- display: flex;
346
- gap: 8px;
347
- margin-top: 10px;
348
- }
349
-
350
- .btnrow .btn {
351
- flex: 1;
352
- }
353
-
354
- .pill {
355
- display: flex;
356
- align-items: center;
357
- gap: 10px;
358
- padding: 8px 12px;
359
- border-radius: 999px;
360
- border: 1px solid var(--stroke2);
361
- background: rgba(255, 255, 255, .04);
362
- box-shadow: 0 10px 26px rgba(0, 0, 0, .35);
363
- font-size: 12px;
364
- color: var(--muted);
365
- white-space: nowrap;
366
- }
367
-
368
- .dot {
369
- width: 8px;
370
- height: 8px;
371
- border-radius: 50%;
372
- background: var(--good);
373
- box-shadow: 0 0 16px rgba(34, 197, 94, .6);
374
- }
375
-
376
- .dot.warn {
377
- background: var(--warn);
378
- box-shadow: 0 0 16px rgba(245, 158, 11, .55);
379
- }
380
-
381
- .dot.bad {
382
- background: var(--bad);
383
- box-shadow: 0 0 16px rgba(239, 68, 68, .55);
384
- }
385
-
386
- .kbd {
387
- font-family: var(--mono);
388
- font-size: 11px;
389
- padding: 2px 6px;
390
- border: 1px solid var(--stroke2);
391
- border-bottom-color: rgba(255, 255, 255, .24);
392
- background: rgba(0, 0, 0, .35);
393
- border-radius: 7px;
394
- color: rgba(255, 255, 255, .78);
395
- }
396
-
397
- .badge {
398
- display: inline-flex;
399
- align-items: center;
400
- gap: 6px;
401
- padding: 4px 8px;
402
- border-radius: 999px;
403
- border: 1px solid var(--stroke2);
404
- background: rgba(0, 0, 0, .25);
405
- font-family: var(--mono);
406
- }
407
-
408
- /* =========================================
409
- Navigation: Tabs
410
- ========================================= */
411
-
412
- .tabs {
413
- display: flex;
414
- gap: 8px;
415
- padding: 10px 12px;
416
- border-bottom: 1px solid var(--stroke);
417
- background: linear-gradient(180deg, rgba(255, 255, 255, .035), transparent);
418
- flex-wrap: wrap;
419
- }
420
-
421
- .tabbtn {
422
- cursor: pointer;
423
- border: none;
424
- border-radius: 999px;
425
- padding: 8px 12px;
426
- font-size: 12px;
427
- color: rgba(255, 255, 255, .75);
428
- background: rgba(255, 255, 255, .04);
429
- border: 1px solid var(--stroke2);
430
- }
431
-
432
- .tabbtn.active {
433
- color: rgba(255, 255, 255, .92);
434
- background: linear-gradient(135deg, rgba(124, 58, 237, .35), rgba(34, 211, 238, .10));
435
- border-color: rgba(124, 58, 237, .45);
436
- box-shadow: 0 0 0 3px rgba(124, 58, 237, .14);
437
- }
438
-
439
- .tab {
440
- display: none;
441
- flex: 1;
442
- min-height: 0;
443
- overflow: auto;
444
- padding: 12px;
445
- }
446
-
447
- .tab.active {
448
- display: block;
449
- }
450
-
451
- /* =========================================
452
- Visualization: Views & Canvas
453
- ========================================= */
454
-
455
- .viewbox {
456
- position: relative;
457
- border-radius: 14px;
458
- overflow: hidden;
459
- background: radial-gradient(700px 380px at 30% 30%, rgba(124, 58, 237, .12), rgba(0, 0, 0, .0) 60%),
460
- linear-gradient(180deg, rgba(0, 0, 0, .25), rgba(0, 0, 0, .15));
461
- border: 1px solid rgba(255, 255, 255, .08);
462
- min-height: 360px;
463
- }
464
-
465
- .viewbox canvas,
466
- .viewbox video {
467
- width: 100%;
468
- height: 100%;
469
- display: block;
470
- }
471
-
472
- /* Always show the engage video feed */
473
- #videoEngage {
474
- display: block;
475
- opacity: 1;
476
- }
477
-
478
- .viewbox .overlay {
479
- position: absolute;
480
- inset: 0;
481
- pointer-events: none;
482
- }
483
-
484
- /* Make engage overlay visible as main display (not just overlay) */
485
- #engageOverlay {
486
- display: none;
487
- pointer-events: none;
488
- }
489
-
490
- .viewbox .watermark {
491
- position: absolute;
492
- left: 10px;
493
- bottom: 10px;
494
- font-family: var(--mono);
495
- font-size: 11px;
496
- color: rgba(255, 255, 255, .55);
497
- background: rgba(0, 0, 0, .35);
498
- border: 1px solid rgba(255, 255, 255, .14);
499
- padding: 6px 8px;
500
- border-radius: 10px;
501
- }
502
-
503
- .viewbox .empty {
504
- position: absolute;
505
- inset: 0;
506
- display: flex;
507
- flex-direction: column;
508
- align-items: center;
509
- justify-content: center;
510
- gap: 10px;
511
- color: rgba(255, 255, 255, .72);
512
- text-align: center;
513
- padding: 22px;
514
- }
515
-
516
- .viewbox .empty .big {
517
- font-size: 14px;
518
- letter-spacing: .12em;
519
- text-transform: uppercase;
520
- }
521
-
522
- .viewbox .empty .small {
523
- color: var(--muted);
524
- font-size: 12px;
525
- max-width: 520px;
526
- line-height: 1.4;
527
- }
528
-
529
- /* =========================================
530
- Lists & Tables
531
- ========================================= */
532
-
533
- .list {
534
- display: flex;
535
- flex-direction: column;
536
- gap: 8px;
537
- min-height: 160px;
538
- max-height: 320px;
539
- overflow: auto;
540
- padding-right: 4px;
541
- }
542
-
543
- .obj {
544
- padding: 10px;
545
- border-radius: 14px;
546
- border: 1px solid var(--stroke2);
547
- background: rgba(255, 255, 255, .03);
548
- cursor: pointer;
549
- }
550
-
551
- .obj:hover {
552
- background: rgba(255, 255, 255, .05);
553
- }
554
-
555
- .obj.active {
556
- border-color: rgba(34, 211, 238, .45);
557
- box-shadow: 0 0 0 3px rgba(34, 211, 238, .14);
558
- background: linear-gradient(135deg, rgba(34, 211, 238, .10), rgba(124, 58, 237, .08));
559
- }
560
-
561
- .obj .top {
562
- display: flex;
563
- align-items: center;
564
- justify-content: space-between;
565
- gap: 10px;
566
- }
567
-
568
- .obj .id {
569
- font-family: var(--mono);
570
- font-size: 12px;
571
- color: rgba(255, 255, 255, .90);
572
- }
573
-
574
- .obj .cls {
575
- font-size: 12px;
576
- color: rgba(255, 255, 255, .80);
577
- }
578
-
579
- .obj .meta {
580
- margin-top: 6px;
581
- display: flex;
582
- gap: 10px;
583
- flex-wrap: wrap;
584
- font-size: 11px;
585
- color: var(--muted);
586
- }
587
-
588
- .table {
589
- width: 100%;
590
- border-collapse: separate;
591
- border-spacing: 0;
592
- overflow: hidden;
593
- border-radius: 14px;
594
- border: 1px solid rgba(255, 255, 255, .10);
595
- }
596
-
597
- .table th,
598
- .table td {
599
- padding: 8px 10px;
600
- font-size: 12px;
601
- border-bottom: 1px solid rgba(255, 255, 255, .08);
602
- vertical-align: top;
603
- }
604
-
605
- .table th {
606
- background: rgba(255, 255, 255, .04);
607
- color: rgba(255, 255, 255, .78);
608
- letter-spacing: .12em;
609
- text-transform: uppercase;
610
- font-size: 11px;
611
- }
612
-
613
- .table tr:last-child td {
614
- border-bottom: none;
615
- }
616
-
617
- .k {
618
- font-family: var(--mono);
619
- color: rgba(255, 255, 255, .84);
620
- }
621
-
622
- .mini {
623
- font-size: 11px;
624
- color: var(--muted);
625
- line-height: 1.35;
626
- }
627
-
628
- /* =========================================
629
- Metrics & Logs
630
- ========================================= */
631
-
632
- .metricgrid {
633
- display: grid;
634
- grid-template-columns: 1fr 1fr;
635
- gap: 8px;
636
- }
637
-
638
- .metric {
639
- border: 1px solid rgba(255, 255, 255, .10);
640
- background: rgba(255, 255, 255, .03);
641
- border-radius: 14px;
642
- padding: 10px;
643
- }
644
-
645
- .metric .label {
646
- font-size: 11px;
647
- color: var(--muted);
648
- letter-spacing: .12em;
649
- text-transform: uppercase;
650
- }
651
-
652
- .metric .value {
653
- margin-top: 6px;
654
- font-family: var(--mono);
655
- font-size: 16px;
656
- color: rgba(255, 255, 255, .92);
657
- }
658
-
659
- .metric .sub {
660
- margin-top: 4px;
661
- font-size: 11px;
662
- color: var(--faint);
663
- line-height: 1.35;
664
- }
665
-
666
- .log {
667
- font-family: var(--mono);
668
- font-size: 11px;
669
- color: rgba(255, 255, 255, .78);
670
- line-height: 1.45;
671
- background: rgba(0, 0, 0, .35);
672
- border: 1px solid rgba(255, 255, 255, .12);
673
- border-radius: 14px;
674
- padding: 10px;
675
- height: 210px;
676
- overflow: auto;
677
- white-space: pre-wrap;
678
- }
679
-
680
- .log .t {
681
- color: rgba(34, 211, 238, .95);
682
- }
683
-
684
- .log .w {
685
- color: rgba(245, 158, 11, .95);
686
- }
687
-
688
- .log .e {
689
- color: rgba(239, 68, 68, .95);
690
- }
691
-
692
- .log .g {
693
- color: rgba(34, 197, 94, .95);
694
- }
695
-
696
- /* =========================================
697
- Tab Specific: Intel + Frame
698
- ========================================= */
699
-
700
- .frame-grid {
701
- display: grid;
702
- grid-template-columns: 1.6fr .9fr;
703
- grid-template-rows: auto auto 1fr;
704
- gap: 12px;
705
- min-height: 0;
706
- }
707
-
708
- .intel {
709
- margin-top: 10px;
710
- display: flex;
711
- flex-direction: column;
712
- gap: 8px;
713
- }
714
-
715
- .intel-top {
716
- display: flex;
717
- align-items: center;
718
- justify-content: space-between;
719
- gap: 8px;
720
- }
721
-
722
- .thumbrow {
723
- display: flex;
724
- gap: 8px;
725
- }
726
-
727
- .thumbrow img {
728
- flex: 1;
729
- height: 86px;
730
- object-fit: cover;
731
- border-radius: 12px;
732
- border: 1px solid rgba(255, 255, 255, .12);
733
- background: rgba(0, 0, 0, .25);
734
- }
735
-
736
- .intelbox {
737
- font-size: 12px;
738
- line-height: 1.45;
739
- color: rgba(255, 255, 255, .84);
740
- background: rgba(0, 0, 0, .35);
741
- border: 1px solid rgba(255, 255, 255, .12);
742
- border-radius: 14px;
743
- padding: 10px;
744
- min-height: 72px;
745
- }
746
-
747
- /* =========================================
748
- Tab Specific: Engage
749
- ========================================= */
750
-
751
- .engage-grid {
752
- display: grid;
753
- grid-template-columns: 1.6fr .9fr;
754
- gap: 12px;
755
- min-height: 0;
756
- transition: grid-template-columns 0.3s ease;
757
- }
758
-
759
- .engage-grid.sidebar-collapsed {
760
- grid-template-columns: 1fr 0fr;
761
- }
762
-
763
- .engage-grid.sidebar-collapsed .engage-right {
764
- display: none;
765
- }
766
-
767
- .engage-right {
768
- display: flex;
769
- flex-direction: column;
770
- gap: 12px;
771
- min-height: 0;
772
- }
773
-
774
- .radar {
775
- height: 540px;
776
- display: flex;
777
- flex-direction: column;
778
- }
779
-
780
- .radar canvas {
781
- flex: 1;
782
- width: 100%;
783
- height: 100%;
784
- display: block;
785
- }
786
-
787
- .strip {
788
- display: flex;
789
- gap: 8px;
790
- flex-wrap: wrap;
791
- align-items: center;
792
- font-size: 12px;
793
- color: var(--muted);
794
- }
795
-
796
- .strip .chip {
797
- padding: 6px 10px;
798
- border-radius: 999px;
799
- border: 1px solid rgba(255, 255, 255, .12);
800
- background: rgba(255, 255, 255, .03);
801
- font-family: var(--mono);
802
- color: rgba(255, 255, 255, .78);
803
- }
804
-
805
- /* Sidebar Checkbox Row */
806
- .checkbox-row {
807
- grid-column: span 2;
808
- margin-top: 8px;
809
- border-top: 1px solid var(--stroke2);
810
- padding-top: 8px;
811
- display: flex;
812
- align-items: center;
813
- gap: 8px;
814
- cursor: pointer;
815
- }
816
-
817
- .checkbox-row input[type="checkbox"] {
818
- width: auto;
819
- margin: 0;
820
- }
821
-
822
- .bar {
823
- height: 10px;
824
- border-radius: 999px;
825
- background: rgba(255, 255, 255, .08);
826
- border: 1px solid rgba(255, 255, 255, .12);
827
- overflow: hidden;
828
- }
829
-
830
- .bar>div {
831
- height: 100%;
832
- width: 0%;
833
- background: linear-gradient(90deg, rgba(34, 211, 238, .95), rgba(124, 58, 237, .95));
834
- transition: width .18s ease;
835
- }
836
-
837
- /* =========================================
838
- Tab Specific: Trade Space
839
- ========================================= */
840
-
841
- .trade-grid {
842
- display: grid;
843
- grid-template-columns: 1.35fr .65fr;
844
- gap: 12px;
845
- min-height: 0;
846
- }
847
-
848
- .plot {
849
- height: 420px;
850
- }
851
-
852
- /* =========================================
853
- Utilities
854
- ========================================= */
855
-
856
- ::-webkit-scrollbar {
857
- width: 10px;
858
- height: 10px;
859
- }
860
-
861
- ::-webkit-scrollbar-thumb {
862
- background: rgba(255, 255, 255, .10);
863
- border-radius: 999px;
864
- border: 2px solid rgba(0, 0, 0, .25);
865
- }
866
-
867
- ::-webkit-scrollbar-thumb:hover {
868
- background: rgba(255, 255, 255, .16);
869
- }
870
-
871
- /* Track Cards */
872
- .track-card {
873
- background: rgba(255, 255, 255, 0.03);
874
- border: 1px solid var(--border-color);
875
- border-radius: 4px;
876
- padding: 8px;
877
- margin-bottom: 8px;
878
- cursor: pointer;
879
- transition: all 0.2s;
880
- }
881
-
882
- .track-card:hover {
883
- background: rgba(255, 255, 255, 0.08);
884
- }
885
-
886
- .track-card.active {
887
- border-color: var(--accent);
888
- background: rgba(34, 211, 238, 0.1);
889
- }
890
-
891
- .track-card-header {
892
- display: flex;
893
- justify-content: space-between;
894
- align-items: center;
895
- font-weight: 600;
896
- margin-bottom: 4px;
897
- font-size: 13px;
898
- color: var(--text-color);
899
- }
900
-
901
- .track-card-meta {
902
- font-size: 11px;
903
- color: var(--text-dim);
904
- margin-bottom: 4px;
905
- }
906
-
907
- .track-card-body {
908
- font-size: 11px;
909
- line-height: 1.4;
910
- color: #ccc;
911
- background: rgba(0, 0, 0, 0.2);
912
- padding: 6px;
913
- border-radius: 4px;
914
- }
915
-
916
- .gpt-badge {
917
- color: gold;
918
- font-size: 10px;
919
- border: 1px solid gold;
920
- border-radius: 3px;
921
- padding: 1px 4px;
922
- margin-left: 6px;
923
- }
924
-
925
- .gpt-text {
926
- color: #e0e0e0;
927
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
LaserPerception/LaserPerception.html DELETED
@@ -1,505 +0,0 @@
1
- <!DOCTYPE html>
2
- <html lang="en">
3
-
4
- <head>
5
- <meta charset="UTF-8" />
6
- <meta name="viewport" content="width=device-width, initial-scale=1.0" />
7
- <link rel="stylesheet" href="LaserPerception.css">
8
- <title>HEL Perception & Engagement Reasoner (Weapon-Grade Demo)</title>
9
- </head>
10
-
11
- <body>
12
- <div id="app">
13
- <header>
14
- <div class="brand">
15
- <div class="logo" aria-hidden="true"></div>
16
- <div>
17
- <h1>HEL Perception & Engagement Reasoner</h1>
18
- <div class="sub">Video → detection → expert features → aimpoint + Intensity@Target → HEL feasibility →
19
- closed-loop tracking & dwell control</div>
20
- </div>
21
- </div>
22
- <div class="status-row">
23
- <div class="pill">
24
- <span class="dot" id="sys-dot"></span>
25
- <span id="sys-status">STANDBY · No video loaded</span>
26
- </div>
27
- <div class="pill">
28
- <span class="kbd">Reason</span>
29
- <span>Frame-1 inference</span>
30
- </div>
31
- <div class="pill">
32
- <span class="kbd">Engage</span>
33
- <span>Closed-loop track + dwell</span>
34
- </div>
35
- </div>
36
- </header>
37
-
38
- <div class="workspace">
39
- <aside>
40
- <div class="card">
41
- <h2>Video Input</h2>
42
- <div class="hint">Upload one video. Tab 1 uses only the first frame. Tab 2 reuses the same video for tracking
43
- and engagement.</div>
44
-
45
- <div class="row mt-md">
46
- <label for="videoFile">Video file</label>
47
- <span class="badge"><span id="videoMeta">No file</span></span>
48
- </div>
49
- <input id="videoFile" type="file" accept="video/*" />
50
-
51
- <div class="mt-md">
52
- <label>Mission Objective (optional · enables class filtering)</label>
53
- <textarea id="missionText" rows="3"
54
- placeholder="Optional: e.g., Detect people and vehicles; highlight hazards and key objects."></textarea>
55
-
56
- <div class="hint mt-sm">
57
- Mission objective is <b>optional</b>. If provided, it will be used directly as input to the detector.
58
- If left blank, the detector will detect <b>all</b> objects without filtering.
59
- <div class="mini mt-xs" id="hfBackendStatus">HF Backend: STANDBY</div>
60
- </div>
61
- </div>
62
-
63
- <div class="btnrow">
64
- <button id="btnLoadSample" class="btn secondary" title="Optional: wire up sample videos later" disabled>Load
65
- Sample</button>
66
- <button id="btnEject" class="btn danger" title="Unload video">Eject</button>
67
- </div>
68
-
69
- <div class="grid2">
70
- <div>
71
- <label>Detector</label>
72
- <select id="detectorSelect">
73
- <optgroup label="Object Detection Models">
74
- <option value="hf_yolov8" data-kind="object" selected>Lite</option>
75
- <option value="detr_resnet50" data-kind="object">Big</option>
76
- <option value="grounding_dino" data-kind="object">Large</option>
77
- </optgroup>
78
- <optgroup label="Segmentation Models">
79
- <option value="sam3" data-kind="segmentation">Segmentor</option>
80
- </optgroup>
81
- <optgroup label="Drone Detection Models">
82
- <option value="drone_yolo" data-kind="drone">Drone</option>
83
- </optgroup>
84
-
85
- </select>
86
- </div>
87
- <div>
88
- <label>Tracking</label>
89
- <select id="trackerSelect">
90
- <option value="iou">IOU + velocity (built-in)</option>
91
- <option value="external">External hook (user API)</option>
92
- </select>
93
- </div>
94
-
95
- <label class="checkbox-row" for="enableDepthToggle">
96
- <input type="checkbox" id="enableDepthToggle">
97
- <span>Enable Legacy Depth Map (Slow)</span>
98
- </label>
99
- <label class="checkbox-row" for="enableGPTToggle" style="margin-top: 4px;">
100
- <input type="checkbox" id="enableGPTToggle">
101
- <span style="color: var(--accent-light);">Enable GPT Reasoning</span>
102
- </label>
103
- </div>
104
-
105
- <div class="hint mt-sm" id="detectorHint">
106
- If the browser model cannot load (offline), plug in your own detector in <span
107
- class="kbd">externalDetect()</span>.
108
- </div>
109
- </div>
110
-
111
- <div class="card">
112
- <h2>Mission Intel Summary</h2>
113
- <div class="hint">Unbiased 2–3 sentence scene description computed from a few sampled frames + detected
114
- objects (no location inference).</div>
115
-
116
- <div class="intel">
117
- <div class="intel-top">
118
- <span class="badge"><span class="dot warn" id="intelDot"
119
- style="width:7px;height:7px;box-shadow:none"></span><span id="intelStamp">Idle</span></span>
120
- <button id="btnIntelRefresh" class="btn secondary"
121
- style="padding:8px 10px; border-radius:10px; font-weight:700">Refresh</button>
122
- </div>
123
-
124
- <div class="thumbrow" aria-label="sampled frames">
125
- <img id="intelThumb0" alt="sample frame 1" />
126
- <img id="intelThumb1" alt="sample frame 2" />
127
- <img id="intelThumb2" alt="sample frame 3" />
128
- </div>
129
-
130
- <div id="intelSummaryBox" class="intelbox">Upload a video, then click <b>Reason</b> to generate an unbiased
131
- scene summary.</div>
132
- </div>
133
- </div>
134
-
135
- <div class="card">
136
- <h2>HEL & Director Knobs</h2>
137
- <div class="grid2">
138
- <div>
139
- <label>Max output power (kW)</label>
140
- <input id="helPower" type="range" min="20" max="250" step="1" value="60" />
141
- <div class="row"><small class="mini"><span id="helPowerVal">60</span> kW</small><small class="mini">turret
142
- output</small></div>
143
- </div>
144
- <div>
145
- <label>Aperture (m)</label>
146
- <input id="helAperture" type="range" min="0.05" max="0.6" step="0.01" value="0.25" />
147
- <div class="row"><small class="mini"><span id="helApertureVal">0.25</span> m</small><small
148
- class="mini">beam director</small></div>
149
- </div>
150
- </div>
151
-
152
- <div class="grid2 mt-sm">
153
- <div>
154
- <label>Beam quality (M²)</label>
155
- <input id="helM2" type="range" min="1.1" max="4.0" step="0.1" value="1.6" />
156
- <div class="row"><small class="mini"><span id="helM2Val">1.6</span></small><small class="mini">lower is
157
- better</small></div>
158
- </div>
159
- <div>
160
- <label>Jitter (μrad RMS)</label>
161
- <input id="helJitter" type="range" min="0.5" max="15" step="0.1" value="3.2" />
162
- <div class="row"><small class="mini"><span id="helJitterVal">3.2</span></small><small
163
- class="mini">director stability</small></div>
164
- </div>
165
- </div>
166
-
167
- <div class="grid2 mt-sm">
168
- <div>
169
- <label>Mode</label>
170
- <select id="helMode">
171
- <option value="cw">CW (continuous)</option>
172
- <option value="burst">Burst (duty-limited)</option>
173
- <option value="pulse">Pulsed (peak shaping)</option>
174
- </select>
175
- </div>
176
- <div>
177
- <label>Duty cycle (%)</label>
178
- <input id="helDuty" type="range" min="10" max="100" step="1" value="85" />
179
- <div class="row"><small class="mini"><span id="helDutyVal">85</span>%</small><small class="mini">thermal /
180
- power</small></div>
181
- </div>
182
- </div>
183
- </div>
184
-
185
- <div class="card">
186
- <h2>Atmosphere & Maritime</h2>
187
- <div class="grid2">
188
- <div>
189
- <label>Visibility (km)</label>
190
- <input id="atmVis" type="range" min="1" max="30" step="1" value="16" />
191
- <div class="row"><small class="mini"><span id="atmVisVal">16</span> km</small><small
192
- class="mini">aerosol/haze</small></div>
193
- </div>
194
- <div>
195
- <label>Turbulence (Cn²)</label>
196
- <input id="atmCn2" type="range" min="1" max="10" step="1" value="5" />
197
- <div class="row"><small class="mini"><span id="atmCn2Val">5</span>/10</small><small
198
- class="mini">wavefront</small></div>
199
- </div>
200
- </div>
201
-
202
- <div class="grid2 mt-sm">
203
- <div>
204
- <label>Sea spray</label>
205
- <input id="seaSpray" type="range" min="0" max="10" step="1" value="2" />
206
- <div class="row"><small class="mini"><span id="seaSprayVal">2</span>/10</small><small class="mini">salt
207
- attenuation</small></div>
208
- </div>
209
- <div>
210
- <label>Adaptive optics</label>
211
- <input id="aoQ" type="range" min="0" max="10" step="1" value="7" />
212
- <div class="row"><small class="mini"><span id="aoQVal">7</span>/10</small><small class="mini">turbulence
213
- mitigation</small></div>
214
- </div>
215
- </div>
216
-
217
- <div class="grid2 mt-sm">
218
- <div>
219
- <label>Baseline range (m)</label>
220
- <input id="rangeBase" type="range" min="200" max="6000" step="25" value="1500" />
221
- <div class="row"><small class="mini"><span id="rangeBaseVal">1500</span> m</small><small
222
- class="mini">median target</small></div>
223
- </div>
224
- <div>
225
- <label>Update rate (Hz)</label>
226
- <input id="detHz" type="range" min="1" max="12" step="1" value="6" />
227
- <div class="row"><small class="mini"><span id="detHzVal">6</span> Hz</small><small class="mini">tab 2
228
- detection</small></div>
229
- </div>
230
- </div>
231
- </div>
232
-
233
- <div class="card">
234
- <h2>Engagement Policy</h2>
235
- <div class="grid2">
236
- <div>
237
- <label>Targeting</label>
238
- <select id="policyMode">
239
- <option value="auto">Auto: highest lethality margin</option>
240
- <option value="manual">Manual: click target</option>
241
- </select>
242
- </div>
243
- <div>
244
- <label>Assess window (s)</label>
245
- <input id="assessWindow" type="range" min="0.3" max="3.0" step="0.1" value="1.0" />
246
- <div class="row"><small class="mini"><span id="assessWindowVal">1.0</span> s</small><small
247
- class="mini">post-dwell</small></div>
248
- </div>
249
- </div>
250
-
251
- <div class="row">
252
- <label>Show agent cursor</label>
253
- <select id="cursorMode">
254
- <option value="on">On</option>
255
- <option value="off">Off</option>
256
- </select>
257
- </div>
258
-
259
- <div class="hint">The UI is wired for your APIs. Replace <span class="kbd">externalDetect()</span>, <span
260
- class="kbd">externalFeatures()</span>, and <span class="kbd">externalTrack()</span> when ready.</div>
261
- </div>
262
-
263
- <div class="card" style="flex:1; min-height:0">
264
- <h2>System Log</h2>
265
- <div class="log" id="sysLog"></div>
266
- </div>
267
- </aside>
268
-
269
- <main>
270
- <div class="tabs">
271
- <button class="tabbtn active" data-tab="frame">Tab 1 · Frame-1 Reason</button>
272
- <button class="tabbtn" data-tab="engage">Tab 2 · Video Engage</button>
273
- <button class="tabbtn" data-tab="trade">Trade Space</button>
274
- </div>
275
-
276
- <!-- ===== Tab 1 ===== -->
277
- <section class="tab active" id="tab-frame">
278
- <div class="frame-grid">
279
- <div class="panel panel-monitor">
280
- <h3>
281
- <span>First Frame · Detection + Aimpoints</span>
282
- <span class="rightnote" id="frameNote">Awaiting video</span>
283
- </h3>
284
- <div class="viewbox" id="frameViewBox">
285
- <canvas id="frameCanvas" width="1280" height="720"></canvas>
286
- <canvas id="frameOverlay" class="overlay" width="1280" height="720"></canvas>
287
- <div class="watermark">EO/IR · Track-ID · Aimpoint · Required Dwell</div>
288
- <div class="empty" id="frameEmpty">
289
- <div class="big">Upload a video to begin</div>
290
- <div class="small">This demo performs first-frame perception and engagement reasoning. Then it replays
291
- the same video with closed-loop tracking and dynamic dwell updates.</div>
292
- <div style="display:flex; gap:10px; margin-top:6px; flex-wrap:wrap; justify-content:center;">
293
- <span class="badge"><span class="dot"></span> If you are online, COCO-SSD loads automatically</span>
294
- </div>
295
- </div>
296
- </div>
297
-
298
- <div class="btnrow" style="margin-top:10px">
299
- <button id="btnReason" class="btn">Reason</button>
300
- <button id="btnCancelReason" class="btn danger" style="display: none;">Cancel</button>
301
- <button id="btnRecompute" class="btn secondary">Recompute HEL</button>
302
- <button id="btnClear" class="btn secondary">Clear</button>
303
- </div>
304
-
305
- <div class="strip mt-md">
306
- <span class="chip" id="chipFrameDepth"
307
- title="Toggle depth view of first frame (if available)">VIEW:DEFAULT</span>
308
- </div>
309
- </div>
310
-
311
- <div class="panel panel-objects radar">
312
- <h3>
313
- <span>Radar / Relative Geometry</span>
314
- <span class="rightnote" id="objCount">0</span>
315
- </h3>
316
- <canvas id="frameRadar" width="600" height="260" class="full-size"></canvas>
317
- </div>
318
-
319
- <div class="panel panel-features">
320
- <h3>
321
- <span>Selected Target · Features</span>
322
- <span class="rightnote" id="selId">—</span>
323
- </h3>
324
- <table class="table" id="featureTable">
325
- <thead>
326
- <tr>
327
- <th style="width:42%">Feature</th>
328
- <th>Value</th>
329
- </tr>
330
- </thead>
331
- <tbody>
332
- <tr>
333
- <td class="k">—</td>
334
- <td class="mini">No target selected</td>
335
- </tr>
336
- </tbody>
337
- </table>
338
- <div class="hint mt-sm">You can replace feature generation via <span
339
- class="kbd">externalFeatures()</span>. The UI will render whatever 10–12 key-value pairs you return.
340
- </div>
341
- </div>
342
-
343
- <div class="panel panel-summary" style="display:flex; flex-direction:column; min-height: 0;">
344
- <h3>
345
- <span>Object Track Cards</span>
346
- <span class="rightnote" id="trackCount">0</span>
347
- </h3>
348
- <div class="list" id="frameTrackList" style="flex:1; overflow-y:auto; padding:8px;">
349
- <!-- Cards injected here -->
350
- <div style="font-style:italic; color:var(--text-dim); text-align:center; margin-top:20px;">
351
- No objects tracked.
352
- </div>
353
- </div>
354
- </div>
355
-
356
- </div>
357
-
358
- </section>
359
-
360
- <!-- ===== Tab 2 ===== -->
361
- <section class="tab" id="tab-engage">
362
- <div class="engage-grid">
363
- <div class="panel">
364
- <h3>
365
- <span>Video Engage · Tracking + Dynamic Dwell</span>
366
- <div style="display: flex; gap: 8px; align-items: center;">
367
- <button class="collapse-btn" id="btnToggleSidebar">◀ Hide Sidebar</button>
368
- <span class="rightnote" id="engageNote">Awaiting video</span>
369
- </div>
370
- </h3>
371
-
372
- <div class="viewbox" style="min-height: 420px;">
373
- <video id="videoEngage" playsinline muted></video>
374
- <canvas id="engageOverlay" class="overlay"></canvas>
375
- <div class="watermark">LOCK · DIST · DWELL · AIMPOINT · FIRE/ASSESS</div>
376
- <div class="empty" id="engageEmpty">
377
- <div class="big">No video loaded</div>
378
- <div class="small">Upload a video. Run <b>Reason</b> first to initialize aimpoints and baseline dwell.
379
- Then click <b>Engage</b>.</div>
380
- </div>
381
- </div>
382
-
383
- <div class="btnrow mt-md">
384
- <button id="btnEngage" class="btn">Engage</button>
385
- <button id="btnPause" class="btn secondary">Pause</button>
386
- <button id="btnReset" class="btn secondary">Reset</button>
387
- </div>
388
-
389
- <div class="strip mt-md">
390
- <span class="chip" id="chipPolicy">POLICY:AUTO</span>
391
- <span class="chip" id="chipTracks">TRACKS:0</span>
392
- <span class="chip" id="chipBeam">BEAM:OFF</span>
393
- <span class="chip" id="chipHz">DET:6Hz</span>
394
- <span class="chip" id="chipFeed" title="Toggle raw vs HF-processed feed (if available)">FEED:RAW</span>
395
- <span class="chip" id="chipDepth" title="Toggle depth view (if available)">VIEW:DEFAULT</span>
396
- </div>
397
-
398
- <div class="mt-md">
399
- <div class="row"><label>Active dwell progress (selected)</label><small class="mini"
400
- id="dwellText">—</small>
401
- </div>
402
- <div class="bar">
403
- <div id="dwellBar"></div>
404
- </div>
405
- </div>
406
-
407
- <div class="hint mt-md">Manual targeting: choose “Manual” in Engagement Policy, then
408
- click a target in the video. The “beam” will track its aimpoint and accumulate dwell.</div>
409
- </div>
410
-
411
- <div class="engage-right">
412
- <div class="panel radar">
413
- <h3>
414
- <span>Radar / Relative Geometry</span>
415
- <span class="rightnote">Dynamic</span>
416
- </h3>
417
- <canvas id="radarCanvas" width="600" height="260" class="full-size"></canvas>
418
- </div>
419
-
420
- <div class="panel" style="flex:1; min-height:0">
421
- <h3>
422
- <span>Live Track Cards</span>
423
- <span class="rightnote" id="liveStamp">—</span>
424
- </h3>
425
- <div class="list" id="trackList" style="max-height:none"></div>
426
- </div>
427
- </div>
428
- </div>
429
- </section>
430
-
431
- <!-- ===== Tab 3 ===== -->
432
- <section class="tab" id="tab-trade">
433
- <div class="trade-grid">
434
- <div class="panel plot">
435
- <h3>
436
- <span>Range Sensitivity · Max vs Required Power · Dwell</span>
437
- <span class="rightnote">Interactive</span>
438
- </h3>
439
- <canvas id="tradeCanvas" width="1100" height="420" class="full-size"></canvas>
440
- </div>
441
-
442
- <div class="panel">
443
- <h3>
444
- <span>Trade Controls</span>
445
- <span class="rightnote">What-if</span>
446
- </h3>
447
- <div class="hint">This plot is computed from your current HEL and atmosphere knobs. It uses the selected
448
- target’s baseline requirements (from Tab 1) as a reference curve.</div>
449
-
450
- <div class="mt-md">
451
- <label>Selected target for curve</label>
452
- <select id="tradeTarget"></select>
453
- </div>
454
-
455
- <div class="grid2 mt-sm">
456
- <div>
457
- <label>Range sweep min (m)</label>
458
- <input id="rMin" type="number" value="200" min="50" max="10000" step="50" />
459
- </div>
460
- <div>
461
- <label>Range sweep max (m)</label>
462
- <input id="rMax" type="number" value="6000" min="100" max="20000" step="50" />
463
- </div>
464
- </div>
465
-
466
- <div class="row mt-md">
467
- <label>Show P(kill)</label>
468
- <select id="showPk">
469
- <option value="on">On</option>
470
- <option value="off">Off</option>
471
- </select>
472
- </div>
473
-
474
- <div class="btnrow">
475
- <button class="btn secondary" id="btnReplot">Replot</button>
476
- <button class="btn secondary" id="btnSnap">Snapshot (log)</button>
477
- </div>
478
-
479
- <div class="hint">This tab is designed to look like a weapon trade-space console: propagation, lethality
480
- margin, and dwell inflation with range and atmosphere.</div>
481
- </div>
482
- </div>
483
- </section>
484
- </main>
485
- </div>
486
-
487
- <footer>
488
- <div>Demo mode · Unclassified visuals · Integrate your APIs where marked</div>
489
- <div class="mono" id="telemetry">HEL=60kW · VIS=16km · Cn²=5/10 · AO=7/10 · DET=6Hz</div>
490
- </footer>
491
-
492
- <!-- Hidden video used only for first-frame capture -->
493
- <video id="videoHidden" playsinline muted style="display:none"></video>
494
- </div>
495
-
496
- <script>
497
- window.API_CONFIG = {
498
- BACKEND_BASE: "https://biaslab2025-perception.hf.space"
499
- };
500
- </script>
501
- <script src="LaserPerception.js"></script>
502
-
503
- </body>
504
-
505
- </html>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
LaserPerception/LaserPerception.js DELETED
The diff for this file is too large to render. See raw diff
 
app.py CHANGED
@@ -280,6 +280,7 @@ async def detect_endpoint(
280
  detector_name=detector_name,
281
  depth_estimator_name=active_depth,
282
  depth_scale=25.0,
 
283
  )
284
  except ValueError as exc:
285
  logging.exception("Video processing failed.")
@@ -396,6 +397,7 @@ async def detect_async_endpoint(
396
  depth_scale=float(depth_scale),
397
  depth_output_path=str(depth_output_path),
398
  first_frame_depth_path=str(first_frame_depth_path),
 
399
  )
400
  get_job_storage().create(job)
401
  asyncio.create_task(process_video_async(job_id))
 
280
  detector_name=detector_name,
281
  depth_estimator_name=active_depth,
282
  depth_scale=25.0,
283
+ enable_gpt=enable_gpt,
284
  )
285
  except ValueError as exc:
286
  logging.exception("Video processing failed.")
 
397
  depth_scale=float(depth_scale),
398
  depth_output_path=str(depth_output_path),
399
  first_frame_depth_path=str(first_frame_depth_path),
400
+ enable_gpt=enable_gpt,
401
  )
402
  get_job_storage().create(job)
403
  asyncio.create_task(process_video_async(job_id))
frontend/index.html CHANGED
@@ -10,7 +10,491 @@
10
 
11
  <body>
12
  <div id="app">
13
- <!-- ... body content ... -->
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  </div>
15
 
16
  <script>
@@ -18,7 +502,24 @@
18
  BACKEND_BASE: "https://biaslab2025-perception.hf.space"
19
  };
20
  </script>
21
- <script type="module" src="./js/main.js"></script>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
  </body>
24
 
 
10
 
11
  <body>
12
  <div id="app">
13
+ <header>
14
+ <div class="brand">
15
+ <div class="logo" aria-hidden="true"></div>
16
+ <div>
17
+ <h1>HEL Perception & Engagement Reasoner</h1>
18
+ <div class="sub">Video → detection → expert features → aimpoint + Intensity@Target → HEL feasibility →
19
+ closed-loop tracking & dwell control</div>
20
+ </div>
21
+ </div>
22
+ <div class="status-row">
23
+ <div class="pill">
24
+ <span class="dot" id="sys-dot"></span>
25
+ <span id="sys-status">STANDBY · No video loaded</span>
26
+ </div>
27
+ <div class="pill">
28
+ <span class="kbd">Reason</span>
29
+ <span>Frame-1 inference</span>
30
+ </div>
31
+ <div class="pill">
32
+ <span class="kbd">Engage</span>
33
+ <span>Closed-loop track + dwell</span>
34
+ </div>
35
+ </div>
36
+ </header>
37
+
38
+ <div class="workspace">
39
+ <aside>
40
+ <div class="card">
41
+ <h2>Video Input</h2>
42
+ <div class="hint">Upload one video. Tab 1 uses only the first frame. Tab 2 reuses the same video for tracking
43
+ and engagement.</div>
44
+
45
+ <div class="row mt-md">
46
+ <label for="videoFile">Video file</label>
47
+ <span class="badge"><span id="videoMeta">No file</span></span>
48
+ </div>
49
+ <input id="videoFile" type="file" accept="video/*" />
50
+
51
+ <div class="mt-md">
52
+ <label>Mission Objective (optional · enables class filtering)</label>
53
+ <textarea id="missionText" rows="3"
54
+ placeholder="Optional: e.g., Detect people and vehicles; highlight hazards and key objects."></textarea>
55
+
56
+ <div class="hint mt-sm">
57
+ Mission objective is <b>optional</b>. If provided, it will be used directly as input to the detector.
58
+ If left blank, the detector will detect <b>all</b> objects without filtering.
59
+ <div class="mini mt-xs" id="hfBackendStatus">HF Backend: STANDBY</div>
60
+ </div>
61
+ </div>
62
+
63
+ <div class="btnrow">
64
+ <button id="btnLoadSample" class="btn secondary" title="Optional: wire up sample videos later" disabled>Load
65
+ Sample</button>
66
+ <button id="btnEject" class="btn danger" title="Unload video">Eject</button>
67
+ </div>
68
+
69
+ <div class="grid2">
70
+ <div>
71
+ <label>Detector</label>
72
+ <select id="detectorSelect">
73
+ <optgroup label="Object Detection Models">
74
+ <option value="hf_yolov8" data-kind="object" selected>Lite</option>
75
+ <option value="detr_resnet50" data-kind="object">Big</option>
76
+ <option value="grounding_dino" data-kind="object">Large</option>
77
+ </optgroup>
78
+ <optgroup label="Segmentation Models">
79
+ <option value="sam3" data-kind="segmentation">Segmentor</option>
80
+ </optgroup>
81
+ <optgroup label="Drone Detection Models">
82
+ <option value="drone_yolo" data-kind="drone">Drone</option>
83
+ </optgroup>
84
+
85
+ </select>
86
+ </div>
87
+ <div>
88
+ <label>Tracking</label>
89
+ <select id="trackerSelect">
90
+ <option value="iou">IOU + velocity (built-in)</option>
91
+ <option value="external">External hook (user API)</option>
92
+ </select>
93
+ </div>
94
+
95
+ <label class="checkbox-row" for="enableDepthToggle">
96
+ <input type="checkbox" id="enableDepthToggle">
97
+ <span>Enable Legacy Depth Map (Slow)</span>
98
+ </label>
99
+ <label class="checkbox-row" for="enableGPTToggle" style="margin-top: 4px;">
100
+ <input type="checkbox" id="enableGPTToggle">
101
+ <span style="color: var(--accent-light);">Enable GPT Reasoning</span>
102
+ </label>
103
+ <label class="checkbox-row" for="enableStreamToggle" style="margin-top: 4px;">
104
+ <input type="checkbox" id="enableStreamToggle" checked>
105
+ <span>Enable Stream Processing</span>
106
+ </label>
107
+ </div>
108
+
109
+ <div class="hint mt-sm" id="detectorHint">
110
+ If the browser model cannot load (offline), plug in your own detector in <span
111
+ class="kbd">externalDetect()</span>.
112
+ </div>
113
+ </div>
114
+
115
+ <div class="card">
116
+ <h2>Mission Intel Summary</h2>
117
+ <div class="hint">Unbiased 2–3 sentence scene description computed from a few sampled frames + detected
118
+ objects (no location inference).</div>
119
+
120
+ <div class="intel">
121
+ <div class="intel-top">
122
+ <span class="badge"><span class="dot warn" id="intelDot"
123
+ style="width:7px;height:7px;box-shadow:none"></span><span id="intelStamp">Idle</span></span>
124
+ <button id="btnIntelRefresh" class="btn secondary"
125
+ style="padding:8px 10px; border-radius:10px; font-weight:700">Refresh</button>
126
+ </div>
127
+
128
+ <div class="thumbrow" aria-label="sampled frames">
129
+ <img id="intelThumb0" alt="sample frame 1" />
130
+ <img id="intelThumb1" alt="sample frame 2" />
131
+ <img id="intelThumb2" alt="sample frame 3" />
132
+ </div>
133
+
134
+ <div id="intelSummaryBox" class="intelbox">Upload a video, then click <b>Reason</b> to generate an unbiased
135
+ scene summary.</div>
136
+ </div>
137
+ </div>
138
+
139
+ <div class="card">
140
+ <h2>HEL & Director Knobs</h2>
141
+ <div class="grid2">
142
+ <div>
143
+ <label>Max output power (kW)</label>
144
+ <input id="helPower" type="range" min="20" max="250" step="1" value="60" />
145
+ <div class="row"><small class="mini"><span id="helPowerVal">60</span> kW</small><small class="mini">turret
146
+ output</small></div>
147
+ </div>
148
+ <div>
149
+ <label>Aperture (m)</label>
150
+ <input id="helAperture" type="range" min="0.05" max="0.6" step="0.01" value="0.25" />
151
+ <div class="row"><small class="mini"><span id="helApertureVal">0.25</span> m</small><small
152
+ class="mini">beam director</small></div>
153
+ </div>
154
+ </div>
155
+
156
+ <div class="grid2 mt-sm">
157
+ <div>
158
+ <label>Beam quality (M²)</label>
159
+ <input id="helM2" type="range" min="1.1" max="4.0" step="0.1" value="1.6" />
160
+ <div class="row"><small class="mini"><span id="helM2Val">1.6</span></small><small class="mini">lower is
161
+ better</small></div>
162
+ </div>
163
+ <div>
164
+ <label>Jitter (μrad RMS)</label>
165
+ <input id="helJitter" type="range" min="0.5" max="15" step="0.1" value="3.2" />
166
+ <div class="row"><small class="mini"><span id="helJitterVal">3.2</span></small><small
167
+ class="mini">director stability</small></div>
168
+ </div>
169
+ </div>
170
+
171
+ <div class="grid2 mt-sm">
172
+ <div>
173
+ <label>Mode</label>
174
+ <select id="helMode">
175
+ <option value="cw">CW (continuous)</option>
176
+ <option value="burst">Burst (duty-limited)</option>
177
+ <option value="pulse">Pulsed (peak shaping)</option>
178
+ </select>
179
+ </div>
180
+ <div>
181
+ <label>Duty cycle (%)</label>
182
+ <input id="helDuty" type="range" min="10" max="100" step="1" value="85" />
183
+ <div class="row"><small class="mini"><span id="helDutyVal">85</span>%</small><small class="mini">thermal /
184
+ power</small></div>
185
+ </div>
186
+ </div>
187
+ </div>
188
+
189
+ <div class="card">
190
+ <h2>Atmosphere & Maritime</h2>
191
+ <div class="grid2">
192
+ <div>
193
+ <label>Visibility (km)</label>
194
+ <input id="atmVis" type="range" min="1" max="30" step="1" value="16" />
195
+ <div class="row"><small class="mini"><span id="atmVisVal">16</span> km</small><small
196
+ class="mini">aerosol/haze</small></div>
197
+ </div>
198
+ <div>
199
+ <label>Turbulence (Cn²)</label>
200
+ <input id="atmCn2" type="range" min="1" max="10" step="1" value="5" />
201
+ <div class="row"><small class="mini"><span id="atmCn2Val">5</span>/10</small><small
202
+ class="mini">wavefront</small></div>
203
+ </div>
204
+ </div>
205
+
206
+ <div class="grid2 mt-sm">
207
+ <div>
208
+ <label>Sea spray</label>
209
+ <input id="seaSpray" type="range" min="0" max="10" step="1" value="2" />
210
+ <div class="row"><small class="mini"><span id="seaSprayVal">2</span>/10</small><small class="mini">salt
211
+ attenuation</small></div>
212
+ </div>
213
+ <div>
214
+ <label>Adaptive optics</label>
215
+ <input id="aoQ" type="range" min="0" max="10" step="1" value="7" />
216
+ <div class="row"><small class="mini"><span id="aoQVal">7</span>/10</small><small class="mini">turbulence
217
+ mitigation</small></div>
218
+ </div>
219
+ </div>
220
+
221
+ <div class="grid2 mt-sm">
222
+ <div>
223
+ <label>Baseline range (m)</label>
224
+ <input id="rangeBase" type="range" min="200" max="6000" step="25" value="1500" />
225
+ <div class="row"><small class="mini"><span id="rangeBaseVal">1500</span> m</small><small
226
+ class="mini">median target</small></div>
227
+ </div>
228
+ <div>
229
+ <label>Update rate (Hz)</label>
230
+ <input id="detHz" type="range" min="1" max="12" step="1" value="6" />
231
+ <div class="row"><small class="mini"><span id="detHzVal">6</span> Hz</small><small class="mini">tab 2
232
+ detection</small></div>
233
+ </div>
234
+ </div>
235
+ </div>
236
+
237
+ <div class="card">
238
+ <h2>Engagement Policy</h2>
239
+ <div class="grid2">
240
+ <div>
241
+ <label>Targeting</label>
242
+ <select id="policyMode">
243
+ <option value="auto">Auto: highest lethality margin</option>
244
+ <option value="manual">Manual: click target</option>
245
+ </select>
246
+ </div>
247
+ <div>
248
+ <label>Assess window (s)</label>
249
+ <input id="assessWindow" type="range" min="0.3" max="3.0" step="0.1" value="1.0" />
250
+ <div class="row"><small class="mini"><span id="assessWindowVal">1.0</span> s</small><small
251
+ class="mini">post-dwell</small></div>
252
+ </div>
253
+ </div>
254
+
255
+ <div class="row">
256
+ <label>Show agent cursor</label>
257
+ <select id="cursorMode">
258
+ <option value="on">On</option>
259
+ <option value="off">Off</option>
260
+ </select>
261
+ </div>
262
+
263
+ <div class="hint">The UI is wired for your APIs. Replace <span class="kbd">externalDetect()</span>, <span
264
+ class="kbd">externalFeatures()</span>, and <span class="kbd">externalTrack()</span> when ready.</div>
265
+ </div>
266
+
267
+ <div class="card" style="flex:1; min-height:0">
268
+ <h2>System Log</h2>
269
+ <div class="log" id="sysLog"></div>
270
+ </div>
271
+ </aside>
272
+
273
+ <main>
274
+ <div class="tabs">
275
+ <button class="tabbtn active" data-tab="frame">Tab 1 · Frame-1 Reason</button>
276
+ <button class="tabbtn" data-tab="engage">Tab 2 · Video Engage</button>
277
+ <button class="tabbtn" data-tab="trade">Trade Space</button>
278
+ </div>
279
+
280
+ <!-- ===== Tab 1 ===== -->
281
+ <section class="tab active" id="tab-frame">
282
+ <div class="frame-grid">
283
+ <div class="panel panel-monitor">
284
+ <h3>
285
+ <span>First Frame · Detection + Aimpoints</span>
286
+ <span class="rightnote" id="frameNote">Awaiting video</span>
287
+ </h3>
288
+ <div class="viewbox" id="frameViewBox">
289
+ <canvas id="frameCanvas" width="1280" height="720"></canvas>
290
+ <canvas id="frameOverlay" class="overlay" width="1280" height="720"></canvas>
291
+ <div class="watermark">EO/IR · Track-ID · Aimpoint · Required Dwell</div>
292
+ <div class="empty" id="frameEmpty">
293
+ <div class="big">Upload a video to begin</div>
294
+ <div class="small">This demo performs first-frame perception and engagement reasoning. Then it replays
295
+ the same video with closed-loop tracking and dynamic dwell updates.</div>
296
+ <div style="display:flex; gap:10px; margin-top:6px; flex-wrap:wrap; justify-content:center;">
297
+ <span class="badge"><span class="dot"></span> If you are online, COCO-SSD loads automatically</span>
298
+ </div>
299
+ </div>
300
+ </div>
301
+
302
+ <div class="btnrow" style="margin-top:10px">
303
+ <button id="btnReason" class="btn">Reason</button>
304
+ <button id="btnCancelReason" class="btn danger" style="display: none;">Cancel</button>
305
+ <button id="btnRecompute" class="btn secondary">Recompute HEL</button>
306
+ <button id="btnClear" class="btn secondary">Clear</button>
307
+ </div>
308
+
309
+ <div class="strip mt-md">
310
+ <span class="chip" id="chipFrameDepth"
311
+ title="Toggle depth view of first frame (if available)">VIEW:DEFAULT</span>
312
+ </div>
313
+ </div>
314
+
315
+ <div class="panel panel-objects radar">
316
+ <h3>
317
+ <span>Radar / Relative Geometry</span>
318
+ <span class="rightnote" id="objCount">0</span>
319
+ </h3>
320
+ <canvas id="frameRadar" width="600" height="260" class="full-size"></canvas>
321
+ </div>
322
+
323
+ <div class="panel panel-features">
324
+ <h3>
325
+ <span>Selected Target · Features</span>
326
+ <span class="rightnote" id="selId">—</span>
327
+ </h3>
328
+ <table class="table" id="featureTable">
329
+ <thead>
330
+ <tr>
331
+ <th style="width:42%">Feature</th>
332
+ <th>Value</th>
333
+ </tr>
334
+ </thead>
335
+ <tbody>
336
+ <tr>
337
+ <td class="k">—</td>
338
+ <td class="mini">No target selected</td>
339
+ </tr>
340
+ </tbody>
341
+ </table>
342
+ <div class="hint mt-sm">You can replace feature generation via <span
343
+ class="kbd">externalFeatures()</span>. The UI will render whatever 10–12 key-value pairs you return.
344
+ </div>
345
+ </div>
346
+
347
+ <div class="panel panel-summary" style="display:flex; flex-direction:column; min-height: 0;">
348
+ <h3>
349
+ <span>Object Track Cards</span>
350
+ <span class="rightnote" id="trackCount">0</span>
351
+ </h3>
352
+ <div class="list" id="frameTrackList" style="flex:1; overflow-y:auto; padding:8px;">
353
+ <!-- Cards injected here -->
354
+ <div style="font-style:italic; color:var(--text-dim); text-align:center; margin-top:20px;">
355
+ No objects tracked.
356
+ </div>
357
+ </div>
358
+ </div>
359
+
360
+ </div>
361
+
362
+ </section>
363
+
364
+ <!-- ===== Tab 2 ===== -->
365
+ <section class="tab" id="tab-engage">
366
+ <div class="engage-grid">
367
+ <div class="panel">
368
+ <h3>
369
+ <span>Video Engage · Tracking + Dynamic Dwell</span>
370
+ <div style="display: flex; gap: 8px; align-items: center;">
371
+ <button class="collapse-btn" id="btnToggleSidebar">◀ Hide Sidebar</button>
372
+ <span class="rightnote" id="engageNote">Awaiting video</span>
373
+ </div>
374
+ </h3>
375
+
376
+ <div class="viewbox" style="min-height: 420px;">
377
+ <video id="videoEngage" playsinline muted></video>
378
+ <canvas id="engageOverlay" class="overlay"></canvas>
379
+ <div class="watermark">LOCK · DIST · DWELL · AIMPOINT · FIRE/ASSESS</div>
380
+ <div class="empty" id="engageEmpty">
381
+ <div class="big">No video loaded</div>
382
+ <div class="small">Upload a video. Run <b>Reason</b> first to initialize aimpoints and baseline dwell.
383
+ Then click <b>Engage</b>.</div>
384
+ </div>
385
+ </div>
386
+
387
+ <div class="btnrow mt-md">
388
+ <button id="btnEngage" class="btn">Engage</button>
389
+ <button id="btnPause" class="btn secondary">Pause</button>
390
+ <button id="btnReset" class="btn secondary">Reset</button>
391
+ </div>
392
+
393
+ <div class="strip mt-md">
394
+ <span class="chip" id="chipPolicy">POLICY:AUTO</span>
395
+ <span class="chip" id="chipTracks">TRACKS:0</span>
396
+ <span class="chip" id="chipBeam">BEAM:OFF</span>
397
+ <span class="chip" id="chipHz">DET:6Hz</span>
398
+ <span class="chip" id="chipFeed" title="Toggle raw vs HF-processed feed (if available)">FEED:RAW</span>
399
+ <span class="chip" id="chipDepth" title="Toggle depth view (if available)">VIEW:DEFAULT</span>
400
+ </div>
401
+
402
+ <div class="mt-md">
403
+ <div class="row"><label>Active dwell progress (selected)</label><small class="mini"
404
+ id="dwellText">—</small>
405
+ </div>
406
+ <div class="bar">
407
+ <div id="dwellBar"></div>
408
+ </div>
409
+ </div>
410
+
411
+ <div class="hint mt-md">Manual targeting: choose “Manual” in Engagement Policy, then
412
+ click a target in the video. The “beam” will track its aimpoint and accumulate dwell.</div>
413
+ </div>
414
+
415
+ <div class="engage-right">
416
+ <div class="panel radar">
417
+ <h3>
418
+ <span>Radar / Relative Geometry</span>
419
+ <span class="rightnote">Dynamic</span>
420
+ </h3>
421
+ <canvas id="radarCanvas" width="600" height="260" class="full-size"></canvas>
422
+ </div>
423
+
424
+ <div class="panel" style="flex:1; min-height:0">
425
+ <h3>
426
+ <span>Live Track Cards</span>
427
+ <span class="rightnote" id="liveStamp">—</span>
428
+ </h3>
429
+ <div class="list" id="trackList" style="max-height:none"></div>
430
+ </div>
431
+ </div>
432
+ </div>
433
+ </section>
434
+
435
+ <!-- ===== Tab 3 ===== -->
436
+ <section class="tab" id="tab-trade">
437
+ <div class="trade-grid">
438
+ <div class="panel plot">
439
+ <h3>
440
+ <span>Range Sensitivity · Max vs Required Power · Dwell</span>
441
+ <span class="rightnote">Interactive</span>
442
+ </h3>
443
+ <canvas id="tradeCanvas" width="1100" height="420" class="full-size"></canvas>
444
+ </div>
445
+
446
+ <div class="panel">
447
+ <h3>
448
+ <span>Trade Controls</span>
449
+ <span class="rightnote">What-if</span>
450
+ </h3>
451
+ <div class="hint">This plot is computed from your current HEL and atmosphere knobs. It uses the selected
452
+ target’s baseline requirements (from Tab 1) as a reference curve.</div>
453
+
454
+ <div class="mt-md">
455
+ <label>Selected target for curve</label>
456
+ <select id="tradeTarget"></select>
457
+ </div>
458
+
459
+ <div class="grid2 mt-sm">
460
+ <div>
461
+ <label>Range sweep min (m)</label>
462
+ <input id="rMin" type="number" value="200" min="50" max="10000" step="50" />
463
+ </div>
464
+ <div>
465
+ <label>Range sweep max (m)</label>
466
+ <input id="rMax" type="number" value="6000" min="100" max="20000" step="50" />
467
+ </div>
468
+ </div>
469
+
470
+ <div class="row mt-md">
471
+ <label>Show P(kill)</label>
472
+ <select id="showPk">
473
+ <option value="on">On</option>
474
+ <option value="off">Off</option>
475
+ </select>
476
+ </div>
477
+
478
+ <div class="btnrow">
479
+ <button class="btn secondary" id="btnReplot">Replot</button>
480
+ <button class="btn secondary" id="btnSnap">Snapshot (log)</button>
481
+ </div>
482
+
483
+ <div class="hint">This tab is designed to look like a weapon trade-space console: propagation, lethality
484
+ margin, and dwell inflation with range and atmosphere.</div>
485
+ </div>
486
+ </div>
487
+ </section>
488
+ </main>
489
+ </div>
490
+
491
+ <footer>
492
+ <div>Demo mode · Unclassified visuals · Integrate your APIs where marked</div>
493
+ <div class="mono" id="telemetry">HEL=60kW · VIS=16km · Cn²=5/10 · AO=7/10 · DET=6Hz</div>
494
+ </footer>
495
+
496
+ <!-- Hidden video used only for first-frame capture -->
497
+ <video id="videoHidden" playsinline muted style="display:none"></video>
498
  </div>
499
 
500
  <script>
 
502
  BACKEND_BASE: "https://biaslab2025-perception.hf.space"
503
  };
504
  </script>
505
+ <script src="./js/init.js"></script>
506
+ <script src="./js/core/config.js"></script>
507
+ <script src="./js/core/utils.js"></script>
508
+ <script src="./js/core/state.js"></script>
509
+ <script src="./js/core/physics.js"></script>
510
+ <script src="./js/core/video.js"></script>
511
+ <script src="./js/core/hel.js"></script>
512
+ <script src="./js/ui/logging.js"></script>
513
+ <script src="./js/core/tracker.js"></script>
514
+ <script src="./js/api/client.js"></script>
515
+ <script src="./js/ui/overlays.js"></script>
516
+ <script src="./js/ui/radar.js"></script>
517
+ <script src="./js/ui/cards.js"></script>
518
+ <script src="./js/ui/features.js"></script>
519
+ <script src="./js/ui/intel.js"></script>
520
+ <script src="./js/ui/cursor.js"></script>
521
+ <script src="./js/ui/trade.js"></script>
522
+ <script src="./js/main.js"></script>
523
 
524
  </body>
525
 
frontend/js/api/client.js CHANGED
@@ -1,17 +1,13 @@
1
- import { state } from '../core/state.js';
2
- import { CONFIG } from '../core/config.js';
3
- import { log, setHfStatus } from '../ui/logging.js';
4
 
5
- export async function hfDetectAsync(formData) {
6
- if (!state.hf.baseUrl) return; // Should handle error or fallback
 
7
 
8
- // Default to state values if not provided in formData
9
- // Assuming formData is constructed in UI layer and passed here
10
-
11
- // Wrapper for the POST /detect/async call
12
  const resp = await fetch(`${state.hf.baseUrl}/detect/async`, {
13
  method: "POST",
14
- body: formData // already contains video, mode, queries, etc.
15
  });
16
 
17
  if (!resp.ok) {
@@ -20,13 +16,39 @@ export async function hfDetectAsync(formData) {
20
  }
21
 
22
  const data = await resp.json();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  return data;
24
- }
25
 
26
- export async function checkJobStatus(jobId) {
 
27
  if (!state.hf.baseUrl) return { status: "error" };
28
 
29
- // Note: Using statusUrl from state if available, or constructing it
30
  const url = state.hf.statusUrl || `${state.hf.baseUrl}/detect/job/${jobId}`;
31
  const resp = await fetch(url, { cache: "no-store" });
32
 
@@ -36,32 +58,181 @@ export async function checkJobStatus(jobId) {
36
  }
37
 
38
  return await resp.json();
39
- }
 
 
 
 
40
 
41
- export async function cancelBackendJob(jobId) {
42
  if (!state.hf.baseUrl || !jobId) return;
 
 
43
  if (state.hf.baseUrl.includes("hf.space")) {
 
44
  return { status: "skipped", message: "Cancel disabled for HF Space" };
45
  }
46
 
47
- const resp = await fetch(`${state.hf.baseUrl}/detect/job/${jobId}`, {
48
- method: "DELETE"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49
  });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
- if (resp.ok) return await resp.json();
52
- if (resp.status === 404) return { status: "not_found" };
53
- throw new Error("Cancel failed");
54
- }
55
 
56
- export async function reasonTrack(trackPayload) {
57
- // trackPayload: { frame: "base64...", boxes: [[x,y,x,y],...] }
58
- const resp = await fetch(`${state.hf.baseUrl}/reason/track`, {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
  method: "POST",
60
- headers: { "Content-Type": "application/json" },
61
- body: JSON.stringify(trackPayload),
62
- cache: "no-store"
63
  });
64
 
65
- if (!resp.ok) throw new Error("Reasoning failed");
 
 
 
66
  return await resp.json();
67
- }
 
1
+ // API Client Module - Backend communication
2
+ APP.api.client = {};
 
3
 
4
+ APP.api.client.hfDetectAsync = async function (formData) {
5
+ const { state } = APP.core;
6
+ if (!state.hf.baseUrl) return;
7
 
 
 
 
 
8
  const resp = await fetch(`${state.hf.baseUrl}/detect/async`, {
9
  method: "POST",
10
+ body: formData
11
  });
12
 
13
  if (!resp.ok) {
 
16
  }
17
 
18
  const data = await resp.json();
19
+
20
+ // Store URLs from response
21
+ if (data.status_url) {
22
+ state.hf.statusUrl = data.status_url.startsWith("http")
23
+ ? data.status_url
24
+ : `${state.hf.baseUrl}${data.status_url}`;
25
+ }
26
+
27
+ if (data.video_url) {
28
+ state.hf.videoUrl = data.video_url.startsWith("http")
29
+ ? data.video_url
30
+ : `${state.hf.baseUrl}${data.video_url}`;
31
+ }
32
+
33
+ if (data.depth_video_url) {
34
+ state.hf.depthVideoUrl = data.depth_video_url.startsWith("http")
35
+ ? data.depth_video_url
36
+ : `${state.hf.baseUrl}${data.depth_video_url}`;
37
+ }
38
+
39
+ if (data.depth_first_frame_url) {
40
+ state.hf.depthFirstFrameUrl = data.depth_first_frame_url.startsWith("http")
41
+ ? data.depth_first_frame_url
42
+ : `${state.hf.baseUrl}${data.depth_first_frame_url}`;
43
+ }
44
+
45
  return data;
46
+ };
47
 
48
+ APP.api.client.checkJobStatus = async function (jobId) {
49
+ const { state } = APP.core;
50
  if (!state.hf.baseUrl) return { status: "error" };
51
 
 
52
  const url = state.hf.statusUrl || `${state.hf.baseUrl}/detect/job/${jobId}`;
53
  const resp = await fetch(url, { cache: "no-store" });
54
 
 
58
  }
59
 
60
  return await resp.json();
61
+ };
62
+
63
+ APP.api.client.cancelBackendJob = async function (jobId, reason) {
64
+ const { state } = APP.core;
65
+ const { log } = APP.ui.logging;
66
 
 
67
  if (!state.hf.baseUrl || !jobId) return;
68
+
69
+ // Don't attempt cancel on HF Space (it doesn't support it)
70
  if (state.hf.baseUrl.includes("hf.space")) {
71
+ log(`Job cancel skipped for HF Space (${reason || "user request"})`, "w");
72
  return { status: "skipped", message: "Cancel disabled for HF Space" };
73
  }
74
 
75
+ try {
76
+ const resp = await fetch(`${state.hf.baseUrl}/detect/job/${jobId}`, {
77
+ method: "DELETE"
78
+ });
79
+
80
+ if (resp.ok) {
81
+ const result = await resp.json();
82
+ log(`Job ${jobId.substring(0, 8)} cancelled`, "w");
83
+ return result;
84
+ }
85
+ if (resp.status === 404) return { status: "not_found" };
86
+ throw new Error("Cancel failed");
87
+ } catch (err) {
88
+ log(`Cancel error: ${err.message}`, "e");
89
+ return { status: "error", message: err.message };
90
+ }
91
+ };
92
+
93
+ APP.api.client.pollAsyncJob = async function () {
94
+ const { state } = APP.core;
95
+ const { log, setHfStatus } = APP.ui.logging;
96
+ const { fetchProcessedVideo, fetchDepthVideo, fetchDepthFirstFrame } = APP.core.video;
97
+
98
+ const pollInterval = 3000; // 3 seconds
99
+ const maxAttempts = 200; // 10 minutes max
100
+ let attempts = 0;
101
+ let fetchingVideo = false;
102
+
103
+ return new Promise((resolve, reject) => {
104
+ state.hf.asyncPollInterval = setInterval(async () => {
105
+ attempts++;
106
+
107
+ try {
108
+ const resp = await fetch(state.hf.statusUrl, { cache: "no-store" });
109
+
110
+ if (!resp.ok) {
111
+ if (resp.status === 404) {
112
+ clearInterval(state.hf.asyncPollInterval);
113
+ reject(new Error("Job expired or not found"));
114
+ return;
115
+ }
116
+ throw new Error(`Status check failed: ${resp.statusText}`);
117
+ }
118
+
119
+ const status = await resp.json();
120
+ state.hf.asyncStatus = status.status;
121
+ state.hf.asyncProgress = status;
122
+
123
+ if (status.status === "completed") {
124
+ if (fetchingVideo) return;
125
+ fetchingVideo = true;
126
+
127
+ const completedJobId = state.hf.asyncJobId;
128
+ log(`✓ Backend job ${completedJobId.substring(0, 8)}: completed successfully`, "g");
129
+ setHfStatus("job completed, fetching video...");
130
+
131
+ try {
132
+ await fetchProcessedVideo();
133
+ await fetchDepthVideo();
134
+ await fetchDepthFirstFrame();
135
+
136
+ clearInterval(state.hf.asyncPollInterval);
137
+ state.hf.asyncJobId = null;
138
+ setHfStatus("ready");
139
+ resolve();
140
+ } catch (err) {
141
+ if (err && err.code === "VIDEO_PENDING") {
142
+ setHfStatus("job completed, finalizing video...");
143
+ fetchingVideo = false;
144
+ return;
145
+ }
146
+ clearInterval(state.hf.asyncPollInterval);
147
+ state.hf.asyncJobId = null;
148
+ reject(err);
149
+ }
150
+ } else if (status.status === "failed") {
151
+ clearInterval(state.hf.asyncPollInterval);
152
+ const errMsg = status.error || "Processing failed";
153
+ log(`✗ Backend job ${state.hf.asyncJobId.substring(0, 8)}: failed - ${errMsg}`, "e");
154
+ state.hf.asyncJobId = null;
155
+ setHfStatus(`error: ${errMsg}`);
156
+ reject(new Error(errMsg));
157
+ } else {
158
+ // Still processing
159
+ const progressInfo = status.progress ? ` (${Math.round(status.progress * 100)}%)` : "";
160
+ setHfStatus(`job ${state.hf.asyncJobId.substring(0, 8)}: ${status.status}${progressInfo} (${attempts})`);
161
+ }
162
+
163
+ if (attempts >= maxAttempts) {
164
+ clearInterval(state.hf.asyncPollInterval);
165
+ reject(new Error("Polling timeout (10 minutes)"));
166
+ }
167
+ } catch (err) {
168
+ clearInterval(state.hf.asyncPollInterval);
169
+ reject(err);
170
+ }
171
+ }, pollInterval);
172
  });
173
+ };
174
+
175
+ // External detection hook (can be replaced by user)
176
+ APP.api.client.externalDetect = async function (input) {
177
+ console.log("externalDetect called", input);
178
+ return [];
179
+ };
180
+
181
+ // External features hook (can be replaced by user)
182
+ APP.api.client.externalFeatures = async function (detections, frameInfo) {
183
+ console.log("externalFeatures called for", detections.length, "objects");
184
+ return {};
185
+ };
186
+
187
+ // External tracker hook (can be replaced by user)
188
+ APP.api.client.externalTrack = async function (videoEl) {
189
+ console.log("externalTrack called");
190
+ return [];
191
+ };
192
+
193
+ // Call HF object detection directly (for first frame)
194
+ APP.api.client.callHfObjectDetection = async function (canvas) {
195
+ const { state } = APP.core;
196
+ const { canvasToBlob } = APP.core.utils;
197
+ const { CONFIG } = APP.core;
198
 
199
+ const proxyBase = (CONFIG.PROXY_URL || "").trim();
 
 
 
200
 
201
+ if (proxyBase) {
202
+ const blob = await canvasToBlob(canvas);
203
+ const form = new FormData();
204
+ form.append("image", blob, "frame.jpg");
205
+
206
+ const resp = await fetch(`${proxyBase.replace(/\/$/, "")}/detect`, {
207
+ method: "POST",
208
+ body: form
209
+ });
210
+
211
+ if (!resp.ok) {
212
+ let detail = `Proxy inference failed (${resp.status})`;
213
+ try {
214
+ const err = await resp.json();
215
+ detail = err.detail || err.error || detail;
216
+ } catch (_) { }
217
+ throw new Error(detail);
218
+ }
219
+
220
+ return await resp.json();
221
+ }
222
+
223
+ // Default: use the backend base URL
224
+ const blob = await canvasToBlob(canvas);
225
+ const form = new FormData();
226
+ form.append("image", blob, "frame.jpg");
227
+
228
+ const resp = await fetch(`${state.hf.baseUrl}/detect/frame`, {
229
  method: "POST",
230
+ body: form
 
 
231
  });
232
 
233
+ if (!resp.ok) {
234
+ throw new Error(`Frame detection failed: ${resp.statusText}`);
235
+ }
236
+
237
  return await resp.json();
238
+ };
frontend/js/core/config.js CHANGED
@@ -1,4 +1,4 @@
1
- export const CONFIG = {
2
  // API Endpoints will be loaded from window.API_CONFIG or defaults
3
  BACKEND_BASE: (window.API_CONFIG?.BACKEND_BASE || window.API_CONFIG?.BASE_URL || "").replace(/\/$/, "") || window.location.origin,
4
  HF_TOKEN: window.API_CONFIG?.HF_TOKEN || "",
 
1
+ APP.core.CONFIG = {
2
  // API Endpoints will be loaded from window.API_CONFIG or defaults
3
  BACKEND_BASE: (window.API_CONFIG?.BACKEND_BASE || window.API_CONFIG?.BASE_URL || "").replace(/\/$/, "") || window.location.origin,
4
  HF_TOKEN: window.API_CONFIG?.HF_TOKEN || "",
frontend/js/core/hel.js ADDED
@@ -0,0 +1,245 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // HEL (High-Energy Laser) Physics and Computation Module
2
+ APP.core.hel = {};
3
+
4
+ // Get current knob values from UI
5
+ APP.core.hel.getKnobs = function () {
6
+ const { $ } = APP.core.utils;
7
+
8
+ const helPower = $("#helPower");
9
+ const helAperture = $("#helAperture");
10
+ const helM2 = $("#helM2");
11
+ const helJitter = $("#helJitter");
12
+ const helDuty = $("#helDuty");
13
+ const helMode = $("#helMode");
14
+ const atmVis = $("#atmVis");
15
+ const atmCn2 = $("#atmCn2");
16
+ const seaSpray = $("#seaSpray");
17
+ const aoQ = $("#aoQ");
18
+ const rangeBase = $("#rangeBase");
19
+
20
+ if (!helPower) return {};
21
+
22
+ return {
23
+ PkW: +helPower.value,
24
+ aperture: +helAperture.value,
25
+ M2: +helM2.value,
26
+ jitter_urad: +helJitter.value,
27
+ duty: (+helDuty.value) / 100,
28
+ mode: helMode ? helMode.value : "cw",
29
+ vis_km: +atmVis.value,
30
+ cn2: +atmCn2.value,
31
+ spray: +seaSpray.value,
32
+ ao: +aoQ.value,
33
+ baseRange: +rangeBase.value
34
+ };
35
+ };
36
+
37
+ // Compute max power at target considering atmospheric effects
38
+ APP.core.hel.maxPowerAtTarget = function (range_m) {
39
+ const k = APP.core.hel.getKnobs();
40
+ if (!k.PkW) return { Ptar: 0, Pout: k.PkW || 0, trans: 0, turb: 0, beam: 0 };
41
+
42
+ const { clamp } = APP.core.utils;
43
+
44
+ // Atmospheric transmission (Beer-Lambert approximation)
45
+ const sigma_km = 3.912 / Math.max(1, k.vis_km);
46
+ const range_km = range_m / 1000;
47
+ const trans = Math.exp(-sigma_km * range_km);
48
+
49
+ // Turbulence factor (simplified Cn² model)
50
+ const cn2_factor = clamp(1 - (k.cn2 / 10) * 0.3, 0.5, 1);
51
+ const ao_correction = 1 + (k.ao / 10) * 0.25;
52
+ const turb = cn2_factor * ao_correction;
53
+
54
+ // Sea spray attenuation
55
+ const spray_factor = 1 - (k.spray / 10) * 0.15;
56
+
57
+ // Beam quality degradation with range
58
+ const beam_spread = 1 + (k.M2 - 1) * (range_km / 5);
59
+ const jitter_loss = 1 / (1 + (k.jitter_urad / 10) * (range_km / 3));
60
+ const beam = jitter_loss / beam_spread;
61
+
62
+ // Total power at target
63
+ const Pout = k.PkW * k.duty;
64
+ const Ptar = Pout * trans * turb * spray_factor * beam;
65
+
66
+ return {
67
+ Ptar: Math.max(0, Ptar),
68
+ Pout,
69
+ trans,
70
+ turb: turb * spray_factor,
71
+ beam
72
+ };
73
+ };
74
+
75
+ // Estimate required power based on target features
76
+ APP.core.hel.requiredPowerFromFeatures = function (feat) {
77
+ if (!feat) return 35; // Default
78
+
79
+ let base = 30; // kW base requirement
80
+
81
+ // Adjust based on material
82
+ const material = (feat.material || "").toLowerCase();
83
+ if (material.includes("metal") || material.includes("aluminum")) base *= 1.2;
84
+ if (material.includes("composite") || material.includes("carbon")) base *= 0.8;
85
+ if (material.includes("plastic") || material.includes("polymer")) base *= 0.6;
86
+
87
+ // Adjust based on reflectivity
88
+ const refl = feat.reflectivity;
89
+ if (typeof refl === "number") {
90
+ base *= (1 + refl * 0.5); // Higher reflectivity = more power needed
91
+ }
92
+
93
+ // Adjust based on size
94
+ const size = feat.physical_size;
95
+ if (typeof size === "number") {
96
+ base *= Math.max(0.5, Math.min(2, size / 2)); // Assuming size in meters
97
+ }
98
+
99
+ return Math.round(base);
100
+ };
101
+
102
+ // Calculate required dwell time
103
+ APP.core.hel.requiredDwell = function (range_m, reqP_kW, maxP_kW, baseDwell_s) {
104
+ if (maxP_kW <= 0) return Infinity;
105
+ if (maxP_kW >= reqP_kW) return baseDwell_s;
106
+
107
+ // Dwell inflates quadratically as power drops below requirement
108
+ const ratio = reqP_kW / maxP_kW;
109
+ return baseDwell_s * ratio * ratio;
110
+ };
111
+
112
+ // Calculate probability of kill
113
+ APP.core.hel.pkillFromMargin = function (margin_kW, baseDwell_s, reqDwell_s) {
114
+ if (margin_kW <= 0) return 0;
115
+ if (reqDwell_s <= 0) return 0;
116
+
117
+ const dwellRatio = baseDwell_s / reqDwell_s;
118
+ const marginFactor = Math.min(1, margin_kW / 50); // Normalize margin
119
+
120
+ return Math.min(0.99, dwellRatio * marginFactor * 0.95);
121
+ };
122
+
123
+ // Recompute HEL synthesis for all detections
124
+ APP.core.hel.recomputeHEL = async function () {
125
+ const { state } = APP.core;
126
+ const { log } = APP.ui.logging;
127
+ const knobs = APP.core.hel.getKnobs();
128
+
129
+ if (!state.detections || state.detections.length === 0) return;
130
+
131
+ for (const det of state.detections) {
132
+ // Get range from GPT or use baseline
133
+ const range = det.gpt_distance_m || knobs.baseRange || 1500;
134
+
135
+ // Compute power at target
136
+ const power = APP.core.hel.maxPowerAtTarget(range);
137
+ det.maxP_kW = power.Ptar;
138
+
139
+ // Required power from features
140
+ det.reqP_kW = APP.core.hel.requiredPowerFromFeatures(det.features);
141
+
142
+ // Dwell calculation
143
+ det.baseDwell_s = det.baseDwell_s || 5.0;
144
+ det.reqDwell_s = APP.core.hel.requiredDwell(range, det.reqP_kW, det.maxP_kW, det.baseDwell_s);
145
+
146
+ // P(kill)
147
+ const margin = det.maxP_kW - det.reqP_kW;
148
+ det.pkill = APP.core.hel.pkillFromMargin(margin, det.baseDwell_s, det.reqDwell_s);
149
+
150
+ // Store range
151
+ det.baseRange_m = range;
152
+ }
153
+
154
+ log("HEL synthesis updated for all targets.", "t");
155
+ };
156
+
157
+ // External hook for HEL synthesis (can be replaced by user)
158
+ APP.core.hel.externalHEL = async function (detections, knobs) {
159
+ // Default implementation - can be replaced by user
160
+ console.log("externalHEL called for", detections.length, "objects", knobs);
161
+ return {
162
+ targets: {},
163
+ system: { maxP_kW: 0, reqP_kW: 0, margin_kW: 0, medianRange_m: 0 }
164
+ };
165
+ };
166
+
167
+ // Sync knob display values in the UI
168
+ APP.core.hel.syncKnobDisplays = function () {
169
+ const { $ } = APP.core.utils;
170
+
171
+ const helPower = $("#helPower");
172
+ const helAperture = $("#helAperture");
173
+ const helM2 = $("#helM2");
174
+ const helJitter = $("#helJitter");
175
+ const helDuty = $("#helDuty");
176
+ const atmVis = $("#atmVis");
177
+ const atmCn2 = $("#atmCn2");
178
+ const seaSpray = $("#seaSpray");
179
+ const aoQ = $("#aoQ");
180
+ const rangeBase = $("#rangeBase");
181
+ const detHz = $("#detHz");
182
+ const assessWindow = $("#assessWindow");
183
+ const policyMode = $("#policyMode");
184
+
185
+ if (helPower) {
186
+ const v = $("#helPowerVal");
187
+ if (v) v.textContent = helPower.value;
188
+ }
189
+ if (helAperture) {
190
+ const v = $("#helApertureVal");
191
+ if (v) v.textContent = (+helAperture.value).toFixed(2);
192
+ }
193
+ if (helM2) {
194
+ const v = $("#helM2Val");
195
+ if (v) v.textContent = (+helM2.value).toFixed(1);
196
+ }
197
+ if (helJitter) {
198
+ const v = $("#helJitterVal");
199
+ if (v) v.textContent = (+helJitter.value).toFixed(1);
200
+ }
201
+ if (helDuty) {
202
+ const v = $("#helDutyVal");
203
+ if (v) v.textContent = helDuty.value;
204
+ }
205
+ if (atmVis) {
206
+ const v = $("#atmVisVal");
207
+ if (v) v.textContent = atmVis.value;
208
+ }
209
+ if (atmCn2) {
210
+ const v = $("#atmCn2Val");
211
+ if (v) v.textContent = atmCn2.value;
212
+ }
213
+ if (seaSpray) {
214
+ const v = $("#seaSprayVal");
215
+ if (v) v.textContent = seaSpray.value;
216
+ }
217
+ if (aoQ) {
218
+ const v = $("#aoQVal");
219
+ if (v) v.textContent = aoQ.value;
220
+ }
221
+ if (rangeBase) {
222
+ const v = $("#rangeBaseVal");
223
+ if (v) v.textContent = rangeBase.value;
224
+ }
225
+ if (detHz) {
226
+ const v = $("#detHzVal");
227
+ if (v) v.textContent = detHz.value;
228
+ }
229
+ if (assessWindow) {
230
+ const v = $("#assessWindowVal");
231
+ if (v) v.textContent = (+assessWindow.value).toFixed(1);
232
+ }
233
+
234
+ // Update chips
235
+ const chipPolicy = $("#chipPolicy");
236
+ const chipHz = $("#chipHz");
237
+ if (chipPolicy && policyMode) chipPolicy.textContent = `POLICY:${policyMode.value.toUpperCase()}`;
238
+ if (chipHz && detHz) chipHz.textContent = `DET:${detHz.value}Hz`;
239
+
240
+ // Update telemetry footer
241
+ const telemetry = $("#telemetry");
242
+ if (telemetry && helPower && atmVis && atmCn2 && aoQ && detHz) {
243
+ telemetry.textContent = `HEL=${helPower.value}kW · VIS=${atmVis.value}km · Cn²=${atmCn2.value}/10 · AO=${aoQ.value}/10 · DET=${detHz.value}Hz`;
244
+ }
245
+ };
frontend/js/core/physics.js CHANGED
@@ -1,7 +1,6 @@
1
- import { $ } from './utils.js';
2
 
3
- // ========= Aimpoint rules =========
4
- export function defaultAimpoint(label) {
5
  const l = (label || "object").toLowerCase();
6
  if (l.includes("airplane") || l.includes("drone") || l.includes("uav") || l.includes("kite") || l.includes("bird")) {
7
  return { relx: 0.62, rely: 0.55, label: "engine" };
@@ -16,32 +15,31 @@ export function defaultAimpoint(label) {
16
  return { relx: 0.55, rely: 0.62, label: "engine_block" };
17
  }
18
  return { relx: 0.50, rely: 0.55, label: "center_mass" };
19
- }
20
 
21
- export function aimpointByLabel(label) {
22
  const l = String(label || "").toLowerCase();
23
  if (l.includes("engine") || l.includes("fuel")) return { relx: 0.64, rely: 0.58, label: label };
24
  if (l.includes("wing")) return { relx: 0.42, rely: 0.52, label: label };
25
  if (l.includes("nose") || l.includes("sensor")) return { relx: 0.28, rely: 0.48, label: label };
26
  if (l.includes("rotor")) return { relx: 0.52, rely: 0.42, label: label };
27
  return { relx: 0.50, rely: 0.55, label: label || "center_mass" };
28
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
 
30
- // ========= Core Physics & Logic Adapters =========
31
- export function getKnobs() {
32
- const helPower = $("#hel-power");
33
- const helAperture = $("#hel-aperture");
34
- const helM2 = $("#hel-m2");
35
- const helJitter = $("#hel-jitter");
36
- const helDuty = $("#hel-duty");
37
- const helMode = $("#hel-mode");
38
- const atmVis = $("#atm-vis");
39
- const atmCn2 = $("#atm-cn2");
40
- const seaSpray = $("#sea-spray");
41
- const aoQ = $("#ao-q");
42
- const rangeBase = $("#range-base");
43
-
44
- // Guard against missing UI elements if loaded before DOM
45
  if (!helPower) return {};
46
 
47
  const PkW = +helPower.value;
@@ -49,23 +47,11 @@ export function getKnobs() {
49
  const M2 = +helM2.value;
50
  const jitter_urad = +helJitter.value;
51
  const duty = (+helDuty.value) / 100;
52
- const mode = helMode.value;
53
  const vis_km = +atmVis.value;
54
  const cn2 = +atmCn2.value;
55
  const spray = +seaSpray.value;
56
  const ao = +aoQ.value;
57
  const baseRange = +rangeBase.value;
58
  return { PkW, aperture, M2, jitter_urad, duty, mode, vis_km, cn2, spray, ao, baseRange };
59
- }
60
-
61
- // ========= Safe Stubs for Client-Side Visualization =========
62
- export function maxPowerAtTarget(range_m) {
63
- // Placeholder: return 0 or simple fallback
64
- return { Ptar: 0, Pout: 0, trans: 0, turb: 0, beam: 0 };
65
- }
66
-
67
- export function requiredPowerFromFeatures(feat) { return 10; } // Safe default
68
-
69
- export function requiredDwell(range_m, reqP, maxP, baseDwell) { return 1.0; } // Safe default
70
-
71
- export function pkillFromMargin(margin_kW, dwell_s, reqDwell_s) { return 0; }
 
1
+ APP.core.physics = {};
2
 
3
+ APP.core.physics.defaultAimpoint = function (label) {
 
4
  const l = (label || "object").toLowerCase();
5
  if (l.includes("airplane") || l.includes("drone") || l.includes("uav") || l.includes("kite") || l.includes("bird")) {
6
  return { relx: 0.62, rely: 0.55, label: "engine" };
 
15
  return { relx: 0.55, rely: 0.62, label: "engine_block" };
16
  }
17
  return { relx: 0.50, rely: 0.55, label: "center_mass" };
18
+ };
19
 
20
+ APP.core.physics.aimpointByLabel = function (label) {
21
  const l = String(label || "").toLowerCase();
22
  if (l.includes("engine") || l.includes("fuel")) return { relx: 0.64, rely: 0.58, label: label };
23
  if (l.includes("wing")) return { relx: 0.42, rely: 0.52, label: label };
24
  if (l.includes("nose") || l.includes("sensor")) return { relx: 0.28, rely: 0.48, label: label };
25
  if (l.includes("rotor")) return { relx: 0.52, rely: 0.42, label: label };
26
  return { relx: 0.50, rely: 0.55, label: label || "center_mass" };
27
+ };
28
+
29
+ APP.core.physics.getKnobs = function () {
30
+ const { $ } = APP.core.utils;
31
+ const helPower = $("#helPower");
32
+ const helAperture = $("#helAperture");
33
+ const helM2 = $("#helM2");
34
+ const helJitter = $("#helJitter");
35
+ const helDuty = $("#helDuty");
36
+ const helMode = $("#helMode");
37
+ const atmVis = $("#atmVis");
38
+ const atmCn2 = $("#atmCn2");
39
+ const seaSpray = $("#seaSpray");
40
+ const aoQ = $("#aoQ");
41
+ const rangeBase = $("#rangeBase");
42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  if (!helPower) return {};
44
 
45
  const PkW = +helPower.value;
 
47
  const M2 = +helM2.value;
48
  const jitter_urad = +helJitter.value;
49
  const duty = (+helDuty.value) / 100;
50
+ const mode = helMode ? helMode.value : "cw";
51
  const vis_km = +atmVis.value;
52
  const cn2 = +atmCn2.value;
53
  const spray = +seaSpray.value;
54
  const ao = +aoQ.value;
55
  const baseRange = +rangeBase.value;
56
  return { PkW, aperture, M2, jitter_urad, duty, mode, vis_km, cn2, spray, ao, baseRange };
57
+ };
 
 
 
 
 
 
 
 
 
 
 
 
frontend/js/core/state.js CHANGED
@@ -1,6 +1,4 @@
1
- import { CONFIG } from './config.js';
2
-
3
- export const state = {
4
  videoUrl: null,
5
  videoFile: null,
6
  videoLoaded: false,
@@ -11,7 +9,8 @@ export const state = {
11
  isReasoning: false, // Flag to prevent concurrent Reason executions
12
 
13
  hf: {
14
- baseUrl: CONFIG.BACKEND_BASE,
 
15
  detector: "auto",
16
  asyncJobId: null, // Current job ID from /detect/async
17
  asyncPollInterval: null, // Polling timer handle
@@ -30,7 +29,9 @@ export const state = {
30
  depthFirstFrameBlob: null, // Depth first frame blob
31
  summary: null,
32
  busy: false,
33
- lastError: null
 
 
34
  },
35
 
36
  detector: {
@@ -71,6 +72,3 @@ export const state = {
71
  agentCursor: { x: 0.65, y: 0.28, vx: 0, vy: 0, visible: false, target: null, mode: "idle", t0: 0 }
72
  }
73
  };
74
-
75
- // Simple event bus for state changes if needed
76
- export const events = new EventTarget();
 
1
+ APP.core.state = {
 
 
2
  videoUrl: null,
3
  videoFile: null,
4
  videoLoaded: false,
 
9
  isReasoning: false, // Flag to prevent concurrent Reason executions
10
 
11
  hf: {
12
+ // Will be properly initialized after CONFIG loads
13
+ baseUrl: (window.API_CONFIG?.BACKEND_BASE || window.API_CONFIG?.BASE_URL || "").replace(/\/$/, "") || window.location.origin,
14
  detector: "auto",
15
  asyncJobId: null, // Current job ID from /detect/async
16
  asyncPollInterval: null, // Polling timer handle
 
29
  depthFirstFrameBlob: null, // Depth first frame blob
30
  summary: null,
31
  busy: false,
32
+ lastError: null,
33
+ missionId: null,
34
+ plan: null
35
  },
36
 
37
  detector: {
 
72
  agentCursor: { x: 0.65, y: 0.28, vx: 0, vy: 0, visible: false, target: null, mode: "idle", t0: 0 }
73
  }
74
  };
 
 
 
frontend/js/core/tracker.js CHANGED
@@ -1,26 +1,29 @@
1
- import { state } from './state.js';
2
- import { CONFIG } from './config.js';
3
- import { normBBox, lerp, now, $ } from './utils.js';
4
- import { defaultAimpoint } from './physics.js';
5
- import { log } from '../ui/logging.js';
6
-
7
- const videoEngage = $("#videoEngage");
8
- const rangeBase = $("#range-base");
9
-
10
- function iou(a, b) {
11
- const ax2 = a.x + a.w, ay2 = a.y + a.h;
12
- const bx2 = b.x + b.w, by2 = b.y + b.h;
13
- const ix1 = Math.max(a.x, b.x), iy1 = Math.max(a.y, b.y);
14
- const ix2 = Math.min(ax2, bx2), iy2 = Math.min(ay2, by2);
15
- const iw = Math.max(0, ix2 - ix1), ih = Math.max(0, iy2 - iy1);
16
- const inter = iw * ih;
17
- const ua = a.w * a.h + b.w * b.h - inter;
18
- return ua <= 0 ? 0 : inter / ua;
19
- }
20
-
21
- export function matchAndUpdateTracks(dets, dtSec) {
22
  if (!videoEngage) return;
23
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  // Convert detections to bbox in video coordinates
25
  const w = videoEngage.videoWidth || state.frame.w;
26
  const h = videoEngage.videoHeight || state.frame.h;
@@ -136,9 +139,12 @@ export function matchAndUpdateTracks(dets, dtSec) {
136
  // prune old tracks
137
  const tNow = now();
138
  state.tracker.tracks = tracks.filter(tr => (tNow - tr.lastSeen) < CONFIG.TRACK_PRUNE_MS || tr.killed);
139
- }
140
 
141
- export function predictTracks(dtSec) {
 
 
 
142
  if (!videoEngage) return;
143
  const w = videoEngage.videoWidth || state.frame.w;
144
  const h = videoEngage.videoHeight || state.frame.h;
@@ -151,4 +157,4 @@ export function predictTracks(dtSec) {
151
  tr.bbox.x = clamp(tr.bbox.x + tr.vx * dtSec * 0.12, 0, w - 1);
152
  tr.bbox.y = clamp(tr.bbox.y + tr.vy * dtSec * 0.12, 0, h - 1);
153
  });
154
- }
 
1
+ APP.core.tracker = {};
2
+
3
+ APP.core.tracker.matchAndUpdateTracks = function (dets, dtSec) {
4
+ const { state } = APP.core;
5
+ const { CONFIG } = APP.core;
6
+ const { normBBox, lerp, now, $ } = APP.core.utils;
7
+ const { defaultAimpoint } = APP.core.physics;
8
+ const { log } = APP.ui.logging;
9
+
10
+ const videoEngage = $("#videoEngage");
11
+ const rangeBase = $("#rangeBase"); // Fixed Selector
12
+
 
 
 
 
 
 
 
 
 
13
  if (!videoEngage) return;
14
 
15
+ // IOU helper
16
+ function iou(a, b) {
17
+ const ax2 = a.x + a.w, ay2 = a.y + a.h;
18
+ const bx2 = b.x + b.w, by2 = b.y + b.h;
19
+ const ix1 = Math.max(a.x, b.x), iy1 = Math.max(a.y, b.y);
20
+ const ix2 = Math.min(ax2, bx2), iy2 = Math.min(ay2, by2);
21
+ const iw = Math.max(0, ix2 - ix1), ih = Math.max(0, iy2 - iy1);
22
+ const inter = iw * ih;
23
+ const ua = a.w * a.h + b.w * b.h - inter;
24
+ return ua <= 0 ? 0 : inter / ua;
25
+ }
26
+
27
  // Convert detections to bbox in video coordinates
28
  const w = videoEngage.videoWidth || state.frame.w;
29
  const h = videoEngage.videoHeight || state.frame.h;
 
139
  // prune old tracks
140
  const tNow = now();
141
  state.tracker.tracks = tracks.filter(tr => (tNow - tr.lastSeen) < CONFIG.TRACK_PRUNE_MS || tr.killed);
142
+ };
143
 
144
+ APP.core.tracker.predictTracks = function (dtSec) {
145
+ const { state } = APP.core;
146
+ const { $ } = APP.core.utils;
147
+ const videoEngage = $("#videoEngage");
148
  if (!videoEngage) return;
149
  const w = videoEngage.videoWidth || state.frame.w;
150
  const h = videoEngage.videoHeight || state.frame.h;
 
157
  tr.bbox.x = clamp(tr.bbox.x + tr.vx * dtSec * 0.12, 0, w - 1);
158
  tr.bbox.y = clamp(tr.bbox.y + tr.vy * dtSec * 0.12, 0, h - 1);
159
  });
160
+ };
frontend/js/core/utils.js CHANGED
@@ -1,15 +1,17 @@
1
- export const $ = (sel, root = document) => root.querySelector(sel);
2
- export const $$ = (sel, root = document) => Array.from(root.querySelectorAll(sel));
3
 
4
- export const clamp = (x, a, b) => Math.min(b, Math.max(a, x));
5
- export const lerp = (a, b, t) => a + (b - a) * t;
6
- export const now = () => performance.now();
7
 
8
- export function escapeHtml(s) {
 
 
 
 
9
  return String(s).replace(/[&<>"']/g, m => ({ "&": "&amp;", "<": "&lt;", ">": "&gt;", '"': "&quot;", "'": "&#39;" }[m]));
10
- }
11
 
12
- export function canvasToBlob(canvas, quality = 0.88) {
13
  return new Promise((resolve, reject) => {
14
  if (!canvas.toBlob) { reject(new Error("Canvas.toBlob not supported")); return; }
15
  canvas.toBlob(blob => {
@@ -17,37 +19,37 @@ export function canvasToBlob(canvas, quality = 0.88) {
17
  resolve(blob);
18
  }, "image/jpeg", quality);
19
  });
20
- }
21
 
22
- export function normBBox(bbox, w, h) {
23
  const [x, y, bw, bh] = bbox;
24
  return {
25
- x: clamp(x, 0, w - 1),
26
- y: clamp(y, 0, h - 1),
27
- w: clamp(bw, 1, w),
28
- h: clamp(bh, 1, h)
29
  };
30
- }
31
 
32
- export const loadedScripts = new Map();
33
 
34
- export function loadScriptOnce(key, src) {
35
  return new Promise((resolve, reject) => {
36
- if (loadedScripts.get(key) === "loaded") { resolve(); return; }
37
- if (loadedScripts.get(key) === "loading") {
38
  const iv = setInterval(() => {
39
- if (loadedScripts.get(key) === "loaded") { clearInterval(iv); resolve(); }
40
- if (loadedScripts.get(key) === "failed") { clearInterval(iv); reject(new Error("Script failed earlier")); }
41
  }, 50);
42
  return;
43
  }
44
 
45
- loadedScripts.set(key, "loading");
46
  const s = document.createElement("script");
47
  s.src = src;
48
  s.async = true;
49
- s.onload = () => { loadedScripts.set(key, "loaded"); resolve(); };
50
- s.onerror = () => { loadedScripts.set(key, "failed"); reject(new Error(`Failed to load ${src}`)); };
51
  document.head.appendChild(s);
52
  });
53
- }
 
1
+ APP.core.utils = {};
 
2
 
3
+ APP.core.utils.$ = (sel, root = document) => root.querySelector(sel);
4
+ APP.core.utils.$$ = (sel, root = document) => Array.from(root.querySelectorAll(sel));
 
5
 
6
+ APP.core.utils.clamp = (x, a, b) => Math.min(b, Math.max(a, x));
7
+ APP.core.utils.lerp = (a, b, t) => a + (b - a) * t;
8
+ APP.core.utils.now = () => performance.now();
9
+
10
+ APP.core.utils.escapeHtml = function (s) {
11
  return String(s).replace(/[&<>"']/g, m => ({ "&": "&amp;", "<": "&lt;", ">": "&gt;", '"': "&quot;", "'": "&#39;" }[m]));
12
+ };
13
 
14
+ APP.core.utils.canvasToBlob = function (canvas, quality = 0.88) {
15
  return new Promise((resolve, reject) => {
16
  if (!canvas.toBlob) { reject(new Error("Canvas.toBlob not supported")); return; }
17
  canvas.toBlob(blob => {
 
19
  resolve(blob);
20
  }, "image/jpeg", quality);
21
  });
22
+ };
23
 
24
+ APP.core.utils.normBBox = function (bbox, w, h) {
25
  const [x, y, bw, bh] = bbox;
26
  return {
27
+ x: APP.core.utils.clamp(x, 0, w - 1),
28
+ y: APP.core.utils.clamp(y, 0, h - 1),
29
+ w: APP.core.utils.clamp(bw, 1, w),
30
+ h: APP.core.utils.clamp(bh, 1, h)
31
  };
32
+ };
33
 
34
+ APP.core.utils.loadedScripts = new Map();
35
 
36
+ APP.core.utils.loadScriptOnce = function (key, src) {
37
  return new Promise((resolve, reject) => {
38
+ if (APP.core.utils.loadedScripts.get(key) === "loaded") { resolve(); return; }
39
+ if (APP.core.utils.loadedScripts.get(key) === "loading") {
40
  const iv = setInterval(() => {
41
+ if (APP.core.utils.loadedScripts.get(key) === "loaded") { clearInterval(iv); resolve(); }
42
+ if (APP.core.utils.loadedScripts.get(key) === "failed") { clearInterval(iv); reject(new Error("Script failed earlier")); }
43
  }, 50);
44
  return;
45
  }
46
 
47
+ APP.core.utils.loadedScripts.set(key, "loading");
48
  const s = document.createElement("script");
49
  s.src = src;
50
  s.async = true;
51
+ s.onload = () => { APP.core.utils.loadedScripts.set(key, "loaded"); resolve(); };
52
+ s.onerror = () => { APP.core.utils.loadedScripts.set(key, "failed"); reject(new Error(`Failed to load ${src}`)); };
53
  document.head.appendChild(s);
54
  });
55
+ };
frontend/js/core/video.js ADDED
@@ -0,0 +1,622 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Video management: loading, unloading, first frame capture, depth handling
2
+ APP.core.video = {};
3
+
4
+ APP.core.video.captureFirstFrame = async function () {
5
+ const { state } = APP.core;
6
+ const { $ } = APP.core.utils;
7
+ const videoHidden = $("#videoHidden");
8
+
9
+ if (!videoHidden || !state.videoUrl) return;
10
+
11
+ videoHidden.src = state.videoUrl;
12
+ videoHidden.load();
13
+
14
+ await new Promise((resolve, reject) => {
15
+ videoHidden.onloadeddata = resolve;
16
+ videoHidden.onerror = () => reject(new Error("Video failed to load"));
17
+ setTimeout(() => reject(new Error("Video load timeout")), 10000);
18
+ });
19
+
20
+ // Seek to first frame
21
+ videoHidden.currentTime = 0;
22
+ await new Promise(r => {
23
+ videoHidden.onseeked = r;
24
+ setTimeout(r, 500);
25
+ });
26
+
27
+ state.frame.w = videoHidden.videoWidth || 1280;
28
+ state.frame.h = videoHidden.videoHeight || 720;
29
+ };
30
+
31
+ APP.core.video.drawFirstFrame = function () {
32
+ const { state } = APP.core;
33
+ const { $ } = APP.core.utils;
34
+ const frameCanvas = $("#frameCanvas");
35
+ const frameOverlay = $("#frameOverlay");
36
+ const videoHidden = $("#videoHidden");
37
+ const frameEmpty = $("#frameEmpty");
38
+
39
+ if (!frameCanvas || !videoHidden) return;
40
+
41
+ const ctx = frameCanvas.getContext("2d");
42
+ frameCanvas.width = state.frame.w;
43
+ frameCanvas.height = state.frame.h;
44
+ ctx.drawImage(videoHidden, 0, 0, state.frame.w, state.frame.h);
45
+
46
+ // Also resize overlay to match so bbox coordinates align
47
+ if (frameOverlay) {
48
+ frameOverlay.width = state.frame.w;
49
+ frameOverlay.height = state.frame.h;
50
+ }
51
+
52
+ if (frameEmpty) frameEmpty.style.display = "none";
53
+ };
54
+
55
+ APP.core.video.frameToBitmap = async function (videoEl) {
56
+ const w = videoEl.videoWidth || 1280;
57
+ const h = videoEl.videoHeight || 720;
58
+ const canvas = document.createElement("canvas");
59
+ canvas.width = w;
60
+ canvas.height = h;
61
+ const ctx = canvas.getContext("2d");
62
+ ctx.drawImage(videoEl, 0, 0, w, h);
63
+ return canvas;
64
+ };
65
+
66
+ APP.core.video.seekTo = function (videoEl, time) {
67
+ return new Promise((resolve) => {
68
+ if (!videoEl) { resolve(); return; }
69
+ videoEl.currentTime = Math.max(0, time);
70
+ videoEl.onseeked = () => resolve();
71
+ setTimeout(resolve, 600);
72
+ });
73
+ };
74
+
75
+ APP.core.video.unloadVideo = async function (options = {}) {
76
+ const { state } = APP.core;
77
+ const { $, $$ } = APP.core.utils;
78
+ const { log, setStatus, setHfStatus } = APP.ui.logging;
79
+ const preserveInput = !!options.preserveInput;
80
+
81
+ // Stop polling if running
82
+ if (state.hf.asyncPollInterval) {
83
+ clearInterval(state.hf.asyncPollInterval);
84
+ state.hf.asyncPollInterval = null;
85
+ }
86
+
87
+ // Revoke blob URLs
88
+ if (state.videoUrl && state.videoUrl.startsWith("blob:")) {
89
+ URL.revokeObjectURL(state.videoUrl);
90
+ }
91
+ if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) {
92
+ try { URL.revokeObjectURL(state.hf.processedUrl); } catch (_) { }
93
+ }
94
+ if (state.hf.depthVideoUrl && state.hf.depthVideoUrl.startsWith("blob:")) {
95
+ try { URL.revokeObjectURL(state.hf.depthVideoUrl); } catch (_) { }
96
+ }
97
+ if (state.hf.depthFirstFrameUrl && state.hf.depthFirstFrameUrl.startsWith("blob:")) {
98
+ try { URL.revokeObjectURL(state.hf.depthFirstFrameUrl); } catch (_) { }
99
+ }
100
+
101
+ // Reset state
102
+ state.videoUrl = null;
103
+ state.videoFile = null;
104
+ state.videoLoaded = false;
105
+ state.useProcessedFeed = false;
106
+ state.useDepthFeed = false;
107
+ state.useFrameDepthView = false;
108
+
109
+ state.hf.missionId = null;
110
+ state.hf.plan = null;
111
+ state.hf.processedUrl = null;
112
+ state.hf.processedBlob = null;
113
+ state.hf.depthVideoUrl = null;
114
+ state.hf.depthBlob = null;
115
+ state.hf.depthFirstFrameUrl = null;
116
+ state.hf.depthFirstFrameBlob = null;
117
+ state.hf.summary = null;
118
+ state.hf.busy = false;
119
+ state.hf.lastError = null;
120
+ state.hf.asyncJobId = null;
121
+ state.hf.asyncStatus = "idle";
122
+ state.hf.firstFrameUrl = null;
123
+ state.hf.videoUrl = null;
124
+
125
+ setHfStatus("idle");
126
+ state.hasReasoned = false;
127
+ state.isReasoning = false;
128
+
129
+ // Reset button states
130
+ const btnReason = $("#btnReason");
131
+ const btnCancelReason = $("#btnCancelReason");
132
+ const btnEngage = $("#btnEngage");
133
+
134
+ if (btnReason) {
135
+ btnReason.disabled = false;
136
+ btnReason.style.opacity = "1";
137
+ btnReason.style.cursor = "pointer";
138
+ }
139
+ if (btnCancelReason) btnCancelReason.style.display = "none";
140
+ if (btnEngage) btnEngage.disabled = true;
141
+
142
+ state.detections = [];
143
+ state.selectedId = null;
144
+
145
+ state.tracker.tracks = [];
146
+ state.tracker.nextId = 1;
147
+ state.tracker.running = false;
148
+ state.tracker.selectedTrackId = null;
149
+ state.tracker.beamOn = false;
150
+
151
+ // Clear video elements
152
+ const videoHidden = $("#videoHidden");
153
+ const videoEngage = $("#videoEngage");
154
+ const videoFile = $("#videoFile");
155
+
156
+ if (videoHidden) {
157
+ videoHidden.removeAttribute("src");
158
+ videoHidden.load();
159
+ }
160
+ if (videoEngage) {
161
+ videoEngage.removeAttribute("src");
162
+ videoEngage.load();
163
+ }
164
+ if (!preserveInput && videoFile) {
165
+ videoFile.value = "";
166
+ }
167
+
168
+ // Update UI
169
+ const videoMeta = $("#videoMeta");
170
+ const frameEmpty = $("#frameEmpty");
171
+ const engageEmpty = $("#engageEmpty");
172
+ const frameNote = $("#frameNote");
173
+ const engageNote = $("#engageNote");
174
+
175
+ if (!preserveInput && videoMeta) videoMeta.textContent = "No file";
176
+ if (frameEmpty) frameEmpty.style.display = "flex";
177
+ if (engageEmpty) engageEmpty.style.display = "flex";
178
+ if (frameNote) frameNote.textContent = "Awaiting video";
179
+ if (engageNote) engageNote.textContent = "Awaiting video";
180
+
181
+ // Clear canvases
182
+ APP.core.video.clearCanvas($("#frameCanvas"));
183
+ APP.core.video.clearCanvas($("#frameOverlay"));
184
+ APP.core.video.clearCanvas($("#engageOverlay"));
185
+
186
+ // Re-render UI components
187
+ if (APP.ui.radar.renderFrameRadar) APP.ui.radar.renderFrameRadar();
188
+ if (APP.ui.cards.renderFrameTrackList) APP.ui.cards.renderFrameTrackList();
189
+ if (APP.ui.features.renderFeatures) APP.ui.features.renderFeatures(null);
190
+ if (APP.ui.trade.renderTrade) APP.ui.trade.renderTrade();
191
+
192
+ setStatus("warn", "STANDBY · No video loaded");
193
+ log("Video unloaded. Demo reset.", "w");
194
+ };
195
+
196
+ APP.core.video.clearCanvas = function (canvas) {
197
+ if (!canvas) return;
198
+ const ctx = canvas.getContext("2d");
199
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
200
+ };
201
+
202
+ APP.core.video.resizeOverlays = function () {
203
+ const { state } = APP.core;
204
+ const { $ } = APP.core.utils;
205
+
206
+ const videoEngage = $("#videoEngage");
207
+ const engageOverlay = $("#engageOverlay");
208
+ const frameOverlay = $("#frameOverlay");
209
+
210
+ if (videoEngage && engageOverlay) {
211
+ const w = videoEngage.videoWidth || state.frame.w;
212
+ const h = videoEngage.videoHeight || state.frame.h;
213
+ engageOverlay.width = w;
214
+ engageOverlay.height = h;
215
+ }
216
+
217
+ if (frameOverlay) {
218
+ frameOverlay.width = state.frame.w;
219
+ frameOverlay.height = state.frame.h;
220
+ }
221
+ };
222
+
223
+ // Depth video handling
224
+ APP.core.video.fetchDepthVideo = async function () {
225
+ const { state } = APP.core;
226
+ const { log } = APP.ui.logging;
227
+
228
+ // Depth is optional - skip silently if no URL
229
+ if (!state.hf.depthVideoUrl) {
230
+ return;
231
+ }
232
+
233
+ try {
234
+ const resp = await fetch(state.hf.depthVideoUrl, { cache: "no-store" });
235
+
236
+ if (!resp.ok) {
237
+ // 404 = depth not enabled/available - this is fine, not an error
238
+ if (resp.status === 404) {
239
+ state.hf.depthVideoUrl = null;
240
+ return;
241
+ }
242
+ // 202 = still processing
243
+ if (resp.status === 202) {
244
+ return;
245
+ }
246
+ throw new Error(`Failed to fetch depth video: ${resp.statusText}`);
247
+ }
248
+
249
+ const nullOrigin = (window.location && window.location.origin) === "null";
250
+ if (nullOrigin) {
251
+ state.hf.depthBlob = null;
252
+ state.hf.depthVideoUrl = `${state.hf.depthVideoUrl}?t=${Date.now()}`;
253
+ log("Depth video ready (streaming URL)");
254
+ return;
255
+ }
256
+
257
+ const blob = await resp.blob();
258
+ state.hf.depthBlob = blob;
259
+ const blobUrl = URL.createObjectURL(blob);
260
+ state.hf.depthVideoUrl = blobUrl;
261
+
262
+ log(`Depth video ready (${(blob.size / 1024 / 1024).toFixed(1)} MB)`, "g");
263
+ APP.core.video.updateDepthChip();
264
+ } catch (err) {
265
+ // Don't log as error - depth is optional
266
+ state.hf.depthVideoUrl = null;
267
+ state.hf.depthBlob = null;
268
+ }
269
+ };
270
+
271
+ APP.core.video.fetchDepthFirstFrame = async function () {
272
+ const { state } = APP.core;
273
+ const { log } = APP.ui.logging;
274
+
275
+ // Depth is optional - skip silently if no URL
276
+ if (!state.hf.depthFirstFrameUrl) return;
277
+
278
+ try {
279
+ const resp = await fetch(state.hf.depthFirstFrameUrl, { cache: "no-store" });
280
+
281
+ // 404 or other errors - depth not available, that's fine
282
+ if (!resp.ok) {
283
+ state.hf.depthFirstFrameUrl = null;
284
+ return;
285
+ }
286
+
287
+ const blob = await resp.blob();
288
+ state.hf.depthFirstFrameBlob = blob;
289
+ state.hf.depthFirstFrameUrl = URL.createObjectURL(blob);
290
+ log("First frame depth ready", "g");
291
+ } catch (err) {
292
+ // Silently clear - depth is optional
293
+ state.hf.depthFirstFrameUrl = null;
294
+ state.hf.depthFirstFrameBlob = null;
295
+ }
296
+ };
297
+
298
+ APP.core.video.fetchProcessedVideo = async function () {
299
+ const { state } = APP.core;
300
+ const { log } = APP.ui.logging;
301
+ const { $ } = APP.core.utils;
302
+
303
+ const resp = await fetch(state.hf.videoUrl, { cache: "no-store" });
304
+
305
+ if (!resp.ok) {
306
+ if (resp.status === 202) {
307
+ const err = new Error("Video still processing");
308
+ err.code = "VIDEO_PENDING";
309
+ throw err;
310
+ }
311
+ throw new Error(`Failed to fetch video: ${resp.statusText}`);
312
+ }
313
+
314
+ const nullOrigin = (window.location && window.location.origin) === "null";
315
+ if (nullOrigin) {
316
+ state.hf.processedBlob = null;
317
+ state.hf.processedUrl = `${state.hf.videoUrl}?t=${Date.now()}`;
318
+ const btnEngage = $("#btnEngage");
319
+ if (btnEngage) btnEngage.disabled = false;
320
+ log("Processed video ready (streaming URL)");
321
+ return;
322
+ }
323
+
324
+ const blob = await resp.blob();
325
+
326
+ if (state.hf.processedUrl && state.hf.processedUrl.startsWith("blob:")) {
327
+ URL.revokeObjectURL(state.hf.processedUrl);
328
+ }
329
+
330
+ state.hf.processedBlob = blob;
331
+ state.hf.processedUrl = URL.createObjectURL(blob);
332
+
333
+ const btnEngage = $("#btnEngage");
334
+ if (btnEngage) btnEngage.disabled = false;
335
+ log(`Processed video ready (${(blob.size / 1024 / 1024).toFixed(1)} MB)`);
336
+ };
337
+
338
+ APP.core.video.updateDepthChip = function () {
339
+ const { state } = APP.core;
340
+ const { $ } = APP.core.utils;
341
+
342
+ const chipDepth = $("#chipDepth");
343
+ if (!chipDepth) return;
344
+
345
+ if (state.hf.depthVideoUrl || state.hf.depthBlob) {
346
+ chipDepth.style.cursor = "pointer";
347
+ chipDepth.style.opacity = "1";
348
+ } else {
349
+ chipDepth.style.cursor = "not-allowed";
350
+ chipDepth.style.opacity = "0.5";
351
+ }
352
+ };
353
+
354
+ APP.core.video.toggleDepthView = function () {
355
+ const { state } = APP.core;
356
+ const { $, log } = APP.core.utils;
357
+ const { log: uiLog } = APP.ui.logging;
358
+
359
+ if (!state.hf.depthVideoUrl && !state.hf.depthBlob) {
360
+ uiLog("Depth video not available yet. Run Reason and wait for processing.", "w");
361
+ return;
362
+ }
363
+
364
+ state.useDepthFeed = !state.useDepthFeed;
365
+
366
+ const videoEngage = $("#videoEngage");
367
+ const chipDepth = $("#chipDepth");
368
+
369
+ if (state.useDepthFeed) {
370
+ if (chipDepth) chipDepth.textContent = "VIEW:DEPTH";
371
+ if (videoEngage) {
372
+ const currentTime = videoEngage.currentTime;
373
+ const wasPlaying = !videoEngage.paused;
374
+ videoEngage.src = state.hf.depthVideoUrl;
375
+ videoEngage.load();
376
+ videoEngage.currentTime = currentTime;
377
+ if (wasPlaying) videoEngage.play();
378
+ }
379
+ } else {
380
+ if (chipDepth) chipDepth.textContent = "VIEW:DEFAULT";
381
+ if (videoEngage) {
382
+ const currentTime = videoEngage.currentTime;
383
+ const wasPlaying = !videoEngage.paused;
384
+ const feedUrl = state.useProcessedFeed ? state.hf.processedUrl : state.videoUrl;
385
+ videoEngage.src = feedUrl;
386
+ videoEngage.load();
387
+ videoEngage.currentTime = currentTime;
388
+ if (wasPlaying) videoEngage.play();
389
+ }
390
+ }
391
+ };
392
+
393
+ APP.core.video.toggleFirstFrameDepthView = function () {
394
+ const { state } = APP.core;
395
+ const { $ } = APP.core.utils;
396
+ const { log } = APP.ui.logging;
397
+
398
+ if (!state.hf.depthFirstFrameUrl) {
399
+ log("First frame depth not available", "w");
400
+ return;
401
+ }
402
+
403
+ state.useFrameDepthView = !state.useFrameDepthView;
404
+
405
+ const frameCanvas = $("#frameCanvas");
406
+ const chipFrameDepth = $("#chipFrameDepth");
407
+
408
+ if (state.useFrameDepthView) {
409
+ if (chipFrameDepth) chipFrameDepth.textContent = "VIEW:DEPTH";
410
+ // Draw depth first frame
411
+ const img = new Image();
412
+ img.onload = () => {
413
+ if (frameCanvas) {
414
+ frameCanvas.width = state.frame.w;
415
+ frameCanvas.height = state.frame.h;
416
+ frameCanvas.getContext("2d").drawImage(img, 0, 0, state.frame.w, state.frame.h);
417
+ APP.ui.overlays.renderFrameOverlay();
418
+ }
419
+ };
420
+ img.src = state.hf.depthFirstFrameUrl;
421
+ } else {
422
+ if (chipFrameDepth) chipFrameDepth.textContent = "VIEW:DEFAULT";
423
+ // Re-draw original first frame
424
+ APP.core.video.drawFirstFrame();
425
+ APP.ui.overlays.renderFrameOverlay();
426
+ }
427
+ };
428
+
429
+ APP.core.video.toggleProcessedFeed = function () {
430
+ const { state } = APP.core;
431
+ const { $ } = APP.core.utils;
432
+ const { log } = APP.ui.logging;
433
+
434
+ if (!state.hf.processedUrl) {
435
+ log("Processed video not available yet", "w");
436
+ return;
437
+ }
438
+
439
+ state.useProcessedFeed = !state.useProcessedFeed;
440
+ state.useDepthFeed = false; // Reset depth view when switching feeds
441
+
442
+ const videoEngage = $("#videoEngage");
443
+ const chipFeed = $("#chipFeed");
444
+ const chipDepth = $("#chipDepth");
445
+
446
+ if (state.useProcessedFeed) {
447
+ if (chipFeed) chipFeed.textContent = "FEED:HF";
448
+ if (videoEngage) {
449
+ const currentTime = videoEngage.currentTime;
450
+ const wasPlaying = !videoEngage.paused;
451
+ videoEngage.src = state.hf.processedUrl;
452
+ videoEngage.load();
453
+ videoEngage.currentTime = currentTime;
454
+ if (wasPlaying) videoEngage.play();
455
+ }
456
+ } else {
457
+ if (chipFeed) chipFeed.textContent = "FEED:RAW";
458
+ if (videoEngage) {
459
+ const currentTime = videoEngage.currentTime;
460
+ const wasPlaying = !videoEngage.paused;
461
+ videoEngage.src = state.videoUrl;
462
+ videoEngage.load();
463
+ videoEngage.currentTime = currentTime;
464
+ if (wasPlaying) videoEngage.play();
465
+ }
466
+ }
467
+
468
+ if (chipDepth) chipDepth.textContent = "VIEW:DEFAULT";
469
+ };
470
+
471
+ // ========= Streaming Mode for Tab 2 (Live Backend Processing) =========
472
+
473
+ APP.core.video.setStreamingMode = function (url) {
474
+ const { $ } = APP.core.utils;
475
+ const videoEngage = $("#videoEngage");
476
+ const engageEmpty = $("#engageEmpty");
477
+
478
+ // Ensure stream image element exists
479
+ let streamView = $("#streamView");
480
+ if (!streamView) {
481
+ streamView = document.createElement("img");
482
+ streamView.id = "streamView";
483
+ streamView.style.width = "100%";
484
+ streamView.style.height = "100%";
485
+ streamView.style.objectFit = "contain";
486
+ streamView.style.position = "absolute";
487
+ streamView.style.top = "0";
488
+ streamView.style.left = "0";
489
+ streamView.style.zIndex = "10"; // Above video
490
+ streamView.style.backgroundColor = "#000";
491
+
492
+ // Insert into the wrapper (parent of videoEngage)
493
+ if (videoEngage && videoEngage.parentNode) {
494
+ videoEngage.parentNode.appendChild(streamView);
495
+ // Ensure container is relative for absolute positioning
496
+ if (getComputedStyle(videoEngage.parentNode).position === "static") {
497
+ videoEngage.parentNode.style.position = "relative";
498
+ }
499
+ }
500
+ }
501
+
502
+ if (streamView) {
503
+ // Reset state
504
+ streamView.style.display = "block";
505
+ streamView.onerror = () => {
506
+ // If stream fails (404 etc), silently revert
507
+ streamView.style.display = "none";
508
+ if (videoEngage) videoEngage.style.display = "block";
509
+ if (engageEmpty && !videoEngage.src) engageEmpty.style.display = "flex";
510
+ };
511
+ streamView.src = url;
512
+
513
+ if (videoEngage) videoEngage.style.display = "none";
514
+
515
+ // Also hide empty state
516
+ if (engageEmpty) engageEmpty.style.display = "none";
517
+ }
518
+ };
519
+
520
+ APP.core.video.stopStreamingMode = function () {
521
+ const { $ } = APP.core.utils;
522
+ const videoEngage = $("#videoEngage");
523
+
524
+ const streamView = $("#streamView");
525
+ if (streamView) {
526
+ streamView.src = ""; // Stop connection
527
+ streamView.style.display = "none";
528
+ }
529
+ if (videoEngage) videoEngage.style.display = "block";
530
+ };
531
+
532
+ // ========= Display Processed First Frame (from backend) =========
533
+
534
+ APP.core.video.displayProcessedFirstFrame = function () {
535
+ const { state } = APP.core;
536
+ const { $ } = APP.core.utils;
537
+ const { log } = APP.ui.logging;
538
+
539
+ const frameCanvas = $("#frameCanvas");
540
+ const frameOverlay = $("#frameOverlay");
541
+ const frameEmpty = $("#frameEmpty");
542
+ const frameNote = $("#frameNote");
543
+
544
+ if (!state.hf.firstFrameUrl) {
545
+ log("Processed first frame URL not available", "w");
546
+ return;
547
+ }
548
+
549
+ const img = new Image();
550
+ img.crossOrigin = "anonymous";
551
+ img.onload = () => {
552
+ if (frameCanvas) {
553
+ // Update frame dimensions from image
554
+ state.frame.w = img.naturalWidth || 1280;
555
+ state.frame.h = img.naturalHeight || 720;
556
+
557
+ // Resize both canvas and overlay to match frame dimensions
558
+ frameCanvas.width = state.frame.w;
559
+ frameCanvas.height = state.frame.h;
560
+ frameCanvas.getContext("2d").drawImage(img, 0, 0, state.frame.w, state.frame.h);
561
+
562
+ // CRITICAL: Resize frameOverlay to match so bbox coordinates align
563
+ if (frameOverlay) {
564
+ frameOverlay.width = state.frame.w;
565
+ frameOverlay.height = state.frame.h;
566
+ }
567
+
568
+ // Hide empty state
569
+ if (frameEmpty) frameEmpty.style.display = "none";
570
+ if (frameNote) frameNote.textContent = "Processed (from backend)";
571
+
572
+ // Re-render overlay on top (now with matching dimensions)
573
+ if (APP.ui.overlays.renderFrameOverlay) {
574
+ APP.ui.overlays.renderFrameOverlay();
575
+ }
576
+
577
+ log("Processed first frame displayed", "g");
578
+ }
579
+ };
580
+ img.onerror = () => {
581
+ log("Failed to load processed first frame", "e");
582
+ // Fallback to local first frame
583
+ APP.core.video.drawFirstFrame();
584
+ };
585
+ img.src = state.hf.firstFrameUrl;
586
+ };
587
+
588
+ // ========= Display First Frame with Depth Overlay (if available) =========
589
+
590
+ APP.core.video.displayFirstFrameWithDepth = function () {
591
+ const { state } = APP.core;
592
+ const { $ } = APP.core.utils;
593
+
594
+ // Check if we're in depth view mode and depth is available
595
+ if (state.useFrameDepthView && state.hf.depthFirstFrameUrl) {
596
+ const frameCanvas = $("#frameCanvas");
597
+ const img = new Image();
598
+ img.crossOrigin = "anonymous";
599
+ img.onload = () => {
600
+ if (frameCanvas) {
601
+ frameCanvas.width = state.frame.w;
602
+ frameCanvas.height = state.frame.h;
603
+ frameCanvas.getContext("2d").drawImage(img, 0, 0, state.frame.w, state.frame.h);
604
+ if (APP.ui.overlays.renderFrameOverlay) {
605
+ APP.ui.overlays.renderFrameOverlay();
606
+ }
607
+ }
608
+ };
609
+ img.onerror = () => {
610
+ // Fallback to processed or raw first frame
611
+ APP.core.video.displayProcessedFirstFrame();
612
+ };
613
+ img.src = state.hf.depthFirstFrameUrl;
614
+ } else if (state.hf.firstFrameUrl) {
615
+ // Show processed first frame
616
+ APP.core.video.displayProcessedFirstFrame();
617
+ } else {
618
+ // Fallback to local video first frame
619
+ APP.core.video.drawFirstFrame();
620
+ }
621
+ };
622
+
frontend/js/init.js ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ // Initialize Global Namespace
2
+ window.APP = {
3
+ core: {},
4
+ ui: {},
5
+ api: {}
6
+ };
frontend/js/main.js CHANGED
@@ -1,248 +1,662 @@
1
- import { state } from './core/state.js';
2
- import { CONFIG } from './core/config.js';
3
- import { $, now } from './core/utils.js';
4
- import { getKnobs } from './core/physics.js';
5
- import { matchAndUpdateTracks, predictTracks } from './core/tracker.js';
6
- import { hfDetectAsync, checkJobStatus, cancelBackendJob } from './api/client.js';
7
-
8
- // UI Renderers
9
- import { renderFrameRadar } from './ui/radar.js';
10
- import { renderFrameOverlay } from './ui/overlays.js';
11
- import { renderFrameTrackList } from './ui/cards.js';
12
- import { renderFeatures } from './ui/features.js';
13
- import { updateHeadlines } from './ui/trade.js';
14
- import { log, setStatus, setHfStatus } from './ui/logging.js';
15
-
16
- // DOM Elements
17
- const videoEngage = $("#videoEngage");
18
- const btnUpload = $("#btn-upload");
19
- const fileInput = $("#file-input");
20
- const btnReason = $("#btn-reason");
21
- const btnCancelReason = $("#btnCancelReason");
22
- const btnEngage = $("#btn-engage");
23
- const btnReset = $("#btn-reset");
24
- const btnPause = $("#btn-pause");
25
- // const btnClear = $("#btn-clear");
26
- const detectorSelect = $("#detector-select");
27
- const missionText = $("#mission-text");
28
-
29
- // Initialization
30
- function init() {
31
- log("System initializing...", "t");
32
-
33
- // Bind Events
34
- setupFileUpload();
35
- setupControls();
36
-
37
- // Start Loop
38
- requestAnimationFrame(loop);
39
-
40
- log("System READY.", "g");
41
- }
42
-
43
- function setupFileUpload() {
44
- btnUpload.addEventListener("click", () => fileInput.click());
45
- fileInput.addEventListener("change", (e) => {
46
- const file = e.target.files[0];
47
- if (!file) return;
48
-
49
- state.videoFile = file;
50
- state.videoUrl = URL.createObjectURL(file);
51
- state.videoLoaded = true;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
 
53
- // Reset state
54
- videoEngage.src = state.videoUrl;
55
- videoEngage.load();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
 
57
- setStatus("warn", "READY · Video loaded (run Reason)");
58
- log(`Video loaded: ${file.name}`, "g");
59
- });
60
- }
 
 
 
 
61
 
62
- function setupControls() {
63
- btnReason.addEventListener("click", async () => {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
  if (!state.videoLoaded) {
65
- log("No video loaded.", "w");
 
66
  return;
67
  }
 
68
  if (state.isReasoning) {
69
- log("Reasoning in progress...", "w");
70
  return;
71
  }
72
 
 
73
  state.isReasoning = true;
74
- btnReason.disabled = true;
75
- btnCancelReason.style.display = "inline-block";
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
  setStatus("warn", "REASONING · Running perception pipeline");
77
 
 
 
 
 
 
 
 
78
  try {
79
- // 1. Prepare Request
80
- const mode = detectorSelect.value; // e.g., "coco", "hf_yolov8"
81
- const queries = missionText.value.trim();
 
82
 
83
  const form = new FormData();
84
  form.append("video", state.videoFile);
85
- form.append("mode", "object_detection"); // Simplified for now
86
  if (queries) form.append("queries", queries);
 
 
 
87
 
88
- // Map detector selection to backend param
89
- if (["hf_yolov8", "detr_resnet50", "grounding_dino", "drone_yolo"].includes(mode)) {
90
- form.append("detector", mode);
91
- }
92
-
93
- form.append("enable_gpt", "true"); // Always on
94
-
95
- // 2. Submit Async
96
  log(`Submitting job to ${state.hf.baseUrl}...`, "t");
 
 
97
  const data = await hfDetectAsync(form);
98
 
99
  state.hf.asyncJobId = data.job_id;
100
- state.hf.videoUrl = `${state.hf.baseUrl}${data.video_url}`;
101
- state.hf.firstFrameUrl = `${state.hf.baseUrl}${data.first_frame_url}`;
102
-
103
- if (data.first_frame_detections) {
104
- // Populate Tab 1 Detections
105
- state.detections = data.first_frame_detections.map((d, i) => ({
106
- id: `T${String(i + 1).padStart(2, '0')}`,
107
- label: d.label,
108
- score: d.score,
109
- bbox: d.bbox ? { x: d.bbox[0], y: d.bbox[1], w: d.bbox[2] - d.bbox[0], h: d.bbox[3] - d.bbox[1] } : { x: 0, y: 0, w: 100, h: 100 },
110
- // Mapping backend fields
111
- gpt_distance_m: d.gpt_distance_m,
112
- gpt_description: d.gpt_description,
113
- // Mock physics fields for now
114
- pkill: Math.random(),
115
- features: {}
116
- }));
117
-
118
- state.hasReasoned = true;
119
- log(`Reason complete. ${state.detections.length} objects found.`, "g");
120
- setStatus("good", "READY · Reason complete");
121
-
122
- // Force UI Update
123
- renderFrameTrackList();
124
- renderFeatures(state.detections[0]);
125
- renderFrameRadar();
126
- renderFrameOverlay(); // Draws first frame boxes
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
127
  }
128
 
129
- // Start Polling for Video
130
- pollJob(data.job_id);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
 
132
  } catch (err) {
133
- log(`Reason failed: ${err.message}`, "e");
134
  setStatus("bad", "ERROR · Reason failed");
 
 
135
  } finally {
136
  state.isReasoning = false;
137
- btnReason.disabled = false;
138
- btnCancelReason.style.display = "none";
139
- }
140
- });
141
-
142
- btnCancelReason.addEventListener("click", () => {
143
- if (state.hf.asyncJobId) cancelBackendJob(state.hf.asyncJobId);
144
- });
145
-
146
- // ENGAGE Controls
147
- btnEngage.addEventListener("click", () => {
148
- if (!state.hasReasoned || !state.hf.processedUrl) {
149
- // If processed video isn't ready, we play original?
150
- // Or wait.
151
- if (state.hf.asyncStatus !== "completed") {
152
- log("Processing not complete yet.", "w");
153
- return;
154
  }
 
155
  }
 
156
 
157
- videoEngage.src = state.hf.processedUrl || state.videoUrl;
158
- videoEngage.play();
159
- state.tracker.running = true;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
 
161
- // Seed Tracker
162
- // We use the simpler method: direct map from detections
163
  state.tracker.tracks = state.detections.map(d => ({
164
- ...d,
165
- vx: 0, vy: 0,
166
- lastSeen: now(),
167
- state: "TRACK"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168
  }));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
169
 
170
- log("Engage sequences started.", "g");
171
- });
172
-
173
- btnPause.addEventListener("click", () => {
174
- videoEngage.pause();
175
- state.tracker.running = false;
176
- });
177
-
178
- btnReset.addEventListener("click", () => {
179
- videoEngage.pause();
180
- videoEngage.currentTime = 0;
181
- state.tracker.tracks = []; // clear
182
- state.tracker.running = false;
183
- });
184
-
185
- // Card Selection Event
186
- document.addEventListener("track-selected", (e) => {
187
- state.selectedId = e.detail.id;
188
- renderFrameTrackList(); // update active class
189
- renderFrameOverlay(); // update highlight
190
- const det = state.detections.find(d => d.id === state.selectedId);
191
- renderFeatures(det);
192
- });
193
- }
194
-
195
- function pollJob(jobId) {
196
- const iv = setInterval(async () => {
197
- if (state.hf.asyncStatus === "completed" || state.hf.asyncStatus === "failed") {
198
- clearInterval(iv);
199
  return;
200
  }
201
 
202
- try {
203
- const status = await checkJobStatus(jobId);
204
- state.hf.asyncStatus = status.status;
205
- setHfStatus(`Job ${jobId.slice(0, 8)}: ${status.status}`);
206
-
207
- if (status.status === "completed") {
208
- state.hf.processedUrl = `${state.hf.baseUrl}${status.video_url}`;
209
- log("Processing complete. Video ready.", "g");
210
- clearInterval(iv);
211
- }
212
- } catch (e) {
213
- clearInterval(iv);
214
- }
215
- }, 2000);
216
- }
217
-
218
- // Main Animation Loop
219
- function loop() {
220
- const t = now();
221
- const dt = (t - state.tracker.lastFrameTime) / 1000;
222
- state.tracker.lastFrameTime = t;
223
-
224
- if (state.tracker.running && !videoEngage.paused) {
225
- // 1. Predict (Coast)
226
- predictTracks(dt);
227
-
228
- // 2. Measure (Update from Detection)
229
- // Since we removed local COCO fallback, what drives updates?
230
- // OPTION A: We rely on server stream (not implemented fully in this refactor).
231
- // OPTION B: We rely purely on "coasting" (dead reckoning) if no new data comes in.
232
- // For this demo refactor, we just predict.
233
-
234
- // 3. Prune
235
- // matchAndUpdateTracks([], dt); // Empty detections just triggers pruning/decay
236
  }
237
 
238
- // Render
239
- renderFrameRadar();
240
- renderFrameOverlay();
241
- // Cards update less frequently? Or every frame for position?
242
- // We update overlays every frame. Cards usually static unless sorted.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
243
 
244
- requestAnimationFrame(loop);
245
- }
246
 
247
- // Start
248
- init();
 
 
1
+ // Main Entry Point - Wire up all event handlers and run the application
2
+ document.addEventListener("DOMContentLoaded", () => {
3
+ // Shortcuts
4
+ const { state } = APP.core;
5
+ const { $, $$ } = APP.core.utils;
6
+ const { log, setStatus, setHfStatus } = APP.ui.logging;
7
+ const { hfDetectAsync, checkJobStatus, cancelBackendJob, pollAsyncJob } = APP.api.client;
8
+
9
+ // Core modules
10
+ const { captureFirstFrame, drawFirstFrame, unloadVideo, toggleDepthView, toggleFirstFrameDepthView, toggleProcessedFeed, resizeOverlays, setStreamingMode, stopStreamingMode, displayProcessedFirstFrame } = APP.core.video;
11
+ const { syncKnobDisplays, recomputeHEL } = APP.core.hel;
12
+
13
+ // UI Renderers
14
+ const { renderFrameRadar, renderLiveRadar } = APP.ui.radar;
15
+ const { renderFrameOverlay, renderEngageOverlay } = APP.ui.overlays;
16
+ const { renderFrameTrackList } = APP.ui.cards;
17
+ const { renderFeatures } = APP.ui.features;
18
+ const { renderTrade, populateTradeTarget, snapshotTrade } = APP.ui.trade;
19
+ const { computeIntelSummary, resetIntelUI, renderMissionContext } = APP.ui.intel;
20
+ const { tickAgentCursor, moveCursorToRect } = APP.ui.cursor;
21
+ const { matchAndUpdateTracks, predictTracks } = APP.core.tracker;
22
+ const { defaultAimpoint } = APP.core.physics;
23
+ const { normBBox } = APP.core.utils;
24
+
25
+ // DOM Elements
26
+ const videoEngage = $("#videoEngage");
27
+ const videoHidden = $("#videoHidden");
28
+ const videoFile = $("#videoFile");
29
+ const btnReason = $("#btnReason");
30
+ const btnCancelReason = $("#btnCancelReason");
31
+ const btnRecompute = $("#btnRecompute");
32
+ const btnClear = $("#btnClear");
33
+ const btnEject = $("#btnEject");
34
+ const btnEngage = $("#btnEngage");
35
+ const btnReset = $("#btnReset");
36
+ const btnPause = $("#btnPause");
37
+ const btnToggleSidebar = $("#btnToggleSidebar");
38
+ const btnIntelRefresh = $("#btnIntelRefresh");
39
+ const btnReplot = $("#btnReplot");
40
+ const btnSnap = $("#btnSnap");
41
+
42
+ const detectorSelect = $("#detectorSelect");
43
+ const missionText = $("#missionText");
44
+ const cursorMode = $("#cursorMode");
45
+ const frameCanvas = $("#frameCanvas");
46
+ const frameTrackList = $("#frameTrackList");
47
+ const frameEmpty = $("#frameEmpty");
48
+ const frameNote = $("#frameNote");
49
+ const engageEmpty = $("#engageEmpty");
50
+ const engageNote = $("#engageNote");
51
+
52
+ const chipFeed = $("#chipFeed");
53
+ const chipDepth = $("#chipDepth");
54
+ const chipFrameDepth = $("#chipFrameDepth");
55
+
56
+ // Initialization
57
+ function init() {
58
+ log("System initializing...", "t");
59
+
60
+ setupFileUpload();
61
+ setupControls();
62
+ setupKnobListeners();
63
+ setupChipToggles();
64
+ setupTabSwitching();
65
+
66
+ // Initial UI sync
67
+ syncKnobDisplays();
68
+ renderMissionContext();
69
+ setHfStatus("idle");
70
+
71
+ // Start main loop
72
+ requestAnimationFrame(loop);
73
+
74
+ log("System READY.", "g");
75
+ }
76
 
77
+ function setupFileUpload() {
78
+ if (!videoFile) return;
79
+
80
+ videoFile.addEventListener("change", async (e) => {
81
+ const file = e.target.files[0];
82
+ if (!file) return;
83
+
84
+ state.videoFile = file;
85
+ state.videoUrl = URL.createObjectURL(file);
86
+ state.videoLoaded = true;
87
+
88
+ // Show meta
89
+ const videoMeta = $("#videoMeta");
90
+ if (videoMeta) videoMeta.textContent = file.name;
91
+
92
+ // Load video into engage player
93
+ if (videoEngage) {
94
+ videoEngage.src = state.videoUrl;
95
+ videoEngage.load();
96
+ }
97
+
98
+ // Hide empty states
99
+ if (engageEmpty) engageEmpty.style.display = "none";
100
+
101
+ // Capture first frame dimensions (but don't draw - wait for processed frame from backend)
102
+ try {
103
+ await captureFirstFrame();
104
+ // Show placeholder message - actual frame will come from backend
105
+ if (frameNote) frameNote.textContent = "Video loaded (run Reason for processed frame)";
106
+ if (engageNote) engageNote.textContent = "Ready for Engage";
107
+ } catch (err) {
108
+ log(`First frame capture failed: ${err.message}`, "e");
109
+ }
110
+
111
+ setStatus("warn", "READY · Video loaded (run Reason)");
112
+ log(`Video loaded: ${file.name}`, "g");
113
+ });
114
+ }
115
+
116
+ function setupControls() {
117
+ // Reason button
118
+ if (btnReason) {
119
+ btnReason.addEventListener("click", runReason);
120
+ }
121
+
122
+ // Cancel Reason button
123
+ if (btnCancelReason) {
124
+ btnCancelReason.addEventListener("click", cancelReasoning);
125
+ }
126
+
127
+ // Recompute HEL button
128
+ if (btnRecompute) {
129
+ btnRecompute.addEventListener("click", async () => {
130
+ if (!state.hasReasoned) return;
131
+ await recomputeHEL();
132
+ renderFrameOverlay();
133
+ renderTrade();
134
+ log("HEL parameters recomputed.", "g");
135
+ });
136
+ }
137
+
138
+ // Clear button
139
+ if (btnClear) {
140
+ btnClear.addEventListener("click", () => {
141
+ state.detections = [];
142
+ state.selectedId = null;
143
+ renderFrameTrackList();
144
+ renderFrameOverlay();
145
+ renderFeatures(null);
146
+ renderTrade();
147
+ log("Detections cleared.", "t");
148
+ });
149
+ }
150
+
151
+ // Eject button
152
+ if (btnEject) {
153
+ btnEject.addEventListener("click", async () => {
154
+ await unloadVideo();
155
+ });
156
+ }
157
+
158
+ // Engage button
159
+ if (btnEngage) {
160
+ btnEngage.addEventListener("click", runEngage);
161
+ }
162
 
163
+ // Pause button
164
+ if (btnPause) {
165
+ btnPause.addEventListener("click", () => {
166
+ if (videoEngage) videoEngage.pause();
167
+ state.tracker.running = false;
168
+ log("Engage paused.", "t");
169
+ });
170
+ }
171
 
172
+ // Reset button
173
+ if (btnReset) {
174
+ btnReset.addEventListener("click", () => {
175
+ if (videoEngage) {
176
+ videoEngage.pause();
177
+ videoEngage.currentTime = 0;
178
+ }
179
+ state.tracker.tracks = [];
180
+ state.tracker.running = false;
181
+ state.tracker.nextId = 1;
182
+ renderFrameTrackList();
183
+ renderFrameRadar();
184
+ renderLiveRadar();
185
+ log("Engage reset.", "t");
186
+ });
187
+ }
188
+
189
+ // Sidebar toggle (Tab 2)
190
+ if (btnToggleSidebar) {
191
+ btnToggleSidebar.addEventListener("click", () => {
192
+ const engageGrid = $(".engage-grid");
193
+ if (engageGrid) {
194
+ engageGrid.classList.toggle("sidebar-collapsed");
195
+ btnToggleSidebar.textContent = engageGrid.classList.contains("sidebar-collapsed")
196
+ ? "▶ Show Sidebar"
197
+ : "◀ Hide Sidebar";
198
+ }
199
+ });
200
+ }
201
+
202
+ // Intel refresh
203
+ if (btnIntelRefresh) {
204
+ btnIntelRefresh.addEventListener("click", async () => {
205
+ if (!state.videoLoaded) return;
206
+ log("Refreshing mission intel summary...", "t");
207
+ await computeIntelSummary();
208
+ });
209
+ }
210
+
211
+ // Trade space controls
212
+ if (btnReplot) {
213
+ btnReplot.addEventListener("click", renderTrade);
214
+ }
215
+ if (btnSnap) {
216
+ btnSnap.addEventListener("click", snapshotTrade);
217
+ }
218
+ const tradeTarget = $("#tradeTarget");
219
+ if (tradeTarget) {
220
+ tradeTarget.addEventListener("change", renderTrade);
221
+ }
222
+
223
+ // Track selection event
224
+ document.addEventListener("track-selected", (e) => {
225
+ state.selectedId = e.detail.id;
226
+ state.tracker.selectedTrackId = e.detail.id;
227
+ renderFrameTrackList();
228
+ renderFrameOverlay();
229
+ const det = state.detections.find(d => d.id === state.selectedId);
230
+ renderFeatures(det);
231
+ });
232
+
233
+ // Cursor mode toggle
234
+ if (cursorMode) {
235
+ cursorMode.addEventListener("change", () => {
236
+ state.ui.cursorMode = cursorMode.value;
237
+ if (state.ui.cursorMode === "off" && APP.ui.cursor.setCursorVisible) {
238
+ APP.ui.cursor.setCursorVisible(false);
239
+ }
240
+ });
241
+ }
242
+ }
243
+
244
+ function setupKnobListeners() {
245
+ // Listen to all inputs and selects for knob updates
246
+ const inputs = Array.from(document.querySelectorAll("input, select"));
247
+ inputs.forEach(el => {
248
+ el.addEventListener("input", () => {
249
+ syncKnobDisplays();
250
+ if (state.hasReasoned) {
251
+ recomputeHEL();
252
+ renderFrameOverlay();
253
+ renderTrade();
254
+ }
255
+ });
256
+ });
257
+
258
+ // Initial sync
259
+ syncKnobDisplays();
260
+ }
261
+
262
+ function setupChipToggles() {
263
+ // Toggle processed/raw feed
264
+ if (chipFeed) {
265
+ chipFeed.style.cursor = "pointer";
266
+ chipFeed.addEventListener("click", () => {
267
+ if (!state.videoLoaded) return;
268
+ toggleProcessedFeed();
269
+ log(`Feed set to: ${state.useProcessedFeed ? "HF" : "RAW"}`, "t");
270
+ });
271
+ }
272
+
273
+ // Toggle depth view (Tab 2)
274
+ if (chipDepth) {
275
+ chipDepth.style.cursor = "pointer";
276
+ chipDepth.addEventListener("click", () => {
277
+ if (!state.videoLoaded) return;
278
+ toggleDepthView();
279
+ log(`Engage view set to: ${state.useDepthFeed ? "DEPTH" : "DEFAULT"}`, "t");
280
+ });
281
+ }
282
+
283
+ // Toggle first frame depth view (Tab 1)
284
+ if (chipFrameDepth) {
285
+ chipFrameDepth.style.cursor = "pointer";
286
+ chipFrameDepth.addEventListener("click", () => {
287
+ if (!state.videoLoaded) return;
288
+ if (!state.hf.depthFirstFrameUrl) {
289
+ log("First frame depth not ready yet. Run Reason and wait for depth processing.", "w");
290
+ return;
291
+ }
292
+ toggleFirstFrameDepthView();
293
+ log(`First frame view set to: ${state.useFrameDepthView ? "DEPTH" : "DEFAULT"}`, "t");
294
+ });
295
+ }
296
+ }
297
+
298
+ function setupTabSwitching() {
299
+ const tabs = Array.from(document.querySelectorAll(".tabbtn"));
300
+ tabs.forEach(btn => {
301
+ btn.addEventListener("click", () => {
302
+ tabs.forEach(b => b.classList.remove("active"));
303
+ document.querySelectorAll(".tab").forEach(t => t.classList.remove("active"));
304
+ btn.classList.add("active");
305
+ const tabId = `#tab-${btn.dataset.tab}`;
306
+ const tab = $(tabId);
307
+ if (tab) tab.classList.add("active");
308
+
309
+ // Tab-specific actions
310
+ if (btn.dataset.tab === "trade") {
311
+ populateTradeTarget();
312
+ renderTrade();
313
+ }
314
+ if (btn.dataset.tab === "engage") {
315
+ resizeOverlays();
316
+ renderLiveRadar();
317
+ }
318
+ });
319
+ });
320
+ }
321
+
322
+ async function runReason() {
323
  if (!state.videoLoaded) {
324
+ log("No video loaded. Upload a video first.", "w");
325
+ setStatus("warn", "READY · Upload a video");
326
  return;
327
  }
328
+
329
  if (state.isReasoning) {
330
+ log("Reason already in progress. Please wait.", "w");
331
  return;
332
  }
333
 
334
+ // Lock the Reason process
335
  state.isReasoning = true;
336
+ if (btnReason) {
337
+ btnReason.disabled = true;
338
+ btnReason.style.opacity = "0.5";
339
+ btnReason.style.cursor = "not-allowed";
340
+ }
341
+ if (btnCancelReason) btnCancelReason.style.display = "inline-block";
342
+ if (btnEngage) btnEngage.disabled = true;
343
+
344
+ // Clear previous detections
345
+ state.detections = [];
346
+ state.selectedId = null;
347
+ renderFrameTrackList();
348
+ renderFrameOverlay();
349
+ renderFeatures(null);
350
+ renderTrade();
351
+
352
  setStatus("warn", "REASONING · Running perception pipeline");
353
 
354
+ // Agent cursor flair
355
+ if (state.ui.cursorMode === "on" && moveCursorToRect) {
356
+ if (btnReason) moveCursorToRect(btnReason.getBoundingClientRect());
357
+ if (frameCanvas) setTimeout(() => moveCursorToRect(frameCanvas.getBoundingClientRect()), 260);
358
+ if (frameTrackList) setTimeout(() => moveCursorToRect(frameTrackList.getBoundingClientRect()), 560);
359
+ }
360
+
361
  try {
362
+ const mode = detectorSelect ? detectorSelect.value : "hf_yolov8";
363
+ const queries = missionText ? missionText.value.trim() : "";
364
+ const enableGPT = $("#enableGPTToggle")?.checked || false;
365
+ const enableDepth = $("#enableDepthToggle")?.checked || false;
366
 
367
  const form = new FormData();
368
  form.append("video", state.videoFile);
369
+ form.append("mode", "object_detection");
370
  if (queries) form.append("queries", queries);
371
+ form.append("detector", mode);
372
+ form.append("enable_gpt", enableGPT ? "true" : "false");
373
+ form.append("enable_depth", enableDepth ? "true" : "false");
374
 
 
 
 
 
 
 
 
 
375
  log(`Submitting job to ${state.hf.baseUrl}...`, "t");
376
+ setHfStatus("submitting job...");
377
+
378
  const data = await hfDetectAsync(form);
379
 
380
  state.hf.asyncJobId = data.job_id;
381
+
382
+ // Store raw detections (will process after image loads to get correct dimensions)
383
+ const rawDetections = data.first_frame_detections || [];
384
+
385
+ // Display processed first frame from backend (only processed frame, not raw)
386
+ // This is async - image loading will update state.frame.w/h
387
+ if (data.first_frame_url) {
388
+ state.hf.firstFrameUrl = data.first_frame_url.startsWith("http")
389
+ ? data.first_frame_url
390
+ : `${state.hf.baseUrl}${data.first_frame_url}`;
391
+
392
+ // Wait for image to load so we have correct dimensions before processing detections
393
+ await new Promise((resolve, reject) => {
394
+ const img = new Image();
395
+ img.crossOrigin = "anonymous";
396
+ img.onload = () => {
397
+ // Update frame dimensions from loaded image
398
+ state.frame.w = img.naturalWidth || 1280;
399
+ state.frame.h = img.naturalHeight || 720;
400
+
401
+ // Resize canvases to match
402
+ const frameCanvas = $("#frameCanvas");
403
+ const frameOverlay = $("#frameOverlay");
404
+ if (frameCanvas) {
405
+ frameCanvas.width = state.frame.w;
406
+ frameCanvas.height = state.frame.h;
407
+ frameCanvas.getContext("2d").drawImage(img, 0, 0, state.frame.w, state.frame.h);
408
+ }
409
+ if (frameOverlay) {
410
+ frameOverlay.width = state.frame.w;
411
+ frameOverlay.height = state.frame.h;
412
+ }
413
+
414
+ // Hide empty state
415
+ const frameEmpty = $("#frameEmpty");
416
+ const frameNote = $("#frameNote");
417
+ if (frameEmpty) frameEmpty.style.display = "none";
418
+ if (frameNote) frameNote.textContent = "Processed (from backend)";
419
+
420
+ log(`Processed first frame displayed (${state.frame.w}×${state.frame.h})`, "g");
421
+ resolve();
422
+ };
423
+ img.onerror = () => {
424
+ log("Failed to load processed first frame, using local frame", "w");
425
+ drawFirstFrame();
426
+ resolve();
427
+ };
428
+ img.src = state.hf.firstFrameUrl;
429
+ });
430
  }
431
 
432
+ // NOW process detections (after frame dimensions are correct)
433
+ if (rawDetections.length > 0) {
434
+ processFirstFrameDetections(rawDetections);
435
+ }
436
+
437
+ // Store depth URLs if provided
438
+ if (data.depth_video_url) {
439
+ state.hf.depthVideoUrl = data.depth_video_url.startsWith("http")
440
+ ? data.depth_video_url
441
+ : `${state.hf.baseUrl}${data.depth_video_url}`;
442
+ log("Depth video URL received", "t");
443
+ }
444
+ if (data.first_frame_depth_url) {
445
+ state.hf.depthFirstFrameUrl = data.first_frame_depth_url.startsWith("http")
446
+ ? data.first_frame_depth_url
447
+ : `${state.hf.baseUrl}${data.first_frame_depth_url}`;
448
+ log("First frame depth URL received", "t");
449
+ }
450
+
451
+ // Enable streaming mode if stream_url is provided (Tab 2 live view)
452
+ const enableStream = $("#enableStreamToggle")?.checked;
453
+
454
+ if (data.stream_url && enableStream) {
455
+ const streamUrl = data.stream_url.startsWith("http")
456
+ ? data.stream_url
457
+ : `${state.hf.baseUrl}${data.stream_url}`;
458
+ log("Activating live stream...", "t");
459
+ setStreamingMode(streamUrl);
460
+ log("Live view available in 'Engage' tab.", "g");
461
+ setStatus("warn", "Live processing... View in Engage tab");
462
+
463
+ // Trigger resize/render for Tab 2
464
+ resizeOverlays();
465
+ renderLiveRadar();
466
+ }
467
+
468
+ // Start polling for completion
469
+ pollAsyncJob().then(() => {
470
+ log("Video processing complete.", "g");
471
+ // Stop streaming mode once video is ready
472
+ stopStreamingMode();
473
+ }).catch(err => {
474
+ log(`Polling error: ${err.message}`, "e");
475
+ stopStreamingMode();
476
+ });
477
+
478
+ state.hasReasoned = true;
479
+ setStatus("good", "READY · Reason complete (you can Engage)");
480
+ log("Reason complete.", "g");
481
+
482
+ // Seed tracks for Tab 2
483
+ seedTracksFromTab1();
484
+ renderFrameRadar();
485
+
486
+ // Generate intel summary (async)
487
+ computeIntelSummary();
488
 
489
  } catch (err) {
 
490
  setStatus("bad", "ERROR · Reason failed");
491
+ log(`Reason failed: ${err.message}`, "e");
492
+ console.error(err);
493
  } finally {
494
  state.isReasoning = false;
495
+ if (btnReason) {
496
+ btnReason.disabled = false;
497
+ btnReason.style.opacity = "1";
498
+ btnReason.style.cursor = "pointer";
 
 
 
 
 
 
 
 
 
 
 
 
 
499
  }
500
+ if (btnCancelReason) btnCancelReason.style.display = "none";
501
  }
502
+ }
503
 
504
+ function processFirstFrameDetections(dets) {
505
+ state.detections = dets.map((d, i) => {
506
+ const id = `T${String(i + 1).padStart(2, "0")}`;
507
+ const ap = defaultAimpoint(d.label || d.class);
508
+ const bbox = d.bbox
509
+ ? { x: d.bbox[0], y: d.bbox[1], w: d.bbox[2] - d.bbox[0], h: d.bbox[3] - d.bbox[1] }
510
+ : { x: 0, y: 0, w: 10, h: 10 };
511
+
512
+ return {
513
+ id,
514
+ label: d.label || d.class,
515
+ score: d.score || 0.5,
516
+ bbox,
517
+ aim: { ...ap },
518
+ features: {},
519
+ baseRange_m: null,
520
+ baseAreaFrac: (bbox.w * bbox.h) / (state.frame.w * state.frame.h),
521
+ baseDwell_s: 5.0,
522
+ reqP_kW: 40,
523
+ maxP_kW: 0,
524
+ pkill: 0,
525
+ depth_rel: Number.isFinite(d.depth_rel) ? d.depth_rel : null,
526
+ gpt_distance_m: d.gpt_distance_m || null,
527
+ gpt_direction: d.gpt_direction || null,
528
+ gpt_description: d.gpt_description || null
529
+ };
530
+ });
531
+
532
+ state.selectedId = state.detections[0]?.id || null;
533
+
534
+ renderFrameTrackList();
535
+ renderFeatures(state.detections[0] || null);
536
+ renderFrameOverlay();
537
+
538
+ log(`Detected ${state.detections.length} objects in first frame.`, "g");
539
+ }
540
 
541
+ function seedTracksFromTab1() {
542
+ const rangeBase = $("#rangeBase");
543
  state.tracker.tracks = state.detections.map(d => ({
544
+ id: d.id,
545
+ label: d.label,
546
+ bbox: { ...d.bbox },
547
+ score: d.score,
548
+ aimRel: d.aim ? { relx: d.aim.relx, rely: d.aim.rely, label: d.aim.label } : { relx: 0.5, rely: 0.5, label: "center_mass" },
549
+ baseAreaFrac: d.baseAreaFrac || ((d.bbox.w * d.bbox.h) / (state.frame.w * state.frame.h)),
550
+ baseRange_m: d.baseRange_m || (rangeBase ? +rangeBase.value : 1500),
551
+ baseDwell_s: d.baseDwell_s || 4.0,
552
+ reqP_kW: d.reqP_kW || 35,
553
+ depth_rel: d.depth_rel,
554
+ gpt_distance_m: d.gpt_distance_m,
555
+ gpt_direction: d.gpt_direction,
556
+ gpt_description: d.gpt_description,
557
+ lastSeen: APP.core.utils.now(),
558
+ vx: 0,
559
+ vy: 0,
560
+ dwellAccum: 0,
561
+ killed: false,
562
+ state: "TRACK",
563
+ assessT: 0
564
  }));
565
+ state.tracker.nextId = state.detections.length + 1;
566
+ log(`Seeded ${state.tracker.tracks.length} tracks from Tab 1 detections.`, "t");
567
+ }
568
+
569
+ function cancelReasoning() {
570
+ // Stop HF polling if running
571
+ if (state.hf.asyncPollInterval) {
572
+ clearInterval(state.hf.asyncPollInterval);
573
+ state.hf.asyncPollInterval = null;
574
+ log("HF polling stopped.", "w");
575
+ }
576
+
577
+ // Stop streaming mode
578
+ stopStreamingMode();
579
+
580
+ // Cancel backend job if it exists
581
+ const jobId = state.hf.asyncJobId;
582
+ if (jobId) {
583
+ cancelBackendJob(jobId, "cancel button");
584
+ }
585
+
586
+ // Reset state
587
+ state.isReasoning = false;
588
+ state.hf.busy = false;
589
+ state.hf.asyncJobId = null;
590
+ state.hf.asyncStatus = "cancelled";
591
+
592
+ // Re-enable Reason button
593
+ if (btnReason) {
594
+ btnReason.disabled = false;
595
+ btnReason.style.opacity = "1";
596
+ btnReason.style.cursor = "pointer";
597
+ }
598
+ if (btnCancelReason) btnCancelReason.style.display = "none";
599
+
600
+ setStatus("warn", "CANCELLED · Reasoning stopped");
601
+ setHfStatus("cancelled (stopped by user)");
602
+ log("Reasoning cancelled by user.", "w");
603
+ }
604
 
605
+ function runEngage() {
606
+ if (!state.hasReasoned) {
607
+ log("Please run Reason first.", "w");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
608
  return;
609
  }
610
 
611
+ // Switch to engage tab
612
+ const engageTab = $(`.tabbtn[data-tab="engage"]`);
613
+ if (engageTab) engageTab.click();
614
+
615
+ // Set video source
616
+ if (videoEngage) {
617
+ videoEngage.src = state.hf.processedUrl || state.videoUrl;
618
+ videoEngage.play();
619
+ }
620
+
621
+ state.tracker.running = true;
622
+ state.tracker.lastFrameTime = APP.core.utils.now();
623
+
624
+ // Ensure tracks are seeded
625
+ if (state.tracker.tracks.length === 0) {
626
+ seedTracksFromTab1();
627
+ }
628
+
629
+ log("Engage sequence started.", "g");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
630
  }
631
 
632
+ function loop() {
633
+ const { now } = APP.core.utils;
634
+ const t = now();
635
+
636
+ // Guard against huge dt on first frame
637
+ if (state.tracker.lastFrameTime === 0) state.tracker.lastFrameTime = t;
638
+
639
+ const dt = Math.min((t - state.tracker.lastFrameTime) / 1000, 0.1);
640
+ state.tracker.lastFrameTime = t;
641
+
642
+ // Update tracker when engaged
643
+ if (state.tracker.running && videoEngage && !videoEngage.paused) {
644
+ predictTracks(dt);
645
+ }
646
+
647
+ // Render UI
648
+ if (renderFrameRadar) renderFrameRadar();
649
+ if (renderLiveRadar) renderLiveRadar();
650
+ if (renderFrameOverlay) renderFrameOverlay();
651
+ if (renderEngageOverlay) renderEngageOverlay();
652
+ if (tickAgentCursor) tickAgentCursor();
653
+
654
+ requestAnimationFrame(loop);
655
+ }
656
 
657
+ // Expose state for debugging
658
+ window.__LP_STATE__ = state;
659
 
660
+ // Start
661
+ init();
662
+ });
frontend/js/ui/cards.js CHANGED
@@ -1,15 +1,16 @@
1
- import { state } from '../core/state.js';
2
- import { $ } from '../core/utils.js';
3
 
4
- const frameTrackList = $("#frameTrackList");
5
- const trackCount = $("#track-count");
 
 
 
6
 
7
- export function renderFrameTrackList() {
8
- if (!frameTrackList || !trackCount) return;
9
  frameTrackList.innerHTML = "";
10
 
11
  const dets = state.detections || [];
12
- trackCount.textContent = dets.length;
13
 
14
  if (dets.length === 0) {
15
  frameTrackList.innerHTML = '<div style="font-style:italic; color:var(--text-dim); text-align:center; margin-top:20px;">No objects tracked.</div>';
@@ -35,8 +36,6 @@ export function renderFrameTrackList() {
35
  if (state.selectedId === id) card.classList.add("active");
36
  card.id = `card-${id}`;
37
 
38
- // Circular dependency risk if we import selectObject from main?
39
- // We'll dispatch a custom event instead.
40
  card.onclick = () => {
41
  const ev = new CustomEvent("track-selected", { detail: { id } });
42
  document.dispatchEvent(ev);
@@ -58,4 +57,4 @@ export function renderFrameTrackList() {
58
  `;
59
  frameTrackList.appendChild(card);
60
  });
61
- }
 
1
+ APP.ui.cards = {};
 
2
 
3
+ APP.ui.cards.renderFrameTrackList = function () {
4
+ const { state } = APP.core;
5
+ const { $ } = APP.core.utils;
6
+ const frameTrackList = $("#frameTrackList");
7
+ const trackCount = $("#trackCount"); // Correct ID
8
 
9
+ if (!frameTrackList) return;
 
10
  frameTrackList.innerHTML = "";
11
 
12
  const dets = state.detections || [];
13
+ if (trackCount) trackCount.textContent = dets.length;
14
 
15
  if (dets.length === 0) {
16
  frameTrackList.innerHTML = '<div style="font-style:italic; color:var(--text-dim); text-align:center; margin-top:20px;">No objects tracked.</div>';
 
36
  if (state.selectedId === id) card.classList.add("active");
37
  card.id = `card-${id}`;
38
 
 
 
39
  card.onclick = () => {
40
  const ev = new CustomEvent("track-selected", { detail: { id } });
41
  document.dispatchEvent(ev);
 
57
  `;
58
  frameTrackList.appendChild(card);
59
  });
60
+ };
frontend/js/ui/cursor.js ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Agent Cursor Animation Module
2
+ APP.ui.cursor = {};
3
+
4
+ APP.ui.cursor.ensureAgentCursorOverlay = function () {
5
+ const { $ } = APP.core.utils;
6
+ if ($("#agentCursor")) return;
7
+
8
+ const el = document.createElement("div");
9
+ el.id = "agentCursor";
10
+ el.style.cssText = `
11
+ position: fixed;
12
+ width: 12px;
13
+ height: 12px;
14
+ border-radius: 50%;
15
+ background: linear-gradient(135deg, rgba(34, 211, 238, 0.9), rgba(124, 58, 237, 0.9));
16
+ box-shadow: 0 0 20px rgba(34, 211, 238, 0.6), 0 0 40px rgba(124, 58, 237, 0.4);
17
+ pointer-events: none;
18
+ z-index: 10000;
19
+ opacity: 0;
20
+ display: none;
21
+ transition: opacity 0.3s ease;
22
+ `;
23
+ document.body.appendChild(el);
24
+ };
25
+
26
+ APP.ui.cursor.setCursorVisible = function (visible) {
27
+ const { $ } = APP.core.utils;
28
+ const { state } = APP.core;
29
+
30
+ APP.ui.cursor.ensureAgentCursorOverlay();
31
+ const el = $("#agentCursor");
32
+
33
+ if (!el) return;
34
+
35
+ state.ui.agentCursor.visible = visible;
36
+ el.style.opacity = visible ? "1" : "0";
37
+ el.style.display = visible ? "block" : "none";
38
+ };
39
+
40
+ APP.ui.cursor.moveCursorToRect = function (rect) {
41
+ const { state } = APP.core;
42
+ const { $, now } = APP.core.utils;
43
+
44
+ if (state.ui.cursorMode === "off") return;
45
+
46
+ APP.ui.cursor.ensureAgentCursorOverlay();
47
+ const el = $("#agentCursor");
48
+
49
+ if (!el) return;
50
+
51
+ const c = state.ui.agentCursor;
52
+ c.visible = true;
53
+ c.target = rect;
54
+ c.t0 = now();
55
+ el.style.opacity = "1";
56
+ el.style.display = "block";
57
+ };
58
+
59
+ APP.ui.cursor.tickAgentCursor = function () {
60
+ const { state } = APP.core;
61
+ const { $, clamp, now } = APP.core.utils;
62
+ const el = $("#agentCursor");
63
+
64
+ if (!el || state.ui.cursorMode !== "on" || !state.ui.agentCursor.visible) return;
65
+
66
+ const c = state.ui.agentCursor;
67
+ if (!c.target) return;
68
+
69
+ const tx = c.target.left + c.target.width * 0.72;
70
+ const ty = c.target.top + c.target.height * 0.50;
71
+
72
+ // Smooth spring physics
73
+ const dx = tx - (c.x * window.innerWidth);
74
+ const dy = ty - (c.y * window.innerHeight);
75
+ c.vx = (c.vx + dx * 0.0018) * 0.85;
76
+ c.vy = (c.vy + dy * 0.0018) * 0.85;
77
+
78
+ const px = (c.x * window.innerWidth) + c.vx * 18;
79
+ const py = (c.y * window.innerHeight) + c.vy * 18;
80
+ c.x = clamp(px / window.innerWidth, 0.02, 0.98);
81
+ c.y = clamp(py / window.innerHeight, 0.02, 0.98);
82
+
83
+ el.style.transform = `translate(${c.x * window.innerWidth}px, ${c.y * window.innerHeight}px)`;
84
+
85
+ // Hide after settling
86
+ const settle = Math.hypot(dx, dy);
87
+ if (settle < 6 && (now() - c.t0) > 650) {
88
+ el.style.opacity = "0.75";
89
+ }
90
+ };
frontend/js/ui/features.js CHANGED
@@ -1,9 +1,10 @@
1
- import { $ } from '../core/utils.js';
2
 
3
- const featureTable = $("#featureTable");
4
- const selId = $("#sel-id");
 
 
5
 
6
- export function renderFeatures(det) {
7
  if (!featureTable || !selId) return;
8
 
9
  selId.textContent = det ? det.id : "—";
@@ -32,4 +33,4 @@ export function renderFeatures(det) {
32
  tbody.appendChild(tr);
33
  }
34
  }
35
- }
 
1
+ APP.ui.features = {};
2
 
3
+ APP.ui.features.renderFeatures = function (det) {
4
+ const { $ } = APP.core.utils;
5
+ const featureTable = $("#featureTable");
6
+ const selId = $("#selId"); // Correct ID
7
 
 
8
  if (!featureTable || !selId) return;
9
 
10
  selId.textContent = det ? det.id : "—";
 
33
  tbody.appendChild(tr);
34
  }
35
  }
36
+ };
frontend/js/ui/intel.js ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Intel Summary Module - Mission intel generation and display
2
+ APP.ui.intel = {};
3
+
4
+ APP.ui.intel.setIntelStatus = function (kind, text) {
5
+ const { $ } = APP.core.utils;
6
+ const intelStamp = $("#intelStamp");
7
+ const intelDot = $("#intelDot");
8
+
9
+ if (!intelStamp || !intelDot) return;
10
+
11
+ intelStamp.innerHTML = text;
12
+ intelDot.className = "dot" + (kind === "warn" ? " warn" : (kind === "bad" ? " bad" : ""));
13
+ intelDot.style.width = "7px";
14
+ intelDot.style.height = "7px";
15
+ intelDot.style.boxShadow = "none";
16
+ };
17
+
18
+ APP.ui.intel.setIntelThumb = function (i, dataUrl) {
19
+ const { $ } = APP.core.utils;
20
+ const thumbs = [$("#intelThumb0"), $("#intelThumb1"), $("#intelThumb2")];
21
+ const img = thumbs[i];
22
+ if (!img) return;
23
+ img.src = dataUrl || "";
24
+ };
25
+
26
+ APP.ui.intel.resetIntelUI = function () {
27
+ const { $ } = APP.core.utils;
28
+ const intelSummaryBox = $("#intelSummaryBox");
29
+
30
+ if (!intelSummaryBox) return;
31
+ intelSummaryBox.innerHTML = 'Upload a video, then click <b>Reason</b> to generate an unbiased scene summary.';
32
+ APP.ui.intel.setIntelStatus("warn", "Idle");
33
+ APP.ui.intel.setIntelThumb(0, "");
34
+ APP.ui.intel.setIntelThumb(1, "");
35
+ APP.ui.intel.setIntelThumb(2, "");
36
+ };
37
+
38
+ // External hook for intel summary (can be replaced by user)
39
+ APP.ui.intel.externalIntel = async function (frames) {
40
+ console.log("externalIntel called with", frames.length, "frames");
41
+ return "Video processed. No external intel provider connected.";
42
+ };
43
+
44
+ APP.ui.intel.computeIntelSummary = async function () {
45
+ const { state } = APP.core;
46
+ const { $ } = APP.core.utils;
47
+ const { log } = APP.ui.logging;
48
+
49
+ const intelSummaryBox = $("#intelSummaryBox");
50
+ const videoHidden = $("#videoHidden");
51
+ const videoEngage = $("#videoEngage");
52
+
53
+ if (!intelSummaryBox) return;
54
+ if (!state.videoLoaded) {
55
+ APP.ui.intel.resetIntelUI();
56
+ return;
57
+ }
58
+ if (state.intelBusy) return;
59
+
60
+ state.intelBusy = true;
61
+ APP.ui.intel.setIntelStatus("warn", "Generating…");
62
+ intelSummaryBox.textContent = "Sampling frames and running analysis…";
63
+
64
+ try {
65
+ const videoEl = videoHidden || videoEngage;
66
+ const dur = videoEl ? (videoEl.duration || 0) : 0;
67
+ const times = [0, dur ? dur * 0.33 : 1, dur ? dur * 0.66 : 2];
68
+ const frames = [];
69
+
70
+ for (let i = 0; i < times.length; i++) {
71
+ await APP.core.video.seekTo(videoEl, times[i]);
72
+
73
+ const canvas = document.createElement("canvas");
74
+ canvas.width = 640;
75
+ canvas.height = 360;
76
+ const ctx = canvas.getContext("2d");
77
+ ctx.drawImage(videoEl, 0, 0, canvas.width, canvas.height);
78
+ const dataUrl = canvas.toDataURL("image/jpeg", 0.6);
79
+ frames.push(dataUrl);
80
+
81
+ try {
82
+ APP.ui.intel.setIntelThumb(i, dataUrl);
83
+ } catch (_) { }
84
+ }
85
+
86
+ const summary = await APP.ui.intel.externalIntel(frames);
87
+
88
+ intelSummaryBox.textContent = summary;
89
+ APP.ui.intel.setIntelStatus("good", `Updated · ${new Date().toLocaleTimeString()}`);
90
+ } catch (err) {
91
+ APP.ui.intel.setIntelStatus("bad", "Summary unavailable");
92
+ intelSummaryBox.textContent = `Unable to generate summary: ${err.message}`;
93
+ console.error(err);
94
+ } finally {
95
+ state.intelBusy = false;
96
+ }
97
+ };
98
+
99
+ // Render mission context (if applicable)
100
+ APP.ui.intel.renderMissionContext = function () {
101
+ const { state } = APP.core;
102
+ const { $ } = APP.core.utils;
103
+
104
+ const missionClassesEl = $("#missionClasses");
105
+ const missionIdEl = $("#missionId");
106
+
107
+ if (missionClassesEl) {
108
+ if (state.hf.queries && state.hf.queries.length > 0) {
109
+ missionClassesEl.textContent = state.hf.queries.join(", ");
110
+ } else {
111
+ missionClassesEl.textContent = "All objects (no filter)";
112
+ }
113
+ }
114
+
115
+ if (missionIdEl) {
116
+ missionIdEl.textContent = state.hf.missionId || "—";
117
+ }
118
+ };
frontend/js/ui/logging.js CHANGED
@@ -1,10 +1,8 @@
1
- import { $ } from '../core/utils.js';
2
 
3
- const sysDot = $("#sys-dot");
4
- const sysStatus = $("#sys-status");
5
- const consoleHook = $("#console-hook");
6
-
7
- export function log(msg, type = "i") {
8
  if (!consoleHook) return;
9
  const div = document.createElement("div");
10
  div.className = "log-line";
@@ -18,9 +16,12 @@ export function log(msg, type = "i") {
18
  div.innerHTML = `<span class="ts">[${ts}]</span> <span style="color:${color}">${msg}</span>`;
19
  consoleHook.appendChild(div);
20
  consoleHook.scrollTop = consoleHook.scrollHeight;
21
- }
22
 
23
- export function setStatus(level, text) {
 
 
 
24
  if (!sysDot || !sysStatus) return;
25
  let color = "var(--text-dim)";
26
  if (level === "good") color = "var(--good)";
@@ -31,13 +32,14 @@ export function setStatus(level, text) {
31
  sysDot.style.boxShadow = `0 0 10px ${color}`;
32
  sysStatus.textContent = text;
33
  sysStatus.style.color = color === "var(--text-dim)" ? "var(--text-main)" : color;
34
- }
 
 
 
 
 
35
 
36
- export function setHfStatus(msg) {
37
- // Optional status line updates for HF backend events
38
- if (msg.startsWith("error")) {
39
- // console.error(msg); // Reduced noise
40
- } else {
41
- // console.log(`[HF] ${msg}`);
42
  }
43
- }
 
1
+ APP.ui.logging = {};
2
 
3
+ APP.ui.logging.log = function (msg, type = "i") {
4
+ const { $ } = APP.core.utils;
5
+ const consoleHook = $("#sysLog"); // Fixed ID: sysLog from html
 
 
6
  if (!consoleHook) return;
7
  const div = document.createElement("div");
8
  div.className = "log-line";
 
16
  div.innerHTML = `<span class="ts">[${ts}]</span> <span style="color:${color}">${msg}</span>`;
17
  consoleHook.appendChild(div);
18
  consoleHook.scrollTop = consoleHook.scrollHeight;
19
+ };
20
 
21
+ APP.ui.logging.setStatus = function (level, text) {
22
+ const { $ } = APP.core.utils;
23
+ const sysDot = $("#sys-dot");
24
+ const sysStatus = $("#sys-status");
25
  if (!sysDot || !sysStatus) return;
26
  let color = "var(--text-dim)";
27
  if (level === "good") color = "var(--good)";
 
32
  sysDot.style.boxShadow = `0 0 10px ${color}`;
33
  sysStatus.textContent = text;
34
  sysStatus.style.color = color === "var(--text-dim)" ? "var(--text-main)" : color;
35
+ };
36
+
37
+ APP.ui.logging.setHfStatus = function (msg) {
38
+ const { $ } = APP.core.utils;
39
+ const el = $("#hfBackendStatus");
40
+ if (el) el.textContent = `HF Backend: ${msg}`;
41
 
42
+ if (msg && (msg.toLowerCase().includes("error") || msg.toLowerCase().includes("failed"))) {
43
+ APP.ui.logging.log(msg, "e");
 
 
 
 
44
  }
45
+ };
frontend/js/ui/overlays.js CHANGED
@@ -1,59 +1,29 @@
1
- import { state } from '../core/state.js';
2
- import { now, $ } from '../core/utils.js';
3
- import { defaultAimpoint, aimpointByLabel } from '../core/physics.js';
4
- import { log } from './logging.js';
5
-
6
- const frameOverlay = $("#frameOverlay");
7
-
8
- // Helper to draw rounded rect
9
- function roundRect(ctx, x, y, w, h, r, fill, stroke) {
10
- if (w < 2 * r) r = w / 2;
11
- if (h < 2 * r) r = h / 2;
12
- ctx.beginPath();
13
- ctx.moveTo(x + r, y);
14
- ctx.arcTo(x + w, y, x + w, y + h, r);
15
- ctx.arcTo(x + w, y + h, x, y + h, r);
16
- ctx.arcTo(x, y + h, x, y, r);
17
- ctx.arcTo(x, y, x + w, y, r);
18
- ctx.closePath();
19
- if (fill) ctx.fill();
20
- if (stroke) ctx.stroke();
21
- }
22
-
23
- function drawAimpoint(ctx, x, y, isSel) {
24
- ctx.save();
25
- ctx.shadowBlur = isSel ? 18 : 12;
26
- ctx.shadowColor = "rgba(239,68,68,.45)";
27
- ctx.strokeStyle = "rgba(239,68,68,.95)";
28
- ctx.lineWidth = isSel ? 3 : 2;
29
- ctx.beginPath();
30
- ctx.arc(x, y, isSel ? 10 : 9, 0, Math.PI * 2);
31
- ctx.stroke();
32
-
33
- ctx.shadowBlur = 0;
34
- ctx.strokeStyle = "rgba(255,255,255,.70)";
35
- ctx.lineWidth = 1.5;
36
- ctx.beginPath();
37
- ctx.moveTo(x - 14, y); ctx.lineTo(x - 4, y);
38
- ctx.moveTo(x + 4, y); ctx.lineTo(x + 14, y);
39
- ctx.moveTo(x, y - 14); ctx.lineTo(x, y - 4);
40
- ctx.moveTo(x, y + 4); ctx.lineTo(x, y + 14);
41
- ctx.stroke();
42
-
43
- ctx.fillStyle = "rgba(239,68,68,.95)";
44
- ctx.beginPath();
45
- ctx.arc(x, y, 2.5, 0, Math.PI * 2);
46
- ctx.fill();
47
- ctx.restore();
48
- }
49
-
50
- export function renderFrameOverlay() {
51
- if (!frameOverlay) return;
52
- const ctx = frameOverlay.getContext("2d");
53
- const w = frameOverlay.width, h = frameOverlay.height;
54
  ctx.clearRect(0, 0, w, h);
55
 
56
- if (!state.detections.length) return;
 
57
 
58
  // subtle scanning effect
59
  const t = now() / 1000;
@@ -61,13 +31,55 @@ export function renderFrameOverlay() {
61
  ctx.fillStyle = "rgba(34,211,238,.06)";
62
  ctx.fillRect(scanX - 8, 0, 16, h);
63
 
64
- state.detections.forEach((d) => {
65
- const isSel = d.id === state.selectedId;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  const b = d.bbox;
67
 
68
  // box
69
  ctx.lineWidth = isSel ? 3 : 2;
70
- // Simple heuristic for focus color
71
  const label = (d.label || "").toLowerCase();
72
  const isFocus = label.includes("drone") || label.includes("uav");
73
 
@@ -84,16 +96,45 @@ export function renderFrameOverlay() {
84
  ctx.fillStyle = g;
85
  ctx.fillRect(b.x, b.y, b.w, b.h);
86
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
  // aimpoint marker
88
- // If aim not set, calculate default
89
- const aim = d.aim || defaultAimpoint(d.label);
 
 
 
90
  const ax = b.x + b.w * aim.relx;
91
  const ay = b.y + b.h * aim.rely;
92
  drawAimpoint(ctx, ax, ay, isSel);
93
  });
 
94
 
95
- // Pointer events moved to controls or handled here?
96
- // Usually simple to keep it here or init logic in main.
97
- }
 
98
 
99
- // Ensure interactions are bound in main setup, not here to keep this pure renderer
 
 
 
 
1
+ APP.ui.overlays = {};
2
+
3
+ APP.ui.overlays.render = function (canvasId, trackSource) {
4
+ const { state } = APP.core;
5
+ const { now, $ } = APP.core.utils;
6
+ const { defaultAimpoint } = APP.core.physics;
7
+
8
+ const canvas = $(`#${canvasId}`);
9
+ if (!canvas) return;
10
+
11
+ // Avoid double-drawing: If we are in Engage view (engageOverlay) and viewing the processed feed
12
+ // (which has baked-in boxes from backend), we should NOT draw frontend predicted boxes.
13
+ // Except maybe selected highlight? For now, hide all to avoid clutter/mismatch.
14
+ if (canvasId === "engageOverlay" && state.useProcessedFeed) {
15
+ const ctx = canvas.getContext("2d");
16
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
17
+ return;
18
+ }
19
+
20
+ // Resize to match DOM if needed (handling HiDPI optionally, or just verify size)
21
+ const w = canvas.width, h = canvas.height;
22
+ const ctx = canvas.getContext("2d");
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  ctx.clearRect(0, 0, w, h);
24
 
25
+ const source = trackSource || state.detections || [];
26
+ if (!source || !source.length) return;
27
 
28
  // subtle scanning effect
29
  const t = now() / 1000;
 
31
  ctx.fillStyle = "rgba(34,211,238,.06)";
32
  ctx.fillRect(scanX - 8, 0, 16, h);
33
 
34
+ // Helpers
35
+ function roundRect(ctx, x, y, w, h, r, fill, stroke) {
36
+ if (w < 2 * r) r = w / 2;
37
+ if (h < 2 * r) r = h / 2;
38
+ ctx.beginPath();
39
+ ctx.moveTo(x + r, y);
40
+ ctx.arcTo(x + w, y, x + w, y + h, r);
41
+ ctx.arcTo(x + w, y + h, x, y + h, r);
42
+ ctx.arcTo(x, y + h, x, y, r);
43
+ ctx.arcTo(x, y, x + w, y, r);
44
+ ctx.closePath();
45
+ if (fill) ctx.fill();
46
+ if (stroke) ctx.stroke();
47
+ }
48
+
49
+ function drawAimpoint(ctx, x, y, isSel) {
50
+ ctx.save();
51
+ ctx.shadowBlur = isSel ? 18 : 12;
52
+ ctx.shadowColor = "rgba(239,68,68,.45)";
53
+ ctx.strokeStyle = "rgba(239,68,68,.95)";
54
+ ctx.lineWidth = isSel ? 3 : 2;
55
+ ctx.beginPath();
56
+ ctx.arc(x, y, isSel ? 10 : 9, 0, Math.PI * 2);
57
+ ctx.stroke();
58
+
59
+ ctx.shadowBlur = 0;
60
+ ctx.strokeStyle = "rgba(255,255,255,.70)";
61
+ ctx.lineWidth = 1.5;
62
+ ctx.beginPath();
63
+ ctx.moveTo(x - 14, y); ctx.lineTo(x - 4, y);
64
+ ctx.moveTo(x + 4, y); ctx.lineTo(x + 14, y);
65
+ ctx.moveTo(x, y - 14); ctx.lineTo(x, y - 4);
66
+ ctx.moveTo(x, y + 4); ctx.lineTo(x, y + 14);
67
+ ctx.stroke();
68
+
69
+ ctx.fillStyle = "rgba(239,68,68,.95)";
70
+ ctx.beginPath();
71
+ ctx.arc(x, y, 2.5, 0, Math.PI * 2);
72
+ ctx.fill();
73
+ ctx.restore();
74
+ }
75
+
76
+ source.forEach((d) => {
77
+ // Handle both 'selectedId' (detection) and 'selectedTrackId' (track)
78
+ const isSel = (d.id === state.selectedId) || (d.id === state.tracker.selectedTrackId);
79
  const b = d.bbox;
80
 
81
  // box
82
  ctx.lineWidth = isSel ? 3 : 2;
 
83
  const label = (d.label || "").toLowerCase();
84
  const isFocus = label.includes("drone") || label.includes("uav");
85
 
 
96
  ctx.fillStyle = g;
97
  ctx.fillRect(b.x, b.y, b.w, b.h);
98
 
99
+ // Label / Info Text
100
+ ctx.font = "bold 13px 'Inter', sans-serif";
101
+ let text = (d.label || "OBJ").toUpperCase();
102
+ if (d.track_id) text = `[${d.track_id}] ${text}`;
103
+ if (d.gpt_distance_m) text += ` ${Math.round(d.gpt_distance_m)}m`;
104
+ if (d.speed_kph && d.speed_kph > 1) text += ` ${Math.round(d.speed_kph)}km/h`;
105
+
106
+ const tm = ctx.measureText(text);
107
+ const textW = tm.width + 16;
108
+ const textH = 22;
109
+ const tx = b.x;
110
+ const ty = b.y - textH - 4 > 0 ? b.y - textH - 4 : b.y + b.h + 4; // draw above if space, else below
111
+
112
+ ctx.fillStyle = isSel ? "rgba(34,211,238,0.9)" : "rgba(17,24,39,0.85)";
113
+ ctx.beginPath();
114
+ ctx.roundRect(tx, ty, textW, textH, 4);
115
+ ctx.fill();
116
+
117
+ ctx.fillStyle = isSel ? "#000" : "#fff";
118
+ ctx.fillText(text, tx + 8, ty + 16);
119
+
120
  // aimpoint marker
121
+ // Tracks define 'aimRel', Detections define 'aim' or just rely on default.
122
+ let aim = d.aim; // if detection
123
+ if (!aim && d.aimRel) aim = d.aimRel; // if track
124
+ if (!aim) aim = defaultAimpoint(d.label);
125
+
126
  const ax = b.x + b.w * aim.relx;
127
  const ay = b.y + b.h * aim.rely;
128
  drawAimpoint(ctx, ax, ay, isSel);
129
  });
130
+ };
131
 
132
+ APP.ui.overlays.renderFrameOverlay = function () {
133
+ const { state } = APP.core;
134
+ APP.ui.overlays.render("frameOverlay", state.detections);
135
+ };
136
 
137
+ APP.ui.overlays.renderEngageOverlay = function () {
138
+ const { state } = APP.core;
139
+ APP.ui.overlays.render("engageOverlay", state.tracker.tracks);
140
+ };
frontend/js/ui/radar.js CHANGED
@@ -1,23 +1,24 @@
1
- import { state } from '../core/state.js';
2
- import { clamp, now, $ } from '../core/utils.js';
3
 
4
- const frameRadar = $("#frameRadar");
 
 
 
5
 
6
- export function renderFrameRadar() {
7
- if (!frameRadar) return;
8
- const ctx = frameRadar.getContext("2d");
9
- const rect = frameRadar.getBoundingClientRect();
10
  const dpr = devicePixelRatio || 1;
11
 
12
  // Resize if needed
13
  const targetW = Math.max(1, Math.floor(rect.width * dpr));
14
  const targetH = Math.max(1, Math.floor(rect.height * dpr));
15
- if (frameRadar.width !== targetW || frameRadar.height !== targetH) {
16
- frameRadar.width = targetW;
17
- frameRadar.height = targetH;
18
  }
19
 
20
- const w = frameRadar.width, h = frameRadar.height;
21
  const cx = w * 0.5, cy = h * 0.5;
22
  const R = Math.min(w, h) * 0.45; // Max radius
23
 
@@ -47,7 +48,7 @@ export function renderFrameRadar() {
47
  const ang = (t * (Math.PI * 2)) % (Math.PI * 2);
48
 
49
  const grad = ctx.createConicGradient(ang + Math.PI / 2, cx, cy); // Offset to start at 0
50
- grad.addColorStop(0, "transparent");
51
  grad.addColorStop(0.1, "transparent");
52
  grad.addColorStop(0.8, "rgba(34, 211, 238, 0.0)");
53
  grad.addColorStop(1, "rgba(34, 211, 238, 0.15)"); // Trailing edge
@@ -78,7 +79,7 @@ export function renderFrameRadar() {
78
  ctx.stroke();
79
 
80
  // --- 4. Render Detections ---
81
- const source = state.tracker.running ? state.tracker.tracks : state.detections;
82
 
83
  if (source) {
84
  source.forEach(det => {
@@ -93,21 +94,10 @@ export function renderFrameRadar() {
93
  const maxRangeM = 1500;
94
  const rPx = (clamp(dist, 0, maxRangeM) / maxRangeM) * R;
95
 
96
- // Determine Bearing
97
- // box center relative to frame center
98
  const bx = det.bbox.x + det.bbox.w * 0.5;
99
- const fw = state.frame.w || 1280; // normalized coords usually, but here bbox seems to be absolute pixel or normalized depending on source?
100
- // NOTE: In state.detections, bbox is normalized (0..1) if coming from normBBox?
101
- // Wait, state.detections has bbox in pixel coords?
102
- // In original script: state.detections = dets.map ... bbox: normBBox(d.bbox, w, h) which clamps but returns pixel coords?
103
- // Let's check normBBox implementation in utils.
104
- // It clamps x to 0..w-1. So it IS pixel coords.
105
-
106
- // Normalized x (-0.5 to 0.5)
107
  const tx = (bx / fw) - 0.5;
108
 
109
- // Map x-axis (-0.5 to 0.5) to angle.
110
- // FOV assumption: ~60 degrees?
111
  const fovRad = (60 * Math.PI) / 180;
112
  const angle = (-Math.PI / 2) + (tx * fovRad);
113
 
@@ -115,7 +105,6 @@ export function renderFrameRadar() {
115
  const px = cx + Math.cos(angle) * rPx;
116
  const py = cy + Math.sin(angle) * rPx;
117
 
118
- // Check selection (handle both track ID and detection ID)
119
  const isSelected = (state.selectedId === det.id) || (state.tracker.selectedTrackId === det.id);
120
 
121
  // Glow for selected
@@ -144,7 +133,6 @@ export function renderFrameRadar() {
144
  ctx.font = "bold 11px monospace";
145
  ctx.fillText(det.id, px + 8, py + 3);
146
 
147
- // Connected Line to center
148
  ctx.strokeStyle = "rgba(255, 255, 255, 0.4)";
149
  ctx.lineWidth = 1;
150
  ctx.setLineDash([2, 2]);
@@ -154,7 +142,6 @@ export function renderFrameRadar() {
154
  ctx.stroke();
155
  ctx.setLineDash([]);
156
 
157
- // Distance Label on Line
158
  const mx = (cx + px) * 0.5;
159
  const my = (cy + py) * 0.5;
160
  const distStr = `${Math.round(dist)}m`;
@@ -164,20 +151,28 @@ export function renderFrameRadar() {
164
  const tw = tm.width;
165
  const th = 10;
166
 
167
- // Label Background
168
  ctx.fillStyle = "rgba(10, 15, 34, 0.85)";
169
  ctx.fillRect(mx - tw / 2 - 3, my - th / 2 - 2, tw + 6, th + 4);
170
 
171
- // Label Text
172
  ctx.fillStyle = "#22d3ee"; // Cyan
173
  ctx.textAlign = "center";
174
  ctx.textBaseline = "middle";
175
  ctx.fillText(distStr, mx, my);
176
 
177
- // Reset text alignment
178
  ctx.textAlign = "start";
179
  ctx.textBaseline = "alphabetic";
180
  }
181
  });
182
  }
183
- }
 
 
 
 
 
 
 
 
 
 
 
 
1
+ APP.ui.radar = {};
 
2
 
3
+ APP.ui.radar.render = function (canvasId, trackSource) {
4
+ const { state } = APP.core;
5
+ const { clamp, now, $ } = APP.core.utils;
6
+ const canvas = $(`#${canvasId}`);
7
 
8
+ if (!canvas) return;
9
+ const ctx = canvas.getContext("2d");
10
+ const rect = canvas.getBoundingClientRect();
 
11
  const dpr = devicePixelRatio || 1;
12
 
13
  // Resize if needed
14
  const targetW = Math.max(1, Math.floor(rect.width * dpr));
15
  const targetH = Math.max(1, Math.floor(rect.height * dpr));
16
+ if (canvas.width !== targetW || canvas.height !== targetH) {
17
+ canvas.width = targetW;
18
+ canvas.height = targetH;
19
  }
20
 
21
+ const w = canvas.width, h = canvas.height;
22
  const cx = w * 0.5, cy = h * 0.5;
23
  const R = Math.min(w, h) * 0.45; // Max radius
24
 
 
48
  const ang = (t * (Math.PI * 2)) % (Math.PI * 2);
49
 
50
  const grad = ctx.createConicGradient(ang + Math.PI / 2, cx, cy); // Offset to start at 0
51
+ grad.addColorStop(0, "transparent"); // transparent
52
  grad.addColorStop(0.1, "transparent");
53
  grad.addColorStop(0.8, "rgba(34, 211, 238, 0.0)");
54
  grad.addColorStop(1, "rgba(34, 211, 238, 0.15)"); // Trailing edge
 
79
  ctx.stroke();
80
 
81
  // --- 4. Render Detections ---
82
+ const source = trackSource || state.detections;
83
 
84
  if (source) {
85
  source.forEach(det => {
 
94
  const maxRangeM = 1500;
95
  const rPx = (clamp(dist, 0, maxRangeM) / maxRangeM) * R;
96
 
 
 
97
  const bx = det.bbox.x + det.bbox.w * 0.5;
98
+ const fw = state.frame.w || 1280;
 
 
 
 
 
 
 
99
  const tx = (bx / fw) - 0.5;
100
 
 
 
101
  const fovRad = (60 * Math.PI) / 180;
102
  const angle = (-Math.PI / 2) + (tx * fovRad);
103
 
 
105
  const px = cx + Math.cos(angle) * rPx;
106
  const py = cy + Math.sin(angle) * rPx;
107
 
 
108
  const isSelected = (state.selectedId === det.id) || (state.tracker.selectedTrackId === det.id);
109
 
110
  // Glow for selected
 
133
  ctx.font = "bold 11px monospace";
134
  ctx.fillText(det.id, px + 8, py + 3);
135
 
 
136
  ctx.strokeStyle = "rgba(255, 255, 255, 0.4)";
137
  ctx.lineWidth = 1;
138
  ctx.setLineDash([2, 2]);
 
142
  ctx.stroke();
143
  ctx.setLineDash([]);
144
 
 
145
  const mx = (cx + px) * 0.5;
146
  const my = (cy + py) * 0.5;
147
  const distStr = `${Math.round(dist)}m`;
 
151
  const tw = tm.width;
152
  const th = 10;
153
 
 
154
  ctx.fillStyle = "rgba(10, 15, 34, 0.85)";
155
  ctx.fillRect(mx - tw / 2 - 3, my - th / 2 - 2, tw + 6, th + 4);
156
 
 
157
  ctx.fillStyle = "#22d3ee"; // Cyan
158
  ctx.textAlign = "center";
159
  ctx.textBaseline = "middle";
160
  ctx.fillText(distStr, mx, my);
161
 
 
162
  ctx.textAlign = "start";
163
  ctx.textBaseline = "alphabetic";
164
  }
165
  });
166
  }
167
+ };
168
+
169
+ // Aliases for compatibility if needed, but ideally updated in main.js
170
+ APP.ui.radar.renderFrameRadar = function () {
171
+ const { state } = APP.core;
172
+ APP.ui.radar.render("frameRadar", state.detections);
173
+ };
174
+
175
+ APP.ui.radar.renderLiveRadar = function () {
176
+ const { state } = APP.core;
177
+ APP.ui.radar.render("radarCanvas", state.tracker.tracks);
178
+ };
frontend/js/ui/trade.js CHANGED
@@ -1,39 +1,291 @@
1
- import { state } from '../core/state.js';
2
- import { $ } from '../core/utils.js';
3
-
4
- const mPlan = $("#m-plan");
5
- const mPlanSub = $("#m-plan-sub");
6
- const mMaxP = $("#m-maxp");
7
- const mMaxPSub = $("#m-maxp-sub");
8
- const mReqP = $("#m-reqp");
9
- const mMargin = $("#m-margin");
10
-
11
- export function renderTrade() {
12
- // This function updates the top summary metrics based on the current state
13
- if (!mPlan) return; // UI not ready
14
-
15
- // Logic mostly handled in recomputeHEL, this is just refresh
16
- // We can move the specific DOM updates here if we want strict separation
17
- // For now, let's assume recomputeHEL drives state and this just reflects it?
18
- // Actually, recomputeHEL in the original code did the DOM updates directly.
19
- // Let's keep it simple: Trade rendering is minimal.
20
- }
21
-
22
- export function updateHeadlines(sys, bestTarget) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  if (!mMaxP) return;
24
 
25
- mMaxP.textContent = sys.maxP ? `${sys.maxP} kW` : "—";
26
- mReqP.textContent = sys.reqP ? `${sys.reqP} kW` : "—";
27
- const margin = sys.margin || 0;
28
- mMargin.textContent = `${margin > 0 ? "+" : ""}${margin} kW`;
29
- mMargin.style.color = margin >= 0 ? "rgba(34,197,94,.95)" : "rgba(239,68,68,.95)";
30
- mMaxPSub.textContent = "Calculated by external HEL engine";
 
 
 
 
31
 
32
  if (bestTarget && bestTarget.pkill > 0) {
33
- mPlan.textContent = `${bestTarget.id} → Engage`;
34
- mPlanSub.textContent = "Highest P(kill) target";
35
- } else {
36
- mPlan.textContent = "—";
37
- mPlanSub.textContent = "No viable targets";
38
  }
39
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Trade Space Visualization Module
2
+ APP.ui.trade = {};
3
+
4
+ APP.ui.trade.populateTradeTarget = function () {
5
+ const { state } = APP.core;
6
+ const { $ } = APP.core.utils;
7
+ const tradeTarget = $("#tradeTarget");
8
+
9
+ if (!tradeTarget) return;
10
+
11
+ const sel = tradeTarget.value;
12
+ tradeTarget.innerHTML = "";
13
+
14
+ const ids = state.detections.map(d => d.id);
15
+ if (!ids.length) {
16
+ const opt = document.createElement("option");
17
+ opt.value = "";
18
+ opt.textContent = "No targets";
19
+ tradeTarget.appendChild(opt);
20
+ return;
21
+ }
22
+
23
+ ids.forEach(id => {
24
+ const opt = document.createElement("option");
25
+ opt.value = id;
26
+ opt.textContent = id;
27
+ tradeTarget.appendChild(opt);
28
+ });
29
+
30
+ if (sel && ids.includes(sel)) tradeTarget.value = sel;
31
+ else tradeTarget.value = state.selectedId || ids[0];
32
+ };
33
+
34
+ APP.ui.trade.renderTrade = function () {
35
+ const { state } = APP.core;
36
+ const { $, clamp } = APP.core.utils;
37
+ const { maxPowerAtTarget, requiredDwell, pkillFromMargin } = APP.core.hel;
38
+
39
+ const tradeCanvas = $("#tradeCanvas");
40
+ const tradeTarget = $("#tradeTarget");
41
+ const rMin = $("#rMin");
42
+ const rMax = $("#rMax");
43
+ const showPk = $("#showPk");
44
+
45
+ if (!tradeCanvas) return;
46
+
47
+ const ctx = tradeCanvas.getContext("2d");
48
+ const W = tradeCanvas.width, H = tradeCanvas.height;
49
+ ctx.clearRect(0, 0, W, H);
50
+
51
+ // Background
52
+ ctx.fillStyle = "rgba(0,0,0,.32)";
53
+ ctx.fillRect(0, 0, W, H);
54
+
55
+ if (!state.detections.length) {
56
+ ctx.fillStyle = "rgba(255,255,255,.75)";
57
+ ctx.font = "14px " + getComputedStyle(document.body).fontFamily;
58
+ ctx.fillText("Run Reason to populate trade-space curves.", 18, 34);
59
+ return;
60
+ }
61
+
62
+ const id = (tradeTarget ? tradeTarget.value : null) || state.selectedId || state.detections[0].id;
63
+ const d = state.detections.find(x => x.id === id) || state.detections[0];
64
+
65
+ const r0 = Math.max(50, rMin ? +rMin.value : 200);
66
+ const r1 = Math.max(r0 + 50, rMax ? +rMax.value : 6000);
67
+
68
+ // Margins
69
+ const padL = 64, padR = 18, padT = 18, padB = 52;
70
+ const plotW = W - padL - padR;
71
+ const plotH = H - padT - padB;
72
+
73
+ // Compute sweep
74
+ const N = 120;
75
+ const xs = [];
76
+ let maxY = 0;
77
+ let minY = Infinity;
78
+
79
+ for (let i = 0; i <= N; i++) {
80
+ const r = r0 + (r1 - r0) * (i / N);
81
+ const mp = maxPowerAtTarget(r).Ptar;
82
+ const reqP = d.reqP_kW || 40;
83
+ const reqD = requiredDwell(r, reqP, mp, d.baseDwell_s || 5);
84
+
85
+ xs.push({ r, mp, reqP, reqD });
86
+ maxY = Math.max(maxY, mp, reqP);
87
+ minY = Math.min(minY, mp, reqP);
88
+ }
89
+
90
+ maxY = Math.max(maxY, 20);
91
+ minY = Math.max(0, minY - 10);
92
+
93
+ // Axes
94
+ ctx.strokeStyle = "rgba(255,255,255,.14)";
95
+ ctx.lineWidth = 1;
96
+ ctx.beginPath();
97
+ ctx.moveTo(padL, padT);
98
+ ctx.lineTo(padL, padT + plotH);
99
+ ctx.lineTo(padL + plotW, padT + plotH);
100
+ ctx.stroke();
101
+
102
+ // Grid lines
103
+ ctx.strokeStyle = "rgba(255,255,255,.07)";
104
+ for (let i = 1; i <= 5; i++) {
105
+ const y = padT + plotH * (i / 5);
106
+ ctx.beginPath(); ctx.moveTo(padL, y); ctx.lineTo(padL + plotW, y); ctx.stroke();
107
+ }
108
+ for (let i = 1; i <= 6; i++) {
109
+ const x = padL + plotW * (i / 6);
110
+ ctx.beginPath(); ctx.moveTo(x, padT); ctx.lineTo(x, padT + plotH); ctx.stroke();
111
+ }
112
+
113
+ // Helpers
114
+ const xMap = (r) => padL + (r - r0) / (r1 - r0) * plotW;
115
+ const yMap = (p) => padT + (1 - (p - minY) / (maxY - minY)) * plotH;
116
+
117
+ // Curve: max power at target
118
+ ctx.strokeStyle = "rgba(34,211,238,.95)";
119
+ ctx.lineWidth = 2.5;
120
+ ctx.beginPath();
121
+ xs.forEach((pt, i) => {
122
+ const x = xMap(pt.r);
123
+ const y = yMap(pt.mp);
124
+ if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y);
125
+ });
126
+ ctx.stroke();
127
+
128
+ // Curve: required power
129
+ ctx.strokeStyle = "rgba(239,68,68,.90)";
130
+ ctx.lineWidth = 2.5;
131
+ ctx.beginPath();
132
+ xs.forEach((pt, i) => {
133
+ const x = xMap(pt.r);
134
+ const y = yMap(pt.reqP);
135
+ if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y);
136
+ });
137
+ ctx.stroke();
138
+
139
+ // Annotate margin zones
140
+ ctx.fillStyle = "rgba(34,197,94,.08)";
141
+ ctx.beginPath();
142
+ xs.forEach((pt, i) => {
143
+ const x = xMap(pt.r);
144
+ const y = yMap(Math.max(pt.reqP, pt.mp));
145
+ if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y);
146
+ });
147
+ for (let i = xs.length - 1; i >= 0; i--) {
148
+ const x = xMap(xs[i].r);
149
+ const y = yMap(Math.min(xs[i].reqP, xs[i].mp));
150
+ ctx.lineTo(x, y);
151
+ }
152
+ ctx.closePath();
153
+ ctx.fill();
154
+
155
+ // Second axis for dwell (scaled)
156
+ const dwellMax = Math.max(...xs.map(p => p.reqD));
157
+ const yMapD = (dwell) => padT + (1 - (dwell / Math.max(1e-6, dwellMax))) * plotH;
158
+
159
+ ctx.strokeStyle = "rgba(124,58,237,.85)";
160
+ ctx.lineWidth = 2.2;
161
+ ctx.beginPath();
162
+ xs.forEach((pt, i) => {
163
+ const x = xMap(pt.r);
164
+ const y = yMapD(pt.reqD);
165
+ if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y);
166
+ });
167
+ ctx.stroke();
168
+
169
+ // Optional pkill band
170
+ if (showPk && showPk.value === "on") {
171
+ ctx.fillStyle = "rgba(245,158,11,.08)";
172
+ ctx.beginPath();
173
+ xs.forEach((pt, i) => {
174
+ const x = xMap(pt.r);
175
+ const mp = pt.mp;
176
+ const margin = mp - pt.reqP;
177
+ const pk = pkillFromMargin(margin, d.baseDwell_s || 5, pt.reqD);
178
+ const y = padT + plotH * (1 - pk);
179
+ if (i === 0) ctx.moveTo(x, y); else ctx.lineTo(x, y);
180
+ });
181
+ ctx.lineTo(padL + plotW, padT + plotH);
182
+ ctx.lineTo(padL, padT + plotH);
183
+ ctx.closePath();
184
+ ctx.fill();
185
+ }
186
+
187
+ // Labels
188
+ ctx.fillStyle = "rgba(255,255,255,.84)";
189
+ ctx.font = "bold 14px " + getComputedStyle(document.body).fontFamily;
190
+ ctx.fillText(`Target: ${id} (${d.label})`, padL, 16);
191
+
192
+ ctx.fillStyle = "rgba(34,211,238,.95)";
193
+ ctx.fillText("Max P@Target (kW)", padL + 10, padT + plotH + 30);
194
+
195
+ ctx.fillStyle = "rgba(239,68,68,.92)";
196
+ ctx.fillText("Required P@Target (kW)", padL + 190, padT + plotH + 30);
197
+
198
+ ctx.fillStyle = "rgba(124,58,237,.90)";
199
+ ctx.fillText(`Required Dwell (s, scaled)`, padL + 420, padT + plotH + 30);
200
+
201
+ ctx.fillStyle = "rgba(255,255,255,.55)";
202
+ ctx.font = "11px " + getComputedStyle(document.body).fontFamily;
203
+ ctx.fillText(`Range (m)`, padL + plotW - 64, padT + plotH + 46);
204
+
205
+ // Axis ticks
206
+ ctx.fillStyle = "rgba(255,255,255,.55)";
207
+ ctx.font = "11px " + getComputedStyle(document.body).fontFamily;
208
+
209
+ for (let i = 0; i <= 5; i++) {
210
+ const p = minY + (maxY - minY) * (1 - i / 5);
211
+ const y = padT + plotH * (i / 5);
212
+ ctx.fillText(p.toFixed(0), 12, y + 4);
213
+ }
214
+
215
+ for (let i = 0; i <= 6; i++) {
216
+ const r = r0 + (r1 - r0) * (i / 6);
217
+ const x = padL + plotW * (i / 6);
218
+ ctx.fillText(r.toFixed(0), x - 14, padT + plotH + 18);
219
+ }
220
+
221
+ // Marker at baseline range
222
+ const rangeBase = $("#rangeBase");
223
+ const baseR = d.baseRange_m || (rangeBase ? +rangeBase.value : 1500);
224
+ const xb = xMap(clamp(baseR, r0, r1));
225
+ ctx.strokeStyle = "rgba(255,255,255,.28)";
226
+ ctx.setLineDash([6, 6]);
227
+ ctx.beginPath();
228
+ ctx.moveTo(xb, padT);
229
+ ctx.lineTo(xb, padT + plotH);
230
+ ctx.stroke();
231
+ ctx.setLineDash([]);
232
+ };
233
+
234
+ APP.ui.trade.updateHeadlines = function (sys, bestTarget) {
235
+ const { $ } = APP.core.utils;
236
+
237
+ const mMaxP = $("#m-maxp");
238
+ const mReqP = $("#m-reqp");
239
+ const mMargin = $("#m-margin");
240
+ const mMaxPSub = $("#m-maxp-sub");
241
+ const mPlanSub = $("#m-plan-sub");
242
+
243
  if (!mMaxP) return;
244
 
245
+ if (mMaxP) mMaxP.textContent = sys.maxP ? `${sys.maxP} kW` : "—";
246
+ if (mReqP) mReqP.textContent = sys.reqP ? `${sys.reqP} kW` : "—";
247
+
248
+ if (mMargin) {
249
+ const margin = sys.margin || 0;
250
+ mMargin.textContent = `${margin > 0 ? "+" : ""}${margin} kW`;
251
+ mMargin.style.color = margin >= 0 ? "rgba(34,197,94,.95)" : "rgba(239,68,68,.95)";
252
+ }
253
+
254
+ if (mMaxPSub) mMaxPSub.textContent = "Calculated by external HEL engine";
255
 
256
  if (bestTarget && bestTarget.pkill > 0) {
257
+ const mPlan = $("#m-plan");
258
+ if (mPlan) mPlan.textContent = `${bestTarget.id} Engage`;
259
+ if (mPlanSub) mPlanSub.textContent = "Highest P(kill) target";
 
 
260
  }
261
+ };
262
+
263
+ APP.ui.trade.snapshotTrade = function () {
264
+ const { state } = APP.core;
265
+ const { $, log } = APP.core.utils;
266
+ const { log: uiLog } = APP.ui.logging;
267
+
268
+ if (!state.detections.length) return;
269
+
270
+ const tradeTarget = $("#tradeTarget");
271
+ const helPower = $("#helPower");
272
+ const atmVis = $("#atmVis");
273
+ const atmCn2 = $("#atmCn2");
274
+ const aoQ = $("#aoQ");
275
+
276
+ const id = tradeTarget ? tradeTarget.value : state.selectedId;
277
+ const d = state.detections.find(x => x.id === id) || state.detections[0];
278
+
279
+ const snap = {
280
+ target: id,
281
+ helPower_kW: helPower ? +helPower.value : 0,
282
+ vis_km: atmVis ? +atmVis.value : 0,
283
+ cn2: atmCn2 ? +atmCn2.value : 0,
284
+ ao: aoQ ? +aoQ.value : 0,
285
+ baseRange_m: d.baseRange_m,
286
+ reqP_kW: d.reqP_kW,
287
+ baseDwell_s: d.baseDwell_s
288
+ };
289
+
290
+ uiLog("SNAPSHOT: " + JSON.stringify(snap), "t");
291
+ };
frontend/style.css CHANGED
@@ -467,6 +467,8 @@ input[type="number"]:focus {
467
  width: 100%;
468
  height: 100%;
469
  display: block;
 
 
470
  }
471
 
472
  /* Always show the engage video feed */
@@ -475,10 +477,16 @@ input[type="number"]:focus {
475
  opacity: 1;
476
  }
477
 
 
478
  .viewbox .overlay {
479
  position: absolute;
480
- inset: 0;
 
 
 
481
  pointer-events: none;
 
 
482
  }
483
 
484
  /* Make engage overlay visible as main display (not just overlay) */
 
467
  width: 100%;
468
  height: 100%;
469
  display: block;
470
+ object-fit: contain;
471
+ /* Maintain aspect ratio, letterbox if needed */
472
  }
473
 
474
  /* Always show the engage video feed */
 
477
  opacity: 1;
478
  }
479
 
480
+ /* Overlay must use same object-fit as the main canvas to align properly */
481
  .viewbox .overlay {
482
  position: absolute;
483
+ top: 0;
484
+ left: 0;
485
+ width: 100%;
486
+ height: 100%;
487
  pointer-events: none;
488
+ object-fit: contain;
489
+ /* CRITICAL: match frameCanvas object-fit */
490
  }
491
 
492
  /* Make engage overlay visible as main display (not just overlay) */
inference.py CHANGED
@@ -269,6 +269,176 @@ def _build_detection_records(
269
  return detections
270
 
271
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
272
  _MODEL_LOCKS: Dict[str, RLock] = {}
273
  _MODEL_LOCKS_GUARD = RLock()
274
  _DEPTH_SCALE = float(os.getenv("DEPTH_SCALE", "25.0"))
@@ -671,6 +841,7 @@ def run_inference(
671
  job_id: Optional[str] = None,
672
  depth_estimator_name: Optional[str] = None,
673
  depth_scale: float = 1.0,
 
674
  stream_queue: Optional[Queue] = None,
675
  ) -> Tuple[str, List[List[Dict[str, Any]]]]:
676
 
@@ -866,10 +1037,10 @@ def run_inference(
866
  _attach_depth_from_result(detections, dep_res, depth_scale)
867
  except: pass
868
 
869
- # B. Render Boxes
870
- display_labels = [_build_display_label(d) for d in detections]
871
- if d_res:
872
- processed = draw_boxes(processed, d_res.boxes, label_names=display_labels)
873
 
874
  # 3. Output
875
  while True:
@@ -933,6 +1104,10 @@ def run_inference(
933
  next_idx = 0
934
  buffer = {}
935
 
 
 
 
 
936
  try:
937
  with VideoWriter(output_video_path, fps, width, height) as writer:
938
  while next_idx < total_frames:
@@ -956,10 +1131,87 @@ def run_inference(
956
 
957
  # Write next_idx
958
  p_frame, dets = buffer.pop(next_idx)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
959
  writer.write(p_frame)
960
 
961
  if stream_queue:
962
  try:
 
 
 
 
 
 
963
  stream_queue.put(p_frame, timeout=0.01)
964
  except:
965
  pass
 
269
  return detections
270
 
271
 
272
+ class SimpleTracker:
273
+ def __init__(self, max_age: int = 30, iou_thresh: float = 0.3):
274
+ self.tracks = {} # id -> {bbox, label, history, missed_frames, filter}
275
+ self.next_id = 1
276
+ self.max_age = max_age
277
+ self.iou_thresh = iou_thresh
278
+
279
+ def update(self, detections: List[Dict[str, Any]]):
280
+ # detection: {bbox: [x1,y1,x2,y2], label, score}
281
+
282
+ # 1. Predict new locations (simple constant velocity or just last pos)
283
+ # For simple IOU tracker, prediction is just previous position.
284
+
285
+ # 2. Match
286
+ active_tracks = [t for t in self.tracks.values() if t['missed_frames'] < self.max_age]
287
+
288
+ matched_track_indices = set()
289
+ matched_det_indices = set()
290
+
291
+ # Greedy matching by IOU
292
+ # O(N*M) but N,M are small
293
+ matches = [] # (track_id, det_idx, iou)
294
+
295
+ for t_id, track in self.tracks.items():
296
+ if track['missed_frames'] >= self.max_age: continue
297
+
298
+ t_box = track['bbox']
299
+ for d_idx, det in enumerate(detections):
300
+ if d_idx in matched_det_indices: continue
301
+ d_box = det['bbox']
302
+
303
+ # Check label consistency (optional, but good for stability)
304
+ if track['label'] != det['label']: continue
305
+
306
+ iou = self._calculate_iou(t_box, d_box)
307
+ if iou > self.iou_thresh:
308
+ matches.append((t_id, d_idx, iou))
309
+
310
+ # Sort by IOU desc
311
+ matches.sort(key=lambda x: x[2], reverse=True)
312
+
313
+ used_tracks = set()
314
+ used_dets = set()
315
+
316
+ for t_id, d_idx, iou in matches:
317
+ if t_id in used_tracks or d_idx in used_dets: continue
318
+
319
+ # Update Track
320
+ track = self.tracks[t_id]
321
+ track['bbox'] = detections[d_idx]['bbox']
322
+ track['score'] = detections[d_idx]['score']
323
+ track['missed_frames'] = 0
324
+ track['history'].append(track['bbox'])
325
+ if len(track['history']) > 30: track['history'].pop(0)
326
+
327
+ # Persist GPT attributes from track to detection (propagate forward)
328
+ for key in ['gpt_distance_m', 'gpt_direction', 'gpt_description']:
329
+ if key in track:
330
+ detections[d_idx][key] = track[key]
331
+
332
+ # Persist GPT attributes from detection to track (update from source)
333
+ for key in ['gpt_distance_m', 'gpt_direction', 'gpt_description']:
334
+ if key in detections[d_idx]:
335
+ track[key] = detections[d_idx][key]
336
+
337
+ detections[d_idx]['track_id'] = f"T{str(t_id).zfill(2)}"
338
+
339
+ # Attach speed/direction state (to be computed by SpeedEstimator)
340
+ detections[d_idx]['history'] = track['history']
341
+
342
+ used_tracks.add(t_id)
343
+ used_dets.add(d_idx)
344
+
345
+ # 3. Create new tracks
346
+ for d_idx, det in enumerate(detections):
347
+ if d_idx not in used_dets:
348
+ t_id = self.next_id
349
+ self.next_id += 1
350
+ self.tracks[t_id] = {
351
+ 'bbox': det['bbox'],
352
+ 'label': det['label'],
353
+ 'score': det['score'],
354
+ 'missed_frames': 0,
355
+ 'history': [det['bbox']]
356
+ }
357
+ # Initialize GPT attributes if present
358
+ for key in ['gpt_distance_m', 'gpt_direction', 'gpt_description']:
359
+ if key in det:
360
+ self.tracks[t_id][key] = det[key]
361
+
362
+ det['track_id'] = f"T{str(t_id).zfill(2)}"
363
+ det['history'] = [det['bbox']]
364
+
365
+ # 4. Age out
366
+ for t_id in list(self.tracks.keys()):
367
+ if t_id not in used_tracks:
368
+ self.tracks[t_id]['missed_frames'] += 1
369
+ if self.tracks[t_id]['missed_frames'] > self.max_age:
370
+ del self.tracks[t_id]
371
+
372
+ def _calculate_iou(self, boxA, boxB):
373
+ xA = max(boxA[0], boxB[0])
374
+ yA = max(boxA[1], boxB[1])
375
+ xB = min(boxA[2], boxB[2])
376
+ yB = min(boxA[3], boxB[3])
377
+ interArea = max(0, xB - xA) * max(0, yB - yA)
378
+ boxAArea = (boxA[2] - boxA[0]) * (boxA[3] - boxA[1])
379
+ boxBArea = (boxB[2] - boxB[0]) * (boxB[3] - boxB[1])
380
+ return interArea / float(boxAArea + boxBArea - interArea + 1e-6)
381
+
382
+
383
+ class SpeedEstimator:
384
+ def __init__(self, fps: float = 30.0):
385
+ self.fps = fps
386
+ self.pixel_scale_map = {} # label -> pixels_per_meter (heuristic)
387
+
388
+ def estimate(self, detections: List[Dict[str, Any]]):
389
+ for det in detections:
390
+ history = det.get('history', [])
391
+ if len(history) < 5: continue
392
+
393
+ # Simple heuristic: Speed based on pixel movement
394
+ # We assume a base depth or size.
395
+ # Delta over last 5 frames
396
+ curr = history[-1]
397
+ prev = history[-5]
398
+
399
+ # Centroids
400
+ cx1 = (curr[0] + curr[2]) / 2
401
+ cy1 = (curr[1] + curr[3]) / 2
402
+ cx2 = (prev[0] + prev[2]) / 2
403
+ cy2 = (prev[1] + prev[3]) / 2
404
+
405
+ dist_px = np.sqrt((cx1-cx2)**2 + (cy1-cy2)**2)
406
+
407
+ # Heuristic scale: Assume car is ~4m long? Or just arbitrary pixel scale
408
+ # If we had GPT distance, we could calibrate.
409
+ # For now, let's use a dummy scale: 50px = 1m (very rough)
410
+ # Speed = (dist_px / 50) meters / (5 frames / 30 fps) seconds
411
+ # = (dist_px / 50) / (0.166) m/s
412
+ # = (dist_px * 0.12) m/s
413
+ # = * 3.6 km/h
414
+
415
+ scale = 50.0
416
+ dt = 5.0 / self.fps
417
+
418
+ speed_mps = (dist_px / scale) / dt
419
+ speed_kph = speed_mps * 3.6
420
+
421
+ # Smoothing
422
+ det['speed_kph'] = speed_kph
423
+
424
+ # Direction
425
+ dx = cx1 - cx2
426
+ dy = cy1 - cy2
427
+ angle = np.degrees(np.arctan2(dy, dx)) # 0 is right, 90 is down
428
+
429
+ # Map to clock direction (12 is up = -90 deg)
430
+ # -90 (up) -> 12
431
+ # 0 (right) -> 3
432
+ # 90 (down) -> 6
433
+ # 180 (left) -> 9
434
+
435
+ # Adjust so 12 is up (negative Y)
436
+ # angle -90 is 12
437
+ clock_hour = ((angle + 90) / 30 + 12) % 12
438
+ if clock_hour == 0: clock_hour = 12.0
439
+ det['direction_clock'] = f"{int(round(clock_hour))} o'clock"
440
+
441
+
442
  _MODEL_LOCKS: Dict[str, RLock] = {}
443
  _MODEL_LOCKS_GUARD = RLock()
444
  _DEPTH_SCALE = float(os.getenv("DEPTH_SCALE", "25.0"))
 
841
  job_id: Optional[str] = None,
842
  depth_estimator_name: Optional[str] = None,
843
  depth_scale: float = 1.0,
844
+ enable_gpt: bool = True,
845
  stream_queue: Optional[Queue] = None,
846
  ) -> Tuple[str, List[List[Dict[str, Any]]]]:
847
 
 
1037
  _attach_depth_from_result(detections, dep_res, depth_scale)
1038
  except: pass
1039
 
1040
+ # B. Render Boxes - DEFERRED TO WRITER THREAD FOR SEQUENTIAL TRACKING
1041
+ # display_labels = [_build_display_label(d) for d in detections]
1042
+ # if d_res:
1043
+ # processed = draw_boxes(processed, d_res.boxes, label_names=display_labels)
1044
 
1045
  # 3. Output
1046
  while True:
 
1104
  next_idx = 0
1105
  buffer = {}
1106
 
1107
+ # Initialize Tracker & Speed Estimator
1108
+ tracker = SimpleTracker()
1109
+ speed_est = SpeedEstimator(fps=fps)
1110
+
1111
  try:
1112
  with VideoWriter(output_video_path, fps, width, height) as writer:
1113
  while next_idx < total_frames:
 
1131
 
1132
  # Write next_idx
1133
  p_frame, dets = buffer.pop(next_idx)
1134
+
1135
+ # --- GPT ESTIMATION (Frame 0 Only) ---
1136
+ if next_idx == 0 and enable_gpt and dets:
1137
+ try:
1138
+ logging.info("Running GPT estimation for video start (Frame 0)...")
1139
+ with tempfile.NamedTemporaryFile(suffix=".jpg", delete=False) as tmp:
1140
+ cv2.imwrite(tmp.name, p_frame) # Use processed frame (boxes not yet drawn)
1141
+ # Wait, p_frame might have heatmaps if depth enabled? No, draw_boxes comes later.
1142
+ # Actually, colorize_depth_map might have happened in worker.
1143
+ # But raw image is better? We don't have raw image here easily without stashing.
1144
+ # p_frame is 'processed'. If depth map enabled, it's a heatmap. Not good for GPT.
1145
+ # GPT needs RGB image.
1146
+ # Worker: processed = frame.copy() -> colorize -> draw_boxes (removed).
1147
+ # So processed is potentially modified.
1148
+ # Ideally we want original.
1149
+ # But let's assume for now processed is fine (if depth disabled) or GPT can handle it.
1150
+ # If depth is enabled, processed is a heatmap. GPT will fail to see car color/details.
1151
+
1152
+ # FIX: We need access to original frame?
1153
+ # worker sends (idx, processed, detections).
1154
+ # It does NOT send original frame.
1155
+ # We should change worker to send original? Or assume GPT runs on processed?
1156
+ # If processed is heatmap, it's bad.
1157
+ # But User Objective says "legacy depth estimation" is optional/deprecated.
1158
+ # If depth_estimator_name is None, processed is just frame.
1159
+
1160
+ gpt_res = estimate_distance_gpt(tmp.name, dets)
1161
+ os.remove(tmp.name)
1162
+
1163
+ # Merge
1164
+ # Helper to match IDs?
1165
+ # estimate_distance_gpt expects us to pass detections list, output keyed by T01..
1166
+ # But detections don't have IDs yet! SimpleTracker assigns them.
1167
+ # We assign temporary IDs T01.. based on index for GPT matching?
1168
+ # gpt_distance.py generates IDs if not present.
1169
+ # Let's inspect gpt_distance.py... assume it matches by index T01, T02...
1170
+
1171
+ for i, d in enumerate(dets):
1172
+ oid = f"T{str(i+1).zfill(2)}"
1173
+ if oid in gpt_res:
1174
+ d.update(gpt_res[oid])
1175
+
1176
+ except Exception as e:
1177
+ logging.error("GPT failed for Frame 0: %s", e)
1178
+
1179
+ # --- SEQUENTIAL TRACKING ---
1180
+ # Update tracker with current frame detections
1181
+ tracker.update(dets)
1182
+ speed_est.estimate(dets)
1183
+
1184
+ # --- RENDER BOXES & OVERLAYS ---
1185
+ # We need to convert list of dicts back to boxes array for draw_boxes
1186
+ if dets:
1187
+ display_boxes = np.array([d['bbox'] for d in dets])
1188
+ display_labels = []
1189
+ for d in dets:
1190
+ lbl = d.get('label', 'obj')
1191
+ # Append Track ID
1192
+ if 'track_id' in d:
1193
+ lbl = f"{d['track_id']} {lbl}"
1194
+ # Append Speed/Direction if available
1195
+ if 'speed_kph' in d and d['speed_kph'] > 1.0: # Threshold static
1196
+ lbl += f" {int(d['speed_kph'])}km/h"
1197
+ # Append GPT Distance if available (from first frame persistence)
1198
+ if d.get('gpt_distance_m'):
1199
+ lbl += f" {int(d['gpt_distance_m'])}m"
1200
+
1201
+ display_labels.append(lbl)
1202
+
1203
+ p_frame = draw_boxes(p_frame, display_boxes, label_names=display_labels)
1204
+
1205
  writer.write(p_frame)
1206
 
1207
  if stream_queue:
1208
  try:
1209
+ # Send TRACKED detections to frontend for overlay
1210
+ # We need to attach them to the frame or send separately?
1211
+ # The stream_queue expects 'p_frame' which is an image.
1212
+ # The frontend polls for 'async job' status which returns video, but
1213
+ # we also want live updates during streaming?
1214
+ # Currently streaming is just Mjpeg of p_frame.
1215
  stream_queue.put(p_frame, timeout=0.01)
1216
  except:
1217
  pass
jobs/background.py CHANGED
@@ -50,6 +50,7 @@ async def process_video_async(job_id: str) -> None:
50
  job_id,
51
  job.depth_estimator_name, # Pass depth estimator to trigger unified loop
52
  job.depth_scale,
 
53
  stream_queue,
54
  )
55
  detection_path, detections_list = result_pkg
 
50
  job_id,
51
  job.depth_estimator_name, # Pass depth estimator to trigger unified loop
52
  job.depth_scale,
53
+ job.enable_gpt,
54
  stream_queue,
55
  )
56
  detection_path, detections_list = result_pkg
jobs/models.py CHANGED
@@ -33,3 +33,4 @@ class JobInfo:
33
  first_frame_depth_path: Optional[str] = None
34
  partial_success: bool = False # True if one component failed but job completed
35
  depth_error: Optional[str] = None # Error message if depth failed
 
 
33
  first_frame_depth_path: Optional[str] = None
34
  partial_success: bool = False # True if one component failed but job completed
35
  depth_error: Optional[str] = None # Error message if depth failed
36
+ enable_gpt: bool = True # Whether to use GPT for distance estimation