WebashalarForML commited on
Commit
1f3eae4
·
verified ·
1 Parent(s): f7dfa10

Upload 9 files

Browse files
app.py ADDED
@@ -0,0 +1,673 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, render_template, request, jsonify, Response, send_from_directory
2
+ from flask_socketio import SocketIO, emit
3
+ import cv2
4
+ import numpy as np
5
+ import os
6
+ import json
7
+ import uuid
8
+ import threading
9
+ import queue
10
+ import torch
11
+ import time
12
+ from datetime import datetime
13
+ from collections import Counter
14
+ import base64
15
+ import sys
16
+ sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
17
+
18
+ from rfdetr import RFDETRMedium
19
+ import supervision as sv
20
+ from gradio_client import Client, handle_file
21
+
22
+ app = Flask(__name__)
23
+ app.config['SECRET_KEY'] = 'your-secret-key-here'
24
+ socketio = SocketIO(app, cors_allowed_origins="*", async_mode='threading')
25
+
26
+ # --- CONFIG ---
27
+ BASE_DIR = os.path.dirname(os.path.abspath(__file__))
28
+ UPLOAD_FOLDER = os.path.join(BASE_DIR, 'static/uploads')
29
+ RESULTS_FOLDER = os.path.join(BASE_DIR, 'static/results')
30
+ os.makedirs(UPLOAD_FOLDER, exist_ok=True)
31
+ os.makedirs(RESULTS_FOLDER, exist_ok=True)
32
+
33
+ # Model Paths
34
+ HELMET_WEIGHTS = os.path.join(BASE_DIR, "Model/v2_helmet/checkpoint_best_ema.pth")
35
+ PLATE_WEIGHTS = os.path.join(BASE_DIR, "Model/models_v1/checkpoint_best_ema_plate.pth")
36
+ HELMET_CLASSES = ["motorbike and helmet", "motorbike and no helmet"]
37
+
38
+ # Thresholds (matching test.py)
39
+ CONF_RIDER = 0.25
40
+ CONF_PLATE = 0.30
41
+ CONF_HELMET_CONFIRM = 0.40
42
+ CONF_NO_HELMET_TRIGGER = 0.35
43
+
44
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
45
+ print(f"[INIT] Targeted Device: {device}")
46
+
47
+ # --- MODEL INITIALIZATION ---
48
+ print("[INIT] Loading Rider Detection Model...")
49
+ rider_model = RFDETRMedium(num_classes=len(HELMET_CLASSES), pretrain_weights=HELMET_WEIGHTS)
50
+ try:
51
+ rider_model.optimize_for_inference(compile=True, batch_size=1)
52
+ if device.type == "cuda": rider_model.model.half()
53
+ except Exception as e: print(f"[WARNING] Rider model opt failed: {e}")
54
+
55
+ print("[INIT] Loading License Plate Model...")
56
+ plate_model = RFDETRMedium(num_classes=1, pretrain_weights=PLATE_WEIGHTS)
57
+ try:
58
+ plate_model.optimize_for_inference(compile=True, batch_size=1)
59
+ if device.type == "cuda": plate_model.model.half()
60
+ except Exception as e: print(f"[WARNING] Plate model opt failed: {e}")
61
+
62
+ # --- GLOBAL STATE & WORKERS ---
63
+ # We use a dictionary to separate state by session_id in a real app,
64
+ # but for simplicity here we reset globals on upload or use a simplified structure.
65
+ current_session_data = {
66
+ "violations": {}, # {track_id: {data}}
67
+ "ocr_queue": queue.Queue(),
68
+ "track_plate_cache": {},
69
+ "track_capture_count": {},
70
+ "track_ocr_history": {}
71
+ }
72
+
73
+ # Live Camera Session State
74
+ live_camera_sessions = {} # {session_id: {tracker, history, etc}}
75
+
76
+ json_lock = threading.Lock()
77
+
78
+ def get_best_consensus(results):
79
+ from collections import Counter
80
+ cleaned = [r.replace("\n", " ").strip() for r in results if r not in ["API_ERROR", "PENDING...", ""]]
81
+ if not cleaned: return "PENDING..."
82
+ if len(cleaned) == 1: return cleaned[0]
83
+ max_len = max(len(r) for r in cleaned)
84
+ final_chars = []
85
+ for i in range(max_len):
86
+ char_pool = [r[i] for r in cleaned if i < len(r)]
87
+ if char_pool:
88
+ final_chars.append(Counter(char_pool).most_common(1)[0][0])
89
+ return "".join(final_chars).strip()
90
+
91
+ def clamp_box(box, w, h):
92
+ x1, y1, x2, y2 = map(int, box)
93
+ return [max(0, x1), max(0, y1), min(w - 1, x2), min(h - 1, y2)]
94
+
95
+ def expand_box(box, w, h):
96
+ x1, y1, x2, y2 = map(int, box)
97
+ bw, bh = x2 - x1, y2 - y1
98
+ return clamp_box([x1 - bw * 0.1, y1 + bh * 0.4, x2 + bw * 0.1, y2 + bh * 0.4], w, h)
99
+
100
+ def background_ocr_worker():
101
+ print("[OCR] Worker Thread Started")
102
+ try:
103
+ # Initialize Gradio Client
104
+ client = Client("WebashalarForML/demo-glm-ocr")
105
+ except Exception as e:
106
+ print(f"[OCR] Connection Failed: {e}")
107
+ return
108
+
109
+ while True:
110
+ try:
111
+ task = current_session_data["ocr_queue"].get()
112
+ if task is None: continue # Keep alive
113
+
114
+ track_id, plate_path, session_id = task
115
+ print(f"[OCR] Processing ID {track_id}...")
116
+
117
+ try:
118
+ # Call external API
119
+ result = client.predict(image=handle_file(plate_path), api_name="/proses_intelijen")
120
+ plate_text = str(result).strip()
121
+ except Exception as e:
122
+ print(f"[OCR] API Error: {e}")
123
+ plate_text = "API_ERROR"
124
+
125
+ # Update History
126
+ if track_id not in current_session_data["track_ocr_history"]:
127
+ current_session_data["track_ocr_history"][track_id] = []
128
+
129
+ if plate_text not in ["API_ERROR", ""]:
130
+ current_session_data["track_ocr_history"][track_id].append(plate_text)
131
+
132
+ # Consensus
133
+ final_plate = get_best_consensus(current_session_data["track_ocr_history"][track_id])
134
+ current_session_data["track_plate_cache"][track_id] = final_plate
135
+
136
+ # Update Main JSON Record
137
+ with json_lock:
138
+ if track_id in current_session_data["violations"]:
139
+ current_session_data["violations"][track_id]["plate_number"] = final_plate
140
+ current_session_data["violations"][track_id]["ocr_attempts"] = current_session_data["track_ocr_history"][track_id]
141
+
142
+ # Save to JSON file for persistence
143
+ json_path = os.path.join(RESULTS_FOLDER, f"session_{session_id}.json")
144
+ with open(json_path, 'w') as f:
145
+ json.dump(list(current_session_data["violations"].values()), f, indent=4)
146
+
147
+ current_session_data["ocr_queue"].task_done()
148
+ except Exception as e:
149
+ print(f"[OCR] Loop Error: {e}")
150
+
151
+ # Start OCR Thread
152
+ threading.Thread(target=background_ocr_worker, daemon=True).start()
153
+
154
+ def parse_preds(preds, W, H):
155
+ boxes, scores, labels = np.array([]), np.array([]), np.array([])
156
+ if hasattr(preds, "xyxy"):
157
+ boxes = preds.xyxy.cpu().numpy() if not isinstance(preds.xyxy, np.ndarray) else preds.xyxy
158
+ scores = preds.confidence.cpu().numpy() if not isinstance(preds.confidence, np.ndarray) else preds.confidence
159
+ labels = preds.class_id.cpu().numpy() if not isinstance(preds.class_id, np.ndarray) else preds.class_id
160
+ if boxes.size > 0 and boxes.max() <= 1.01:
161
+ boxes = boxes.copy()
162
+ boxes[:, [0, 2]] *= W
163
+ boxes[:, [1, 3]] *= H
164
+ return boxes, scores, labels
165
+
166
+ def expand_box(box, w, h):
167
+ x1, y1, x2, y2 = map(int, box)
168
+ bw, bh = x2 - x1, y2 - y1
169
+ return clamp_box([x1 - bw * 0.1, y1 + bh * 0.4, x2 + bw * 0.1, y2 + bh * 0.4], w, h)
170
+
171
+ def process_video_gen(video_path, session_id):
172
+ cap = cv2.VideoCapture(video_path)
173
+ tracker = sv.ByteTrack()
174
+
175
+ # Session specific tracking (matching test.py structure)
176
+ track_class_history = {}
177
+ track_violation_memory = {}
178
+ track_last_seen = {}
179
+ dead_ids = set()
180
+ frame_idx = 0
181
+
182
+ while cap.isOpened():
183
+ ret, frame = cap.read()
184
+ if not ret: break
185
+ frame_idx += 1
186
+
187
+ # Cleanup dead tracks (matching test.py)
188
+ to_kill = [tid for tid, last in track_last_seen.items() if frame_idx - last > 50 and tid not in dead_ids]
189
+ for tk in to_kill:
190
+ dead_ids.add(tk)
191
+
192
+ rgb_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
193
+ h_orig, w_orig = frame.shape[:2]
194
+
195
+ # 1. RIDER DETECTION
196
+ with torch.no_grad():
197
+ rider_preds = rider_model.predict(rgb_frame, conf=CONF_RIDER, iou=0.5)
198
+
199
+ r_boxes, r_scores, r_labels = parse_preds(rider_preds, w_orig, h_orig)
200
+
201
+ if r_boxes.size > 0:
202
+ detections = sv.Detections(
203
+ xyxy=r_boxes.astype(np.float32),
204
+ confidence=r_scores.astype(np.float32),
205
+ class_id=r_labels.astype(np.int32)
206
+ )
207
+ else:
208
+ detections = sv.Detections.empty()
209
+
210
+ detections = tracker.update_with_detections(detections)
211
+
212
+ for i, (xyxy, mask, confidence, class_id, tracker_id, data) in enumerate(detections):
213
+ if tracker_id is None: continue
214
+ tid = int(tracker_id)
215
+ track_last_seen[tid] = frame_idx
216
+ x1, y1, x2, y2 = map(int, xyxy)
217
+ cls_idx = int(class_id)
218
+
219
+ # Track class history (matching test.py logic)
220
+ if tid not in track_class_history:
221
+ track_class_history[tid] = []
222
+ track_class_history[tid].append({"class": cls_idx, "conf": confidence})
223
+ if len(track_class_history[tid]) > 20:
224
+ track_class_history[tid].pop(0)
225
+
226
+ # Robust helmet state mapping (matching test.py)
227
+ is_nh_current = (cls_idx == 2)
228
+ is_h_current = (cls_idx == 0)
229
+ if cls_idx == 1:
230
+ hist = [h['class'] for h in track_class_history[tid]]
231
+ if 2 in hist:
232
+ is_h_current = True
233
+ else:
234
+ is_nh_current = True
235
+
236
+ # Violation memory logic (matching test.py)
237
+ if tid not in track_violation_memory:
238
+ if is_nh_current and confidence >= CONF_NO_HELMET_TRIGGER:
239
+ track_violation_memory[tid] = True
240
+ hist = [h['class'] for h in track_class_history[tid]]
241
+ nh_hits = sum(1 for c in hist if c == 2 or (c == 1 and 2 not in hist))
242
+ if nh_hits > 3:
243
+ track_violation_memory[tid] = True
244
+
245
+ is_no_helmet = track_violation_memory.get(tid, False)
246
+
247
+ # Display name logic (matching test.py)
248
+ if is_no_helmet:
249
+ display_name, color = "VIOLATION: NO HELMET", (0, 0, 255)
250
+ elif is_nh_current and confidence >= CONF_NO_HELMET_TRIGGER:
251
+ display_name, color = "WARNING: NO HELMET", (0, 165, 255)
252
+ elif is_nh_current and confidence > 0.15:
253
+ display_name, color = "ANALYZING...", (0, 255, 255)
254
+ elif is_h_current and confidence >= CONF_HELMET_CONFIRM:
255
+ display_name, color = "HELMET", (0, 255, 0)
256
+ else:
257
+ display_name, color = f"TRACKING (C{cls_idx})", (180, 180, 180)
258
+
259
+ # 3. LOG VIOLATION & CROP
260
+ if is_no_helmet and tid not in dead_ids:
261
+ # 3. LOG VIOLATION & CROP
262
+ with json_lock:
263
+ if tid not in current_session_data["violations"]:
264
+ ts = datetime.now()
265
+
266
+ # Save Rider Image
267
+ rider_img_name = f"viol_{session_id}_{tid}_rider.jpg"
268
+ rider_path = os.path.join(RESULTS_FOLDER, rider_img_name)
269
+ cv2.imwrite(rider_path, frame[y1:y2, x1:x2])
270
+
271
+ # Initialize Record
272
+ current_session_data["violations"][tid] = {
273
+ "id": tid,
274
+ "timestamp": ts.strftime('%H:%M:%S'),
275
+ "type": "No Helmet",
276
+ "plate_number": "Scanning...",
277
+ "image_url": f"/static/results/{rider_img_name}",
278
+ "plate_image_url": None, # Will fill later
279
+ "ocr_attempts": [],
280
+ "raw": {
281
+ "confidence": float(confidence),
282
+ "box": xyxy.tolist()
283
+ }
284
+ }
285
+ current_session_data["track_capture_count"][tid] = 0
286
+
287
+ # 4. PLATE DETECTION (Only if violation confirmed)
288
+ # Expand rider box to find plate inside/near it (matching test.py)
289
+ eb = expand_box(xyxy, w_orig, h_orig)
290
+ rider_crop = frame[eb[1]:eb[3], eb[0]:eb[2]]
291
+
292
+ if rider_crop.size > 0 and current_session_data["track_capture_count"].get(tid, 0) < 3:
293
+ # Run Plate Model on Crop
294
+ with torch.no_grad():
295
+ plate_preds = plate_model.predict(cv2.cvtColor(rider_crop, cv2.COLOR_BGR2RGB), conf=CONF_PLATE, iou=0.5)
296
+
297
+ pb, ps, pl = parse_preds(plate_preds, rider_crop.shape[1], rider_crop.shape[0])
298
+
299
+ if pb.size > 0:
300
+ # Get best plate
301
+ best_idx = np.argmax(ps)
302
+ px1, py1, px2, py2 = map(int, pb[best_idx])
303
+
304
+ # Validate size
305
+ plate_crop = rider_crop[py1:py2, px1:px2]
306
+ if plate_crop.size > 0 and plate_crop.shape[0] > 10 and plate_crop.shape[1] > 20:
307
+ # Save Plate Image
308
+ s_idx = current_session_data['track_capture_count'][tid] + 1
309
+ plate_img_name = f"viol_{session_id}_{tid}_plate_snap{s_idx}.jpg"
310
+ plate_path = os.path.join(RESULTS_FOLDER, plate_img_name)
311
+ cv2.imwrite(plate_path, plate_crop)
312
+
313
+ # Update JSON with plate image URL (use the latest one)
314
+ with json_lock:
315
+ current_session_data["violations"][tid]["plate_image_url"] = f"/static/results/{plate_img_name}"
316
+
317
+ # Trigger OCR
318
+ current_session_data["ocr_queue"].put((tid, plate_path, session_id))
319
+ current_session_data["track_capture_count"][tid] += 1
320
+
321
+ # Draw UI (matching test.py style)
322
+ plate_text = current_session_data["track_plate_cache"].get(tid, "")
323
+ cv2.rectangle(frame, (x1, y1), (x2, y2), color, 2)
324
+ cv2.putText(frame, f"ID:{tid} {display_name} {confidence:.2f}", (x1, y1 - 10),
325
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2)
326
+ if plate_text:
327
+ cv2.putText(frame, f"Plate: {plate_text}", (x1, y2 + 20),
328
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 0, 0), 2)
329
+
330
+ else:
331
+ # Normal Helmet/Tracking (matching test.py)
332
+ cv2.rectangle(frame, (x1, y1), (x2, y2), color, 2)
333
+ cv2.putText(frame, f"ID:{tid} {display_name} {confidence:.2f}", (x1, y1 - 10),
334
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2)
335
+
336
+ # Encode for streaming
337
+ _, buffer = cv2.imencode('.jpg', frame)
338
+ yield (b'--frame\r\n'
339
+ b'Content-Type: image/jpeg\r\n\r\n' + buffer.tobytes() + b'\r\n')
340
+
341
+ cap.release()
342
+
343
+ # --- ROUTES ---
344
+
345
+ @app.route('/')
346
+ def index():
347
+ return render_template('landing.html')
348
+
349
+ @app.route('/dashboard')
350
+ def dashboard():
351
+ return render_template('dashboard.html')
352
+
353
+ @app.route('/camera_debug')
354
+ def camera_debug():
355
+ return render_template('camera_debug.html')
356
+
357
+ @app.route('/test_simple')
358
+ def test_simple():
359
+ return send_from_directory('.', 'test_simple.html')
360
+
361
+ @app.route('/test_socket_echo')
362
+ def test_socket_echo():
363
+ return send_from_directory('.', 'test_socket_echo.html')
364
+
365
+ @app.route('/upload', methods=['POST'])
366
+ def upload_video():
367
+ if 'file' not in request.files:
368
+ return jsonify({"error": "No file part"}), 400
369
+ file = request.files['file']
370
+ if file.filename == '':
371
+ return jsonify({"error": "No selected file"}), 400
372
+
373
+ # 1. Generate Session ID
374
+ session_id = str(uuid.uuid4())[:8]
375
+
376
+ # 2. Reset Session Data
377
+ with json_lock:
378
+ current_session_data["violations"] = {}
379
+ current_session_data["track_plate_cache"] = {}
380
+ current_session_data["track_capture_count"] = {}
381
+ current_session_data["track_ocr_history"] = {}
382
+ # Clear queue
383
+ while not current_session_data["ocr_queue"].empty():
384
+ try: current_session_data["ocr_queue"].get_nowait()
385
+ except: pass
386
+
387
+ filename = f"{session_id}_{file.filename}"
388
+ filepath = os.path.join(UPLOAD_FOLDER, filename)
389
+ file.save(filepath)
390
+
391
+ return jsonify({"filename": filename, "session_id": session_id})
392
+
393
+ @app.route('/get_violations')
394
+ def get_violations():
395
+ # Return list of violations
396
+ with json_lock:
397
+ data = list(current_session_data["violations"].values())
398
+ # Sort by timestamp (descending)
399
+ data.reverse()
400
+ return jsonify(data)
401
+
402
+ @app.route('/video_feed/<filename>/<session_id>')
403
+ def video_feed(filename, session_id):
404
+ filepath = os.path.join(UPLOAD_FOLDER, filename)
405
+ return Response(process_video_gen(filepath, session_id),
406
+ mimetype='multipart/x-mixed-replace; boundary=frame')
407
+
408
+ @app.route('/mobile/<session_id>')
409
+ def mobile_node(session_id):
410
+ return render_template('mobile.html', session_id=session_id)
411
+
412
+ @app.route('/upload_frame/<session_id>', methods=['POST'])
413
+ def upload_frame(session_id):
414
+ # (Simplified for now - can extend to run detection on mobile frames too)
415
+ return jsonify({"status": "received"})
416
+
417
+ # --- SOCKET.IO HANDLERS FOR LIVE CAMERA ---
418
+
419
+ @socketio.on('connect')
420
+ def handle_connect():
421
+ print(f"[SOCKET] Client connected: {request.sid}")
422
+ emit('connection_response', {'status': 'connected'})
423
+
424
+ @socketio.on('disconnect')
425
+ def handle_disconnect():
426
+ print(f"[SOCKET] Client disconnected: {request.sid}")
427
+ # Cleanup session if exists
428
+ if request.sid in live_camera_sessions:
429
+ del live_camera_sessions[request.sid]
430
+
431
+ @socketio.on('start_camera_session')
432
+ def handle_start_camera(data):
433
+ session_id = data.get('session_id', str(uuid.uuid4())[:8])
434
+ print(f"[SOCKET] Starting camera session: {session_id}")
435
+
436
+ # Initialize session state
437
+ live_camera_sessions[request.sid] = {
438
+ 'session_id': session_id,
439
+ 'tracker': sv.ByteTrack(),
440
+ 'track_class_history': {},
441
+ 'track_violation_memory': {},
442
+ 'track_last_seen': {},
443
+ 'dead_ids': set(),
444
+ 'frame_idx': 0,
445
+ 'violations': {},
446
+ 'track_plate_cache': {},
447
+ 'track_capture_count': {},
448
+ 'track_ocr_history': {}
449
+ }
450
+
451
+ emit('camera_session_started', {'session_id': session_id})
452
+
453
+ @socketio.on('camera_frame')
454
+ def handle_camera_frame(data):
455
+ if request.sid not in live_camera_sessions:
456
+ print(f"[SOCKET] No session found for {request.sid}")
457
+ emit('error', {'message': 'No active session'})
458
+ return
459
+
460
+ try:
461
+ print(f"[SOCKET] Received frame from {request.sid}, size: {len(data.get('frame', ''))} bytes")
462
+
463
+ # Decode base64 frame
464
+ frame_data = data['frame'].split(',')[1] # Remove data:image/jpeg;base64,
465
+ frame_bytes = base64.b64decode(frame_data)
466
+ nparr = np.frombuffer(frame_bytes, np.uint8)
467
+ frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
468
+
469
+ if frame is None:
470
+ print("[SOCKET] Failed to decode frame")
471
+ return
472
+
473
+ print(f"[SOCKET] Frame decoded: {frame.shape}")
474
+
475
+ # Process frame
476
+ session = live_camera_sessions[request.sid]
477
+ session_id = session['session_id']
478
+ processed_frame, new_violations = process_live_frame(frame, session, session_id)
479
+
480
+ print(f"[SOCKET] Frame processed, violations: {len(new_violations)}")
481
+
482
+ # Encode processed frame
483
+ _, buffer = cv2.imencode('.jpg', processed_frame, [cv2.IMWRITE_JPEG_QUALITY, 85])
484
+ processed_b64 = base64.b64encode(buffer).decode('utf-8')
485
+
486
+ print(f"[SOCKET] Encoded frame size: {len(processed_b64)} bytes")
487
+
488
+ # Send back processed frame (use explicit room targeting)
489
+ socketio.emit('processed_frame', {
490
+ 'frame': f'data:image/jpeg;base64,{processed_b64}',
491
+ 'violations': new_violations
492
+ }, room=request.sid)
493
+
494
+ print(f"[SOCKET] Emitted processed_frame to client {request.sid}")
495
+
496
+ except Exception as e:
497
+ print(f"[SOCKET] Frame processing error: {e}")
498
+ import traceback
499
+ traceback.print_exc()
500
+ emit('error', {'message': str(e)})
501
+
502
+ def process_live_frame(frame, session, session_id):
503
+ """Process a single frame from live camera feed"""
504
+ tracker = session['tracker']
505
+ track_class_history = session['track_class_history']
506
+ track_violation_memory = session['track_violation_memory']
507
+ track_last_seen = session['track_last_seen']
508
+ dead_ids = session['dead_ids']
509
+ frame_idx = session['frame_idx']
510
+
511
+ session['frame_idx'] += 1
512
+ frame_idx = session['frame_idx']
513
+
514
+ # Cleanup dead tracks
515
+ to_kill = [tid for tid, last in track_last_seen.items() if frame_idx - last > 50 and tid not in dead_ids]
516
+ for tk in to_kill:
517
+ dead_ids.add(tk)
518
+
519
+ rgb_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
520
+ h_orig, w_orig = frame.shape[:2]
521
+
522
+ # 1. RIDER DETECTION
523
+ with torch.no_grad():
524
+ rider_preds = rider_model.predict(rgb_frame, conf=CONF_RIDER, iou=0.5)
525
+
526
+ r_boxes, r_scores, r_labels = parse_preds(rider_preds, w_orig, h_orig)
527
+
528
+ if r_boxes.size > 0:
529
+ detections = sv.Detections(
530
+ xyxy=r_boxes.astype(np.float32),
531
+ confidence=r_scores.astype(np.float32),
532
+ class_id=r_labels.astype(np.int32)
533
+ )
534
+ else:
535
+ detections = sv.Detections.empty()
536
+
537
+ detections = tracker.update_with_detections(detections)
538
+
539
+ new_violations = []
540
+
541
+ for i, (xyxy, mask, confidence, class_id, tracker_id, data) in enumerate(detections):
542
+ if tracker_id is None: continue
543
+ tid = int(tracker_id)
544
+ track_last_seen[tid] = frame_idx
545
+ x1, y1, x2, y2 = map(int, xyxy)
546
+ cls_idx = int(class_id)
547
+
548
+ # Track class history
549
+ if tid not in track_class_history:
550
+ track_class_history[tid] = []
551
+ track_class_history[tid].append({"class": cls_idx, "conf": confidence})
552
+ if len(track_class_history[tid]) > 20:
553
+ track_class_history[tid].pop(0)
554
+
555
+ # Robust helmet state mapping
556
+ is_nh_current = (cls_idx == 2)
557
+ is_h_current = (cls_idx == 0)
558
+ if cls_idx == 1:
559
+ hist = [h['class'] for h in track_class_history[tid]]
560
+ if 2 in hist:
561
+ is_h_current = True
562
+ else:
563
+ is_nh_current = True
564
+
565
+ # Violation memory logic
566
+ if tid not in track_violation_memory:
567
+ if is_nh_current and confidence >= CONF_NO_HELMET_TRIGGER:
568
+ track_violation_memory[tid] = True
569
+ hist = [h['class'] for h in track_class_history[tid]]
570
+ nh_hits = sum(1 for c in hist if c == 2 or (c == 1 and 2 not in hist))
571
+ if nh_hits > 3:
572
+ track_violation_memory[tid] = True
573
+
574
+ is_no_helmet = track_violation_memory.get(tid, False)
575
+
576
+ # Display name logic
577
+ if is_no_helmet:
578
+ display_name, color = "VIOLATION: NO HELMET", (0, 0, 255)
579
+ elif is_nh_current and confidence >= CONF_NO_HELMET_TRIGGER:
580
+ display_name, color = "WARNING: NO HELMET", (0, 165, 255)
581
+ elif is_nh_current and confidence > 0.15:
582
+ display_name, color = "ANALYZING...", (0, 255, 255)
583
+ elif is_h_current and confidence >= CONF_HELMET_CONFIRM:
584
+ display_name, color = "HELMET", (0, 255, 0)
585
+ else:
586
+ display_name, color = f"TRACKING (C{cls_idx})", (180, 180, 180)
587
+
588
+ # LOG VIOLATION & CROP
589
+ if is_no_helmet and tid not in dead_ids:
590
+ with json_lock:
591
+ if tid not in session['violations']:
592
+ ts = datetime.now()
593
+
594
+ # Save Rider Image
595
+ rider_img_name = f"viol_live_{session_id}_{tid}_rider.jpg"
596
+ rider_path = os.path.join(RESULTS_FOLDER, rider_img_name)
597
+ cv2.imwrite(rider_path, frame[y1:y2, x1:x2])
598
+
599
+ # Initialize Record
600
+ violation_record = {
601
+ "id": tid,
602
+ "timestamp": ts.strftime('%H:%M:%S'),
603
+ "type": "No Helmet",
604
+ "plate_number": "Scanning...",
605
+ "image_url": f"/static/results/{rider_img_name}",
606
+ "plate_image_url": None,
607
+ "ocr_attempts": [],
608
+ "raw": {
609
+ "confidence": float(confidence),
610
+ "box": xyxy.tolist()
611
+ }
612
+ }
613
+ session['violations'][tid] = violation_record
614
+ session['track_capture_count'][tid] = 0
615
+ new_violations.append(violation_record)
616
+
617
+ # PLATE DETECTION
618
+ eb = expand_box(xyxy, w_orig, h_orig)
619
+ rider_crop = frame[eb[1]:eb[3], eb[0]:eb[2]]
620
+
621
+ if rider_crop.size > 0 and session['track_capture_count'].get(tid, 0) < 3:
622
+ with torch.no_grad():
623
+ plate_preds = plate_model.predict(cv2.cvtColor(rider_crop, cv2.COLOR_BGR2RGB), conf=CONF_PLATE, iou=0.5)
624
+
625
+ pb, ps, pl = parse_preds(plate_preds, rider_crop.shape[1], rider_crop.shape[0])
626
+
627
+ if pb.size > 0:
628
+ best_idx = np.argmax(ps)
629
+ px1, py1, px2, py2 = map(int, pb[best_idx])
630
+
631
+ plate_crop = rider_crop[py1:py2, px1:px2]
632
+ if plate_crop.size > 0 and plate_crop.shape[0] > 10 and plate_crop.shape[1] > 20:
633
+ s_idx = session['track_capture_count'][tid] + 1
634
+ plate_img_name = f"viol_live_{session_id}_{tid}_plate_snap{s_idx}.jpg"
635
+ plate_path = os.path.join(RESULTS_FOLDER, plate_img_name)
636
+ cv2.imwrite(plate_path, plate_crop)
637
+
638
+ with json_lock:
639
+ session['violations'][tid]["plate_image_url"] = f"/static/results/{plate_img_name}"
640
+
641
+ # Trigger OCR (using shared queue)
642
+ current_session_data["ocr_queue"].put((tid, plate_path, f"live_{session_id}"))
643
+ session['track_capture_count'][tid] += 1
644
+
645
+ # Draw UI
646
+ plate_text = session['track_plate_cache'].get(tid, "")
647
+ cv2.rectangle(frame, (x1, y1), (x2, y2), color, 2)
648
+ cv2.putText(frame, f"ID:{tid} {display_name} {confidence:.2f}", (x1, y1 - 10),
649
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2)
650
+ if plate_text:
651
+ cv2.putText(frame, f"Plate: {plate_text}", (x1, y2 + 20),
652
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 0, 0), 2)
653
+ else:
654
+ # Normal Helmet/Tracking
655
+ cv2.rectangle(frame, (x1, y1), (x2, y2), color, 2)
656
+ cv2.putText(frame, f"ID:{tid} {display_name} {confidence:.2f}", (x1, y1 - 10),
657
+ cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 2)
658
+
659
+ return frame, new_violations
660
+
661
+ @app.route('/get_live_violations/<session_id>')
662
+ def get_live_violations(session_id):
663
+ """Get violations for a specific live camera session"""
664
+ for sid, session in live_camera_sessions.items():
665
+ if session['session_id'] == session_id:
666
+ with json_lock:
667
+ data = list(session['violations'].values())
668
+ data.reverse()
669
+ return jsonify(data)
670
+ return jsonify([])
671
+
672
+ if __name__ == '__main__':
673
+ socketio.run(app, host='0.0.0.0', debug=True, port=7860, allow_unsafe_werkzeug=True)
requirements.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ flask
2
+ flask-socketio
3
+ opencv-python
4
+ numpy
5
+ torch
6
+ supervision
7
+ gradio-client
8
+ rfdetr
9
+ python-socketio
10
+ eventlet
static/css/style.css ADDED
@@ -0,0 +1,810 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ :root {
2
+ --primary: #8b5cf6;
3
+ --secondary: #ec4899;
4
+ --accent: #f43f5e;
5
+ --orange: #f97316;
6
+ --bg-dark: #000000;
7
+ --sidebar-bg: rgba(18, 18, 18, 0.4);
8
+ --card-bg: rgba(25, 25, 25, 0.5);
9
+ --text-main: #ffffff;
10
+ --text-dim: rgba(255, 255, 255, 0.6);
11
+ --glass-border: rgba(255, 255, 255, 0.08);
12
+ --purple-main: #8a4fff;
13
+ }
14
+
15
+ * {
16
+ margin: 0;
17
+ padding: 0;
18
+ box-sizing: border-box;
19
+ font-family: 'Outfit', sans-serif;
20
+ }
21
+
22
+ body {
23
+ background-color: #000;
24
+ color: var(--text-main);
25
+ overflow-x: hidden;
26
+ min-height: 100vh;
27
+ width: 100%;
28
+ max-width: 100vw;
29
+ }
30
+
31
+ /* Background - Shared with Landing */
32
+ .liquid-container {
33
+ position: fixed;
34
+ top: 0;
35
+ left: 0;
36
+ width: 100vw;
37
+ height: 100vh;
38
+ z-index: -1;
39
+ overflow: hidden;
40
+ background: #000;
41
+ }
42
+
43
+ .liquid-bg {
44
+ position: absolute;
45
+ width: 200%;
46
+ height: 200%;
47
+ top: -50%;
48
+ left: -50%;
49
+ background: radial-gradient(circle at 20% 30%, var(--primary) 0%, transparent 40%),
50
+ radial-gradient(circle at 80% 20%, var(--secondary) 0%, transparent 45%),
51
+ radial-gradient(circle at 40% 80%, var(--accent) 0%, transparent 50%),
52
+ radial-gradient(circle at 90% 90%, var(--orange) 0%, transparent 40%);
53
+ filter: blur(100px);
54
+ animation: liquidMove 40s linear infinite;
55
+ opacity: 0.9;
56
+ }
57
+
58
+ .grain-overlay {
59
+ position: absolute;
60
+ top: 0;
61
+ left: 0;
62
+ width: 100%;
63
+ height: 100%;
64
+ opacity: 0.18;
65
+ pointer-events: none;
66
+ z-index: 1000;
67
+ background-image: url("data:image/svg+xml,%3Csvg viewBox='0 0 200 200' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='noiseFilter'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='0.65' numOctaves='3' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23noiseFilter)'/%3E%3C/svg%3E");
68
+ }
69
+
70
+ @keyframes liquidMove {
71
+ 0% {
72
+ transform: translate(0, 0) rotate(0deg);
73
+ }
74
+
75
+ 33% {
76
+ transform: translate(5%, 5%) rotate(5deg);
77
+ }
78
+
79
+ 66% {
80
+ transform: translate(-5%, 8%) rotate(-5deg);
81
+ }
82
+
83
+ 100% {
84
+ transform: translate(0, 0) rotate(0deg);
85
+ }
86
+ }
87
+
88
+ /* Landing Page Styles */
89
+ #canvas-container {
90
+ position: fixed;
91
+ top: 0;
92
+ left: 0;
93
+ width: 100%;
94
+ height: 100%;
95
+ z-index: -1;
96
+ }
97
+
98
+ .hero-content {
99
+ position: relative;
100
+ z-index: 10;
101
+ height: 100vh;
102
+ display: flex;
103
+ flex-direction: column;
104
+ justify-content: center;
105
+ align-items: center;
106
+ text-align: center;
107
+ color: #fff;
108
+ padding: 0 1rem;
109
+ }
110
+
111
+ .made-by {
112
+ font-size: 0.8rem;
113
+ /* default */
114
+ letter-spacing: 0.5em;
115
+ /* default */
116
+ opacity: 0.6;
117
+ text-transform: uppercase;
118
+ }
119
+
120
+ .made-by.top {
121
+ margin-bottom: 2rem;
122
+ }
123
+
124
+ .made-by.bottom {
125
+ margin-top: 5rem;
126
+ font-size: 0.6rem;
127
+ letter-spacing: 0.8em;
128
+ opacity: 0.4;
129
+ }
130
+
131
+ .landing-title {
132
+ font-size: clamp(3rem, 15vw, 10rem);
133
+ line-height: 0.85;
134
+ margin-bottom: 2rem;
135
+ font-weight: 800;
136
+ background: linear-gradient(to bottom, #fff 50%, rgba(255, 255, 255, 0.2));
137
+ -webkit-background-clip: text;
138
+ background-clip: text;
139
+ -webkit-text-fill-color: transparent;
140
+ }
141
+
142
+ .landing-subtitle {
143
+ max-width: 600px;
144
+ line-height: 1.6;
145
+ opacity: 0.7;
146
+ margin-bottom: 3rem;
147
+ /* Adjusted for spacing */
148
+ }
149
+
150
+ .enter-btn {
151
+ /* margin-top: 3rem; -- handled by subtitle margin */
152
+ padding: 1.2rem 3rem;
153
+ background: rgba(255, 255, 255, 0.1);
154
+ backdrop-filter: blur(10px);
155
+ border: 1px solid rgba(255, 255, 255, 0.2);
156
+ color: #fff;
157
+ font-weight: 700;
158
+ font-size: 1.1rem;
159
+ border-radius: 100px;
160
+ cursor: pointer;
161
+ transition: all 0.4s cubic-bezier(0.16, 1, 0.3, 1);
162
+ text-decoration: none;
163
+ letter-spacing: 0.1em;
164
+ display: inline-block;
165
+ }
166
+
167
+ .enter-btn:hover {
168
+ background: #fff;
169
+ color: #000;
170
+ transform: scale(1.05);
171
+ box-shadow: 0 20px 40px rgba(0, 0, 0, 0.3);
172
+ }
173
+
174
+ /* Page Layout */
175
+ .dashboard-container {
176
+ display: flex;
177
+ width: 100vw;
178
+ height: 100vh;
179
+ position: relative;
180
+ z-index: 1;
181
+ }
182
+
183
+ /* Sidebar - Glass Styling */
184
+ .sidebar {
185
+ width: 80px;
186
+ background: var(--sidebar-bg);
187
+ backdrop-filter: blur(20px);
188
+ display: flex;
189
+ flex-direction: column;
190
+ align-items: center;
191
+ padding: 2rem 0;
192
+ gap: 2rem;
193
+ border-right: 1px solid var(--glass-border);
194
+ transition: width 0.3s ease;
195
+ }
196
+
197
+ .nav-icon {
198
+ font-size: 1.4rem;
199
+ color: var(--text-dim);
200
+ cursor: pointer;
201
+ transition: all 0.3s ease;
202
+ padding: 12px;
203
+ border-radius: 16px;
204
+ display: flex;
205
+ align-items: center;
206
+ justify-content: center;
207
+ text-decoration: none;
208
+ }
209
+
210
+ .nav-icon:hover,
211
+ .nav-icon.active {
212
+ color: #fff;
213
+ background: var(--purple-main);
214
+ box-shadow: 0 0 20px rgba(138, 79, 255, 0.4);
215
+ }
216
+
217
+ /* Main Area */
218
+ .main-wrapper {
219
+ flex: 1;
220
+ display: flex;
221
+ flex-direction: column;
222
+ padding: 2rem;
223
+ overflow-y: auto;
224
+ }
225
+
226
+ .dashboard-header {
227
+ display: flex;
228
+ justify-content: space-between;
229
+ align-items: center;
230
+ margin-bottom: 2rem;
231
+ gap: 2rem;
232
+ }
233
+
234
+ .search-bar {
235
+ background: rgba(255, 255, 255, 0.05);
236
+ backdrop-filter: blur(10px);
237
+ border: 1px solid var(--glass-border);
238
+ padding: 0.8rem 1.5rem 0.8rem 3rem;
239
+ border-radius: 14px;
240
+ color: #fff;
241
+ width: 300px;
242
+ outline: none;
243
+ }
244
+
245
+ .add-btn {
246
+ background: var(--purple-main);
247
+ color: #fff;
248
+ border: none;
249
+ padding: 0.8rem 1.8rem;
250
+ border-radius: 14px;
251
+ font-weight: 700;
252
+ cursor: pointer;
253
+ transition: transform 0.2s ease;
254
+ }
255
+
256
+ /* Dashboard Grid - Responsive */
257
+ .dashboard-grid {
258
+ display: grid;
259
+ grid-template-columns: minmax(0, 1.3fr) minmax(0, 0.7fr);
260
+ gap: 1.5rem;
261
+ width: 100%;
262
+ max-width: 100%;
263
+ }
264
+
265
+ .card {
266
+ background: var(--card-bg);
267
+ backdrop-filter: blur(30px) saturate(160%);
268
+ border: 1px solid var(--glass-border);
269
+ border-radius: 32px;
270
+ padding: 1.8rem;
271
+ height: fit-content;
272
+ }
273
+
274
+ /* Video Feed */
275
+ .feed-card {
276
+ grid-column: span 1;
277
+ background: #000;
278
+ border-radius: 32px;
279
+ position: relative;
280
+ overflow: hidden;
281
+ min-height: 450px;
282
+ max-height: 70vh;
283
+ display: flex;
284
+ align-items: center;
285
+ justify-content: center;
286
+ }
287
+
288
+ /* When overlay is visible, allow content to fit */
289
+ .feed-card.overlay-active {
290
+ overflow: visible;
291
+ aspect-ratio: unset;
292
+ height: auto;
293
+ }
294
+
295
+ /* When video is playing, maintain aspect ratio */
296
+ .feed-card.video-active {
297
+ aspect-ratio: 16/9;
298
+ overflow: hidden;
299
+ }
300
+
301
+ .feed-card img {
302
+ width: 100%;
303
+ height: 100%;
304
+ object-fit: contain;
305
+ position: relative;
306
+ z-index: 1;
307
+ }
308
+
309
+ #drop-zone-overlay {
310
+ position: absolute;
311
+ inset: 0;
312
+ display: flex;
313
+ flex-direction: column;
314
+ justify-content: center;
315
+ align-items: center;
316
+ background: rgba(0, 0, 0, 0.6);
317
+ text-align: center;
318
+ padding: 2.5rem 2rem;
319
+ z-index: 10;
320
+ min-height: 100%;
321
+ }
322
+
323
+ #drop-zone-overlay .mode-btn-wrap {
324
+ display: flex;
325
+ gap: 1rem;
326
+ margin-bottom: 1rem;
327
+ z-index: 10;
328
+ position: relative;
329
+ }
330
+
331
+ .live-badge {
332
+ position: absolute;
333
+ top: 20px;
334
+ left: 20px;
335
+ z-index: 20;
336
+ background: rgba(0, 0, 0, 0.7);
337
+ backdrop-filter: blur(10px);
338
+ padding: 8px 16px;
339
+ border-radius: 20px;
340
+ font-size: 0.85rem;
341
+ font-weight: 600;
342
+ display: flex;
343
+ align-items: center;
344
+ gap: 8px;
345
+ }
346
+
347
+ @keyframes pulse {
348
+
349
+ 0%,
350
+ 100% {
351
+ opacity: 1;
352
+ }
353
+
354
+ 50% {
355
+ opacity: 0.3;
356
+ }
357
+ }
358
+
359
+ /* Mobile Optimizations */
360
+ @media (max-width: 1024px) {
361
+ .dashboard-grid {
362
+ grid-template-columns: 1fr;
363
+ }
364
+
365
+ .sidebar {
366
+ width: 70px;
367
+ }
368
+
369
+ .feed-card {
370
+ min-height: 400px;
371
+ max-height: 60vh;
372
+ }
373
+
374
+ .feed-card.overlay-active {
375
+ min-height: 450px;
376
+ }
377
+ }
378
+
379
+ @media (max-width: 768px) {
380
+ .dashboard-container {
381
+ flex-direction: column-reverse;
382
+ height: auto;
383
+ min-height: 100vh;
384
+ }
385
+
386
+ .sidebar {
387
+ width: 100%;
388
+ height: 64px;
389
+ flex-direction: row;
390
+ justify-content: space-around;
391
+ padding: 0 1rem;
392
+ border-right: none;
393
+ border-top: 1px solid var(--glass-border);
394
+ position: fixed;
395
+ bottom: 0;
396
+ left: 0;
397
+ z-index: 100;
398
+ background: rgba(10, 10, 10, 0.85);
399
+ backdrop-filter: blur(20px);
400
+ }
401
+
402
+ .nav-icon {
403
+ font-size: 1.1rem;
404
+ padding: 10px;
405
+ }
406
+
407
+ .main-wrapper {
408
+ padding: 1rem;
409
+ padding-bottom: 80px;
410
+ overflow-x: hidden;
411
+ width: 100%;
412
+ }
413
+
414
+ .dashboard-header {
415
+ flex-direction: column;
416
+ align-items: flex-start;
417
+ gap: 0.75rem;
418
+ margin-bottom: 1rem;
419
+ }
420
+
421
+ .dashboard-header h2 {
422
+ font-size: 1.3rem;
423
+ }
424
+
425
+ .system-stats {
426
+ display: flex;
427
+ gap: 0.75rem;
428
+ flex-wrap: wrap;
429
+ }
430
+
431
+ .search-bar {
432
+ width: 100%;
433
+ }
434
+
435
+ .device-grid {
436
+ grid-template-columns: 1fr;
437
+ }
438
+
439
+ .device-tabs {
440
+ overflow-x: auto;
441
+ padding-bottom: 15px;
442
+ }
443
+
444
+ .dashboard-header .header-actions {
445
+ width: 100%;
446
+ justify-content: space-between;
447
+ }
448
+
449
+ /* Feed Card - Mobile Fix */
450
+ .feed-card {
451
+ min-height: 350px !important;
452
+ max-height: 50vh !important;
453
+ height: auto !important;
454
+ border-radius: 20px;
455
+ width: 100%;
456
+ }
457
+
458
+ .feed-card.overlay-active {
459
+ min-height: 400px !important;
460
+ height: auto !important;
461
+ }
462
+
463
+ .feed-card.video-active {
464
+ aspect-ratio: unset !important;
465
+ }
466
+
467
+ .feed-card #processed-stream {
468
+ object-fit: contain !important;
469
+ width: 100% !important;
470
+ height: auto !important;
471
+ max-height: 50vh !important;
472
+ position: relative !important;
473
+ min-height: 200px !important;
474
+ }
475
+
476
+ .feed-card .live-badge {
477
+ top: 10px !important;
478
+ left: 10px !important;
479
+ font-size: 0.75rem !important;
480
+ padding: 6px 12px !important;
481
+ }
482
+
483
+ #drop-zone-overlay {
484
+ padding: 1.5rem 1rem !important;
485
+ }
486
+
487
+ /* Drop zone buttons on mobile */
488
+ #drop-zone-overlay {
489
+ padding: 1rem !important;
490
+ }
491
+
492
+ #drop-zone-overlay h3 {
493
+ font-size: 1rem;
494
+ }
495
+
496
+ #drop-zone-overlay .mode-btn-wrap {
497
+ flex-direction: column;
498
+ gap: 0.6rem;
499
+ width: 100%;
500
+ max-width: 260px;
501
+ }
502
+
503
+ #drop-zone-overlay .mode-btn-wrap button {
504
+ width: 100%;
505
+ }
506
+
507
+ /* Results Panel */
508
+ #results-panel {
509
+ max-height: 260px !important;
510
+ }
511
+
512
+ .log-row-title {
513
+ font-size: 0.8rem !important;
514
+ }
515
+
516
+ .log-row-meta {
517
+ font-size: 0.72rem !important;
518
+ }
519
+
520
+ /* Evidence Cache - 3 cols on mobile */
521
+ .mini-feed-gallery {
522
+ grid-template-columns: repeat(3, 1fr) !important;
523
+ gap: 8px !important;
524
+ }
525
+
526
+ /* Cards */
527
+ .card {
528
+ border-radius: 20px;
529
+ padding: 1.2rem;
530
+ width: 100%;
531
+ overflow: hidden;
532
+ }
533
+
534
+ .side-panel-wrapper {
535
+ display: flex;
536
+ flex-direction: column;
537
+ gap: 1rem;
538
+ width: 100%;
539
+ }
540
+
541
+ .dashboard-grid {
542
+ width: 100%;
543
+ overflow-x: hidden;
544
+ }
545
+ }
546
+
547
+ /* Extra Small Mobile (Portrait Phones) */
548
+ @media (max-width: 480px) {
549
+ .feed-card {
550
+ min-height: 180px !important;
551
+ max-height: 45vh !important;
552
+ }
553
+
554
+ .feed-card #processed-stream {
555
+ min-height: 180px !important;
556
+ max-height: 45vh !important;
557
+ }
558
+
559
+ .dashboard-header h2 {
560
+ font-size: 1.1rem !important;
561
+ }
562
+
563
+ .system-stats {
564
+ font-size: 0.75rem;
565
+ }
566
+
567
+ .stat-item {
568
+ font-size: 0.7rem;
569
+ }
570
+ }
571
+
572
+ /* === JSON Syntax Highlighting === */
573
+ .json-viewer {
574
+ font-family: 'JetBrains Mono', monospace;
575
+ font-size: 0.72rem;
576
+ line-height: 1.7;
577
+ white-space: pre-wrap;
578
+ word-break: break-word;
579
+ overflow-wrap: break-word;
580
+ }
581
+
582
+ .json-key {
583
+ color: #c084fc;
584
+ }
585
+
586
+ .json-str {
587
+ color: #86efac;
588
+ }
589
+
590
+ .json-num {
591
+ color: #67e8f9;
592
+ }
593
+
594
+ .json-bool {
595
+ color: #fbbf24;
596
+ }
597
+
598
+ .json-null {
599
+ color: #f87171;
600
+ }
601
+
602
+ .json-url {
603
+ color: #38bdf8;
604
+ text-decoration: underline;
605
+ cursor: pointer;
606
+ }
607
+
608
+ /* JSON Table View */
609
+ .json-table {
610
+ width: 100%;
611
+ border-collapse: collapse;
612
+ font-family: 'JetBrains Mono', monospace;
613
+ font-size: 0.75rem;
614
+ }
615
+
616
+ .json-table tr {
617
+ border-bottom: 1px solid rgba(255, 255, 255, 0.08);
618
+ }
619
+
620
+ .json-table td {
621
+ padding: 0.6rem 0.8rem;
622
+ vertical-align: top;
623
+ word-break: break-word;
624
+ }
625
+
626
+ .json-table td:first-child {
627
+ color: #c084fc;
628
+ font-weight: 600;
629
+ width: 35%;
630
+ min-width: 100px;
631
+ }
632
+
633
+ .json-table td:last-child {
634
+ color: #86efac;
635
+ width: 65%;
636
+ }
637
+
638
+ /* === Modal Mobile === */
639
+ @media (max-width: 768px) {
640
+ #detail-modal>div {
641
+ width: 100% !important;
642
+ max-width: 100% !important;
643
+ margin: 0 !important;
644
+ height: 100vh !important;
645
+ border-radius: 0 !important;
646
+ padding: 1.2rem !important;
647
+ overflow-y: auto;
648
+ }
649
+
650
+ .modal-inner-grid {
651
+ grid-template-columns: 1fr !important;
652
+ gap: 1rem !important;
653
+ overflow: visible !important;
654
+ }
655
+
656
+ .modal-main-img-wrap {
657
+ max-height: 220px;
658
+ min-height: 160px;
659
+ }
660
+
661
+ #modal-thumb {
662
+ max-height: 220px;
663
+ width: 100%;
664
+ }
665
+
666
+ .modal-plate-box {
667
+ padding: 0.8rem !important;
668
+ }
669
+
670
+ #modal-plate-text {
671
+ font-size: 1.1rem !important;
672
+ }
673
+
674
+ .modal-json-box {
675
+ max-height: none !important;
676
+ overflow-y: auto;
677
+ }
678
+
679
+ .json-viewer {
680
+ font-size: 0.68rem !important;
681
+ }
682
+
683
+ .json-table {
684
+ font-size: 0.7rem !important;
685
+ }
686
+
687
+ .json-table td {
688
+ padding: 0.5rem 0.6rem !important;
689
+ }
690
+
691
+ .json-table td:first-child {
692
+ width: 40% !important;
693
+ }
694
+ }
695
+
696
+ /* === Log Row Styles === */
697
+ .log-row {
698
+ background: rgba(255, 255, 255, 0.05);
699
+ padding: 12px 14px;
700
+ border-radius: 10px;
701
+ border-left: 4px solid #ef4444;
702
+ cursor: pointer;
703
+ transition: background 0.2s;
704
+ position: relative;
705
+ }
706
+
707
+ .log-row:hover {
708
+ background: rgba(255, 255, 255, 0.1);
709
+ }
710
+
711
+ .log-row-header {
712
+ display: flex;
713
+ justify-content: space-between;
714
+ align-items: center;
715
+ gap: 0.5rem;
716
+ flex-wrap: wrap;
717
+ }
718
+
719
+ .log-row-title {
720
+ font-weight: 700;
721
+ font-size: 0.85rem;
722
+ color: #fff;
723
+ }
724
+
725
+ .log-row-time {
726
+ font-size: 0.7rem;
727
+ opacity: 0.55;
728
+ white-space: nowrap;
729
+ }
730
+
731
+ .log-row-meta {
732
+ margin-top: 5px;
733
+ font-size: 0.78rem;
734
+ display: flex;
735
+ justify-content: space-between;
736
+ align-items: center;
737
+ gap: 0.5rem;
738
+ flex-wrap: wrap;
739
+ }
740
+
741
+ .log-row-id {
742
+ opacity: 0.6;
743
+ font-family: 'JetBrains Mono', monospace;
744
+ }
745
+
746
+ .log-row-plate {
747
+ color: #facc15;
748
+ font-family: 'JetBrains Mono', monospace;
749
+ font-weight: 700;
750
+ letter-spacing: 1px;
751
+ }
752
+
753
+ /* Switches & Controls */
754
+ .switch {
755
+ width: 48px;
756
+ height: 26px;
757
+ background: rgba(255, 255, 255, 0.1);
758
+ border-radius: 100px;
759
+ position: relative;
760
+ cursor: pointer;
761
+ transition: background 0.3s ease;
762
+ }
763
+
764
+ .switch.on {
765
+ background: var(--purple-main);
766
+ }
767
+
768
+ .switch::after {
769
+ content: '';
770
+ position: absolute;
771
+ top: 3px;
772
+ left: 3px;
773
+ width: 20px;
774
+ height: 20px;
775
+ background: white;
776
+ border-radius: 50%;
777
+ transition: all 0.3s cubic-bezier(0.16, 1, 0.3, 1);
778
+ }
779
+
780
+ .switch.on::after {
781
+ left: 25px;
782
+ }
783
+
784
+ /* Detections Gallery */
785
+ .mini-feed-gallery {
786
+ display: grid;
787
+ grid-template-columns: repeat(4, 1fr);
788
+ gap: 12px;
789
+ margin-top: 1.5rem;
790
+ }
791
+
792
+ .mini-thumb {
793
+ aspect-ratio: 1/1;
794
+ background: #1a1a1a;
795
+ border-radius: 12px;
796
+ overflow: hidden;
797
+ border: 1px solid var(--glass-border);
798
+ cursor: pointer;
799
+ transition: transform 0.2s ease;
800
+ }
801
+
802
+ .mini-thumb:hover {
803
+ transform: scale(1.05);
804
+ }
805
+
806
+ .mini-thumb img {
807
+ width: 100%;
808
+ height: 100%;
809
+ object-fit: cover;
810
+ }
static/js/main.js ADDED
@@ -0,0 +1,541 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ document.addEventListener('DOMContentLoaded', () => {
2
+ // Check if we are on dashboard or landing
3
+ if (document.getElementById('processed-stream')) {
4
+ initDashboard();
5
+ }
6
+ });
7
+
8
+ function initDashboard() {
9
+ const dropZone = document.getElementById('drop-zone-overlay');
10
+ const fileInput = document.getElementById('file-input');
11
+ const streamImg = document.getElementById('processed-stream');
12
+ const resultsPanel = document.getElementById('results-panel');
13
+ const miniResultsPanel = document.getElementById('mini-results-panel');
14
+ const violationBadge = document.getElementById('violation-count-badge');
15
+
16
+ // Camera elements
17
+ const cameraVideo = document.getElementById('camera-video');
18
+ const cameraCanvas = document.getElementById('camera-canvas');
19
+
20
+ // Modal Elements
21
+ const modal = document.getElementById('detail-modal');
22
+ const modalThumb = document.getElementById('modal-thumb');
23
+ const modalPlateThumb = document.getElementById('modal-plate-thumb');
24
+ const modalPlateText = document.getElementById('modal-plate-text');
25
+ const modalJson = document.getElementById('modal-json');
26
+ const closeModal = document.querySelector('.close-modal');
27
+
28
+ let knownDetections = new Map(); // Store full object
29
+ let pollInterval = null;
30
+ let socket = null;
31
+ let cameraStream = null;
32
+ let cameraMode = false;
33
+ let sessionId = null;
34
+
35
+ // --- MODE SELECTION (Using Event Delegation) ---
36
+ function attachModeButtons() {
37
+ const uploadBtn = document.getElementById('upload-mode-btn');
38
+ const cameraBtn = document.getElementById('camera-mode-btn');
39
+
40
+ console.log('[DEBUG] Attaching mode buttons...', { uploadBtn, cameraBtn });
41
+
42
+ if (uploadBtn) {
43
+ uploadBtn.onclick = (e) => {
44
+ e.stopPropagation();
45
+ console.log('[DEBUG] Upload mode clicked');
46
+
47
+ // NEW: Stop any active camera streams before opening the file picker
48
+ if (cameraMode) {
49
+ stopCameraMode();
50
+ resetDropZone(); // Re-shows the buttons so the user can choose again if they cancel
51
+ }
52
+
53
+ fileInput.click();
54
+ };
55
+ console.log('[DEBUG] Upload button attached');
56
+ } else {
57
+ console.error('[DEBUG] Upload button not found!');
58
+ }
59
+
60
+ if (cameraBtn) {
61
+ cameraBtn.onclick = (e) => {
62
+ e.stopPropagation();
63
+ console.log('[DEBUG] Camera mode clicked');
64
+ startCameraMode();
65
+ };
66
+ console.log('[DEBUG] Camera button attached');
67
+ } else {
68
+ console.error('[DEBUG] Camera button not found!');
69
+ }
70
+ }
71
+
72
+
73
+ // Attach buttons on load
74
+ console.log('[DEBUG] Initializing dashboard...');
75
+ attachModeButtons();
76
+
77
+ // --- CAMERA MODE ---
78
+ async function startCameraMode() {
79
+ try {
80
+ const liveStatus = document.getElementById('live-status');
81
+ const liveIndicator = document.getElementById('live-indicator');
82
+
83
+ dropZone.innerHTML = '<i class="fas fa-spinner fa-spin" style="font-size:3rem; color:var(--purple-main);"></i><p style="margin-top:10px">Requesting Camera Access...</p>';
84
+
85
+ if (liveStatus) liveStatus.textContent = 'REQUESTING CAMERA...';
86
+ if (liveIndicator) liveIndicator.style.background = '#facc15';
87
+
88
+ // Request camera permission
89
+ cameraStream = await navigator.mediaDevices.getUserMedia({
90
+ video: {
91
+ facingMode: 'environment',
92
+ width: { ideal: 1280 },
93
+ height: { ideal: 720 }
94
+ }
95
+ });
96
+
97
+ console.log('[DEBUG] Camera stream obtained:', cameraStream);
98
+
99
+ cameraVideo.srcObject = cameraStream;
100
+ cameraMode = true;
101
+
102
+ // NEW: Explicitly command the video to play
103
+ cameraVideo.play().catch(err => console.error('[DEBUG] Video play failed:', err));
104
+
105
+ if (liveStatus) liveStatus.textContent = 'CAMERA ACTIVE';
106
+ if (liveIndicator) liveIndicator.style.background = '#10b981';
107
+
108
+ // Hide overlay and show stream
109
+ dropZone.style.display = 'none';
110
+ document.querySelector('.feed-card').classList.remove('overlay-active');
111
+ document.querySelector('.feed-card').classList.add('video-active');
112
+ streamImg.style.display = 'block';
113
+ streamImg.style.opacity = '1';
114
+ streamImg.style.visibility = 'visible';
115
+ streamImg.style.zIndex = '5';
116
+
117
+ console.log('[DEBUG] Stream image element:', streamImg);
118
+ console.log('[DEBUG] Stream image display:', streamImg.style.display);
119
+ console.log('[DEBUG] Stream image visibility:', streamImg.style.visibility);
120
+
121
+ // Clear UI
122
+ knownDetections.clear();
123
+ resultsPanel.innerHTML = '';
124
+ miniResultsPanel.innerHTML = '';
125
+ violationBadge.innerText = '0';
126
+
127
+ // Initialize Socket.IO
128
+ console.log('[DEBUG] Initializing Socket.IO...');
129
+ socket = io();
130
+
131
+ console.log('[DEBUG] Registering socket event handlers...');
132
+
133
+ socket.on('connect', () => {
134
+ console.log('[SOCKET] Connected, socket ID:', socket.id);
135
+ if (liveStatus) liveStatus.textContent = 'SOCKET CONNECTED';
136
+ sessionId = generateSessionId();
137
+ console.log('[SOCKET] Emitting start_camera_session with ID:', sessionId);
138
+ socket.emit('start_camera_session', { session_id: sessionId });
139
+ });
140
+
141
+ socket.on('camera_session_started', (data) => {
142
+ console.log('[SOCKET] Session started:', data.session_id);
143
+ console.log('[SOCKET] Socket connected:', socket.connected);
144
+ if (liveStatus) liveStatus.textContent = 'LIVE CAMERA STREAM';
145
+ sessionId = data.session_id;
146
+
147
+ // Ensure stream image is ready
148
+ streamImg.style.display = 'block';
149
+ streamImg.style.visibility = 'visible';
150
+ streamImg.style.zIndex = '5';
151
+ console.log('[SOCKET] Stream image prepared for display');
152
+
153
+ startCameraCapture();
154
+ });
155
+
156
+ socket.on('processed_frame', (data) => {
157
+ console.log('[SOCKET] ========== RECEIVED PROCESSED FRAME ==========');
158
+ console.log('[SOCKET] Data keys:', Object.keys(data));
159
+ console.log('[SOCKET] Frame length:', data.frame ? data.frame.length : 'no frame');
160
+ console.log('[SOCKET] Violations:', data.violations ? data.violations.length : 'no violations');
161
+ console.log('[SOCKET] Stream img element:', streamImg);
162
+ console.log('[SOCKET] Stream img parent:', streamImg.parentElement);
163
+
164
+ // Display processed frame
165
+ if (data.frame) {
166
+ console.log('[SOCKET] Setting image src...');
167
+ streamImg.src = data.frame;
168
+ streamImg.style.display = 'block';
169
+ streamImg.style.visibility = 'visible';
170
+ streamImg.style.zIndex = '5';
171
+ console.log('[SOCKET] Image updated - src length:', data.frame.length);
172
+ console.log('[SOCKET] Image display:', streamImg.style.display);
173
+ console.log('[SOCKET] Image visibility:', streamImg.style.visibility);
174
+ console.log('[SOCKET] Image computed display:', window.getComputedStyle(streamImg).display);
175
+ console.log('[SOCKET] Image computed visibility:', window.getComputedStyle(streamImg).visibility);
176
+
177
+ // Force a reflow
178
+ void streamImg.offsetHeight;
179
+ console.log('[SOCKET] ========== FRAME UPDATE COMPLETE ==========');
180
+ } else {
181
+ console.error('[SOCKET] No frame data in response!');
182
+ }
183
+
184
+ // Update violations
185
+ if (data.violations && data.violations.length > 0) {
186
+ console.log('[SOCKET] Processing violations:', data.violations);
187
+ data.violations.forEach(v => {
188
+ if (!knownDetections.has(v.id)) {
189
+ knownDetections.set(v.id, v);
190
+ updateUI(v, true);
191
+ }
192
+ });
193
+ violationBadge.innerText = knownDetections.size;
194
+ }
195
+ });
196
+
197
+ socket.on('error', (data) => {
198
+ console.error('[SOCKET] Error:', data.message);
199
+ if (liveStatus) liveStatus.textContent = 'ERROR: ' + data.message;
200
+ if (liveIndicator) liveIndicator.style.background = '#ef4444';
201
+ });
202
+
203
+ socket.on('disconnect', () => {
204
+ console.log('[SOCKET] Disconnected');
205
+ if (liveStatus) liveStatus.textContent = 'DISCONNECTED';
206
+ if (liveIndicator) liveIndicator.style.background = '#ef4444';
207
+
208
+ // Try to reconnect after 2 seconds
209
+ setTimeout(() => {
210
+ if (cameraMode && cameraStream) {
211
+ console.log('[SOCKET] Attempting to reconnect...');
212
+ if (liveStatus) liveStatus.textContent = 'RECONNECTING...';
213
+ socket.connect();
214
+ }
215
+ }, 2000);
216
+ });
217
+
218
+ } catch (err) {
219
+ console.error('Camera access denied:', err);
220
+ const liveStatus = document.getElementById('live-status');
221
+ const liveIndicator = document.getElementById('live-indicator');
222
+ if (liveStatus) liveStatus.textContent = 'CAMERA DENIED';
223
+ if (liveIndicator) liveIndicator.style.background = '#ef4444';
224
+
225
+ dropZone.innerHTML = '<i class="fas fa-exclamation-triangle" style="font-size:3rem; color:#ef4444;"></i><p>Camera Access Denied</p><p style="font-size:0.8rem; opacity:0.6;">Please allow camera permissions</p>';
226
+ setTimeout(() => {
227
+ resetDropZone();
228
+ if (liveStatus) liveStatus.textContent = 'LIVE INFERENCE';
229
+ if (liveIndicator) liveIndicator.style.background = '#fff';
230
+ }, 3000);
231
+ }
232
+ }
233
+
234
+ function startCameraCapture() {
235
+ const ctx = cameraCanvas.getContext('2d');
236
+
237
+ console.log('[DEBUG] Starting camera capture...');
238
+ console.log('[DEBUG] Video element:', cameraVideo);
239
+ console.log('[DEBUG] Video ready state:', cameraVideo.readyState);
240
+
241
+ function captureFrame() {
242
+ if (!cameraMode || !cameraStream) {
243
+ console.log('[DEBUG] Camera mode stopped');
244
+ return;
245
+ }
246
+
247
+ // Check if video is ready
248
+ // Check if video is ready AND has valid dimensions
249
+ if (cameraVideo.readyState < 2 || cameraVideo.videoWidth === 0) {
250
+ console.log('[DEBUG] Video not ready or width is 0, waiting...');
251
+ setTimeout(captureFrame, 100);
252
+ return;
253
+ }
254
+
255
+ // Set canvas size to match video
256
+ if (cameraCanvas.width !== cameraVideo.videoWidth || cameraCanvas.height !== cameraVideo.videoHeight) {
257
+ cameraCanvas.width = cameraVideo.videoWidth;
258
+ cameraCanvas.height = cameraVideo.videoHeight;
259
+ console.log('[DEBUG] Canvas size set:', cameraCanvas.width, 'x', cameraCanvas.height);
260
+ }
261
+
262
+ // Draw current frame
263
+ ctx.drawImage(cameraVideo, 0, 0);
264
+
265
+ // Convert to base64
266
+ const frameData = cameraCanvas.toDataURL('image/jpeg', 0.8);
267
+
268
+ console.log('[DEBUG] Sending frame, size:', frameData.length, 'bytes');
269
+
270
+ // Send to server
271
+ if (socket && socket.connected) {
272
+ socket.emit('camera_frame', { frame: frameData });
273
+ console.log('[DEBUG] Frame emitted to server, socket ID:', socket.id);
274
+ } else {
275
+ console.error('[DEBUG] Socket not connected! Socket state:', socket ? socket.connected : 'socket is null');
276
+ }
277
+
278
+ // Capture next frame (adjust FPS here - currently ~10 FPS)
279
+ setTimeout(captureFrame, 100);
280
+ }
281
+
282
+ // Start immediately if video is ready, otherwise wait for metadata
283
+ if (cameraVideo.readyState >= 2) {
284
+ console.log('[DEBUG] Video already ready, starting capture');
285
+ captureFrame();
286
+ } else {
287
+ console.log('[DEBUG] Waiting for video metadata...');
288
+ cameraVideo.addEventListener('loadedmetadata', () => {
289
+ console.log('[DEBUG] Video metadata loaded, starting capture');
290
+ captureFrame();
291
+ }, { once: true });
292
+
293
+ // Fallback: start after 1 second anyway
294
+ setTimeout(() => {
295
+ if (cameraVideo.readyState >= 2) {
296
+ console.log('[DEBUG] Fallback: starting capture after timeout');
297
+ captureFrame();
298
+ }
299
+ }, 1000);
300
+ }
301
+ }
302
+
303
+ function stopCameraMode() {
304
+ cameraMode = false;
305
+ if (cameraStream) {
306
+ cameraStream.getTracks().forEach(track => track.stop());
307
+ cameraStream = null;
308
+ }
309
+ if (socket) {
310
+ socket.disconnect();
311
+ socket = null;
312
+ }
313
+ }
314
+
315
+ function generateSessionId() {
316
+ return Math.random().toString(36).substring(2, 10);
317
+ }
318
+
319
+ function resetDropZone() {
320
+ console.log('[DEBUG] Resetting drop zone...');
321
+ dropZone.innerHTML = `
322
+ <i class="fas fa-plus-circle" style="font-size:3.5rem; color: var(--purple-main); margin-bottom:1rem;"></i>
323
+ <h3 style="font-weight: 700;">Deploy Synaptic Node</h3>
324
+ <p style="opacity: 0.6; font-size: 0.9rem; margin-bottom: 1.5rem;">Choose your input source</p>
325
+ <div style="display: flex; gap: 1rem; margin-bottom: 1rem; z-index: 10; position: relative;">
326
+ <button id="upload-mode-btn" class="mode-btn" style="padding: 0.8rem 1.5rem; background: rgba(138, 79, 255, 0.2); border: 2px solid var(--purple-main); border-radius: 10px; color: #fff; cursor: pointer; font-weight: 600; transition: all 0.3s; font-size: 0.9rem;"
327
+ onmouseover="this.style.background='rgba(138, 79, 255, 0.4)'"
328
+ onmouseout="this.style.background='rgba(138, 79, 255, 0.2)'">
329
+ <i class="fas fa-upload"></i> Upload Video
330
+ </button>
331
+ <button id="camera-mode-btn" class="mode-btn" style="padding: 0.8rem 1.5rem; background: rgba(138, 79, 255, 0.2); border: 2px solid var(--purple-main); border-radius: 10px; color: #fff; cursor: pointer; font-weight: 600; transition: all 0.3s; font-size: 0.9rem;"
332
+ onmouseover="this.style.background='rgba(138, 79, 255, 0.4)'"
333
+ onmouseout="this.style.background='rgba(138, 79, 255, 0.2)'">
334
+ <i class="fas fa-video"></i> Live Camera
335
+ </button>
336
+ </div>
337
+ `;
338
+ dropZone.style.display = 'flex';
339
+ document.querySelector('.feed-card').classList.add('overlay-active');
340
+ document.querySelector('.feed-card').classList.remove('video-active');
341
+ console.log('[DEBUG] Re-attaching buttons after reset...');
342
+ attachModeButtons();
343
+ }
344
+
345
+ // --- UPLOAD HANDLING ---
346
+ dropZone.addEventListener('dragover', (e) => {
347
+ e.preventDefault();
348
+ if (!cameraMode) {
349
+ dropZone.style.background = 'rgba(138, 79, 255, 0.2)';
350
+ }
351
+ });
352
+ dropZone.addEventListener('dragleave', () => {
353
+ if (!cameraMode) {
354
+ dropZone.style.background = 'rgba(0,0,0,0.6)';
355
+ }
356
+ });
357
+ dropZone.addEventListener('drop', (e) => {
358
+ e.preventDefault();
359
+ if (e.dataTransfer.files.length && !cameraMode) {
360
+ handleUpload(e.dataTransfer.files[0]);
361
+ }
362
+ });
363
+
364
+ fileInput.addEventListener('change', (e) => {
365
+ if (e.target.files.length) handleUpload(e.target.files[0]);
366
+ });
367
+
368
+ async function handleUpload(file) {
369
+ const formData = new FormData();
370
+ formData.append('file', file);
371
+
372
+ // NEW: Stop camera if it's running before starting upload
373
+ if (cameraMode) {
374
+ stopCameraMode();
375
+ }
376
+
377
+ dropZone.innerHTML = '<i class="fas fa-spinner fa-spin" style="font-size:3rem; color:var(--purple-main);"></i><p style="margin-top:10px">Ingesting Neural Feed...</p>';
378
+
379
+ try {
380
+ const response = await fetch('/upload', { method: 'POST', body: formData });
381
+ const data = await response.json();
382
+
383
+ if (data.filename) {
384
+ // Ensure the upload overlay is hidden
385
+ dropZone.style.display = 'none';
386
+ document.querySelector('.feed-card').classList.remove('overlay-active');
387
+ document.querySelector('.feed-card').classList.add('video-active');
388
+
389
+ // NEW: Explicitly make the stream image visible (Copying logic from Camera Mode)
390
+ streamImg.src = `/video_feed/${data.filename}/${data.session_id}`;
391
+ streamImg.style.display = 'block';
392
+ streamImg.style.opacity = '1';
393
+ streamImg.style.visibility = 'visible';
394
+ streamImg.style.zIndex = '5';
395
+
396
+ console.log('[DEBUG] Upload successful, showing stream:', streamImg.src);
397
+
398
+ // Clear UI
399
+ knownDetections.clear();
400
+ resultsPanel.innerHTML = '';
401
+ miniResultsPanel.innerHTML = '';
402
+ violationBadge.innerText = '0';
403
+
404
+ // Start Polling
405
+ if (pollInterval) clearInterval(pollInterval);
406
+ pollInterval = setInterval(pollViolations, 1000);
407
+ }
408
+ } catch (err) {
409
+ console.error(err);
410
+ dropZone.innerHTML = '<i class="fas fa-exclamation-triangle" style="font-size:3rem; color:#ef4444;"></i><p>Connection Refused</p>';
411
+ }
412
+ }
413
+
414
+ // --- POLLING LOGIC ---
415
+ async function pollViolations() {
416
+ try {
417
+ const response = await fetch('/get_violations');
418
+ const data = await response.json(); // Array of violations
419
+
420
+ data.forEach(v => {
421
+ // If new or updated (e.g., OCR finished)
422
+ const stored = knownDetections.get(v.id);
423
+ const isNew = !stored;
424
+ const isUpdated = stored && (stored.plate_number !== v.plate_number);
425
+
426
+ if (isNew || isUpdated) {
427
+ knownDetections.set(v.id, v);
428
+ updateUI(v, isNew);
429
+ }
430
+ });
431
+ violationBadge.innerText = knownDetections.size;
432
+ } catch (err) { console.error("Sync Error", err); }
433
+ }
434
+
435
+ function updateUI(v, isNew) {
436
+ // If it's an update, remove the old row first
437
+ if (!isNew) {
438
+ const oldRow = document.getElementById(`log-${v.id}`);
439
+ if (oldRow) oldRow.remove();
440
+ }
441
+
442
+ // Create Log Row
443
+ const row = document.createElement('div');
444
+ row.id = `log-${v.id}`;
445
+
446
+ const plateDisplay = v.plate_number === "Scanning..." ?
447
+ `<span class="log-row-plate" style="color:var(--text-dim);"><i class="fas fa-circle-notch fa-spin"></i> Reading...</span>` :
448
+ `<span class="log-row-plate">${v.plate_number}</span>`;
449
+
450
+ row.className = 'log-row';
451
+ row.innerHTML = `
452
+ <div class="log-row-header">
453
+ <span class="log-row-title">⚠ NO HELMET DETECTED</span>
454
+ <span class="log-row-time">${v.timestamp}</span>
455
+ </div>
456
+ <div class="log-row-meta">
457
+ <span class="log-row-id">ID: ${v.id}</span>
458
+ ${plateDisplay}
459
+ </div>
460
+ `;
461
+
462
+ row.onclick = () => showDetail(v);
463
+
464
+ // Prepend to list
465
+ resultsPanel.prepend(row);
466
+
467
+ // Add to Mini Gallery (only if new)
468
+ if (isNew) {
469
+ const thumb = document.createElement('div');
470
+ thumb.className = 'mini-thumb';
471
+ thumb.innerHTML = `<img src="${v.image_url}" style="width:100%; height:100%; object-fit:cover;">`;
472
+ thumb.onclick = () => showDetail(knownDetections.get(v.id)); // get latest data on click
473
+ miniResultsPanel.prepend(thumb);
474
+
475
+ // Limit gallery items
476
+ if (miniResultsPanel.children.length > 8) {
477
+ miniResultsPanel.removeChild(miniResultsPanel.lastChild);
478
+ }
479
+ }
480
+ }
481
+
482
+ function syntaxHighlightJSON(obj) {
483
+ // Create a table view for better mobile responsiveness
484
+ let tableHTML = '<table class="json-table">';
485
+
486
+ for (const [key, value] of Object.entries(obj)) {
487
+ let displayValue = value;
488
+ let valueClass = 'json-str';
489
+
490
+ if (typeof value === 'number') {
491
+ valueClass = 'json-num';
492
+ } else if (typeof value === 'boolean') {
493
+ valueClass = 'json-bool';
494
+ } else if (value === null) {
495
+ valueClass = 'json-null';
496
+ displayValue = 'null';
497
+ } else if (typeof value === 'string' && value.startsWith('http')) {
498
+ valueClass = 'json-url';
499
+ displayValue = `<a href="${value}" target="_blank" style="color: #38bdf8; text-decoration: underline;">${value}</a>`;
500
+ } else if (typeof value === 'object') {
501
+ displayValue = JSON.stringify(value, null, 2);
502
+ }
503
+
504
+ tableHTML += `
505
+ <tr>
506
+ <td><span class="json-key">${key}</span></td>
507
+ <td><span class="${valueClass}">${displayValue}</span></td>
508
+ </tr>
509
+ `;
510
+ }
511
+
512
+ tableHTML += '</table>';
513
+ return tableHTML;
514
+ }
515
+
516
+ function showDetail(v) {
517
+ // Refetch latest from map in case OCR updated while modal was closed
518
+ const latest = knownDetections.get(v.id);
519
+
520
+ modalThumb.src = latest.image_url;
521
+
522
+ // Handle Plate Image
523
+ if (latest.plate_image_url) {
524
+ modalPlateThumb.src = latest.plate_image_url;
525
+ modalPlateThumb.style.display = 'block';
526
+ } else {
527
+ modalPlateThumb.style.display = 'none';
528
+ modalPlateThumb.src = '';
529
+ }
530
+
531
+ modalPlateText.innerText = latest.plate_number || "----";
532
+
533
+ // Syntax highlighted JSON
534
+ modalJson.innerHTML = syntaxHighlightJSON(latest);
535
+
536
+ modal.style.display = 'flex';
537
+ }
538
+
539
+ closeModal.onclick = () => modal.style.display = 'none';
540
+ window.onclick = (e) => { if (e.target == modal) modal.style.display = 'none'; };
541
+ }
templates/camera_debug.html ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html>
3
+
4
+ <head>
5
+ <title>Camera Debug</title>
6
+ <style>
7
+ body {
8
+ background: #000;
9
+ color: #fff;
10
+ font-family: monospace;
11
+ padding: 20px;
12
+ }
13
+
14
+ .container {
15
+ display: grid;
16
+ grid-template-columns: 1fr 1fr;
17
+ gap: 20px;
18
+ }
19
+
20
+ video,
21
+ img {
22
+ width: 100%;
23
+ border: 2px solid #0f0;
24
+ }
25
+
26
+ .info {
27
+ background: #222;
28
+ padding: 10px;
29
+ margin: 10px 0;
30
+ }
31
+
32
+ button {
33
+ padding: 10px 20px;
34
+ font-size: 16px;
35
+ cursor: pointer;
36
+ }
37
+ </style>
38
+ </head>
39
+
40
+ <body>
41
+ <h1>Camera Stream Debug</h1>
42
+ <button onclick="startTest()">Start Camera Test</button>
43
+ <button onclick="stopTest()">Stop Test</button>
44
+
45
+ <div class="container">
46
+ <div>
47
+ <h3>Raw Camera Feed</h3>
48
+ <video id="camera" autoplay playsinline muted></video>
49
+ <div class="info" id="camera-info">Not started</div>
50
+ </div>
51
+
52
+ <div>
53
+ <h3>Processed Stream (from server)</h3>
54
+ <img id="processed" src="" alt="Waiting for processed frames...">
55
+ <div class="info" id="processed-info">Not started</div>
56
+ </div>
57
+ </div>
58
+
59
+ <div class="info">
60
+ <h3>Console Log</h3>
61
+ <div id="log" style="max-height: 300px; overflow-y: auto;"></div>
62
+ </div>
63
+
64
+ <canvas id="canvas" style="display:none;"></canvas>
65
+
66
+ <script src="https://cdn.socket.io/4.5.4/socket.io.min.js"></script>
67
+ <script>
68
+ let stream = null;
69
+ let socket = null;
70
+ let capturing = false;
71
+
72
+ const camera = document.getElementById('camera');
73
+ const processed = document.getElementById('processed');
74
+ const canvas = document.getElementById('canvas');
75
+ const ctx = canvas.getContext('2d');
76
+ const logDiv = document.getElementById('log');
77
+
78
+ function log(msg) {
79
+ const time = new Date().toLocaleTimeString();
80
+ logDiv.innerHTML += `[${time}] ${msg}<br>`;
81
+ logDiv.scrollTop = logDiv.scrollHeight;
82
+ console.log(msg);
83
+ }
84
+
85
+ async function startTest() {
86
+ try {
87
+ log('Requesting camera access...');
88
+ stream = await navigator.mediaDevices.getUserMedia({
89
+ video: { width: 640, height: 480 }
90
+ });
91
+
92
+ camera.srcObject = stream;
93
+ document.getElementById('camera-info').textContent = 'Camera active';
94
+ log('Camera started successfully');
95
+
96
+ // Connect socket
97
+ log('Connecting to Socket.IO...');
98
+ socket = io();
99
+
100
+ socket.on('connect', () => {
101
+ log('Socket connected: ' + socket.id);
102
+ socket.emit('start_camera_session', { session_id: 'debug_test' });
103
+ });
104
+
105
+ socket.on('camera_session_started', (data) => {
106
+ log('Session started: ' + data.session_id);
107
+ startCapture();
108
+ });
109
+
110
+ socket.on('processed_frame', (data) => {
111
+ log('Received processed frame, size: ' + data.frame.length);
112
+ processed.src = data.frame;
113
+ document.getElementById('processed-info').textContent = 'Last update: ' + new Date().toLocaleTimeString();
114
+ });
115
+
116
+ socket.on('error', (data) => {
117
+ log('ERROR: ' + data.message);
118
+ });
119
+
120
+ } catch (err) {
121
+ log('ERROR: ' + err.message);
122
+ }
123
+ }
124
+
125
+ function startCapture() {
126
+ capturing = true;
127
+ log('Starting frame capture...');
128
+ captureFrame();
129
+ }
130
+
131
+ function captureFrame() {
132
+ if (!capturing) return;
133
+
134
+ if (camera.readyState < 2) {
135
+ setTimeout(captureFrame, 100);
136
+ return;
137
+ }
138
+
139
+ canvas.width = camera.videoWidth;
140
+ canvas.height = camera.videoHeight;
141
+ ctx.drawImage(camera, 0, 0);
142
+
143
+ const frameData = canvas.toDataURL('image/jpeg', 0.8);
144
+
145
+ if (socket && socket.connected) {
146
+ socket.emit('camera_frame', { frame: frameData });
147
+ log('Sent frame: ' + frameData.length + ' bytes');
148
+ }
149
+
150
+ setTimeout(captureFrame, 500); // 2 FPS for debugging
151
+ }
152
+
153
+ function stopTest() {
154
+ capturing = false;
155
+ if (stream) {
156
+ stream.getTracks().forEach(track => track.stop());
157
+ stream = null;
158
+ }
159
+ if (socket) {
160
+ socket.disconnect();
161
+ socket = null;
162
+ }
163
+ log('Test stopped');
164
+ }
165
+ </script>
166
+ </body>
167
+
168
+ </html>
templates/dashboard.html ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+
4
+ <head>
5
+ <meta charset="UTF-8">
6
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
+ <title>Dashboard | AI Vision</title>
8
+ <link rel="preconnect" href="https://fonts.googleapis.com">
9
+ <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
10
+ <link
11
+ href="https://fonts.googleapis.com/css2?family=Outfit:wght@300;400;600;800&family=JetBrains+Mono:wght@400;700&display=swap"
12
+ rel="stylesheet">
13
+ <link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}">
14
+ <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
15
+ </head>
16
+
17
+ <body class="dashboard-body">
18
+ <div class="liquid-container">
19
+ <div class="liquid-bg"></div>
20
+ </div>
21
+ <div class="grain-overlay"></div>
22
+
23
+ <div class="dashboard-container">
24
+ <!-- Sidebar -->
25
+ <aside class="sidebar">
26
+ <a href="/" class="nav-icon"><i class="fas fa-house"></i></a>
27
+ <div class="nav-icon active"><i class="fas fa-th-large"></i></div>
28
+ <div class="nav-icon"><i class="fas fa-chart-line"></i></div>
29
+ <div class="nav-icon mobile-hide"><i class="fas fa-gear"></i></div>
30
+ </aside>
31
+
32
+ <main class="main-wrapper">
33
+ <!-- Header -->
34
+ <header class="dashboard-header">
35
+ <div class="user-info">
36
+ <h2 style="font-weight: 800; letter-spacing: -0.02em;">Synaptic Dashboard</h2>
37
+ <div class="system-stats">
38
+ <div class="stat-item"><i class="fas fa-memory" style="color: #60a5fa;"></i> RF-DETR ACTIVE
39
+ </div>
40
+ <div class="stat-item"><i class="fas fa-eye" style="color: #facc15;"></i> OCR ON</div>
41
+ </div>
42
+ </div>
43
+ <div class="header-actions">
44
+ <button class="add-btn mobile-hide">EXPORT LOGS</button>
45
+ </div>
46
+ </header>
47
+
48
+ <div class="dashboard-grid">
49
+ <!-- Video Feed -->
50
+ <div class="feed-card card overlay-active" style="position: relative;">
51
+ <div class="live-badge" style="position: absolute; top: 20px; left: 20px; z-index: 20;">
52
+ <span id="live-indicator"
53
+ style="display:inline-block; width:8px; height:8px; background:#fff; border-radius:50%; animation: pulse 1.5s infinite;"></span>
54
+ <span id="live-status">LIVE INFERENCE</span>
55
+ </div>
56
+ <img id="processed-stream" src="" alt="Neural Stream Output"
57
+ style="width:100%; height:100%; object-fit:contain; background: #000; display: none; position: absolute; top: 0; left: 0; z-index: 1;">
58
+ <canvas id="camera-canvas" style="display:none;"></canvas>
59
+ <video id="camera-video" autoplay playsinline muted
60
+ style="position: absolute; width: 1px; height: 1px; opacity: 0; pointer-events: none; z-index: -1;"></video>
61
+
62
+ <!-- Upload Overlay -->
63
+ <div id="drop-zone-overlay"
64
+ style="position:absolute; inset:0; display:flex; flex-direction:column; justify-content:center; align-items:center; background:rgba(0,0,0,0.6); text-align:center; padding: 2rem; z-index: 10;">
65
+ <i class="fas fa-plus-circle"
66
+ style="font-size:3.5rem; color: var(--purple-main); margin-bottom:1rem;"></i>
67
+ <h3 style="font-weight: 700;">Deploy Synaptic Node</h3>
68
+ <p style="opacity: 0.6; font-size: 0.9rem; margin-bottom: 1.5rem;">Choose your input source</p>
69
+
70
+ <!-- Mode Selection Buttons -->
71
+ <div class="mode-btn-wrap"
72
+ style="display: flex; gap: 1rem; margin-bottom: 1rem; z-index: 10; position: relative;">
73
+ <button id="upload-mode-btn" class="mode-btn"
74
+ style="padding: 0.8rem 1.5rem; background: rgba(138, 79, 255, 0.2); border: 2px solid var(--purple-main); border-radius: 10px; color: #fff; cursor: pointer; font-weight: 600; transition: all 0.3s; font-size: 0.9rem;"
75
+ onmouseover="this.style.background='rgba(138, 79, 255, 0.4)'"
76
+ onmouseout="this.style.background='rgba(138, 79, 255, 0.2)'">
77
+ <i class="fas fa-upload"></i> Upload Video
78
+ </button>
79
+ <button id="camera-mode-btn" class="mode-btn"
80
+ style="padding: 0.8rem 1.5rem; background: rgba(138, 79, 255, 0.2); border: 2px solid var(--purple-main); border-radius: 10px; color: #fff; cursor: pointer; font-weight: 600; transition: all 0.3s; font-size: 0.9rem;"
81
+ onmouseover="this.style.background='rgba(138, 79, 255, 0.4)'"
82
+ onmouseout="this.style.background='rgba(138, 79, 255, 0.2)'">
83
+ <i class="fas fa-video"></i> Live Camera
84
+ </button>
85
+ </div>
86
+
87
+ <input type="file" id="file-input" hidden accept="video/*">
88
+ </div>
89
+ </div>
90
+
91
+ <!-- Side Panel (Logs) -->
92
+ <div class="side-panel-wrapper" style="display:flex; flex-direction:column; gap:1.5rem;">
93
+ <div class="chart-card card">
94
+ <div style="display:flex; justify-content:space-between; margin-bottom:1rem;">
95
+ <h3 style="font-size: 1.1rem; font-weight: 700;">Real-time Analytics</h3>
96
+ <span id="violation-count-badge"
97
+ style="background: rgba(239, 68, 68, 0.2); color: #ef4444; padding: 2px 8px; border-radius: 100px; font-size: 0.7rem; font-weight: 800;">0</span>
98
+ </div>
99
+
100
+ <!-- Scrollable Log List -->
101
+ <div id="results-panel"
102
+ style="flex:1; overflow-y:auto; display:flex; flex-direction:column; gap:10px; max-height: 400px; padding-right: 5px;">
103
+ <!-- Items injected by JS -->
104
+ <div style="text-align:center; opacity:0.5; padding:20px; font-size:0.8rem;">
105
+ Waiting for data stream...
106
+ </div>
107
+ </div>
108
+ </div>
109
+
110
+ <!-- Mini Gallery -->
111
+ <div class="side-card card" style="height: auto; min-height: 150px;">
112
+ <h4
113
+ style="font-size: 0.85rem; text-transform: uppercase; letter-spacing: 0.1em; color: var(--text-dim); margin-bottom:10px;">
114
+ Evidence Cache</h4>
115
+ <div class="mini-feed-gallery" id="mini-results-panel">
116
+ <!-- Thumbs injected by JS -->
117
+ </div>
118
+ </div>
119
+ </div>
120
+ </div>
121
+ </main>
122
+ </div>
123
+
124
+ <!-- Enhanced Modal -->
125
+ <div id="detail-modal" class="modal"
126
+ style="display:none; position:fixed; inset:0; background:rgba(0,0,0,0.95); z-index:3000; backdrop-filter:blur(10px);">
127
+ <div
128
+ style="width:90%; max-width:1100px; margin:5vh auto; background: #1a1a1a; border: 1px solid var(--glass-border); padding:2rem; border-radius:30px; position:relative; display: flex; flex-direction: column; height: 90vh;">
129
+
130
+ <span class="close-modal"
131
+ style="position:absolute; top:20px; right:30px; font-size:2rem; cursor:pointer; color: #fff;">&times;</span>
132
+ <h2 style="margin-bottom:1.5rem; font-weight: 800; color:#fff;">Violation Detail View</h2>
133
+
134
+ <div class="modal-inner-grid"
135
+ style="display:grid; grid-template-columns: 2fr 1fr; gap:2rem; flex: 1; overflow: hidden;">
136
+ <!-- Main Rider Image -->
137
+ <div class="modal-main-img-wrap"
138
+ style="background: #000; border-radius: 20px; overflow: hidden; border: 1px solid var(--glass-border); display: flex; align-items: center; justify-content: center;">
139
+ <img id="modal-thumb" src="" style="max-width:100%; max-height: 100%; object-fit: contain;">
140
+ </div>
141
+
142
+ <!-- Right Sidebar: Plate & Data -->
143
+ <div style="display: flex; flex-direction: column; gap: 1.5rem; overflow: hidden;">
144
+
145
+ <!-- Plate Zoom -->
146
+ <div class="modal-plate-box"
147
+ style="background: #252525; padding: 1rem; border-radius: 15px; border: 1px solid var(--glass-border);">
148
+ <h4
149
+ style="color: var(--text-dim); font-size: 0.8rem; margin-bottom: 0.5rem; text-transform: uppercase;">
150
+ License Plate OCR</h4>
151
+ <div
152
+ style="height: 100px; background: #000; border-radius: 8px; margin-bottom: 0.5rem; overflow: hidden; display: flex; align-items: center; justify-content: center;">
153
+ <img id="modal-plate-thumb" src="" alt="No Plate Detected"
154
+ style="max-height: 100%; max-width: 100%; object-fit: contain;">
155
+ </div>
156
+ <div id="modal-plate-text"
157
+ style="font-family: 'JetBrains Mono', monospace; font-size: 1.5rem; font-weight: 700; color: #facc15; text-align: center; letter-spacing: 2px;">
158
+ ----
159
+ </div>
160
+ </div>
161
+
162
+ <!-- JSON Data -->
163
+ <div class="modal-json-box"
164
+ style="flex: 1; background:rgba(0,0,0,0.45); padding:1rem; border-radius:15px; border: 1px solid var(--glass-border); overflow-y: auto; min-height: 0;">
165
+ <div
166
+ style="font-size: 0.65rem; text-transform: uppercase; letter-spacing: 0.1em; color: var(--text-dim); margin-bottom: 0.5rem;">
167
+ Detection Data</div>
168
+ <div id="modal-json" class="json-viewer"></div>
169
+ </div>
170
+ </div>
171
+ </div>
172
+ </div>
173
+ </div>
174
+
175
+ <script src="https://cdn.socket.io/4.5.4/socket.io.min.js"></script>
176
+ <script src="{{ url_for('static', filename='js/main.js') }}"></script>
177
+ </body>
178
+
179
+ </html>
templates/index.html ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+
4
+ <head>
5
+ <meta charset="UTF-8">
6
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
+ <title>Liquid Vision | RF-DETR</title>
8
+ <link rel="preconnect" href="https://fonts.googleapis.com">
9
+ <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
10
+ <link
11
+ href="https://fonts.googleapis.com/css2?family=Outfit:wght@300;400;600;800&family=JetBrains+Mono:wght@400;700&display=swap"
12
+ rel="stylesheet">
13
+ <link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}">
14
+ <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
15
+ </head>
16
+
17
+ <body>
18
+ <div class="grain-overlay"></div>
19
+
20
+ <div class="liquid-container">
21
+ <div class="liquid-bg"></div>
22
+ </div>
23
+
24
+ <!-- Landing Page Hero -->
25
+ <section class="hero">
26
+ <p class="made-by" style="margin-bottom: 2rem;">AI</p>
27
+ <h1>Computer<br>Vision</h1>
28
+ <p class="subtitle" style="max-width: 600px; margin: 0 auto; line-height: 1.6;">
29
+ Empowering traffic safety with high-precision RF-DETR analytics.
30
+ Real-time helmet detection, plate recognition, and mobile nodes.
31
+ </p>
32
+ <div class="made-by" style="margin-top: 3rem;">MADE BY WEBASHLAR</div>
33
+ <div class="scroll-hint" onclick="document.querySelector('.container').scrollIntoView({behavior: 'smooth'})">
34
+ <i class="fas fa-chevron-down"></i>
35
+ </div>
36
+ </section>
37
+
38
+ <!-- Main Dashboard -->
39
+ <div class="container" id="dashboard">
40
+ <div class="dashboard-grid">
41
+ <!-- Central Feed Section -->
42
+ <div class="main-feed">
43
+ <section class="card vision-card">
44
+ <div class="card-header">
45
+ <h2><i class="fas fa-microchip"></i> Neural Core</h2>
46
+ <div class="live-indicator"><span class="dot"></span> LIVE</div>
47
+ </div>
48
+
49
+ <!-- Mode Selector -->
50
+ <div class="mode-selector" style="display: flex; gap: 10px; margin-bottom: 1rem;">
51
+ <button class="mode-btn active" id="upload-mode-btn" onclick="switchInputMode('upload')">
52
+ <i class="fas fa-cloud-upload-alt"></i> Upload Video
53
+ </button>
54
+ <button class="mode-btn" id="camera-mode-btn" onclick="switchInputMode('camera')">
55
+ <i class="fas fa-video"></i> Live Camera
56
+ </button>
57
+ </div>
58
+
59
+ <!-- Upload Mode -->
60
+ <div class="upload-area" id="drop-zone">
61
+ <div class="upload-icon">
62
+ <i class="fas fa-cloud-upload-alt"></i>
63
+ </div>
64
+ <p class="upload-title">Ingest Video Evidence</p>
65
+ <p class="upload-subtitle">Drag & Drop or Click to Ingest</p>
66
+ <input type="file" id="file-input" hidden accept="video/*">
67
+ </div>
68
+
69
+ <!-- Camera Mode -->
70
+ <div class="camera-area" id="camera-zone" style="display: none;">
71
+ <video id="camera-preview" autoplay playsinline
72
+ style="width: 100%; border-radius: 16px; background: #000;"></video>
73
+ <canvas id="camera-canvas" style="display: none;"></canvas>
74
+ <div style="display: flex; gap: 10px; margin-top: 1rem;">
75
+ <button class="btn btn-primary" id="start-camera-btn" onclick="startCamera()">
76
+ <i class="fas fa-play"></i> Start Camera
77
+ </button>
78
+ <button class="btn btn-danger" id="stop-camera-btn" onclick="stopCamera()"
79
+ style="display: none;">
80
+ <i class="fas fa-stop"></i> Stop Camera
81
+ </button>
82
+ </div>
83
+ </div>
84
+
85
+ <div class="video-preview-wrapper" id="video-wrapper">
86
+ <img id="processed-stream" src="" alt="Neural Stream Output">
87
+ </div>
88
+ </section>
89
+
90
+ <section class="card log-card">
91
+ <h2><i class="fas fa-terminal"></i> System Logs</h2>
92
+ <div id="logs">
93
+ <div class="log-entry system">[SYSTEM] Kernal initialized. Awaiting neural link...</div>
94
+ </div>
95
+ </section>
96
+ </div>
97
+
98
+ <!-- Side Panels -->
99
+ <div class="side-panels">
100
+ <!-- Live Detection Panel -->
101
+ <section class="card results-card">
102
+ <div class="card-header">
103
+ <h2><i class="fas fa-bolt"></i> Detections</h2>
104
+ <div id="violation-count" class="count-badge">0</div>
105
+ </div>
106
+ <div id="results-panel" class="results-list">
107
+ <!-- Detection cards will be injected here -->
108
+ <div class="empty-state">
109
+ <i class="fas fa-radar"></i>
110
+ <p>Scanning for violations...</p>
111
+ </div>
112
+ </div>
113
+ </section>
114
+
115
+ <!-- Mobile Connection Card -->
116
+ <section class="card mobile-card">
117
+ <h2><i class="fas fa-mobile-screen-button"></i> Mobile Node</h2>
118
+ <div class="qr-container">
119
+ <div class="qr-box">
120
+ <i class="fas fa-qrcode"></i>
121
+ </div>
122
+ <div class="qr-label">NODE HANDSHAKE</div>
123
+ </div>
124
+ <div class="connection-status">
125
+ <div class="status-row">
126
+ <span>SURVEILLANCE LINK</span>
127
+ <span class="status-offline">OFFLINE</span>
128
+ </div>
129
+ <div class="progress-bar">
130
+ <div class="progress-fill"></div>
131
+ </div>
132
+ </div>
133
+ </section>
134
+ </div>
135
+ </div>
136
+ </div>
137
+
138
+ <!-- Event Detail Modal -->
139
+ <div id="detail-modal" class="modal">
140
+ <div class="modal-content card">
141
+ <span class="close-modal">&times;</span>
142
+ <h2><i class="fas fa-search-plus"></i> Violation Detail</h2>
143
+ <div class="modal-body">
144
+ <div class="modal-image">
145
+ <img id="modal-thumb" src="" alt="Violation Full Resolution">
146
+ </div>
147
+ <div class="modal-info">
148
+ <div id="modal-json" class="json-preview"></div>
149
+ </div>
150
+ </div>
151
+ </div>
152
+ </div>
153
+
154
+ <script src="https://cdn.socket.io/4.5.4/socket.io.min.js"></script>
155
+ <script src="{{ url_for('static', filename='js/main.js') }}"></script>
156
+ </body>
157
+
158
+ </html>
templates/landing.html ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+
4
+ <head>
5
+ <meta charset="UTF-8">
6
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
+ <title>AI Computer Vision | Webashlar</title>
8
+ <link rel="preconnect" href="https://fonts.googleapis.com">
9
+ <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
10
+ <link href="https://fonts.googleapis.com/css2?family=Outfit:wght@300;400;600;800&display=swap" rel="stylesheet">
11
+ <link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}">
12
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js"></script>
13
+ </head>
14
+
15
+ <body>
16
+ <div id="canvas-container"></div>
17
+ <div class="grain-overlay"></div>
18
+
19
+ <section class="hero-content">
20
+ <p class="made-by top">AI</p>
21
+ <h1 class="landing-title">
22
+ Computer<br>Vision</h1>
23
+ <p class="landing-subtitle">
24
+ Next-gen traffic safety with high-precision RF-DETR analytics.
25
+ Real-time helmet detection and smart mobile nodes.
26
+ </p>
27
+ <a href="/dashboard" class="enter-btn">ENTER DASHBOARD</a>
28
+ <div class="made-by bottom">MADE BY
29
+ WEBASHLAR</div>
30
+ </section>
31
+
32
+ <script>
33
+ // Simple Three.js Liquid Gradient Shader
34
+ const scene = new THREE.Scene();
35
+ const camera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1);
36
+ const renderer = new THREE.WebGLRenderer({ alpha: true });
37
+ document.getElementById('canvas-container').appendChild(renderer.domElement);
38
+
39
+ const uniforms = {
40
+ u_time: { value: 0 },
41
+ u_resolution: { value: new THREE.Vector2() }
42
+ };
43
+
44
+ const geometry = new THREE.PlaneGeometry(2, 2);
45
+ const material = new THREE.ShaderMaterial({
46
+ uniforms: uniforms,
47
+ vertexShader: `
48
+ varying vec2 vUv;
49
+ void main() {
50
+ vUv = uv;
51
+ gl_Position = vec4(position, 1.0);
52
+ }
53
+ `,
54
+ fragmentShader: `
55
+ uniform float u_time;
56
+ uniform vec2 u_resolution;
57
+ varying vec2 vUv;
58
+
59
+ void main() {
60
+ vec2 p = vUv * 2.0 - 1.0;
61
+ p.x *= u_resolution.x / u_resolution.y;
62
+
63
+ float t = u_time * 0.2;
64
+
65
+ vec3 col1 = vec3(0.54, 0.36, 0.96); // #8b5cf6
66
+ vec3 col2 = vec3(0.93, 0.28, 0.60); // #ec4899
67
+ vec3 col3 = vec3(0.96, 0.25, 0.37); // #f43f5e
68
+ vec3 col4 = vec3(0.98, 0.45, 0.09); // #f97316
69
+
70
+ float d1 = length(p - vec2(sin(t)*0.7, cos(t*0.7)*0.5));
71
+ float d2 = length(p - vec2(cos(t*0.8)*0.8, sin(t*0.6)*0.6));
72
+ float d3 = length(p - vec2(sin(t*1.2)*0.5, cos(t*0.4*0.8)));
73
+
74
+ float v = 0.0;
75
+ v += 1.0 / (1.0 + d1 * 2.0);
76
+ v += 1.0 / (1.0 + d2 * 1.5);
77
+ v += 1.0 / (1.0 + d3 * 2.0);
78
+
79
+ vec3 color = mix(col1, col2, sin(d1 + t) * 0.5 + 0.5);
80
+ color = mix(color, col3, sin(d2 - t*0.5) * 0.5 + 0.5);
81
+ color = mix(color, col4, 1.0 / (1.0 + length(p) * 2.0));
82
+
83
+ gl_FragColor = vec4(color * (v * 0.5 + 0.5), 1.0);
84
+ }
85
+ `
86
+ });
87
+
88
+ const mesh = new THREE.Mesh(geometry, material);
89
+ scene.add(mesh);
90
+
91
+ function resize() {
92
+ const w = window.innerWidth;
93
+ const h = window.innerHeight;
94
+ renderer.setSize(w, h);
95
+ uniforms.u_resolution.value.set(w, h);
96
+ }
97
+
98
+ window.addEventListener('resize', resize);
99
+ resize();
100
+
101
+ function animate(t) {
102
+ uniforms.u_time.value = t * 0.001;
103
+ renderer.render(scene, camera);
104
+ requestAnimationFrame(animate);
105
+ }
106
+ requestAnimationFrame(animate);
107
+ </script>
108
+ </body>
109
+
110
+ </html>
templates/mobile.html ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+
4
+ <head>
5
+ <meta charset="UTF-8">
6
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
7
+ <title>Mobile Vision Node</title>
8
+ <link rel="preconnect" href="https://fonts.googleapis.com">
9
+ <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
10
+ <link href="https://fonts.googleapis.com/css2?family=Outfit:wght@300;400;600;800&display=swap" rel="stylesheet">
11
+ <link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}">
12
+ <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
13
+ <style>
14
+ body {
15
+ margin: 0;
16
+ background: #000;
17
+ color: #fff;
18
+ overflow: hidden;
19
+ }
20
+
21
+ video {
22
+ width: 100vw;
23
+ height: 100vh;
24
+ object-fit: cover;
25
+ position: absolute;
26
+ z-index: 1;
27
+ }
28
+
29
+ .overlay {
30
+ position: fixed;
31
+ top: 20px;
32
+ left: 20px;
33
+ z-index: 10;
34
+ background: rgba(0, 0, 0, 0.5);
35
+ backdrop-filter: blur(10px);
36
+ padding: 15px;
37
+ border-radius: 12px;
38
+ border-left: 4px solid #8b5cf6;
39
+ border: 1px solid rgba(255, 255, 255, 0.1);
40
+ }
41
+ </style>
42
+ </head>
43
+
44
+ <body>
45
+ <div class="grain-overlay" style="z-index: 5;"></div>
46
+ <div class="overlay">
47
+ <div id="status" style="font-weight: 800; font-size: 0.9rem; letter-spacing: 0.05em;"><i
48
+ class="fas fa-circle-notch fa-spin"></i> INITIALIZING NODE</div>
49
+ <div style="font-size: 0.7rem; opacity: 0.7; margin-top: 6px; font-family: 'JetBrains Mono', monospace;">ID: {{
50
+ session_id }}</div>
51
+ </div>
52
+ <video id="webcam" autoplay playsinline></video>
53
+ <canvas id="snapshot" style="display: none;"></canvas>
54
+
55
+ <script>
56
+ const video = document.getElementById('webcam');
57
+ const canvas = document.getElementById('snapshot');
58
+ const status = document.getElementById('status');
59
+ const sessionId = "{{ session_id }}";
60
+
61
+ async function startNode() {
62
+ try {
63
+ const stream = await navigator.mediaDevices.getUserMedia({ video: { facingMode: 'environment' } });
64
+ video.srcObject = stream;
65
+ status.innerHTML = '<i class="fas fa-wifi" style="margin-right:5px;"></i> STREAMING ACTIVE';
66
+ status.style.color = "#10b981";
67
+
68
+ setInterval(sendFrame, 200); // 5 FPS
69
+ } catch (err) {
70
+ status.innerHTML = '<i class="fas fa-exclamation-triangle"></i> ACCESS DENIED';
71
+ status.style.color = "#ef4444";
72
+ }
73
+ }
74
+
75
+ function sendFrame() {
76
+ const context = canvas.getContext('2d');
77
+ canvas.width = video.videoWidth / 2; // Resize for speed
78
+ canvas.height = video.videoHeight / 2;
79
+ context.drawImage(video, 0, 0, canvas.width, canvas.height);
80
+
81
+ canvas.toBlob(async (blob) => {
82
+ const formData = new FormData();
83
+ formData.append('frame', blob);
84
+ try {
85
+ await fetch(`/upload_frame/${sessionId}`, { method: 'POST', body: formData });
86
+ } catch (e) { }
87
+ }, 'image/jpeg', 0.6);
88
+ }
89
+
90
+ startNode();
91
+ </script>
92
+ </body>
93
+
94
+ </html>