eeeeelin commited on
Commit
f2d45f2
·
1 Parent(s): 43b7807

implemented Live Cam feature

Browse files
app/__pycache__/config.cpython-311.pyc CHANGED
Binary files a/app/__pycache__/config.cpython-311.pyc and b/app/__pycache__/config.cpython-311.pyc differ
 
app/__pycache__/models.cpython-311.pyc CHANGED
Binary files a/app/__pycache__/models.cpython-311.pyc and b/app/__pycache__/models.cpython-311.pyc differ
 
app/__pycache__/state.cpython-311.pyc CHANGED
Binary files a/app/__pycache__/state.cpython-311.pyc and b/app/__pycache__/state.cpython-311.pyc differ
 
app/config.py CHANGED
@@ -28,4 +28,10 @@ class Config:
28
  'Motorcycle': {'min': 1, 'max': 2},
29
  'Bus': {'min': 1, 'max': 50},
30
  'Truck': {'min': 1, 'max': 3}
31
- }
 
 
 
 
 
 
 
28
  'Motorcycle': {'min': 1, 'max': 2},
29
  'Bus': {'min': 1, 'max': 50},
30
  'Truck': {'min': 1, 'max': 3}
31
+ }
32
+
33
+ # Live Stream Settings
34
+ LIVE_STREAM_RETRY_ATTEMPTS = 5
35
+ LIVE_STREAM_RETRY_DELAY = 2 # seconds between reconnection attempts
36
+ LIVE_STREAM_CONNECTION_TIMEOUT = 10 # seconds to wait for initial connection
37
+ LIVE_STREAM_BUFFER_SIZE = 1 # minimize latency by keeping buffer small
app/routes/__pycache__/dashboard.cpython-311.pyc CHANGED
Binary files a/app/routes/__pycache__/dashboard.cpython-311.pyc and b/app/routes/__pycache__/dashboard.cpython-311.pyc differ
 
app/routes/__pycache__/setup.cpython-311.pyc CHANGED
Binary files a/app/routes/__pycache__/setup.cpython-311.pyc and b/app/routes/__pycache__/setup.cpython-311.pyc differ
 
app/routes/setup.py CHANGED
@@ -4,7 +4,7 @@ from datetime import datetime
4
  import os
5
  import uuid
6
  from app.services.firebase_service import FirebaseService
7
- from app.services.processing_service import start_processing, get_job_status
8
  from app.config import Config
9
  import cv2
10
  import base64
@@ -144,6 +144,7 @@ def start_processing_route():
144
  camera_data = cameras.get(camera_role, {})
145
  video_path = camera_data.get('video_path')
146
  line_points = camera_data.get('line_points')
 
147
 
148
  if not all([session_id, video_path, line_points]):
149
  return jsonify({'error': f'Missing configuration for {camera_role} camera'}), 400
@@ -169,7 +170,8 @@ def start_processing_route():
169
  line_points=line_points,
170
  location=location,
171
  video_start_time=video_start_time,
172
- camera_role=camera_role
 
173
  )
174
 
175
  return jsonify({
@@ -177,12 +179,113 @@ def start_processing_route():
177
  'session_id': session_id,
178
  'camera_role': camera_role,
179
  'status': job.status,
 
180
  'message': f'Processing started for {camera_role} camera'
181
  })
182
 
183
  except Exception as e:
184
  return jsonify({'error': str(e)}), 500
185
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
186
  @setup_bp.route('/processing-status/<session_id>')
187
  def processing_status(session_id):
188
  """Get the status of a processing job"""
 
4
  import os
5
  import uuid
6
  from app.services.firebase_service import FirebaseService
7
+ from app.services.processing_service import start_processing, get_job_status, stop_processing
8
  from app.config import Config
9
  import cv2
10
  import base64
 
144
  camera_data = cameras.get(camera_role, {})
145
  video_path = camera_data.get('video_path')
146
  line_points = camera_data.get('line_points')
147
+ is_live_stream = camera_data.get('is_live_stream', False)
148
 
149
  if not all([session_id, video_path, line_points]):
150
  return jsonify({'error': f'Missing configuration for {camera_role} camera'}), 400
 
170
  line_points=line_points,
171
  location=location,
172
  video_start_time=video_start_time,
173
+ camera_role=camera_role,
174
+ is_live_stream=is_live_stream
175
  )
176
 
177
  return jsonify({
 
179
  'session_id': session_id,
180
  'camera_role': camera_role,
181
  'status': job.status,
182
+ 'is_live_stream': is_live_stream,
183
  'message': f'Processing started for {camera_role} camera'
184
  })
185
 
186
  except Exception as e:
187
  return jsonify({'error': str(e)}), 500
188
 
189
+
190
+ @setup_bp.route('/configure-stream', methods=['POST'])
191
+ def configure_stream():
192
+ """Configure a live camera stream (RTSP/HTTP)"""
193
+ try:
194
+ data = request.get_json()
195
+ stream_url = data.get('stream_url')
196
+ camera_role = data.get('camera_role', 'ENTRY')
197
+
198
+ if not stream_url:
199
+ return jsonify({'error': 'Stream URL is required'}), 400
200
+
201
+ # Validate URL format
202
+ if not stream_url.lower().startswith(('rtsp://', 'http://', 'https://', 'rtmp://')):
203
+ return jsonify({'error': 'Invalid stream URL. Must start with rtsp://, http://, https://, or rtmp://'}), 400
204
+
205
+ print(f"Connecting to stream: {stream_url} for {camera_role} camera")
206
+
207
+ # Attempt to connect and capture first frame
208
+ cap = cv2.VideoCapture(stream_url)
209
+ cap.set(cv2.CAP_PROP_BUFFERSIZE, Config.LIVE_STREAM_BUFFER_SIZE)
210
+
211
+ if not cap.isOpened():
212
+ return jsonify({'error': 'Cannot connect to stream. Please check the URL.'}), 400
213
+
214
+ # Try to read a frame with timeout
215
+ ret, frame = cap.read()
216
+ cap.release()
217
+
218
+ if not ret or frame is None:
219
+ return jsonify({'error': 'Connected but cannot read from stream.'}), 400
220
+
221
+ # Create/get session ID
222
+ session_id = session.get('current_session')
223
+ if not session_id:
224
+ session_id = str(uuid.uuid4())
225
+ session['current_session'] = session_id
226
+
227
+ # Initialize camera storage if needed
228
+ if 'cameras' not in session:
229
+ session['cameras'] = {'ENTRY': {}, 'EXIT': {}}
230
+
231
+ # Store stream configuration
232
+ session['cameras'][camera_role]['video_path'] = stream_url
233
+ session['cameras'][camera_role]['is_live_stream'] = True
234
+ session['cameras'][camera_role]['has_video'] = True
235
+ session.modified = True
236
+
237
+ # Prepare frame for line drawing
238
+ frame = cv2.resize(frame, (Config.FRAME_WIDTH, Config.FRAME_HEIGHT))
239
+ _, buffer = cv2.imencode('.jpg', frame)
240
+ frame_base64 = base64.b64encode(buffer).decode('utf-8')
241
+
242
+ # Get existing line points if any
243
+ line_points = session['cameras'][camera_role].get('line_points')
244
+
245
+ print(f"Stream configured successfully for {camera_role} camera")
246
+ return jsonify({
247
+ 'success': True,
248
+ 'session_id': session_id,
249
+ 'camera_role': camera_role,
250
+ 'frame': frame_base64,
251
+ 'width': Config.FRAME_WIDTH,
252
+ 'height': Config.FRAME_HEIGHT,
253
+ 'line_points': line_points,
254
+ 'is_live_stream': True
255
+ })
256
+
257
+ except Exception as e:
258
+ print(f"Stream configuration error: {str(e)}")
259
+ import traceback
260
+ traceback.print_exc()
261
+ return jsonify({'error': str(e)}), 500
262
+
263
+
264
+ @setup_bp.route('/stop-processing', methods=['POST'])
265
+ def stop_processing_route():
266
+ """Stop a running processing job"""
267
+ try:
268
+ session_id = session.get('current_session')
269
+ data = request.get_json()
270
+ camera_role = data.get('camera_role') # None = stop all
271
+
272
+ if not session_id:
273
+ return jsonify({'error': 'No active session'}), 400
274
+
275
+ stopped = stop_processing(session_id, camera_role)
276
+
277
+ if stopped:
278
+ return jsonify({
279
+ 'success': True,
280
+ 'message': f'Stop signal sent for {camera_role or "all cameras"}'
281
+ })
282
+ else:
283
+ return jsonify({'error': 'No active processing job found'}), 404
284
+
285
+ except Exception as e:
286
+ return jsonify({'error': str(e)}), 500
287
+
288
+
289
  @setup_bp.route('/processing-status/<session_id>')
290
  def processing_status(session_id):
291
  """Get the status of a processing job"""
app/services/__pycache__/firebase_service.cpython-311.pyc CHANGED
Binary files a/app/services/__pycache__/firebase_service.cpython-311.pyc and b/app/services/__pycache__/firebase_service.cpython-311.pyc differ
 
app/services/__pycache__/processing_service.cpython-311.pyc CHANGED
Binary files a/app/services/__pycache__/processing_service.cpython-311.pyc and b/app/services/__pycache__/processing_service.cpython-311.pyc differ
 
app/services/__pycache__/video_processor.cpython-311.pyc CHANGED
Binary files a/app/services/__pycache__/video_processor.cpython-311.pyc and b/app/services/__pycache__/video_processor.cpython-311.pyc differ
 
app/services/processing_service.py CHANGED
@@ -38,6 +38,7 @@ class ProcessingStatus(str, Enum):
38
  PENDING = 'pending'
39
  PROCESSING = 'processing'
40
  COMPLETED = 'completed'
 
41
  ERROR = 'error'
42
 
43
 
@@ -47,6 +48,12 @@ class CameraRole(str, Enum):
47
  EXIT = 'EXIT'
48
 
49
 
 
 
 
 
 
 
50
  @dataclass
51
  class ProcessingConfig:
52
  """Configuration constants for video processing."""
@@ -107,6 +114,10 @@ class ProcessingJob:
107
  progress: int = 0
108
  error: Optional[str] = None
109
  thread: Optional[threading.Thread] = None
 
 
 
 
110
 
111
  def to_dict(self) -> dict:
112
  """Convert job to dictionary for serialization."""
@@ -116,9 +127,15 @@ class ProcessingJob:
116
  'progress': self.progress,
117
  'error': self.error,
118
  'location': self.location,
119
- 'camera_role': self.camera_role
 
 
120
  }
121
 
 
 
 
 
122
  @property
123
  def line_points_int(self) -> Tuple[Tuple[int, int], Tuple[int, int]]:
124
  """Get line points as integer tuples."""
@@ -138,18 +155,20 @@ def start_processing(
138
  line_points: List[List[float]],
139
  location: str,
140
  video_start_time: datetime = None,
141
- camera_role: str = CameraRole.ENTRY.value
 
142
  ) -> ProcessingJob:
143
  """
144
  Start video processing in a background thread.
145
 
146
  Args:
147
  session_id: Unique identifier for the session
148
- video_path: Path to the video file
149
  line_points: Counting line coordinates [[x1,y1], [x2,y2]]
150
  location: Location name for the session
151
  video_start_time: Timestamp for the video start
152
  camera_role: 'ENTRY' or 'EXIT' camera designation
 
153
 
154
  Returns:
155
  ProcessingJob instance for tracking progress
@@ -157,6 +176,10 @@ def start_processing(
157
  # Clear stale frames from queue
158
  _clear_frame_queue(camera_role)
159
 
 
 
 
 
160
  # Create job
161
  job = ProcessingJob(
162
  session_id=session_id,
@@ -164,7 +187,8 @@ def start_processing(
164
  line_points=line_points,
165
  location=location,
166
  camera_role=camera_role,
167
- video_start_time=video_start_time or datetime.now()
 
168
  )
169
 
170
  # Store job in global registry
@@ -184,6 +208,44 @@ def start_processing(
184
  return job
185
 
186
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
187
  def get_job_status(session_id: str) -> Optional[Dict[str, dict]]:
188
  """
189
  Get the status of all processing jobs for a session.
@@ -226,15 +288,21 @@ def _run_processing_job(job: ProcessingJob) -> None:
226
  session_data = SessionData(job.session_id, job.location)
227
  session_data.line_coordinates = job.line_points
228
 
229
- # Process the video
230
- _process_video(processor, firebase, job, session_data)
 
 
 
231
 
232
  # Save final results
233
  firebase.save_session(session_data, camera_role=job.camera_role)
234
 
235
- # Mark complete
236
- job.status = ProcessingStatus.COMPLETED.value
237
- job.progress = 100
 
 
 
238
 
239
  _emit_status_update(job)
240
  _emit_processing_complete(job, session_data.get_statistics())
@@ -336,6 +404,170 @@ def _process_video(
336
  return output_path
337
 
338
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
339
  def _process_single_frame(
340
  frame: np.ndarray,
341
  processor: VideoProcessor,
 
38
  PENDING = 'pending'
39
  PROCESSING = 'processing'
40
  COMPLETED = 'completed'
41
+ STOPPED = 'stopped'
42
  ERROR = 'error'
43
 
44
 
 
48
  EXIT = 'EXIT'
49
 
50
 
51
+ class VideoSource(str, Enum):
52
+ """Video input source types."""
53
+ FILE = 'file'
54
+ LIVE_STREAM = 'live_stream'
55
+
56
+
57
  @dataclass
58
  class ProcessingConfig:
59
  """Configuration constants for video processing."""
 
114
  progress: int = 0
115
  error: Optional[str] = None
116
  thread: Optional[threading.Thread] = None
117
+ # Live stream support
118
+ is_live_stream: bool = False
119
+ should_stop: bool = False
120
+ frames_processed: int = 0
121
 
122
  def to_dict(self) -> dict:
123
  """Convert job to dictionary for serialization."""
 
127
  'progress': self.progress,
128
  'error': self.error,
129
  'location': self.location,
130
+ 'camera_role': self.camera_role,
131
+ 'is_live_stream': self.is_live_stream,
132
+ 'frames_processed': self.frames_processed
133
  }
134
 
135
+ def stop(self) -> None:
136
+ """Signal the job to stop processing."""
137
+ self.should_stop = True
138
+
139
  @property
140
  def line_points_int(self) -> Tuple[Tuple[int, int], Tuple[int, int]]:
141
  """Get line points as integer tuples."""
 
155
  line_points: List[List[float]],
156
  location: str,
157
  video_start_time: datetime = None,
158
+ camera_role: str = CameraRole.ENTRY.value,
159
+ is_live_stream: bool = False
160
  ) -> ProcessingJob:
161
  """
162
  Start video processing in a background thread.
163
 
164
  Args:
165
  session_id: Unique identifier for the session
166
+ video_path: Path to the video file or stream URL
167
  line_points: Counting line coordinates [[x1,y1], [x2,y2]]
168
  location: Location name for the session
169
  video_start_time: Timestamp for the video start
170
  camera_role: 'ENTRY' or 'EXIT' camera designation
171
+ is_live_stream: Whether the source is a live stream (RTSP/HTTP)
172
 
173
  Returns:
174
  ProcessingJob instance for tracking progress
 
176
  # Clear stale frames from queue
177
  _clear_frame_queue(camera_role)
178
 
179
+ # Auto-detect live stream if not explicitly set
180
+ if not is_live_stream:
181
+ is_live_stream = _is_live_stream(video_path)
182
+
183
  # Create job
184
  job = ProcessingJob(
185
  session_id=session_id,
 
187
  line_points=line_points,
188
  location=location,
189
  camera_role=camera_role,
190
+ video_start_time=video_start_time or datetime.now(),
191
+ is_live_stream=is_live_stream
192
  )
193
 
194
  # Store job in global registry
 
208
  return job
209
 
210
 
211
+ def stop_processing(session_id: str, camera_role: str = None) -> bool:
212
+ """
213
+ Stop a running processing job.
214
+
215
+ Args:
216
+ session_id: The session ID to stop
217
+ camera_role: Specific camera to stop, or None to stop all
218
+
219
+ Returns:
220
+ True if job(s) were signaled to stop, False if not found
221
+ """
222
+ if session_id not in processing_jobs:
223
+ return False
224
+
225
+ stopped = False
226
+ jobs = processing_jobs[session_id]
227
+
228
+ if camera_role:
229
+ # Stop specific camera
230
+ if camera_role in jobs:
231
+ jobs[camera_role].stop()
232
+ stopped = True
233
+ else:
234
+ # Stop all cameras in session
235
+ for job in jobs.values():
236
+ job.stop()
237
+ stopped = True
238
+
239
+ return stopped
240
+
241
+
242
+ def _is_live_stream(video_path: str) -> bool:
243
+ """Check if the video source is a live stream URL."""
244
+ if not video_path:
245
+ return False
246
+ return video_path.lower().startswith(('rtsp://', 'http://', 'https://', 'rtmp://'))
247
+
248
+
249
  def get_job_status(session_id: str) -> Optional[Dict[str, dict]]:
250
  """
251
  Get the status of all processing jobs for a session.
 
288
  session_data = SessionData(job.session_id, job.location)
289
  session_data.line_coordinates = job.line_points
290
 
291
+ # Process based on source type
292
+ if job.is_live_stream:
293
+ _process_live_stream(processor, firebase, job, session_data)
294
+ else:
295
+ _process_video(processor, firebase, job, session_data)
296
 
297
  # Save final results
298
  firebase.save_session(session_data, camera_role=job.camera_role)
299
 
300
+ # Mark complete (or stopped for live streams)
301
+ if job.should_stop:
302
+ job.status = ProcessingStatus.STOPPED.value
303
+ else:
304
+ job.status = ProcessingStatus.COMPLETED.value
305
+ job.progress = 100
306
 
307
  _emit_status_update(job)
308
  _emit_processing_complete(job, session_data.get_statistics())
 
404
  return output_path
405
 
406
 
407
+ def _process_live_stream(
408
+ processor: VideoProcessor,
409
+ firebase: FirebaseService,
410
+ job: ProcessingJob,
411
+ session_data: SessionData
412
+ ) -> None:
413
+ """
414
+ Process live stream continuously until stopped.
415
+
416
+ Key differences from file processing:
417
+ - No total_frames (runs indefinitely)
418
+ - Handles reconnection on failure
419
+ - Runs until job.should_stop is True
420
+ - No video file output (streaming only)
421
+
422
+ Args:
423
+ processor: VideoProcessor instance for detection
424
+ firebase: FirebaseService for persistence
425
+ job: Current processing job
426
+ session_data: Session data container
427
+ """
428
+ import time
429
+
430
+ cap = None
431
+ retry_count = 0
432
+ max_retries = Config.LIVE_STREAM_RETRY_ATTEMPTS
433
+ retry_delay = Config.LIVE_STREAM_RETRY_DELAY
434
+
435
+ # Initialize tracker and annotators
436
+ fps = 30.0 # Default for live streams
437
+ tracker = _create_tracker(fps)
438
+ annotators = _create_annotators()
439
+ tracking = TrackingState()
440
+
441
+ frame_queue = frame_queues.get(job.camera_role)
442
+ frame_idx = 0
443
+ last_event_count = 0
444
+ last_save_time = datetime.now()
445
+
446
+ print(f"Starting live stream processing for {job.camera_role}: {job.video_path}")
447
+
448
+ try:
449
+ while not job.should_stop:
450
+ # Connect/reconnect to stream
451
+ if cap is None or not cap.isOpened():
452
+ if retry_count >= max_retries:
453
+ raise ConnectionError(f"Lost connection to stream after {max_retries} retries")
454
+
455
+ if retry_count > 0:
456
+ print(f"Reconnecting to stream (attempt {retry_count + 1}/{max_retries})...")
457
+ _emit_status_update(job) # Notify client of reconnection attempt
458
+ time.sleep(retry_delay)
459
+
460
+ cap = cv2.VideoCapture(job.video_path)
461
+ cap.set(cv2.CAP_PROP_BUFFERSIZE, Config.LIVE_STREAM_BUFFER_SIZE)
462
+
463
+ if not cap.isOpened():
464
+ retry_count += 1
465
+ cap = None
466
+ continue
467
+
468
+ # Successfully connected
469
+ actual_fps = cap.get(cv2.CAP_PROP_FPS)
470
+ if actual_fps > 0:
471
+ fps = actual_fps
472
+ tracker = _create_tracker(fps)
473
+
474
+ retry_count = 0
475
+ print(f"Connected to live stream at {fps:.1f} FPS")
476
+
477
+ # Read frame
478
+ ret, frame = cap.read()
479
+ if not ret:
480
+ retry_count += 1
481
+ cap.release()
482
+ cap = None
483
+ continue
484
+
485
+ # Reset retry count on successful read
486
+ retry_count = 0
487
+
488
+ # Process frame
489
+ annotated_frame = _process_single_frame(
490
+ frame=frame,
491
+ processor=processor,
492
+ tracker=tracker,
493
+ annotators=annotators,
494
+ tracking=tracking,
495
+ job=job,
496
+ session_data=session_data,
497
+ frame_idx=frame_idx,
498
+ fps=fps
499
+ )
500
+
501
+ # Stream frame for live display
502
+ _stream_frame(frame_queue, annotated_frame, frame_idx)
503
+
504
+ # Update job stats
505
+ job.frames_processed = frame_idx
506
+
507
+ # Periodic updates (every 10 frames)
508
+ if frame_idx % PROC_CONFIG.PROGRESS_UPDATE_INTERVAL == 0:
509
+ last_event_count = _handle_live_stream_updates(
510
+ job=job,
511
+ session_data=session_data,
512
+ firebase=firebase,
513
+ frame_idx=frame_idx,
514
+ last_event_count=last_event_count
515
+ )
516
+
517
+ # Save to Firebase periodically (every 30 seconds)
518
+ if (datetime.now() - last_save_time).total_seconds() > 30:
519
+ firebase.save_session(session_data, update_events=False, camera_role=job.camera_role)
520
+ last_save_time = datetime.now()
521
+
522
+ frame_idx += 1
523
+
524
+ finally:
525
+ if cap:
526
+ cap.release()
527
+ print(f"Live stream processing stopped for {job.camera_role}. Frames processed: {frame_idx}")
528
+
529
+
530
+ def _handle_live_stream_updates(
531
+ job: ProcessingJob,
532
+ session_data: SessionData,
533
+ firebase: FirebaseService,
534
+ frame_idx: int,
535
+ last_event_count: int
536
+ ) -> int:
537
+ """
538
+ Handle periodic updates for live stream processing.
539
+
540
+ Returns:
541
+ Updated event count
542
+ """
543
+ # For live streams, emit a "live" status instead of progress percentage
544
+ socketio.emit(
545
+ 'processing_progress',
546
+ {
547
+ 'session_id': job.session_id,
548
+ 'progress': -1, # -1 indicates live stream
549
+ 'camera_role': job.camera_role,
550
+ 'frames_processed': frame_idx,
551
+ 'is_live': True
552
+ },
553
+ room=job.session_id,
554
+ namespace='/'
555
+ )
556
+
557
+ # Check for new events
558
+ current_count = len(session_data.events)
559
+ if current_count > last_event_count:
560
+ # Emit new events
561
+ for event in session_data.events[last_event_count:]:
562
+ _emit_vehicle_event(job, event)
563
+ firebase.save_event(job.session_id, event)
564
+
565
+ # Emit updated statistics
566
+ _emit_statistics_update(job, session_data.get_statistics())
567
+
568
+ return current_count
569
+
570
+
571
  def _process_single_frame(
572
  frame: np.ndarray,
573
  processor: VideoProcessor,
app/static/css/style.css CHANGED
@@ -296,6 +296,15 @@ body {
296
  background-color: #16a34a;
297
  }
298
 
 
 
 
 
 
 
 
 
 
299
  .btn-outline {
300
  background-color: transparent;
301
  border: 2px solid var(--border-color);
@@ -1400,6 +1409,42 @@ body {
1400
  box-shadow: 0 1px 2px rgba(0,0,0,0.1);
1401
  }
1402
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1403
  /* Config Card */
1404
  .source-config-card {
1405
  background-color: #f8fafc;
@@ -1611,6 +1656,12 @@ body {
1611
  color: var(--primary-color);
1612
  }
1613
 
 
 
 
 
 
 
1614
  .camera-preview-card .video-wrapper {
1615
  min-height: 280px;
1616
  aspect-ratio: 3/2;
 
296
  background-color: #16a34a;
297
  }
298
 
299
+ .btn-danger {
300
+ background-color: var(--danger-color);
301
+ color: white;
302
+ }
303
+
304
+ .btn-danger:hover {
305
+ background-color: #dc2626;
306
+ }
307
+
308
  .btn-outline {
309
  background-color: transparent;
310
  border: 2px solid var(--border-color);
 
1409
  box-shadow: 0 1px 2px rgba(0,0,0,0.1);
1410
  }
1411
 
1412
+ /* Source Type Toggle */
1413
+ .source-toggle {
1414
+ display: flex;
1415
+ background-color: var(--background-color);
1416
+ padding: 0.25rem;
1417
+ border-radius: 0.5rem;
1418
+ margin-bottom: 0.75rem;
1419
+ }
1420
+
1421
+ .toggle-btn {
1422
+ flex: 1;
1423
+ border: none;
1424
+ background: transparent;
1425
+ padding: 0.5rem 0.75rem;
1426
+ border-radius: 0.375rem;
1427
+ color: var(--text-secondary);
1428
+ font-weight: 500;
1429
+ font-size: 0.875rem;
1430
+ cursor: pointer;
1431
+ transition: all 0.2s ease;
1432
+ display: flex;
1433
+ align-items: center;
1434
+ justify-content: center;
1435
+ gap: 0.375rem;
1436
+ }
1437
+
1438
+ .toggle-btn:hover {
1439
+ color: var(--text-primary);
1440
+ }
1441
+
1442
+ .toggle-btn.active {
1443
+ background-color: #ffffff;
1444
+ color: var(--primary-color);
1445
+ box-shadow: 0 1px 2px rgba(0,0,0,0.1);
1446
+ }
1447
+
1448
  /* Config Card */
1449
  .source-config-card {
1450
  background-color: #f8fafc;
 
1656
  color: var(--primary-color);
1657
  }
1658
 
1659
+ .camera-status.live {
1660
+ color: var(--danger-color);
1661
+ font-weight: 600;
1662
+ animation: pulse 1.5s ease-in-out infinite;
1663
+ }
1664
+
1665
  .camera-preview-card .video-wrapper {
1666
  min-height: 280px;
1667
  aspect-ratio: 3/2;
app/static/js/dashboard.js CHANGED
@@ -19,9 +19,15 @@ const PROCESSING_STATUS = Object.freeze({
19
  PENDING: 'pending',
20
  PROCESSING: 'processing',
21
  COMPLETED: 'completed',
 
22
  ERROR: 'error'
23
  });
24
 
 
 
 
 
 
25
  const CONFIG = Object.freeze({
26
  MAX_LOG_EVENTS: 50,
27
  NOTIFICATION_DURATION_MS: 5000,
@@ -88,7 +94,8 @@ const createCameraConfig = () => ({
88
  hasLine: false,
89
  videoName: '',
90
  processingStatus: PROCESSING_STATUS.PENDING,
91
- progress: 0
 
92
  });
93
 
94
  /**
@@ -488,6 +495,8 @@ class WorkbenchManager {
488
  constructor() {
489
  this.currentCameraRole = CAMERA_ROLES.ENTRY;
490
  this.sessionCompleted = false;
 
 
491
 
492
  // LineDrawer instances per camera
493
  this.lineDrawers = {
@@ -538,13 +547,21 @@ class WorkbenchManager {
538
  videoUpload: getElement('video-upload'),
539
  fileName: getElement('file-name-display'),
540
  startBtn: getElement('btn-start-analysis'),
 
541
  clearLineBtn: getElement('clear-line-btn'),
542
  configStatusText: getElement('config-status-text'),
543
  configStatusDot: getElement('config-status-dot'),
544
  processingStatus: getElement('processing-status'),
545
  locationInput: getElement('location-input'),
546
  modeLabel: getElement('mode-label'),
547
- liveBadge: getElement('live-badge')
 
 
 
 
 
 
 
548
  };
549
  }
550
 
@@ -559,7 +576,7 @@ class WorkbenchManager {
559
  // -------------------------------------------------------------------------
560
 
561
  setupEventListeners() {
562
- const { videoUpload, clearLineBtn, startBtn } = this.globalElements;
563
 
564
  if (videoUpload) {
565
  videoUpload.addEventListener('change', (e) => this.handleVideoUpload(e));
@@ -570,6 +587,132 @@ class WorkbenchManager {
570
  if (startBtn) {
571
  startBtn.addEventListener('click', () => this.startAnalysis());
572
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
573
  }
574
 
575
  // -------------------------------------------------------------------------
@@ -639,21 +782,21 @@ class WorkbenchManager {
639
  const statusEl = this.cameraElements[role].statusText;
640
  if (!statusEl) return;
641
 
642
- const statusConfig = {
643
- [PROCESSING_STATUS.PROCESSING]: {
644
- text: `Processing ${config.progress}%`,
645
- className: 'camera-status processing'
646
- },
647
- [PROCESSING_STATUS.COMPLETED]: {
648
- text: 'Completed',
649
- className: 'camera-status completed'
650
  }
651
- };
652
-
653
- if (statusConfig[config.processingStatus]) {
654
- const { text, className } = statusConfig[config.processingStatus];
655
- statusEl.textContent = text;
656
- statusEl.className = className;
657
  } else if (config.hasLine) {
658
  statusEl.textContent = 'Ready';
659
  statusEl.className = 'camera-status ready';
@@ -669,10 +812,16 @@ class WorkbenchManager {
669
  updateCameraProgress(role) {
670
  const config = this.cameraConfigs[role];
671
  const { progressBar, progressText } = this.cameraElements[role];
672
- const progress = config.progress || 0;
673
 
674
- if (progressBar) progressBar.style.width = `${progress}%`;
675
- if (progressText) progressText.textContent = `${progress}%`;
 
 
 
 
 
 
 
676
  }
677
 
678
  // -------------------------------------------------------------------------
@@ -704,7 +853,14 @@ class WorkbenchManager {
704
  const config = this.cameraConfigs[data.camera_role];
705
  if (!config) return;
706
 
707
- config.progress = data.progress;
 
 
 
 
 
 
 
708
  if (config.processingStatus === PROCESSING_STATUS.PENDING) {
709
  config.processingStatus = PROCESSING_STATUS.PROCESSING;
710
  }
@@ -719,11 +875,24 @@ class WorkbenchManager {
719
 
720
  checkAllCompleted() {
721
  const allDone = Object.values(CAMERA_ROLES).every(
722
- role => this.cameraConfigs[role].processingStatus === PROCESSING_STATUS.COMPLETED
 
723
  );
724
 
 
 
 
 
 
 
 
 
 
 
 
725
  if (allDone) {
726
  this.sessionCompleted = true;
 
727
 
728
  // Disable continue mode
729
  if (window.dashboardManager) {
@@ -732,6 +901,7 @@ class WorkbenchManager {
732
 
733
  this.globalElements.startBtn.innerHTML = `<i data-feather="check"></i> Analysis Complete`;
734
  this.globalElements.startBtn.disabled = true;
 
735
  this.globalElements.processingStatus.classList.add('hidden');
736
  feather.replace();
737
  }
@@ -922,6 +1092,13 @@ class WorkbenchManager {
922
  if (data.success) {
923
  if (!sessionId) sessionId = data.session_id;
924
  this.cameraConfigs[role].processingStatus = PROCESSING_STATUS.PROCESSING;
 
 
 
 
 
 
 
925
  this.updateCameraStatusText(role);
926
  this.setAnalysisMode(true, role);
927
  } else {
@@ -929,6 +1106,11 @@ class WorkbenchManager {
929
  }
930
  }
931
 
 
 
 
 
 
932
  // Join socket room
933
  if (window.dashboardManager?.socket && sessionId) {
934
  window.dashboardManager.sessionId = sessionId;
@@ -963,6 +1145,54 @@ class WorkbenchManager {
963
  this.globalElements.modeLabel.textContent = 'Live Analysis';
964
  }
965
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
966
  // -------------------------------------------------------------------------
967
  // Session Management
968
  // -------------------------------------------------------------------------
 
19
  PENDING: 'pending',
20
  PROCESSING: 'processing',
21
  COMPLETED: 'completed',
22
+ STOPPED: 'stopped',
23
  ERROR: 'error'
24
  });
25
 
26
+ const SOURCE_TYPE = Object.freeze({
27
+ FILE: 'file',
28
+ STREAM: 'stream'
29
+ });
30
+
31
  const CONFIG = Object.freeze({
32
  MAX_LOG_EVENTS: 50,
33
  NOTIFICATION_DURATION_MS: 5000,
 
94
  hasLine: false,
95
  videoName: '',
96
  processingStatus: PROCESSING_STATUS.PENDING,
97
+ progress: 0,
98
+ isLiveStream: false
99
  });
100
 
101
  /**
 
495
  constructor() {
496
  this.currentCameraRole = CAMERA_ROLES.ENTRY;
497
  this.sessionCompleted = false;
498
+ this.sourceType = SOURCE_TYPE.FILE; // Current source type selection
499
+ this.hasLiveStreams = false; // Track if any camera is using live stream
500
 
501
  // LineDrawer instances per camera
502
  this.lineDrawers = {
 
547
  videoUpload: getElement('video-upload'),
548
  fileName: getElement('file-name-display'),
549
  startBtn: getElement('btn-start-analysis'),
550
+ stopBtn: getElement('btn-stop-analysis'),
551
  clearLineBtn: getElement('clear-line-btn'),
552
  configStatusText: getElement('config-status-text'),
553
  configStatusDot: getElement('config-status-dot'),
554
  processingStatus: getElement('processing-status'),
555
  locationInput: getElement('location-input'),
556
  modeLabel: getElement('mode-label'),
557
+ liveBadge: getElement('live-badge'),
558
+ // Source type elements
559
+ sourceFileBtn: getElement('source-file-btn'),
560
+ sourceStreamBtn: getElement('source-stream-btn'),
561
+ fileSourceConfig: getElement('file-source-config'),
562
+ streamSourceConfig: getElement('stream-source-config'),
563
+ streamUrlInput: getElement('stream-url-input'),
564
+ streamStatusDisplay: getElement('stream-status-display')
565
  };
566
  }
567
 
 
576
  // -------------------------------------------------------------------------
577
 
578
  setupEventListeners() {
579
+ const { videoUpload, clearLineBtn, startBtn, stopBtn } = this.globalElements;
580
 
581
  if (videoUpload) {
582
  videoUpload.addEventListener('change', (e) => this.handleVideoUpload(e));
 
587
  if (startBtn) {
588
  startBtn.addEventListener('click', () => this.startAnalysis());
589
  }
590
+ if (stopBtn) {
591
+ stopBtn.addEventListener('click', () => this.stopAnalysis());
592
+ }
593
+ }
594
+
595
+ // -------------------------------------------------------------------------
596
+ // Source Type Management
597
+ // -------------------------------------------------------------------------
598
+
599
+ /**
600
+ * Switch between file upload and live stream source types
601
+ * @param {string} type - 'file' or 'stream'
602
+ */
603
+ setSourceType(type) {
604
+ this.sourceType = type;
605
+ const { sourceFileBtn, sourceStreamBtn, fileSourceConfig, streamSourceConfig } = this.globalElements;
606
+
607
+ if (type === SOURCE_TYPE.FILE) {
608
+ sourceFileBtn?.classList.add('active');
609
+ sourceStreamBtn?.classList.remove('active');
610
+ fileSourceConfig?.classList.remove('hidden');
611
+ streamSourceConfig?.classList.add('hidden');
612
+ } else {
613
+ sourceFileBtn?.classList.remove('active');
614
+ sourceStreamBtn?.classList.add('active');
615
+ fileSourceConfig?.classList.add('hidden');
616
+ streamSourceConfig?.classList.remove('hidden');
617
+ }
618
+
619
+ feather.replace();
620
+ }
621
+
622
+ /**
623
+ * Connect to a live stream and capture first frame
624
+ */
625
+ async connectStream() {
626
+ const { streamUrlInput, streamStatusDisplay } = this.globalElements;
627
+ const streamUrl = streamUrlInput?.value?.trim();
628
+
629
+ if (!streamUrl) {
630
+ alert('Please enter a stream URL');
631
+ return;
632
+ }
633
+
634
+ // Validate URL format
635
+ if (!streamUrl.match(/^(rtsp|http|https|rtmp):\/\//i)) {
636
+ alert('Invalid URL. Must start with rtsp://, http://, https://, or rtmp://');
637
+ return;
638
+ }
639
+
640
+ streamStatusDisplay.textContent = 'Connecting...';
641
+
642
+ try {
643
+ const response = await fetch('/setup/configure-stream', {
644
+ method: 'POST',
645
+ headers: { 'Content-Type': 'application/json' },
646
+ body: JSON.stringify({
647
+ stream_url: streamUrl,
648
+ camera_role: this.currentCameraRole
649
+ })
650
+ });
651
+ const data = await response.json();
652
+
653
+ if (data.success) {
654
+ const config = this.cameraConfigs[this.currentCameraRole];
655
+ config.hasVideo = true;
656
+ config.videoName = 'Live Stream';
657
+ config.isLiveStream = true;
658
+ this.hasLiveStreams = true;
659
+
660
+ streamStatusDisplay.textContent = 'Connected';
661
+ streamStatusDisplay.style.color = 'var(--success-color)';
662
+
663
+ // Load the captured frame for line drawing
664
+ await this.loadStreamFrame(this.currentCameraRole, data.frame, data.line_points);
665
+ this.updateStartButtonState();
666
+ } else {
667
+ streamStatusDisplay.textContent = data.error || 'Connection failed';
668
+ streamStatusDisplay.style.color = 'var(--danger-color)';
669
+ }
670
+ } catch (error) {
671
+ streamStatusDisplay.textContent = 'Connection error';
672
+ streamStatusDisplay.style.color = 'var(--danger-color)';
673
+ console.error('Stream connection error:', error);
674
+ }
675
+ }
676
+
677
+ /**
678
+ * Load a stream frame for line drawing
679
+ */
680
+ async loadStreamFrame(role, frameBase64, existingLinePoints) {
681
+ const camEls = this.cameraElements[role];
682
+ const config = this.cameraConfigs[role];
683
+
684
+ // Update visibility
685
+ if (camEls.placeholder) camEls.placeholder.style.display = 'none';
686
+ if (camEls.canvas) camEls.canvas.style.display = 'block';
687
+ if (camEls.liveFeed) camEls.liveFeed.classList.add('hidden');
688
+
689
+ // Create LineDrawer
690
+ const canvasId = `${role.toLowerCase()}-canvas`;
691
+ this.lineDrawers[role] = new LineDrawer(canvasId);
692
+ await this.lineDrawers[role].loadImage('data:image/jpeg;base64,' + frameBase64);
693
+
694
+ // Restore existing line if present
695
+ if (existingLinePoints) {
696
+ this.lineDrawers[role].setLinePoints(existingLinePoints);
697
+ config.hasLine = true;
698
+ this.updateCameraStatusText(role);
699
+ this.updateStartButtonState();
700
+ }
701
+
702
+ // Set up line completion callback
703
+ this.lineDrawers[role].onLineComplete = (points) => {
704
+ config.hasLine = true;
705
+ this.updateCameraStatusText(role);
706
+ this.updateStartButtonState();
707
+ this.saveLine(role, points);
708
+
709
+ if (this.currentCameraRole === role) {
710
+ this.globalElements.clearLineBtn.disabled = false;
711
+ this.updateSidebarStatus();
712
+ }
713
+ };
714
+
715
+ this.updateCameraStatusText(role);
716
  }
717
 
718
  // -------------------------------------------------------------------------
 
782
  const statusEl = this.cameraElements[role].statusText;
783
  if (!statusEl) return;
784
 
785
+ // Handle processing status
786
+ if (config.processingStatus === PROCESSING_STATUS.PROCESSING) {
787
+ if (config.isLiveStream || config.progress === -1) {
788
+ statusEl.textContent = 'LIVE';
789
+ statusEl.className = 'camera-status live';
790
+ } else {
791
+ statusEl.textContent = `Processing ${config.progress}%`;
792
+ statusEl.className = 'camera-status processing';
793
  }
794
+ } else if (config.processingStatus === PROCESSING_STATUS.COMPLETED) {
795
+ statusEl.textContent = 'Completed';
796
+ statusEl.className = 'camera-status completed';
797
+ } else if (config.processingStatus === PROCESSING_STATUS.STOPPED) {
798
+ statusEl.textContent = 'Stopped';
799
+ statusEl.className = 'camera-status completed';
800
  } else if (config.hasLine) {
801
  statusEl.textContent = 'Ready';
802
  statusEl.className = 'camera-status ready';
 
812
  updateCameraProgress(role) {
813
  const config = this.cameraConfigs[role];
814
  const { progressBar, progressText } = this.cameraElements[role];
 
815
 
816
+ // Handle live stream display
817
+ if (config.isLiveStream || config.progress === -1) {
818
+ if (progressBar) progressBar.style.width = '100%';
819
+ if (progressText) progressText.textContent = 'LIVE';
820
+ } else {
821
+ const progress = config.progress || 0;
822
+ if (progressBar) progressBar.style.width = `${progress}%`;
823
+ if (progressText) progressText.textContent = `${progress}%`;
824
+ }
825
  }
826
 
827
  // -------------------------------------------------------------------------
 
853
  const config = this.cameraConfigs[data.camera_role];
854
  if (!config) return;
855
 
856
+ // Handle live stream progress differently
857
+ if (data.is_live || data.progress === -1) {
858
+ config.progress = -1; // -1 indicates live stream
859
+ config.isLiveStream = true;
860
+ } else {
861
+ config.progress = data.progress;
862
+ }
863
+
864
  if (config.processingStatus === PROCESSING_STATUS.PENDING) {
865
  config.processingStatus = PROCESSING_STATUS.PROCESSING;
866
  }
 
875
 
876
  checkAllCompleted() {
877
  const allDone = Object.values(CAMERA_ROLES).every(
878
+ role => this.cameraConfigs[role].processingStatus === PROCESSING_STATUS.COMPLETED ||
879
+ this.cameraConfigs[role].processingStatus === PROCESSING_STATUS.STOPPED
880
  );
881
 
882
+ const anyProcessing = Object.values(CAMERA_ROLES).some(
883
+ role => this.cameraConfigs[role].processingStatus === PROCESSING_STATUS.PROCESSING
884
+ );
885
+
886
+ // Show/hide stop button based on processing state and live streams
887
+ if (anyProcessing && this.hasLiveStreams) {
888
+ this.globalElements.stopBtn?.classList.remove('hidden');
889
+ } else {
890
+ this.globalElements.stopBtn?.classList.add('hidden');
891
+ }
892
+
893
  if (allDone) {
894
  this.sessionCompleted = true;
895
+ this.hasLiveStreams = false;
896
 
897
  // Disable continue mode
898
  if (window.dashboardManager) {
 
901
 
902
  this.globalElements.startBtn.innerHTML = `<i data-feather="check"></i> Analysis Complete`;
903
  this.globalElements.startBtn.disabled = true;
904
+ this.globalElements.stopBtn?.classList.add('hidden');
905
  this.globalElements.processingStatus.classList.add('hidden');
906
  feather.replace();
907
  }
 
1092
  if (data.success) {
1093
  if (!sessionId) sessionId = data.session_id;
1094
  this.cameraConfigs[role].processingStatus = PROCESSING_STATUS.PROCESSING;
1095
+
1096
+ // Track if this is a live stream
1097
+ if (data.is_live_stream) {
1098
+ this.cameraConfigs[role].isLiveStream = true;
1099
+ this.hasLiveStreams = true;
1100
+ }
1101
+
1102
  this.updateCameraStatusText(role);
1103
  this.setAnalysisMode(true, role);
1104
  } else {
 
1106
  }
1107
  }
1108
 
1109
+ // Show stop button for live streams
1110
+ if (this.hasLiveStreams) {
1111
+ this.globalElements.stopBtn?.classList.remove('hidden');
1112
+ }
1113
+
1114
  // Join socket room
1115
  if (window.dashboardManager?.socket && sessionId) {
1116
  window.dashboardManager.sessionId = sessionId;
 
1145
  this.globalElements.modeLabel.textContent = 'Live Analysis';
1146
  }
1147
 
1148
+ /**
1149
+ * Stop all running analysis (for live streams)
1150
+ */
1151
+ async stopAnalysis() {
1152
+ const { stopBtn, startBtn, processingStatus } = this.globalElements;
1153
+
1154
+ stopBtn.disabled = true;
1155
+ stopBtn.innerHTML = `<i data-feather="loader" class="spin"></i> Stopping...`;
1156
+ feather.replace();
1157
+
1158
+ try {
1159
+ const response = await fetch('/setup/stop-processing', {
1160
+ method: 'POST',
1161
+ headers: { 'Content-Type': 'application/json' },
1162
+ body: JSON.stringify({}) // Stop all cameras
1163
+ });
1164
+ const data = await response.json();
1165
+
1166
+ if (data.success) {
1167
+ console.log('Stop signal sent successfully');
1168
+
1169
+ // Update UI - the actual completion will come via socket events
1170
+ Object.values(CAMERA_ROLES).forEach(role => {
1171
+ const config = this.cameraConfigs[role];
1172
+ if (config.processingStatus === PROCESSING_STATUS.PROCESSING) {
1173
+ config.processingStatus = PROCESSING_STATUS.STOPPED;
1174
+ this.updateCameraStatusText(role);
1175
+ }
1176
+ });
1177
+
1178
+ this.sessionCompleted = true;
1179
+ this.hasLiveStreams = false;
1180
+
1181
+ stopBtn.classList.add('hidden');
1182
+ startBtn.innerHTML = `<i data-feather="check"></i> Analysis Stopped`;
1183
+ startBtn.disabled = true;
1184
+ processingStatus.classList.add('hidden');
1185
+ }
1186
+ } catch (error) {
1187
+ console.error('Failed to stop analysis:', error);
1188
+ alert('Failed to stop analysis: ' + error.message);
1189
+ }
1190
+
1191
+ stopBtn.disabled = false;
1192
+ stopBtn.innerHTML = `<i data-feather="square"></i> Stop Analysis`;
1193
+ feather.replace();
1194
+ }
1195
+
1196
  // -------------------------------------------------------------------------
1197
  // Session Management
1198
  // -------------------------------------------------------------------------
app/templates/dashboard.html CHANGED
@@ -38,7 +38,20 @@
38
  </div>
39
 
40
  <div class="config-group mt-2">
41
- <label class="form-label">Video Source</label>
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  <input type="file" id="video-upload" accept="video/*" class="form-input" style="display: none;">
43
  <button class="btn btn-outline btn-full" onclick="document.getElementById('video-upload').click()">
44
  <i data-feather="upload"></i> Upload Video File
@@ -46,10 +59,20 @@
46
  <div id="file-name-display" class="file-name text-truncate mt-1">No file selected</div>
47
  </div>
48
 
 
 
 
 
 
 
 
 
 
 
49
  <div class="config-group">
50
  <label class="form-label">Instructions</label>
51
  <ul class="instruction-list">
52
- <li>1. Upload a video.</li>
53
  <li>2. Draw the crossing line on the canvas.</li>
54
  <li>3. Repeat for other camera.</li>
55
  </ul>
@@ -63,6 +86,9 @@
63
  <button id="btn-start-analysis" class="btn btn-primary btn-full btn-lg" disabled>
64
  <i data-feather="play"></i> Start Analysis
65
  </button>
 
 
 
66
  <div id="processing-status" class="hidden mt-2">
67
  <div class="dual-progress">
68
  <div class="progress-item">
 
38
  </div>
39
 
40
  <div class="config-group mt-2">
41
+ <label class="form-label">Source Type</label>
42
+ <div class="source-toggle">
43
+ <button class="toggle-btn active" id="source-file-btn" onclick="window.workbenchManager?.setSourceType('file')">
44
+ <i data-feather="upload"></i> File
45
+ </button>
46
+ <button class="toggle-btn" id="source-stream-btn" onclick="window.workbenchManager?.setSourceType('stream')">
47
+ <i data-feather="video"></i> Live Stream
48
+ </button>
49
+ </div>
50
+ </div>
51
+
52
+ <!-- File Upload (shown when source type is 'file') -->
53
+ <div class="config-group" id="file-source-config">
54
+ <label class="form-label">Video File</label>
55
  <input type="file" id="video-upload" accept="video/*" class="form-input" style="display: none;">
56
  <button class="btn btn-outline btn-full" onclick="document.getElementById('video-upload').click()">
57
  <i data-feather="upload"></i> Upload Video File
 
59
  <div id="file-name-display" class="file-name text-truncate mt-1">No file selected</div>
60
  </div>
61
 
62
+ <!-- Live Stream URL (shown when source type is 'stream') -->
63
+ <div class="config-group hidden" id="stream-source-config">
64
+ <label class="form-label">Stream URL</label>
65
+ <input type="text" id="stream-url-input" class="form-input" placeholder="rtsp://192.168.1.100:554/stream">
66
+ <button class="btn btn-outline btn-full mt-1" id="btn-connect-stream" onclick="window.workbenchManager?.connectStream()">
67
+ <i data-feather="link"></i> Connect to Stream
68
+ </button>
69
+ <div id="stream-status-display" class="file-name text-truncate mt-1">Not connected</div>
70
+ </div>
71
+
72
  <div class="config-group">
73
  <label class="form-label">Instructions</label>
74
  <ul class="instruction-list">
75
+ <li>1. Select source type and configure.</li>
76
  <li>2. Draw the crossing line on the canvas.</li>
77
  <li>3. Repeat for other camera.</li>
78
  </ul>
 
86
  <button id="btn-start-analysis" class="btn btn-primary btn-full btn-lg" disabled>
87
  <i data-feather="play"></i> Start Analysis
88
  </button>
89
+ <button id="btn-stop-analysis" class="btn btn-danger btn-full btn-lg hidden mt-2">
90
+ <i data-feather="square"></i> Stop Analysis
91
+ </button>
92
  <div id="processing-status" class="hidden mt-2">
93
  <div class="dual-progress">
94
  <div class="progress-item">