Spaces:
Sleeping
Sleeping
Upload 9 files
Browse files- Dockerfile +27 -0
- app.py +132 -0
- config.py +65 -0
- requirements.txt +6 -0
- utils.py +274 -0
- vehicle_tracker.py +360 -0
- web_app.html +523 -0
- yolo11n.pt +3 -0
- yolo11x.pt +3 -0
Dockerfile
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.11-slim
|
| 2 |
+
|
| 3 |
+
# Install system dependencies for OpenCV and FFmpeg
|
| 4 |
+
RUN apt-get update && apt-get install -y \
|
| 5 |
+
libgl1-mesa-glx \
|
| 6 |
+
libglib2.0-0 \
|
| 7 |
+
ffmpeg \
|
| 8 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 9 |
+
|
| 10 |
+
# Set working directory
|
| 11 |
+
WORKDIR /app
|
| 12 |
+
|
| 13 |
+
# Create necessary directories with write permissions
|
| 14 |
+
RUN mkdir -p uploads outputs && chmod 777 uploads outputs
|
| 15 |
+
|
| 16 |
+
# Copy requirements first to leverage cache
|
| 17 |
+
COPY requirements.txt .
|
| 18 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 19 |
+
|
| 20 |
+
# Copy application code
|
| 21 |
+
COPY . .
|
| 22 |
+
|
| 23 |
+
# Expose Hugging Face default port
|
| 24 |
+
EXPOSE 7860
|
| 25 |
+
|
| 26 |
+
# Run the application
|
| 27 |
+
CMD ["python", "app.py"]
|
app.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Hugging Face Spaces - Vehicle Tracking App
|
| 3 |
+
"""
|
| 4 |
+
from flask import Flask, request, jsonify, send_file, send_from_directory
|
| 5 |
+
from werkzeug.utils import secure_filename
|
| 6 |
+
import os
|
| 7 |
+
import subprocess
|
| 8 |
+
import threading
|
| 9 |
+
import time
|
| 10 |
+
import shutil
|
| 11 |
+
|
| 12 |
+
app = Flask(__name__)
|
| 13 |
+
|
| 14 |
+
# Configuration
|
| 15 |
+
UPLOAD_FOLDER = 'uploads'
|
| 16 |
+
OUTPUT_FOLDER = 'outputs'
|
| 17 |
+
ALLOWED_EXTENSIONS = {'mp4', 'avi', 'mov', 'mkv'}
|
| 18 |
+
|
| 19 |
+
# Create folders
|
| 20 |
+
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
|
| 21 |
+
os.makedirs(OUTPUT_FOLDER, exist_ok=True)
|
| 22 |
+
|
| 23 |
+
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
|
| 24 |
+
app.config['OUTPUT_FOLDER'] = OUTPUT_FOLDER
|
| 25 |
+
app.config['MAX_CONTENT_LENGTH'] = 500 * 1024 * 1024 # 500 MB limit
|
| 26 |
+
|
| 27 |
+
# Global status
|
| 28 |
+
processing_status = {
|
| 29 |
+
'status': 'idle',
|
| 30 |
+
'progress': 0,
|
| 31 |
+
'message': '',
|
| 32 |
+
'output_file': None
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
def allowed_file(filename):
|
| 36 |
+
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
|
| 37 |
+
|
| 38 |
+
def process_video_background(input_path, output_path):
|
| 39 |
+
global processing_status
|
| 40 |
+
try:
|
| 41 |
+
processing_status['status'] = 'processing'
|
| 42 |
+
processing_status['progress'] = 0
|
| 43 |
+
processing_status['message'] = 'Starting processing...'
|
| 44 |
+
|
| 45 |
+
# Run tracker with --no-display flag
|
| 46 |
+
cmd = [
|
| 47 |
+
'python', '-u', 'vehicle_tracker.py',
|
| 48 |
+
'--video', input_path,
|
| 49 |
+
'--output', output_path,
|
| 50 |
+
'--no-display'
|
| 51 |
+
]
|
| 52 |
+
|
| 53 |
+
process = subprocess.Popen(
|
| 54 |
+
cmd,
|
| 55 |
+
stdout=subprocess.PIPE,
|
| 56 |
+
stderr=subprocess.PIPE,
|
| 57 |
+
text=True,
|
| 58 |
+
bufsize=1,
|
| 59 |
+
universal_newlines=True
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
for line in process.stdout:
|
| 63 |
+
if "Progress:" in line:
|
| 64 |
+
try:
|
| 65 |
+
parts = line.split('|')[0]
|
| 66 |
+
percent = float(parts.split(':')[1].strip().replace('%', ''))
|
| 67 |
+
processing_status['progress'] = percent
|
| 68 |
+
processing_status['message'] = f'Processing: {percent:.1f}%'
|
| 69 |
+
except:
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
process.wait()
|
| 73 |
+
|
| 74 |
+
if process.returncode == 0:
|
| 75 |
+
processing_status['status'] = 'complete'
|
| 76 |
+
processing_status['progress'] = 100
|
| 77 |
+
processing_status['message'] = 'Processing complete!'
|
| 78 |
+
processing_status['output_file'] = os.path.basename(output_path)
|
| 79 |
+
else:
|
| 80 |
+
stderr = process.stderr.read()
|
| 81 |
+
processing_status['status'] = 'error'
|
| 82 |
+
processing_status['message'] = f'Error: {stderr[:200]}'
|
| 83 |
+
|
| 84 |
+
except Exception as e:
|
| 85 |
+
processing_status['status'] = 'error'
|
| 86 |
+
processing_status['message'] = f'System Error: {str(e)}'
|
| 87 |
+
|
| 88 |
+
@app.route('/')
|
| 89 |
+
def index():
|
| 90 |
+
return send_file('web_app.html')
|
| 91 |
+
|
| 92 |
+
@app.route('/upload', methods=['POST'])
|
| 93 |
+
def upload_file():
|
| 94 |
+
global processing_status
|
| 95 |
+
processing_status = {'status': 'idle', 'progress': 0, 'message': '', 'output_file': None}
|
| 96 |
+
|
| 97 |
+
if 'video' not in request.files:
|
| 98 |
+
return jsonify({'error': 'No file part'}), 400
|
| 99 |
+
file = request.files['video']
|
| 100 |
+
if file.filename == '':
|
| 101 |
+
return jsonify({'error': 'No selected file'}), 400
|
| 102 |
+
|
| 103 |
+
if file and allowed_file(file.filename):
|
| 104 |
+
filename = secure_filename(file.filename)
|
| 105 |
+
# Use simple timestamp to avoid collisions
|
| 106 |
+
timestamp = int(time.time())
|
| 107 |
+
input_filename = f"{timestamp}_{filename}"
|
| 108 |
+
input_path = os.path.join(app.config['UPLOAD_FOLDER'], input_filename)
|
| 109 |
+
file.save(input_path)
|
| 110 |
+
|
| 111 |
+
output_filename = f"tracked_{input_filename}"
|
| 112 |
+
output_path = os.path.join(app.config['OUTPUT_FOLDER'], output_filename)
|
| 113 |
+
|
| 114 |
+
thread = threading.Thread(target=process_video_background, args=(input_path, output_path))
|
| 115 |
+
thread.daemon = True
|
| 116 |
+
thread.start()
|
| 117 |
+
|
| 118 |
+
return jsonify({'success': True, 'filename': input_filename})
|
| 119 |
+
|
| 120 |
+
return jsonify({'error': 'Invalid file type'}), 400
|
| 121 |
+
|
| 122 |
+
@app.route('/status')
|
| 123 |
+
def get_status():
|
| 124 |
+
return jsonify(processing_status)
|
| 125 |
+
|
| 126 |
+
@app.route('/download/<filename>')
|
| 127 |
+
def download_file(filename):
|
| 128 |
+
return send_from_directory(app.config['OUTPUT_FOLDER'], filename, as_attachment=True)
|
| 129 |
+
|
| 130 |
+
if __name__ == '__main__':
|
| 131 |
+
# Hugging Face Spaces runs on port 7860
|
| 132 |
+
app.run(host='0.0.0.0', port=7860)
|
config.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Configuration file for Vehicle Tracking and Counting System
|
| 3 |
+
Customize these settings based on your requirements
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# YOLO Model Configuration
|
| 7 |
+
MODEL_NAME = "yolo11x.pt" # Using Extra Large model for maximum accuracy
|
| 8 |
+
CONFIDENCE_THRESHOLD = 0.25 # Lower threshold to catch more vehicles
|
| 9 |
+
IOU_THRESHOLD = 0.5 # IoU threshold for NMS
|
| 10 |
+
|
| 11 |
+
# Vehicle Classes (COCO dataset class IDs)
|
| 12 |
+
VEHICLE_CLASSES = {
|
| 13 |
+
2: "car",
|
| 14 |
+
3: "motorcycle",
|
| 15 |
+
5: "bus",
|
| 16 |
+
7: "truck"
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
+
# Counting Line Configuration
|
| 20 |
+
# Orientation: "horizontal" (for up/down traffic) or "vertical" (for left/right traffic)
|
| 21 |
+
COUNTING_LINE_ORIENTATION = "vertical"
|
| 22 |
+
|
| 23 |
+
# Line position as percentage of frame size (0.0 to 1.0)
|
| 24 |
+
# For horizontal: percentage of height (0.5 = middle height)
|
| 25 |
+
# For vertical: percentage of width (0.5 = middle width)
|
| 26 |
+
COUNTING_LINE_POSITION = 0.5
|
| 27 |
+
|
| 28 |
+
# Or use absolute pixel coordinates (will override percentage if not None)
|
| 29 |
+
# Format: (x1, y1, x2, y2)
|
| 30 |
+
COUNTING_LINE_COORDS = None
|
| 31 |
+
|
| 32 |
+
# Visualization Settings
|
| 33 |
+
COLORS = {
|
| 34 |
+
"car": (0, 255, 0), # Green
|
| 35 |
+
"truck": (255, 0, 0), # Blue
|
| 36 |
+
"bus": (0, 165, 255), # Orange
|
| 37 |
+
"motorcycle": (255, 0, 255) # Magenta
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
LINE_COLOR = (0, 255, 255) # Yellow for counting line
|
| 41 |
+
LINE_THICKNESS = 3
|
| 42 |
+
BOX_THICKNESS = 2
|
| 43 |
+
FONT_SCALE = 0.6
|
| 44 |
+
FONT_THICKNESS = 2
|
| 45 |
+
|
| 46 |
+
# Display Settings
|
| 47 |
+
SHOW_CONFIDENCE = True
|
| 48 |
+
SHOW_TRACK_ID = True
|
| 49 |
+
SHOW_CLASS_NAME = True
|
| 50 |
+
DISPLAY_WINDOW = True # Set to False for headless mode
|
| 51 |
+
|
| 52 |
+
# Video Settings
|
| 53 |
+
VIDEO_INPUT = "input_video.mp4" # Path to input video
|
| 54 |
+
VIDEO_OUTPUT = "output_video.mp4" # Path to save output video
|
| 55 |
+
RESIZE_WIDTH = None # Set to resize video width (None = original size)
|
| 56 |
+
RESIZE_HEIGHT = None # Set to resize video height (None = original size)
|
| 57 |
+
|
| 58 |
+
# Performance Settings
|
| 59 |
+
SKIP_FRAMES = 0 # Process every Nth frame (0 = process all frames)
|
| 60 |
+
USE_GPU = True # Use GPU if available
|
| 61 |
+
|
| 62 |
+
# Statistics Display
|
| 63 |
+
STATS_POSITION = (10, 30) # Position for statistics text
|
| 64 |
+
STATS_COLOR = (255, 255, 255) # White
|
| 65 |
+
STATS_BG_COLOR = (0, 0, 0) # Black background
|
requirements.txt
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
flask
|
| 2 |
+
ultralytics
|
| 3 |
+
opencv-python-headless
|
| 4 |
+
numpy
|
| 5 |
+
torch
|
| 6 |
+
torchvision
|
utils.py
ADDED
|
@@ -0,0 +1,274 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Utility functions for vehicle tracking and counting system
|
| 3 |
+
"""
|
| 4 |
+
import cv2
|
| 5 |
+
import numpy as np
|
| 6 |
+
from typing import Tuple, Dict
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def get_center_point(bbox: Tuple[int, int, int, int]) -> Tuple[int, int]:
|
| 10 |
+
"""
|
| 11 |
+
Calculate the center point of a bounding box
|
| 12 |
+
|
| 13 |
+
Args:
|
| 14 |
+
bbox: Bounding box coordinates (x1, y1, x2, y2)
|
| 15 |
+
|
| 16 |
+
Returns:
|
| 17 |
+
Tuple of (center_x, center_y)
|
| 18 |
+
"""
|
| 19 |
+
x1, y1, x2, y2 = bbox
|
| 20 |
+
center_x = int((x1 + x2) / 2)
|
| 21 |
+
center_y = int((y1 + y2) / 2)
|
| 22 |
+
return center_x, center_y
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def check_line_crossing(
|
| 26 |
+
curr_pos: int,
|
| 27 |
+
prev_pos: int,
|
| 28 |
+
line_pos: int,
|
| 29 |
+
margin: int = 5
|
| 30 |
+
) -> bool:
|
| 31 |
+
"""
|
| 32 |
+
Check if an object has crossed the counting line
|
| 33 |
+
|
| 34 |
+
Args:
|
| 35 |
+
curr_pos: Current coordinate (X or Y)
|
| 36 |
+
prev_pos: Previous coordinate (X or Y)
|
| 37 |
+
line_pos: Coordinate of counting line
|
| 38 |
+
margin: Margin of error for line crossing
|
| 39 |
+
|
| 40 |
+
Returns:
|
| 41 |
+
True if object crossed the line (in either direction)
|
| 42 |
+
"""
|
| 43 |
+
# Check crossing in positive direction (e.g., left to right or top to bottom)
|
| 44 |
+
if prev_pos < line_pos - margin and curr_pos >= line_pos + margin:
|
| 45 |
+
return True
|
| 46 |
+
|
| 47 |
+
# Check crossing in negative direction (e.g., right to left or bottom to top)
|
| 48 |
+
if prev_pos > line_pos + margin and curr_pos <= line_pos - margin:
|
| 49 |
+
return True
|
| 50 |
+
|
| 51 |
+
return False
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def draw_counting_line(
|
| 55 |
+
frame: np.ndarray,
|
| 56 |
+
line_coords: Tuple[int, int, int, int],
|
| 57 |
+
color: Tuple[int, int, int],
|
| 58 |
+
thickness: int
|
| 59 |
+
) -> np.ndarray:
|
| 60 |
+
"""
|
| 61 |
+
Draw the counting line on the frame
|
| 62 |
+
|
| 63 |
+
Args:
|
| 64 |
+
frame: Video frame
|
| 65 |
+
line_coords: Line coordinates (x1, y1, x2, y2)
|
| 66 |
+
color: Line color (B, G, R)
|
| 67 |
+
thickness: Line thickness
|
| 68 |
+
|
| 69 |
+
Returns:
|
| 70 |
+
Frame with line drawn
|
| 71 |
+
"""
|
| 72 |
+
x1, y1, x2, y2 = line_coords
|
| 73 |
+
cv2.line(frame, (x1, y1), (x2, y2), color, thickness)
|
| 74 |
+
|
| 75 |
+
# Add text label for the line
|
| 76 |
+
cv2.putText(
|
| 77 |
+
frame,
|
| 78 |
+
"COUNTING LINE",
|
| 79 |
+
(x1 + 10, y1 - 10),
|
| 80 |
+
cv2.FONT_HERSHEY_SIMPLEX,
|
| 81 |
+
0.7,
|
| 82 |
+
color,
|
| 83 |
+
2
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
return frame
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def draw_statistics(
|
| 90 |
+
frame: np.ndarray,
|
| 91 |
+
counts: Dict[str, int],
|
| 92 |
+
position: Tuple[int, int],
|
| 93 |
+
font_scale: float = 0.8,
|
| 94 |
+
color: Tuple[int, int, int] = (255, 255, 255),
|
| 95 |
+
bg_color: Tuple[int, int, int] = (0, 0, 0)
|
| 96 |
+
) -> np.ndarray:
|
| 97 |
+
"""
|
| 98 |
+
Draw counting statistics on the frame
|
| 99 |
+
|
| 100 |
+
Args:
|
| 101 |
+
frame: Video frame
|
| 102 |
+
counts: Dictionary of vehicle counts by class
|
| 103 |
+
position: Position to draw statistics (x, y)
|
| 104 |
+
font_scale: Font scale
|
| 105 |
+
color: Text color
|
| 106 |
+
bg_color: Background color
|
| 107 |
+
|
| 108 |
+
Returns:
|
| 109 |
+
Frame with statistics drawn
|
| 110 |
+
"""
|
| 111 |
+
x, y = position
|
| 112 |
+
line_height = 30
|
| 113 |
+
|
| 114 |
+
# Draw background rectangle
|
| 115 |
+
total_lines = len(counts) + 1
|
| 116 |
+
cv2.rectangle(
|
| 117 |
+
frame,
|
| 118 |
+
(x - 5, y - 25),
|
| 119 |
+
(x + 250, y + line_height * total_lines + 5),
|
| 120 |
+
bg_color,
|
| 121 |
+
-1
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
# Draw total count
|
| 125 |
+
total = sum(counts.values())
|
| 126 |
+
cv2.putText(
|
| 127 |
+
frame,
|
| 128 |
+
f"TOTAL: {total}",
|
| 129 |
+
(x, y),
|
| 130 |
+
cv2.FONT_HERSHEY_SIMPLEX,
|
| 131 |
+
font_scale,
|
| 132 |
+
(0, 255, 255), # Yellow for total
|
| 133 |
+
2
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
# Draw individual class counts
|
| 137 |
+
y_offset = y + line_height
|
| 138 |
+
for vehicle_class, count in counts.items():
|
| 139 |
+
cv2.putText(
|
| 140 |
+
frame,
|
| 141 |
+
f"{vehicle_class.upper()}: {count}",
|
| 142 |
+
(x, y_offset),
|
| 143 |
+
cv2.FONT_HERSHEY_SIMPLEX,
|
| 144 |
+
font_scale,
|
| 145 |
+
color,
|
| 146 |
+
2
|
| 147 |
+
)
|
| 148 |
+
y_offset += line_height
|
| 149 |
+
|
| 150 |
+
return frame
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def draw_bounding_box(
|
| 154 |
+
frame: np.ndarray,
|
| 155 |
+
bbox: Tuple[int, int, int, int],
|
| 156 |
+
track_id: int,
|
| 157 |
+
class_name: str,
|
| 158 |
+
confidence: float,
|
| 159 |
+
color: Tuple[int, int, int],
|
| 160 |
+
thickness: int = 2,
|
| 161 |
+
show_id: bool = True,
|
| 162 |
+
show_confidence: bool = True
|
| 163 |
+
) -> np.ndarray:
|
| 164 |
+
"""
|
| 165 |
+
Draw bounding box with label on the frame
|
| 166 |
+
|
| 167 |
+
Args:
|
| 168 |
+
frame: Video frame
|
| 169 |
+
bbox: Bounding box coordinates (x1, y1, x2, y2)
|
| 170 |
+
track_id: Tracking ID
|
| 171 |
+
class_name: Class name
|
| 172 |
+
confidence: Detection confidence
|
| 173 |
+
color: Box color
|
| 174 |
+
thickness: Box thickness
|
| 175 |
+
show_id: Whether to show track ID
|
| 176 |
+
show_confidence: Whether to show confidence score
|
| 177 |
+
|
| 178 |
+
Returns:
|
| 179 |
+
Frame with bounding box drawn
|
| 180 |
+
"""
|
| 181 |
+
x1, y1, x2, y2 = bbox
|
| 182 |
+
|
| 183 |
+
# Draw bounding box
|
| 184 |
+
cv2.rectangle(frame, (x1, y1), (x2, y2), color, thickness)
|
| 185 |
+
|
| 186 |
+
# Prepare label text
|
| 187 |
+
label_parts = [class_name]
|
| 188 |
+
if show_id:
|
| 189 |
+
label_parts.append(f"ID:{track_id}")
|
| 190 |
+
if show_confidence:
|
| 191 |
+
label_parts.append(f"{confidence:.2f}")
|
| 192 |
+
|
| 193 |
+
label = " | ".join(label_parts)
|
| 194 |
+
|
| 195 |
+
# Calculate label size
|
| 196 |
+
(label_width, label_height), baseline = cv2.getTextSize(
|
| 197 |
+
label,
|
| 198 |
+
cv2.FONT_HERSHEY_SIMPLEX,
|
| 199 |
+
0.6,
|
| 200 |
+
2
|
| 201 |
+
)
|
| 202 |
+
|
| 203 |
+
# Draw label background
|
| 204 |
+
cv2.rectangle(
|
| 205 |
+
frame,
|
| 206 |
+
(x1, y1 - label_height - baseline - 5),
|
| 207 |
+
(x1 + label_width + 5, y1),
|
| 208 |
+
color,
|
| 209 |
+
-1
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
# Draw label text
|
| 213 |
+
cv2.putText(
|
| 214 |
+
frame,
|
| 215 |
+
label,
|
| 216 |
+
(x1 + 2, y1 - baseline - 2),
|
| 217 |
+
cv2.FONT_HERSHEY_SIMPLEX,
|
| 218 |
+
0.6,
|
| 219 |
+
(0, 0, 0), # Black text
|
| 220 |
+
2
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
# Draw center point
|
| 224 |
+
center_x, center_y = get_center_point(bbox)
|
| 225 |
+
cv2.circle(frame, (center_x, center_y), 4, color, -1)
|
| 226 |
+
|
| 227 |
+
return frame
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
def format_time(seconds: float) -> str:
|
| 231 |
+
"""
|
| 232 |
+
Format elapsed time in seconds to HH:MM:SS format
|
| 233 |
+
|
| 234 |
+
Args:
|
| 235 |
+
seconds: Time in seconds
|
| 236 |
+
|
| 237 |
+
Returns:
|
| 238 |
+
Formatted time string
|
| 239 |
+
"""
|
| 240 |
+
hours = int(seconds // 3600)
|
| 241 |
+
minutes = int((seconds % 3600) // 60)
|
| 242 |
+
secs = int(seconds % 60)
|
| 243 |
+
return f"{hours:02d}:{minutes:02d}:{secs:02d}"
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def get_counting_line_coords(
|
| 247 |
+
frame_width: int,
|
| 248 |
+
frame_height: int,
|
| 249 |
+
line_position: float = 0.5,
|
| 250 |
+
custom_coords: Tuple[int, int, int, int] = None,
|
| 251 |
+
orientation: str = "horizontal"
|
| 252 |
+
) -> Tuple[int, int, int, int]:
|
| 253 |
+
"""
|
| 254 |
+
Get counting line coordinates based on frame dimensions
|
| 255 |
+
|
| 256 |
+
Args:
|
| 257 |
+
frame_width: Width of video frame
|
| 258 |
+
frame_height: Height of video frame
|
| 259 |
+
line_position: Position as percentage (0.0 to 1.0)
|
| 260 |
+
custom_coords: Custom line coordinates (overrides line_position)
|
| 261 |
+
orientation: "horizontal" or "vertical"
|
| 262 |
+
|
| 263 |
+
Returns:
|
| 264 |
+
Line coordinates (x1, y1, x2, y2)
|
| 265 |
+
"""
|
| 266 |
+
if custom_coords is not None:
|
| 267 |
+
return custom_coords
|
| 268 |
+
|
| 269 |
+
if orientation == "vertical":
|
| 270 |
+
line_x = int(frame_width * line_position)
|
| 271 |
+
return (line_x, 0, line_x, frame_height)
|
| 272 |
+
else:
|
| 273 |
+
line_y = int(frame_height * line_position)
|
| 274 |
+
return (0, line_y, frame_width, line_y)
|
vehicle_tracker.py
ADDED
|
@@ -0,0 +1,360 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Real-Time Vehicle Tracking & Counting System using YOLO11
|
| 3 |
+
Detects, tracks, and counts vehicles crossing a virtual line in video
|
| 4 |
+
"""
|
| 5 |
+
import cv2
|
| 6 |
+
import argparse
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
from ultralytics import YOLO
|
| 9 |
+
import time
|
| 10 |
+
from collections import defaultdict
|
| 11 |
+
|
| 12 |
+
# Import configuration and utilities
|
| 13 |
+
import config
|
| 14 |
+
from utils import (
|
| 15 |
+
get_center_point,
|
| 16 |
+
check_line_crossing,
|
| 17 |
+
draw_counting_line,
|
| 18 |
+
draw_statistics,
|
| 19 |
+
draw_bounding_box,
|
| 20 |
+
get_counting_line_coords,
|
| 21 |
+
format_time
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class VehicleTracker:
|
| 26 |
+
"""Main class for vehicle tracking and counting"""
|
| 27 |
+
|
| 28 |
+
def __init__(self, model_path: str = None, video_path: str = None):
|
| 29 |
+
"""
|
| 30 |
+
Initialize the vehicle tracker
|
| 31 |
+
|
| 32 |
+
Args:
|
| 33 |
+
model_path: Path to YOLO model (default from config)
|
| 34 |
+
video_path: Path to input video (default from config)
|
| 35 |
+
"""
|
| 36 |
+
self.model_path = model_path or config.MODEL_NAME
|
| 37 |
+
self.video_path = video_path or config.VIDEO_INPUT
|
| 38 |
+
|
| 39 |
+
# Initialize YOLO model
|
| 40 |
+
print(f"Loading YOLO model: {self.model_path}")
|
| 41 |
+
self.model = YOLO(self.model_path)
|
| 42 |
+
|
| 43 |
+
# Set device
|
| 44 |
+
if config.USE_GPU:
|
| 45 |
+
print("GPU enabled (if available)")
|
| 46 |
+
|
| 47 |
+
# Tracking data
|
| 48 |
+
self.vehicle_counts = defaultdict(int)
|
| 49 |
+
self.tracked_objects = {} # Store previous positions
|
| 50 |
+
self.counted_ids = set() # IDs that have been counted
|
| 51 |
+
|
| 52 |
+
# Video properties
|
| 53 |
+
self.cap = None
|
| 54 |
+
self.out = None
|
| 55 |
+
self.frame_width = 0
|
| 56 |
+
self.frame_height = 0
|
| 57 |
+
self.fps = 0
|
| 58 |
+
self.total_frames = 0
|
| 59 |
+
|
| 60 |
+
# Counting line coordinates
|
| 61 |
+
self.line_coords = None
|
| 62 |
+
|
| 63 |
+
def initialize_video(self, output_path: str = None):
|
| 64 |
+
"""
|
| 65 |
+
Initialize video capture and writer
|
| 66 |
+
|
| 67 |
+
Args:
|
| 68 |
+
output_path: Path to save output video (default from config)
|
| 69 |
+
"""
|
| 70 |
+
self.cap = cv2.VideoCapture(self.video_path)
|
| 71 |
+
|
| 72 |
+
if not self.cap.isOpened():
|
| 73 |
+
raise ValueError(f"Error opening video file: {self.video_path}")
|
| 74 |
+
|
| 75 |
+
# Get video properties
|
| 76 |
+
self.frame_width = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
|
| 77 |
+
self.frame_height = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
|
| 78 |
+
self.fps = int(self.cap.get(cv2.CAP_PROP_FPS))
|
| 79 |
+
self.total_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT))
|
| 80 |
+
|
| 81 |
+
print(f"\nVideo Properties:")
|
| 82 |
+
print(f" Resolution: {self.frame_width}x{self.frame_height}")
|
| 83 |
+
print(f" FPS: {self.fps}")
|
| 84 |
+
print(f" Total Frames: {self.total_frames}")
|
| 85 |
+
|
| 86 |
+
# Apply resize if configured
|
| 87 |
+
if config.RESIZE_WIDTH and config.RESIZE_HEIGHT:
|
| 88 |
+
self.frame_width = config.RESIZE_WIDTH
|
| 89 |
+
self.frame_height = config.RESIZE_HEIGHT
|
| 90 |
+
print(f" Resizing to: {self.frame_width}x{self.frame_height}")
|
| 91 |
+
|
| 92 |
+
# Set counting line coordinates
|
| 93 |
+
self.line_coords = get_counting_line_coords(
|
| 94 |
+
self.frame_width,
|
| 95 |
+
self.frame_height,
|
| 96 |
+
config.COUNTING_LINE_POSITION,
|
| 97 |
+
config.COUNTING_LINE_COORDS,
|
| 98 |
+
config.COUNTING_LINE_ORIENTATION
|
| 99 |
+
)
|
| 100 |
+
print(f" Counting Line: {self.line_coords} ({config.COUNTING_LINE_ORIENTATION})\n")
|
| 101 |
+
|
| 102 |
+
# Initialize video writer
|
| 103 |
+
output_path = output_path or config.VIDEO_OUTPUT
|
| 104 |
+
fourcc = cv2.VideoWriter_fourcc(*'mp4v')
|
| 105 |
+
self.out = cv2.VideoWriter(
|
| 106 |
+
output_path,
|
| 107 |
+
fourcc,
|
| 108 |
+
self.fps,
|
| 109 |
+
(self.frame_width, self.frame_height)
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
print(f"Output will be saved to: {output_path}\n")
|
| 113 |
+
|
| 114 |
+
def process_frame(self, frame):
|
| 115 |
+
"""
|
| 116 |
+
Process a single frame: detect, track, and count vehicles
|
| 117 |
+
|
| 118 |
+
Args:
|
| 119 |
+
frame: Input video frame
|
| 120 |
+
|
| 121 |
+
Returns:
|
| 122 |
+
Processed frame with annotations
|
| 123 |
+
"""
|
| 124 |
+
# Resize frame if configured
|
| 125 |
+
if config.RESIZE_WIDTH and config.RESIZE_HEIGHT:
|
| 126 |
+
frame = cv2.resize(frame, (self.frame_width, self.frame_height))
|
| 127 |
+
|
| 128 |
+
# Run YOLO tracking
|
| 129 |
+
results = self.model.track(
|
| 130 |
+
frame,
|
| 131 |
+
persist=True,
|
| 132 |
+
conf=config.CONFIDENCE_THRESHOLD,
|
| 133 |
+
iou=config.IOU_THRESHOLD,
|
| 134 |
+
classes=list(config.VEHICLE_CLASSES.keys()),
|
| 135 |
+
verbose=False
|
| 136 |
+
)
|
| 137 |
+
|
| 138 |
+
# Draw counting line
|
| 139 |
+
frame = draw_counting_line(
|
| 140 |
+
frame,
|
| 141 |
+
self.line_coords,
|
| 142 |
+
config.LINE_COLOR,
|
| 143 |
+
config.LINE_THICKNESS
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
# Process detections
|
| 147 |
+
if results[0].boxes is not None and results[0].boxes.id is not None:
|
| 148 |
+
boxes = results[0].boxes.xyxy.cpu().numpy().astype(int)
|
| 149 |
+
track_ids = results[0].boxes.id.cpu().numpy().astype(int)
|
| 150 |
+
classes = results[0].boxes.cls.cpu().numpy().astype(int)
|
| 151 |
+
confidences = results[0].boxes.conf.cpu().numpy()
|
| 152 |
+
|
| 153 |
+
for box, track_id, cls, conf in zip(boxes, track_ids, classes, confidences):
|
| 154 |
+
# Get class name
|
| 155 |
+
class_name = config.VEHICLE_CLASSES.get(cls, "unknown")
|
| 156 |
+
|
| 157 |
+
# Get center point
|
| 158 |
+
center_x, center_y = get_center_point(box)
|
| 159 |
+
|
| 160 |
+
# Determine coordinate to check based on orientation
|
| 161 |
+
curr_pos = center_x if config.COUNTING_LINE_ORIENTATION == "vertical" else center_y
|
| 162 |
+
line_pos = self.line_coords[0] if config.COUNTING_LINE_ORIENTATION == "vertical" else self.line_coords[1]
|
| 163 |
+
|
| 164 |
+
# Check for line crossing
|
| 165 |
+
if track_id in self.tracked_objects:
|
| 166 |
+
prev_pos = self.tracked_objects[track_id]['pos']
|
| 167 |
+
|
| 168 |
+
# Check if crossed and not already counted
|
| 169 |
+
if (track_id not in self.counted_ids and
|
| 170 |
+
check_line_crossing(curr_pos, prev_pos, line_pos)):
|
| 171 |
+
|
| 172 |
+
self.vehicle_counts[class_name] += 1
|
| 173 |
+
self.counted_ids.add(track_id)
|
| 174 |
+
print(f"✓ Counted: {class_name.upper()} (ID: {track_id}) - Total: {sum(self.vehicle_counts.values())}")
|
| 175 |
+
|
| 176 |
+
# Update tracked object position
|
| 177 |
+
self.tracked_objects[track_id] = {
|
| 178 |
+
'pos': curr_pos,
|
| 179 |
+
'class': class_name
|
| 180 |
+
}
|
| 181 |
+
|
| 182 |
+
# Draw bounding box and label
|
| 183 |
+
color = config.COLORS.get(class_name, (255, 255, 255))
|
| 184 |
+
frame = draw_bounding_box(
|
| 185 |
+
frame,
|
| 186 |
+
box,
|
| 187 |
+
track_id,
|
| 188 |
+
class_name,
|
| 189 |
+
conf,
|
| 190 |
+
color,
|
| 191 |
+
config.BOX_THICKNESS,
|
| 192 |
+
config.SHOW_TRACK_ID,
|
| 193 |
+
config.SHOW_CONFIDENCE
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# Draw statistics
|
| 197 |
+
frame = draw_statistics(
|
| 198 |
+
frame,
|
| 199 |
+
dict(self.vehicle_counts),
|
| 200 |
+
config.STATS_POSITION,
|
| 201 |
+
config.FONT_SCALE,
|
| 202 |
+
config.STATS_COLOR,
|
| 203 |
+
config.STATS_BG_COLOR
|
| 204 |
+
)
|
| 205 |
+
|
| 206 |
+
return frame
|
| 207 |
+
|
| 208 |
+
def run(self, output_path: str = None):
|
| 209 |
+
"""
|
| 210 |
+
Run the vehicle tracking and counting system
|
| 211 |
+
|
| 212 |
+
Args:
|
| 213 |
+
output_path: Path to save output video
|
| 214 |
+
"""
|
| 215 |
+
try:
|
| 216 |
+
# Initialize video
|
| 217 |
+
self.initialize_video(output_path)
|
| 218 |
+
|
| 219 |
+
print("Starting vehicle tracking and counting...")
|
| 220 |
+
print("Press 'q' to quit early\n")
|
| 221 |
+
|
| 222 |
+
frame_count = 0
|
| 223 |
+
start_time = time.time()
|
| 224 |
+
|
| 225 |
+
while True:
|
| 226 |
+
ret, frame = self.cap.read()
|
| 227 |
+
|
| 228 |
+
if not ret:
|
| 229 |
+
break
|
| 230 |
+
|
| 231 |
+
frame_count += 1
|
| 232 |
+
|
| 233 |
+
# Skip frames if configured
|
| 234 |
+
if config.SKIP_FRAMES > 0 and frame_count % (config.SKIP_FRAMES + 1) != 0:
|
| 235 |
+
continue
|
| 236 |
+
|
| 237 |
+
# Process frame
|
| 238 |
+
processed_frame = self.process_frame(frame)
|
| 239 |
+
|
| 240 |
+
# Write to output video
|
| 241 |
+
self.out.write(processed_frame)
|
| 242 |
+
|
| 243 |
+
# Display frame
|
| 244 |
+
if config.DISPLAY_WINDOW:
|
| 245 |
+
cv2.imshow('Vehicle Tracking & Counting', processed_frame)
|
| 246 |
+
|
| 247 |
+
if cv2.waitKey(1) & 0xFF == ord('q'):
|
| 248 |
+
print("\nStopped by user")
|
| 249 |
+
break
|
| 250 |
+
|
| 251 |
+
# Print progress
|
| 252 |
+
if frame_count % 30 == 0:
|
| 253 |
+
elapsed = time.time() - start_time
|
| 254 |
+
fps = frame_count / elapsed
|
| 255 |
+
progress = (frame_count / self.total_frames) * 100
|
| 256 |
+
print(f"Progress: {progress:.1f}% | Frame: {frame_count}/{self.total_frames} | FPS: {fps:.1f}")
|
| 257 |
+
|
| 258 |
+
# Final statistics
|
| 259 |
+
elapsed_time = time.time() - start_time
|
| 260 |
+
avg_fps = frame_count / elapsed_time
|
| 261 |
+
|
| 262 |
+
print("\n" + "="*50)
|
| 263 |
+
print("PROCESSING COMPLETE")
|
| 264 |
+
print("="*50)
|
| 265 |
+
print(f"Frames Processed: {frame_count}")
|
| 266 |
+
print(f"Time Elapsed: {format_time(elapsed_time)}")
|
| 267 |
+
print(f"Average FPS: {avg_fps:.2f}")
|
| 268 |
+
print("\nVehicle Counts:")
|
| 269 |
+
print("-"*30)
|
| 270 |
+
|
| 271 |
+
total = 0
|
| 272 |
+
for vehicle_class in sorted(self.vehicle_counts.keys()):
|
| 273 |
+
count = self.vehicle_counts[vehicle_class]
|
| 274 |
+
total += count
|
| 275 |
+
print(f" {vehicle_class.upper()}: {count}")
|
| 276 |
+
|
| 277 |
+
print("-"*30)
|
| 278 |
+
print(f" TOTAL: {total}")
|
| 279 |
+
print("="*50)
|
| 280 |
+
|
| 281 |
+
finally:
|
| 282 |
+
# Cleanup
|
| 283 |
+
if self.cap:
|
| 284 |
+
self.cap.release()
|
| 285 |
+
if self.out:
|
| 286 |
+
self.out.release()
|
| 287 |
+
if config.DISPLAY_WINDOW:
|
| 288 |
+
cv2.destroyAllWindows()
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
def main():
|
| 292 |
+
"""Main entry point with command-line argument parsing"""
|
| 293 |
+
parser = argparse.ArgumentParser(
|
| 294 |
+
description='Real-Time Vehicle Tracking & Counting System using YOLO11'
|
| 295 |
+
)
|
| 296 |
+
|
| 297 |
+
parser.add_argument(
|
| 298 |
+
'--video',
|
| 299 |
+
type=str,
|
| 300 |
+
default=config.VIDEO_INPUT,
|
| 301 |
+
help='Path to input video file'
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
parser.add_argument(
|
| 305 |
+
'--output',
|
| 306 |
+
type=str,
|
| 307 |
+
default=config.VIDEO_OUTPUT,
|
| 308 |
+
help='Path to save output video'
|
| 309 |
+
)
|
| 310 |
+
|
| 311 |
+
parser.add_argument(
|
| 312 |
+
'--model',
|
| 313 |
+
type=str,
|
| 314 |
+
default=config.MODEL_NAME,
|
| 315 |
+
help='YOLO model to use (e.g., yolo11n.pt, yolo11m.pt)'
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
parser.add_argument(
|
| 319 |
+
'--line-y',
|
| 320 |
+
type=int,
|
| 321 |
+
default=None,
|
| 322 |
+
help='Y coordinate for counting line (overrides config)'
|
| 323 |
+
)
|
| 324 |
+
|
| 325 |
+
parser.add_argument(
|
| 326 |
+
'--no-display',
|
| 327 |
+
action='store_true',
|
| 328 |
+
help='Run without displaying video window (headless mode)'
|
| 329 |
+
)
|
| 330 |
+
|
| 331 |
+
parser.add_argument(
|
| 332 |
+
'--conf',
|
| 333 |
+
type=float,
|
| 334 |
+
default=config.CONFIDENCE_THRESHOLD,
|
| 335 |
+
help='Confidence threshold for detection'
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
args = parser.parse_args()
|
| 339 |
+
|
| 340 |
+
# Override config with command-line arguments
|
| 341 |
+
if args.line_y:
|
| 342 |
+
config.COUNTING_LINE_COORDS = (0, args.line_y, 10000, args.line_y)
|
| 343 |
+
|
| 344 |
+
if args.no_display:
|
| 345 |
+
config.DISPLAY_WINDOW = False
|
| 346 |
+
|
| 347 |
+
if args.conf:
|
| 348 |
+
config.CONFIDENCE_THRESHOLD = args.conf
|
| 349 |
+
|
| 350 |
+
# Create tracker and run
|
| 351 |
+
tracker = VehicleTracker(
|
| 352 |
+
model_path=args.model,
|
| 353 |
+
video_path=args.video
|
| 354 |
+
)
|
| 355 |
+
|
| 356 |
+
tracker.run(output_path=args.output)
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
if __name__ == "__main__":
|
| 360 |
+
main()
|
web_app.html
ADDED
|
@@ -0,0 +1,523 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="en">
|
| 3 |
+
|
| 4 |
+
<head>
|
| 5 |
+
<meta charset="UTF-8">
|
| 6 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 7 |
+
<title>🚗 Vehicle Tracking - Web App</title>
|
| 8 |
+
<style>
|
| 9 |
+
* {
|
| 10 |
+
margin: 0;
|
| 11 |
+
padding: 0;
|
| 12 |
+
box-sizing: border-box;
|
| 13 |
+
}
|
| 14 |
+
|
| 15 |
+
body {
|
| 16 |
+
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
| 17 |
+
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
|
| 18 |
+
min-height: 100vh;
|
| 19 |
+
padding: 20px;
|
| 20 |
+
}
|
| 21 |
+
|
| 22 |
+
.container {
|
| 23 |
+
max-width: 900px;
|
| 24 |
+
margin: 0 auto;
|
| 25 |
+
background: rgba(255, 255, 255, 0.98);
|
| 26 |
+
border-radius: 20px;
|
| 27 |
+
padding: 40px;
|
| 28 |
+
box-shadow: 0 20px 60px rgba(0, 0, 0, 0.3);
|
| 29 |
+
}
|
| 30 |
+
|
| 31 |
+
h1 {
|
| 32 |
+
text-align: center;
|
| 33 |
+
color: #333;
|
| 34 |
+
margin-bottom: 10px;
|
| 35 |
+
font-size: 2.5em;
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
.subtitle {
|
| 39 |
+
text-align: center;
|
| 40 |
+
color: #666;
|
| 41 |
+
margin-bottom: 40px;
|
| 42 |
+
font-size: 1.1em;
|
| 43 |
+
}
|
| 44 |
+
|
| 45 |
+
.drop-zone {
|
| 46 |
+
border: 3px dashed #667eea;
|
| 47 |
+
border-radius: 15px;
|
| 48 |
+
padding: 80px 40px;
|
| 49 |
+
text-align: center;
|
| 50 |
+
cursor: pointer;
|
| 51 |
+
transition: all 0.3s ease;
|
| 52 |
+
background: linear-gradient(135deg, rgba(102, 126, 234, 0.05) 0%, rgba(118, 75, 162, 0.05) 100%);
|
| 53 |
+
margin-bottom: 30px;
|
| 54 |
+
}
|
| 55 |
+
|
| 56 |
+
.drop-zone:hover {
|
| 57 |
+
border-color: #764ba2;
|
| 58 |
+
background: linear-gradient(135deg, rgba(102, 126, 234, 0.1) 0%, rgba(118, 75, 162, 0.1) 100%);
|
| 59 |
+
transform: translateY(-2px);
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
.drop-zone.dragover {
|
| 63 |
+
border-color: #4CAF50;
|
| 64 |
+
background: rgba(76, 175, 80, 0.1);
|
| 65 |
+
transform: scale(1.02);
|
| 66 |
+
}
|
| 67 |
+
|
| 68 |
+
.drop-zone.processing {
|
| 69 |
+
opacity: 0.5;
|
| 70 |
+
pointer-events: none;
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
.drop-zone-icon {
|
| 74 |
+
font-size: 5em;
|
| 75 |
+
margin-bottom: 20px;
|
| 76 |
+
animation: float 3s ease-in-out infinite;
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
@keyframes float {
|
| 80 |
+
|
| 81 |
+
0%,
|
| 82 |
+
100% {
|
| 83 |
+
transform: translateY(0px);
|
| 84 |
+
}
|
| 85 |
+
|
| 86 |
+
50% {
|
| 87 |
+
transform: translateY(-10px);
|
| 88 |
+
}
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
.drop-zone-text {
|
| 92 |
+
font-size: 1.5em;
|
| 93 |
+
color: #667eea;
|
| 94 |
+
font-weight: 600;
|
| 95 |
+
margin-bottom: 10px;
|
| 96 |
+
}
|
| 97 |
+
|
| 98 |
+
.drop-zone-subtext {
|
| 99 |
+
color: #999;
|
| 100 |
+
font-size: 1em;
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
.file-input {
|
| 104 |
+
display: none;
|
| 105 |
+
}
|
| 106 |
+
|
| 107 |
+
.file-info {
|
| 108 |
+
display: none;
|
| 109 |
+
background: #f8f9fa;
|
| 110 |
+
padding: 20px;
|
| 111 |
+
border-radius: 10px;
|
| 112 |
+
margin-bottom: 20px;
|
| 113 |
+
border-left: 4px solid #667eea;
|
| 114 |
+
}
|
| 115 |
+
|
| 116 |
+
.file-info.show {
|
| 117 |
+
display: block;
|
| 118 |
+
animation: slideIn 0.3s ease;
|
| 119 |
+
}
|
| 120 |
+
|
| 121 |
+
@keyframes slideIn {
|
| 122 |
+
from {
|
| 123 |
+
opacity: 0;
|
| 124 |
+
transform: translateY(-10px);
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
to {
|
| 128 |
+
opacity: 1;
|
| 129 |
+
transform: translateY(0);
|
| 130 |
+
}
|
| 131 |
+
}
|
| 132 |
+
|
| 133 |
+
.file-name {
|
| 134 |
+
font-weight: 600;
|
| 135 |
+
color: #333;
|
| 136 |
+
margin-bottom: 8px;
|
| 137 |
+
font-size: 1.1em;
|
| 138 |
+
}
|
| 139 |
+
|
| 140 |
+
.file-size {
|
| 141 |
+
color: #666;
|
| 142 |
+
font-size: 0.95em;
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
.progress-container {
|
| 146 |
+
display: none;
|
| 147 |
+
margin: 30px 0;
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
.progress-container.show {
|
| 151 |
+
display: block;
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
.progress-bar {
|
| 155 |
+
width: 100%;
|
| 156 |
+
height: 40px;
|
| 157 |
+
background: #e9ecef;
|
| 158 |
+
border-radius: 20px;
|
| 159 |
+
overflow: hidden;
|
| 160 |
+
margin-bottom: 15px;
|
| 161 |
+
box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.1);
|
| 162 |
+
}
|
| 163 |
+
|
| 164 |
+
.progress-fill {
|
| 165 |
+
height: 100%;
|
| 166 |
+
background: linear-gradient(90deg, #667eea 0%, #764ba2 100%);
|
| 167 |
+
width: 0%;
|
| 168 |
+
transition: width 0.5s ease;
|
| 169 |
+
display: flex;
|
| 170 |
+
align-items: center;
|
| 171 |
+
justify-content: center;
|
| 172 |
+
color: white;
|
| 173 |
+
font-weight: 600;
|
| 174 |
+
font-size: 1.1em;
|
| 175 |
+
}
|
| 176 |
+
|
| 177 |
+
.status-text {
|
| 178 |
+
text-align: center;
|
| 179 |
+
color: #666;
|
| 180 |
+
font-size: 1.05em;
|
| 181 |
+
font-weight: 500;
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
.results {
|
| 185 |
+
display: none;
|
| 186 |
+
background: linear-gradient(135deg, #d4edda 0%, #c3e6cb 100%);
|
| 187 |
+
border-left: 4px solid #28a745;
|
| 188 |
+
padding: 25px;
|
| 189 |
+
border-radius: 10px;
|
| 190 |
+
margin-top: 20px;
|
| 191 |
+
}
|
| 192 |
+
|
| 193 |
+
.results.show {
|
| 194 |
+
display: block;
|
| 195 |
+
animation: slideIn 0.3s ease;
|
| 196 |
+
}
|
| 197 |
+
|
| 198 |
+
.results h3 {
|
| 199 |
+
color: #155724;
|
| 200 |
+
margin-bottom: 15px;
|
| 201 |
+
font-size: 1.3em;
|
| 202 |
+
}
|
| 203 |
+
|
| 204 |
+
.results p {
|
| 205 |
+
color: #155724;
|
| 206 |
+
margin-bottom: 15px;
|
| 207 |
+
line-height: 1.6;
|
| 208 |
+
}
|
| 209 |
+
|
| 210 |
+
.btn {
|
| 211 |
+
padding: 15px 30px;
|
| 212 |
+
border: none;
|
| 213 |
+
border-radius: 10px;
|
| 214 |
+
font-size: 1.1em;
|
| 215 |
+
font-weight: 600;
|
| 216 |
+
cursor: pointer;
|
| 217 |
+
transition: all 0.3s ease;
|
| 218 |
+
text-decoration: none;
|
| 219 |
+
display: inline-block;
|
| 220 |
+
}
|
| 221 |
+
|
| 222 |
+
.btn-download {
|
| 223 |
+
background: linear-gradient(135deg, #28a745 0%, #20c997 100%);
|
| 224 |
+
color: white;
|
| 225 |
+
}
|
| 226 |
+
|
| 227 |
+
.btn-download:hover {
|
| 228 |
+
transform: translateY(-2px);
|
| 229 |
+
box-shadow: 0 10px 25px rgba(40, 167, 69, 0.4);
|
| 230 |
+
}
|
| 231 |
+
|
| 232 |
+
.btn-new {
|
| 233 |
+
background: #f8f9fa;
|
| 234 |
+
color: #667eea;
|
| 235 |
+
border: 2px solid #667eea;
|
| 236 |
+
margin-left: 10px;
|
| 237 |
+
}
|
| 238 |
+
|
| 239 |
+
.btn-new:hover {
|
| 240 |
+
background: #667eea;
|
| 241 |
+
color: white;
|
| 242 |
+
}
|
| 243 |
+
|
| 244 |
+
.error {
|
| 245 |
+
display: none;
|
| 246 |
+
background: linear-gradient(135deg, #f8d7da 0%, #f5c6cb 100%);
|
| 247 |
+
border-left: 4px solid #dc3545;
|
| 248 |
+
padding: 20px;
|
| 249 |
+
border-radius: 10px;
|
| 250 |
+
margin-top: 20px;
|
| 251 |
+
color: #721c24;
|
| 252 |
+
}
|
| 253 |
+
|
| 254 |
+
.error.show {
|
| 255 |
+
display: block;
|
| 256 |
+
animation: slideIn 0.3s ease;
|
| 257 |
+
}
|
| 258 |
+
|
| 259 |
+
.spinner {
|
| 260 |
+
display: inline-block;
|
| 261 |
+
width: 20px;
|
| 262 |
+
height: 20px;
|
| 263 |
+
border: 3px solid rgba(255, 255, 255, .3);
|
| 264 |
+
border-radius: 50%;
|
| 265 |
+
border-top-color: #fff;
|
| 266 |
+
animation: spin 1s ease-in-out infinite;
|
| 267 |
+
}
|
| 268 |
+
|
| 269 |
+
@keyframes spin {
|
| 270 |
+
to {
|
| 271 |
+
transform: rotate(360deg);
|
| 272 |
+
}
|
| 273 |
+
}
|
| 274 |
+
|
| 275 |
+
.features {
|
| 276 |
+
display: grid;
|
| 277 |
+
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
| 278 |
+
gap: 20px;
|
| 279 |
+
margin-top: 30px;
|
| 280 |
+
}
|
| 281 |
+
|
| 282 |
+
.feature {
|
| 283 |
+
text-align: center;
|
| 284 |
+
padding: 20px;
|
| 285 |
+
background: #f8f9fa;
|
| 286 |
+
border-radius: 10px;
|
| 287 |
+
}
|
| 288 |
+
|
| 289 |
+
.feature-icon {
|
| 290 |
+
font-size: 2.5em;
|
| 291 |
+
margin-bottom: 10px;
|
| 292 |
+
}
|
| 293 |
+
|
| 294 |
+
.feature-text {
|
| 295 |
+
color: #666;
|
| 296 |
+
font-size: 0.95em;
|
| 297 |
+
}
|
| 298 |
+
</style>
|
| 299 |
+
</head>
|
| 300 |
+
|
| 301 |
+
<body>
|
| 302 |
+
<div class="container">
|
| 303 |
+
<h1>🚗 Vehicle Tracking System</h1>
|
| 304 |
+
<p class="subtitle">Upload your traffic video and get AI-powered vehicle tracking</p>
|
| 305 |
+
|
| 306 |
+
<div class="drop-zone" id="dropZone">
|
| 307 |
+
<div class="drop-zone-icon">🎥</div>
|
| 308 |
+
<div class="drop-zone-text">Drop your video here</div>
|
| 309 |
+
<div class="drop-zone-subtext">or click to browse (MP4, AVI, MOV, MKV)</div>
|
| 310 |
+
</div>
|
| 311 |
+
|
| 312 |
+
<input type="file" id="fileInput" class="file-input" accept="video/*">
|
| 313 |
+
|
| 314 |
+
<div class="file-info" id="fileInfo">
|
| 315 |
+
<div class="file-name" id="fileName"></div>
|
| 316 |
+
<div class="file-size" id="fileSize"></div>
|
| 317 |
+
</div>
|
| 318 |
+
|
| 319 |
+
<div class="progress-container" id="progressContainer">
|
| 320 |
+
<div class="progress-bar">
|
| 321 |
+
<div class="progress-fill" id="progressFill">
|
| 322 |
+
<span id="progressText">0%</span>
|
| 323 |
+
</div>
|
| 324 |
+
</div>
|
| 325 |
+
<div class="status-text" id="statusText">
|
| 326 |
+
<span class="spinner"></span> Processing...
|
| 327 |
+
</div>
|
| 328 |
+
</div>
|
| 329 |
+
|
| 330 |
+
<div class="results" id="results">
|
| 331 |
+
<h3>✅ Processing Complete!</h3>
|
| 332 |
+
<p id="resultsText"></p>
|
| 333 |
+
<a href="#" id="downloadBtn" class="btn btn-download">📥 Download Tracked Video</a>
|
| 334 |
+
<button class="btn btn-new" onclick="location.reload()">🔄 Process Another Video</button>
|
| 335 |
+
</div>
|
| 336 |
+
|
| 337 |
+
<div class="error" id="error">
|
| 338 |
+
<strong>❌ Error:</strong> <span id="errorText"></span>
|
| 339 |
+
</div>
|
| 340 |
+
|
| 341 |
+
<div class="features">
|
| 342 |
+
<div class="feature">
|
| 343 |
+
<div class="feature-icon">🎯</div>
|
| 344 |
+
<div class="feature-text">Detects Cars, Trucks, Buses & Motorcycles</div>
|
| 345 |
+
</div>
|
| 346 |
+
<div class="feature">
|
| 347 |
+
<div class="feature-icon">🔢</div>
|
| 348 |
+
<div class="feature-text">Tracks with Unique IDs</div>
|
| 349 |
+
</div>
|
| 350 |
+
<div class="feature">
|
| 351 |
+
<div class="feature-icon">📊</div>
|
| 352 |
+
<div class="feature-text">Counts Vehicles Automatically</div>
|
| 353 |
+
</div>
|
| 354 |
+
<div class="feature">
|
| 355 |
+
<div class="feature-icon">⚡</div>
|
| 356 |
+
<div class="feature-text">Powered by YOLO11 AI</div>
|
| 357 |
+
</div>
|
| 358 |
+
</div>
|
| 359 |
+
</div>
|
| 360 |
+
|
| 361 |
+
<script>
|
| 362 |
+
const dropZone = document.getElementById('dropZone');
|
| 363 |
+
const fileInput = document.getElementById('fileInput');
|
| 364 |
+
const fileInfo = document.getElementById('fileInfo');
|
| 365 |
+
const fileName = document.getElementById('fileName');
|
| 366 |
+
const fileSize = document.getElementById('fileSize');
|
| 367 |
+
const progressContainer = document.getElementById('progressContainer');
|
| 368 |
+
const progressFill = document.getElementById('progressFill');
|
| 369 |
+
const progressText = document.getElementById('progressText');
|
| 370 |
+
const statusText = document.getElementById('statusText');
|
| 371 |
+
const results = document.getElementById('results');
|
| 372 |
+
const resultsText = document.getElementById('resultsText');
|
| 373 |
+
const downloadBtn = document.getElementById('downloadBtn');
|
| 374 |
+
const error = document.getElementById('error');
|
| 375 |
+
const errorText = document.getElementById('errorText');
|
| 376 |
+
|
| 377 |
+
let selectedFile = null;
|
| 378 |
+
let statusCheckInterval = null;
|
| 379 |
+
|
| 380 |
+
// Click to browse
|
| 381 |
+
dropZone.addEventListener('click', () => {
|
| 382 |
+
if (!dropZone.classList.contains('processing')) {
|
| 383 |
+
fileInput.click();
|
| 384 |
+
}
|
| 385 |
+
});
|
| 386 |
+
|
| 387 |
+
// Drag and drop events
|
| 388 |
+
dropZone.addEventListener('dragover', (e) => {
|
| 389 |
+
e.preventDefault();
|
| 390 |
+
if (!dropZone.classList.contains('processing')) {
|
| 391 |
+
dropZone.classList.add('dragover');
|
| 392 |
+
}
|
| 393 |
+
});
|
| 394 |
+
|
| 395 |
+
dropZone.addEventListener('dragleave', () => {
|
| 396 |
+
dropZone.classList.remove('dragover');
|
| 397 |
+
});
|
| 398 |
+
|
| 399 |
+
dropZone.addEventListener('drop', (e) => {
|
| 400 |
+
e.preventDefault();
|
| 401 |
+
dropZone.classList.remove('dragover');
|
| 402 |
+
|
| 403 |
+
if (!dropZone.classList.contains('processing')) {
|
| 404 |
+
const files = e.dataTransfer.files;
|
| 405 |
+
if (files.length > 0) {
|
| 406 |
+
handleFile(files[0]);
|
| 407 |
+
}
|
| 408 |
+
}
|
| 409 |
+
});
|
| 410 |
+
|
| 411 |
+
// File input change
|
| 412 |
+
fileInput.addEventListener('change', (e) => {
|
| 413 |
+
if (e.target.files.length > 0) {
|
| 414 |
+
handleFile(e.target.files[0]);
|
| 415 |
+
}
|
| 416 |
+
});
|
| 417 |
+
|
| 418 |
+
function handleFile(file) {
|
| 419 |
+
// Check if it's a video file
|
| 420 |
+
if (!file.type.startsWith('video/')) {
|
| 421 |
+
showError('Please select a video file');
|
| 422 |
+
return;
|
| 423 |
+
}
|
| 424 |
+
|
| 425 |
+
selectedFile = file;
|
| 426 |
+
|
| 427 |
+
// Display file info
|
| 428 |
+
fileName.textContent = `📁 ${file.name}`;
|
| 429 |
+
const sizeMB = (file.size / (1024 * 1024)).toFixed(2);
|
| 430 |
+
fileSize.textContent = `Size: ${sizeMB} MB`;
|
| 431 |
+
|
| 432 |
+
fileInfo.classList.add('show');
|
| 433 |
+
|
| 434 |
+
// Hide previous results/errors
|
| 435 |
+
results.classList.remove('show');
|
| 436 |
+
error.classList.remove('show');
|
| 437 |
+
|
| 438 |
+
// Auto-upload and process
|
| 439 |
+
uploadAndProcess();
|
| 440 |
+
}
|
| 441 |
+
|
| 442 |
+
async function uploadAndProcess() {
|
| 443 |
+
if (!selectedFile) return;
|
| 444 |
+
|
| 445 |
+
// Show progress
|
| 446 |
+
dropZone.classList.add('processing');
|
| 447 |
+
progressContainer.classList.add('show');
|
| 448 |
+
updateProgress(0, 'Uploading video...');
|
| 449 |
+
|
| 450 |
+
const formData = new FormData();
|
| 451 |
+
formData.append('video', selectedFile);
|
| 452 |
+
|
| 453 |
+
try {
|
| 454 |
+
// Upload file
|
| 455 |
+
const response = await fetch('/upload', {
|
| 456 |
+
method: 'POST',
|
| 457 |
+
body: formData
|
| 458 |
+
});
|
| 459 |
+
|
| 460 |
+
const data = await response.json();
|
| 461 |
+
|
| 462 |
+
if (response.ok) {
|
| 463 |
+
// Start checking status
|
| 464 |
+
checkStatus();
|
| 465 |
+
} else {
|
| 466 |
+
showError(data.error || 'Upload failed');
|
| 467 |
+
dropZone.classList.remove('processing');
|
| 468 |
+
}
|
| 469 |
+
} catch (err) {
|
| 470 |
+
showError('Network error: ' + err.message);
|
| 471 |
+
dropZone.classList.remove('processing');
|
| 472 |
+
}
|
| 473 |
+
}
|
| 474 |
+
|
| 475 |
+
function checkStatus() {
|
| 476 |
+
statusCheckInterval = setInterval(async () => {
|
| 477 |
+
try {
|
| 478 |
+
const response = await fetch('/status');
|
| 479 |
+
const status = await response.json();
|
| 480 |
+
|
| 481 |
+
updateProgress(status.progress, status.message);
|
| 482 |
+
|
| 483 |
+
if (status.status === 'complete') {
|
| 484 |
+
clearInterval(statusCheckInterval);
|
| 485 |
+
showResults(status.output_file);
|
| 486 |
+
} else if (status.status === 'error') {
|
| 487 |
+
clearInterval(statusCheckInterval);
|
| 488 |
+
showError(status.message);
|
| 489 |
+
dropZone.classList.remove('processing');
|
| 490 |
+
}
|
| 491 |
+
} catch (err) {
|
| 492 |
+
console.error('Status check error:', err);
|
| 493 |
+
}
|
| 494 |
+
}, 1000);
|
| 495 |
+
}
|
| 496 |
+
|
| 497 |
+
function updateProgress(percent, message) {
|
| 498 |
+
progressFill.style.width = percent + '%';
|
| 499 |
+
progressText.textContent = percent + '%';
|
| 500 |
+
statusText.innerHTML = `<span class="spinner"></span> ${message}`;
|
| 501 |
+
}
|
| 502 |
+
|
| 503 |
+
function showResults(outputFile) {
|
| 504 |
+
dropZone.classList.remove('processing');
|
| 505 |
+
progressContainer.classList.remove('show');
|
| 506 |
+
|
| 507 |
+
resultsText.textContent = 'Your video has been processed successfully! The tracked video includes bounding boxes, vehicle IDs, and counting statistics.';
|
| 508 |
+
downloadBtn.href = `/download/${outputFile}`;
|
| 509 |
+
results.classList.add('show');
|
| 510 |
+
}
|
| 511 |
+
|
| 512 |
+
function showError(message) {
|
| 513 |
+
errorText.textContent = message;
|
| 514 |
+
error.classList.add('show');
|
| 515 |
+
progressContainer.classList.remove('show');
|
| 516 |
+
setTimeout(() => {
|
| 517 |
+
error.classList.remove('show');
|
| 518 |
+
}, 8000);
|
| 519 |
+
}
|
| 520 |
+
</script>
|
| 521 |
+
</body>
|
| 522 |
+
|
| 523 |
+
</html>
|
yolo11n.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0ebbc80d4a7680d14987a577cd21342b65ecfd94632bd9a8da63ae6417644ee1
|
| 3 |
+
size 5613764
|
yolo11x.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7bc158aa95c0ebfdd87f70f01653c1131b93e92522dbe15c228bcd742e773a24
|
| 3 |
+
size 114636239
|