show_only_wrong
#14
by
nishanth-saka
- opened
app.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
# ============================================================
|
| 2 |
-
# π¦ Stage 3 β Wrong Direction Detection (Improved)
|
| 3 |
# ============================================================
|
| 4 |
|
| 5 |
import os, cv2, json, tempfile, numpy as np, gradio as gr
|
|
@@ -44,7 +44,7 @@ class Track:
|
|
| 44 |
return [x, y]
|
| 45 |
|
| 46 |
# ============================================================
|
| 47 |
-
# βοΈ
|
| 48 |
# ============================================================
|
| 49 |
def compute_cosine_similarity(v1, v2):
|
| 50 |
v1 = v1 / (np.linalg.norm(v1) + 1e-6)
|
|
@@ -64,7 +64,7 @@ def smooth_direction(points, window=5):
|
|
| 64 |
# ============================================================
|
| 65 |
# π§ Wrong-Direction Detection Core
|
| 66 |
# ============================================================
|
| 67 |
-
def process_video(video_file, stage2_json):
|
| 68 |
data = json.load(open(stage2_json))
|
| 69 |
lane_flows = np.array(data.get("flow_centers", [[1,0]]))
|
| 70 |
drive_zone = np.array(data.get("drive_zone", []))
|
|
@@ -131,16 +131,19 @@ def process_video(video_file, stage2_json):
|
|
| 131 |
sims = [compute_cosine_similarity(motion, f) for f in lane_flows]
|
| 132 |
best_sim = max(sims)
|
| 133 |
|
| 134 |
-
# only classify after some frames
|
| 135 |
if trk.frames_seen > DELAY_FRAMES:
|
| 136 |
if best_sim < SIM_THRESH:
|
| 137 |
trk.status = "WRONG"
|
| 138 |
-
color = (0, 0, 255)
|
| 139 |
else:
|
| 140 |
trk.status = "OK"
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 144 |
|
| 145 |
out.write(frame)
|
| 146 |
|
|
@@ -153,16 +156,18 @@ def process_video(video_file, stage2_json):
|
|
| 153 |
# ============================================================
|
| 154 |
description = """
|
| 155 |
### π¦ Stage 3 β Wrong Direction Detection (Improved)
|
| 156 |
-
-
|
| 157 |
-
- Lane-wise flow support for curved roads
|
| 158 |
-
- Temporal smoothing & delayed classification
|
|
|
|
| 159 |
"""
|
| 160 |
|
| 161 |
demo = gr.Interface(
|
| 162 |
fn=process_video,
|
| 163 |
inputs=[
|
| 164 |
gr.File(label="Input Video"),
|
| 165 |
-
gr.File(label="Stage 2 Flow JSON")
|
|
|
|
| 166 |
],
|
| 167 |
outputs=gr.Video(label="Output (with WRONG/OK labels)"),
|
| 168 |
title="π Stage 3 β Improved Wrong-Direction Detection",
|
|
|
|
| 1 |
# ============================================================
|
| 2 |
+
# π¦ Stage 3 β Wrong Direction Detection (Improved + Toggle)
|
| 3 |
# ============================================================
|
| 4 |
|
| 5 |
import os, cv2, json, tempfile, numpy as np, gradio as gr
|
|
|
|
| 44 |
return [x, y]
|
| 45 |
|
| 46 |
# ============================================================
|
| 47 |
+
# βοΈ Utility Functions
|
| 48 |
# ============================================================
|
| 49 |
def compute_cosine_similarity(v1, v2):
|
| 50 |
v1 = v1 / (np.linalg.norm(v1) + 1e-6)
|
|
|
|
| 64 |
# ============================================================
|
| 65 |
# π§ Wrong-Direction Detection Core
|
| 66 |
# ============================================================
|
| 67 |
+
def process_video(video_file, stage2_json, show_only_wrong=False):
|
| 68 |
data = json.load(open(stage2_json))
|
| 69 |
lane_flows = np.array(data.get("flow_centers", [[1,0]]))
|
| 70 |
drive_zone = np.array(data.get("drive_zone", []))
|
|
|
|
| 131 |
sims = [compute_cosine_similarity(motion, f) for f in lane_flows]
|
| 132 |
best_sim = max(sims)
|
| 133 |
|
| 134 |
+
# only classify after some frames
|
| 135 |
if trk.frames_seen > DELAY_FRAMES:
|
| 136 |
if best_sim < SIM_THRESH:
|
| 137 |
trk.status = "WRONG"
|
|
|
|
| 138 |
else:
|
| 139 |
trk.status = "OK"
|
| 140 |
+
|
| 141 |
+
# draw depending on toggle
|
| 142 |
+
if (not show_only_wrong) or (trk.status == "WRONG"):
|
| 143 |
+
color = (0, 0, 255) if trk.status == "WRONG" else (0, 255, 0)
|
| 144 |
+
cv2.putText(frame, f"ID:{tid} {trk.status}",
|
| 145 |
+
tuple(np.int32(pos)),
|
| 146 |
+
cv2.FONT_HERSHEY_SIMPLEX, 0.6, color, 2)
|
| 147 |
|
| 148 |
out.write(frame)
|
| 149 |
|
|
|
|
| 156 |
# ============================================================
|
| 157 |
description = """
|
| 158 |
### π¦ Stage 3 β Wrong Direction Detection (Improved)
|
| 159 |
+
- β
Cosine similarity instead of raw angle
|
| 160 |
+
- β
Lane-wise flow support for curved roads
|
| 161 |
+
- β
Temporal smoothing & delayed classification
|
| 162 |
+
- β
Toggle to show only WRONG vehicles
|
| 163 |
"""
|
| 164 |
|
| 165 |
demo = gr.Interface(
|
| 166 |
fn=process_video,
|
| 167 |
inputs=[
|
| 168 |
gr.File(label="Input Video"),
|
| 169 |
+
gr.File(label="Stage 2 Flow JSON"),
|
| 170 |
+
gr.Checkbox(label="Show ONLY Wrong Labels Overlay", value=False)
|
| 171 |
],
|
| 172 |
outputs=gr.Video(label="Output (with WRONG/OK labels)"),
|
| 173 |
title="π Stage 3 β Improved Wrong-Direction Detection",
|