Confidence Level Slider

#17
Files changed (1) hide show
  1. app.py +17 -10
app.py CHANGED
@@ -1,5 +1,5 @@
1
  # ============================================================
2
- # 🚦 Stage 3 — Wrong Direction Detection (Stable + Confidence + Hysteresis)
3
  # ============================================================
4
 
5
  import os, cv2, json, tempfile, numpy as np, gradio as gr
@@ -34,7 +34,7 @@ class Track:
34
  self.status = "OK"
35
  self.status_history = []
36
  self.confidence = 1.0
37
- self.ema_sim = 1.0 # for exponential smoothing
38
 
39
  def update(self, bbox):
40
  self.kf.predict()
@@ -78,7 +78,7 @@ def smooth_direction(points, window=5):
78
  # ============================================================
79
  # 🧭 Wrong-Direction Detection Core
80
  # ============================================================
81
- def process_video(video_file, stage2_json, show_only_wrong=False):
82
  data = json.load(open(stage2_json))
83
  lane_flows = np.array(data.get("flow_centers", [[1,0]]))
84
  drive_zone = np.array(data.get("drive_zone", []))
@@ -91,7 +91,6 @@ def process_video(video_file, stage2_json, show_only_wrong=False):
91
  out = cv2.VideoWriter(out_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (w, h))
92
 
93
  tracks, next_id = {}, 0
94
- SIM_THRESH = 0.5 # base reference
95
  DELAY_FRAMES = 8
96
  MIN_FLOW_SPEED = 1.2
97
  HYST_OK = 0.55
@@ -160,7 +159,14 @@ def process_video(video_file, stage2_json, show_only_wrong=False):
160
 
161
  trk.stable_status(new_status, new_conf=trk.ema_sim, window=10, agree_ratio=0.6)
162
 
163
- if (not show_only_wrong) or (trk.status == "WRONG"):
 
 
 
 
 
 
 
164
  color = (0, 0, 255) if trk.status == "WRONG" else (0, 255, 0)
165
  label = f"ID:{tid} {trk.status} ({trk.confidence:.2f})"
166
  cv2.putText(frame, label, tuple(np.int32(pos)),
@@ -176,12 +182,12 @@ def process_video(video_file, stage2_json, show_only_wrong=False):
176
  # 🎛️ Gradio Interface
177
  # ============================================================
178
  description = """
179
- ### 🚦 Stage 3 — Wrong Direction Detection (Stable + Confidence + Hysteresis)
180
  - ✅ Cosine similarity with exponential smoothing
181
  - ✅ Hysteresis (OK≥0.55 / WRONG≤0.45) for stability
182
  - ✅ 10-frame consensus voting (flicker-free)
183
- - ✅ Confidence score beside each ID
184
- - ✅ Optional “Show Only Wrong Labels” toggle
185
  """
186
 
187
  demo = gr.Interface(
@@ -189,10 +195,11 @@ demo = gr.Interface(
189
  inputs=[
190
  gr.File(label="Input Video"),
191
  gr.File(label="Stage 2 Flow JSON"),
192
- gr.Checkbox(label="Show ONLY Wrong Labels Overlay", value=False)
 
193
  ],
194
  outputs=gr.Video(label="Output Video"),
195
- title="🚗 Stage 3 – Stable Wrong-Direction Detection (with Confidence)",
196
  description=description
197
  )
198
 
 
1
  # ============================================================
2
+ # 🚦 Stage 3 — Wrong Direction Detection (Stable + Confidence + Hysteresis + Filter)
3
  # ============================================================
4
 
5
  import os, cv2, json, tempfile, numpy as np, gradio as gr
 
34
  self.status = "OK"
35
  self.status_history = []
36
  self.confidence = 1.0
37
+ self.ema_sim = 1.0
38
 
39
  def update(self, bbox):
40
  self.kf.predict()
 
78
  # ============================================================
79
  # 🧭 Wrong-Direction Detection Core
80
  # ============================================================
81
+ def process_video(video_file, stage2_json, show_only_wrong=False, conf_threshold=0.0):
82
  data = json.load(open(stage2_json))
83
  lane_flows = np.array(data.get("flow_centers", [[1,0]]))
84
  drive_zone = np.array(data.get("drive_zone", []))
 
91
  out = cv2.VideoWriter(out_path, cv2.VideoWriter_fourcc(*'mp4v'), fps, (w, h))
92
 
93
  tracks, next_id = {}, 0
 
94
  DELAY_FRAMES = 8
95
  MIN_FLOW_SPEED = 1.2
96
  HYST_OK = 0.55
 
159
 
160
  trk.stable_status(new_status, new_conf=trk.ema_sim, window=10, agree_ratio=0.6)
161
 
162
+ # --- Filter by UI controls ---
163
+ show_label = True
164
+ if trk.confidence < conf_threshold:
165
+ show_label = False
166
+ if show_only_wrong and trk.status != "WRONG":
167
+ show_label = False
168
+
169
+ if show_label:
170
  color = (0, 0, 255) if trk.status == "WRONG" else (0, 255, 0)
171
  label = f"ID:{tid} {trk.status} ({trk.confidence:.2f})"
172
  cv2.putText(frame, label, tuple(np.int32(pos)),
 
182
  # 🎛️ Gradio Interface
183
  # ============================================================
184
  description = """
185
+ ### 🚦 Stage 3 — Wrong Direction Detection (Stable + Confidence + Filter)
186
  - ✅ Cosine similarity with exponential smoothing
187
  - ✅ Hysteresis (OK≥0.55 / WRONG≤0.45) for stability
188
  - ✅ 10-frame consensus voting (flicker-free)
189
+ - ✅ Confidence-based label filtering
190
+ - ✅ “Show Only Wrong” toggle
191
  """
192
 
193
  demo = gr.Interface(
 
195
  inputs=[
196
  gr.File(label="Input Video"),
197
  gr.File(label="Stage 2 Flow JSON"),
198
+ gr.Checkbox(label="Show ONLY Wrong Labels Overlay", value=False),
199
+ gr.Slider(0.0, 1.0, value=0.0, step=0.05, label="Confidence Level Filter (Show ≥ this value)")
200
  ],
201
  outputs=gr.Video(label="Output Video"),
202
+ title="🚗 Stage 3 – Stable Wrong-Direction Detection (with Confidence Filter)",
203
  description=description
204
  )
205