mastefan commited on
Commit
a5961f0
·
verified ·
1 Parent(s): a4c154c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +87 -21
app.py CHANGED
@@ -20,6 +20,8 @@ import numpy as np
20
  import pathlib
21
  import zipfile
22
  import shutil
 
 
23
  from ultralytics import YOLO
24
  from autogluon.tabular import TabularPredictor
25
  from huggingface_hub import hf_hub_download
@@ -61,9 +63,11 @@ def ag_predictor():
61
  shutil.rmtree(extract_dir)
62
  with zipfile.ZipFile(z, "r") as zip_ref:
63
  zip_ref.extractall(extract_dir)
64
- _ag_predictor = TabularPredictor.load(str(extract_dir),
65
- require_version_match=False,
66
- require_py_version_match=False)
 
 
67
  return _ag_predictor
68
 
69
  # -------------------
@@ -117,6 +121,82 @@ def isolate_scoreboard_color(frame_bgr: np.ndarray,
117
 
118
  return gray
119
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
120
  # -------------------
121
  # Progress bar builder
122
  # -------------------
@@ -130,51 +210,37 @@ def _make_progress_bar(percent: int, final_text: str = None):
130
  """
131
 
132
  # -------------------
133
- # Main pipeline
134
  # -------------------
135
- def extract_score_clips(video_path: str, debug: bool = False):
136
- print("[INFO] Starting frame extraction...")
137
- # Placeholder for real processing code
138
- # This would extract frames, isolate scoreboard, run predictor, detect events, cut clips
139
- # For now we mock it as a "success" to show UI
140
- return [], "⚠️ No touches confidently detected in this video."
141
-
142
  def wrapped_run(video_file):
143
  if not video_file:
144
  yield gr.update(value=[], visible=False), "Please upload a video file.", gr.update(value="", visible=False)
145
  return
146
 
147
- # Fake step progress (jumping updates)
148
  yield gr.update(value=[], visible=False), "Processing started...", gr.update(value=_make_progress_bar(10), visible=True)
149
  yield gr.update(value=[], visible=False), "Extracting frames...", gr.update(value=_make_progress_bar(40), visible=True)
150
  yield gr.update(value=[], visible=False), "Running predictor...", gr.update(value=_make_progress_bar(70), visible=True)
151
 
152
  clips, status_msg = extract_score_clips(video_file, debug=False)
153
-
154
  final_bar = _make_progress_bar(100, "✅ Done")
155
  yield gr.update(value=clips, visible=bool(clips)), status_msg, gr.update(value=final_bar, visible=True)
156
 
157
  # -------------------
158
- # Build Gradio UI
159
  # -------------------
160
  with gr.Blocks() as demo:
161
  gr.Markdown("## 🤺 Fencing Score Detector\nUpload a bout video and detect touches.")
162
 
163
  in_video = gr.Video(label="Upload Bout Video", type="filepath")
164
-
165
  run_btn = gr.Button("Detect Touches", elem_id="detect-btn")
166
 
167
  status = gr.Markdown("Status messages will appear here.")
168
  progress_html = gr.HTML("")
169
  gallery = gr.Gallery(label="Detected Clips", visible=False)
170
 
171
- run_btn.click(
172
- fn=wrapped_run,
173
- inputs=in_video,
174
- outputs=[gallery, status, progress_html],
175
- )
176
 
177
- # Launch with queue
178
  if __name__ == "__main__":
179
  demo.queue(max_size=20)
180
  demo.launch(debug=True)
 
 
20
  import pathlib
21
  import zipfile
22
  import shutil
23
+ import pandas as pd
24
+ import subprocess
25
  from ultralytics import YOLO
26
  from autogluon.tabular import TabularPredictor
27
  from huggingface_hub import hf_hub_download
 
63
  shutil.rmtree(extract_dir)
64
  with zipfile.ZipFile(z, "r") as zip_ref:
65
  zip_ref.extractall(extract_dir)
66
+ _ag_predictor = TabularPredictor.load(
67
+ str(extract_dir),
68
+ require_version_match=False,
69
+ require_py_version_match=False
70
+ )
71
  return _ag_predictor
72
 
73
  # -------------------
 
121
 
122
  return gray
123
 
124
+ # -------------------
125
+ # Event picking
126
+ # -------------------
127
+ def pick_events(df: pd.DataFrame, score: pd.Series, fps: float) -> list:
128
+ # simple hybrid threshold (as tuned earlier)
129
+ max_score = score.max()
130
+ raw_cutoff = 0.7 * max_score if max_score > 0 else 0.4
131
+ z = (score - score.rolling(45, min_periods=1).mean()) / (score.rolling(45, min_periods=1).std()+1e-9)
132
+ z_cutoff = max(2.0, 0.6 * z.max())
133
+
134
+ out_times = []
135
+ for i in range(1, len(score)-1):
136
+ ts = float(df.iloc[i]["timestamp"])
137
+ if ((score.iloc[i] > raw_cutoff) or (z.iloc[i] > z_cutoff)):
138
+ if score.iloc[i] > score.iloc[i-1] and score.iloc[i] > score.iloc[i+1]:
139
+ if ts >= 1.0: # guard against first second
140
+ out_times.append(ts)
141
+
142
+ grouped = []
143
+ for t in out_times:
144
+ if (not grouped) or (t - grouped[-1]) > GROUP_GAP_S:
145
+ grouped.append(t)
146
+ return grouped
147
+
148
+ # -------------------
149
+ # Video clipping
150
+ # -------------------
151
+ def cut_clip(video_path, start, end, out_path):
152
+ cmd = [
153
+ "ffmpeg", "-y", "-i", str(video_path),
154
+ "-ss", str(start), "-to", str(end),
155
+ "-c", "copy", str(out_path)
156
+ ]
157
+ subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
158
+
159
+ def extract_score_clips(video_path: str, debug: bool = False):
160
+ cap = cv2.VideoCapture(video_path)
161
+ if not cap.isOpened():
162
+ return [], "❌ Could not open video."
163
+
164
+ fps = cap.get(cv2.CAP_PROP_FPS)
165
+ frames = []
166
+ timestamps = []
167
+ idx = 0
168
+ while True:
169
+ ret, frame = cap.read()
170
+ if not ret:
171
+ break
172
+ ts = idx / fps
173
+ masked = isolate_scoreboard_color(frame, debug=debug, frame_id=idx if debug else None)
174
+ red_ratio = float((masked[:,:,2] > 150).mean()) # crude feature
175
+ frames.append([ts, red_ratio])
176
+ timestamps.append(ts)
177
+ idx += 1
178
+ cap.release()
179
+
180
+ if not frames:
181
+ return [], "⚠️ No frames processed."
182
+
183
+ df = pd.DataFrame(frames, columns=["timestamp","red_ratio"])
184
+ pred = ag_predictor().predict_proba(df[["red_ratio"]])
185
+ score = pd.Series(pred[1].values, index=df.index)
186
+
187
+ events = pick_events(df, score, fps)
188
+ if not events:
189
+ return [], "⚠️ No touches confidently detected in this video."
190
+
191
+ clips = []
192
+ for i, t in enumerate(events, 1):
193
+ s = max(0.0, t - CLIP_PAD_S)
194
+ e = min(df["timestamp"].max(), t + CLIP_PAD_S)
195
+ out_path = f"clip_{i}.mp4"
196
+ cut_clip(video_path, s, e, out_path)
197
+ clips.append((out_path, f"Touch at {t:.2f}s"))
198
+ return clips, f"✅ Detected {len(events)} touches."
199
+
200
  # -------------------
201
  # Progress bar builder
202
  # -------------------
 
210
  """
211
 
212
  # -------------------
213
+ # Wrapped run (step-based)
214
  # -------------------
 
 
 
 
 
 
 
215
  def wrapped_run(video_file):
216
  if not video_file:
217
  yield gr.update(value=[], visible=False), "Please upload a video file.", gr.update(value="", visible=False)
218
  return
219
 
 
220
  yield gr.update(value=[], visible=False), "Processing started...", gr.update(value=_make_progress_bar(10), visible=True)
221
  yield gr.update(value=[], visible=False), "Extracting frames...", gr.update(value=_make_progress_bar(40), visible=True)
222
  yield gr.update(value=[], visible=False), "Running predictor...", gr.update(value=_make_progress_bar(70), visible=True)
223
 
224
  clips, status_msg = extract_score_clips(video_file, debug=False)
 
225
  final_bar = _make_progress_bar(100, "✅ Done")
226
  yield gr.update(value=clips, visible=bool(clips)), status_msg, gr.update(value=final_bar, visible=True)
227
 
228
  # -------------------
229
+ # Gradio UI
230
  # -------------------
231
  with gr.Blocks() as demo:
232
  gr.Markdown("## 🤺 Fencing Score Detector\nUpload a bout video and detect touches.")
233
 
234
  in_video = gr.Video(label="Upload Bout Video", type="filepath")
 
235
  run_btn = gr.Button("Detect Touches", elem_id="detect-btn")
236
 
237
  status = gr.Markdown("Status messages will appear here.")
238
  progress_html = gr.HTML("")
239
  gallery = gr.Gallery(label="Detected Clips", visible=False)
240
 
241
+ run_btn.click(fn=wrapped_run, inputs=in_video, outputs=[gallery, status, progress_html])
 
 
 
 
242
 
 
243
  if __name__ == "__main__":
244
  demo.queue(max_size=20)
245
  demo.launch(debug=True)
246
+