andytaylor-smg commited on
Commit
1f3bac1
·
1 Parent(s): ecbaf10

restoring missing clock reset detection functions

Browse files
src/config/session.py CHANGED
@@ -255,7 +255,7 @@ def _save_timeout_config(config: SessionConfig, output_dir: Path, basename: str)
255
  def save_session_config(
256
  config: SessionConfig,
257
  output_dir: Path,
258
- selected_frame: Tuple[float, np.ndarray] = None,
259
  ) -> Tuple[str, str]:
260
  """
261
  Save session configuration and generate template image.
 
255
  def save_session_config(
256
  config: SessionConfig,
257
  output_dir: Path,
258
+ selected_frame: Tuple[float, np.ndarray] | None = None,
259
  ) -> Tuple[str, str]:
260
  """
261
  Save session configuration and generate template image.
src/detection/models.py CHANGED
@@ -5,7 +5,7 @@ These models represent the results of detecting various elements in video frames
5
  scorebug presence and timeout indicators.
6
  """
7
 
8
- from typing import Optional, Tuple, List
9
 
10
  from pydantic import BaseModel, Field
11
 
@@ -27,7 +27,7 @@ class TimeoutRegionConfig(BaseModel):
27
  team_name: str = Field(..., description="'home' or 'away'")
28
  bbox: Tuple[int, int, int, int] = Field(..., description="x, y, width, height for the 3-oval group")
29
 
30
- def to_dict(self) -> dict:
31
  """Convert to dictionary for JSON serialization."""
32
  return {
33
  "team_name": self.team_name,
@@ -35,7 +35,7 @@ class TimeoutRegionConfig(BaseModel):
35
  }
36
 
37
  @classmethod
38
- def from_dict(cls, data: dict) -> "TimeoutRegionConfig":
39
  """Create from dictionary."""
40
  bbox = (data["bbox"]["x"], data["bbox"]["y"], data["bbox"]["width"], data["bbox"]["height"])
41
  return cls(team_name=data["team_name"], bbox=bbox)
 
5
  scorebug presence and timeout indicators.
6
  """
7
 
8
+ from typing import Any, Optional, Tuple, List
9
 
10
  from pydantic import BaseModel, Field
11
 
 
27
  team_name: str = Field(..., description="'home' or 'away'")
28
  bbox: Tuple[int, int, int, int] = Field(..., description="x, y, width, height for the 3-oval group")
29
 
30
+ def to_dict(self) -> dict[str, object]:
31
  """Convert to dictionary for JSON serialization."""
32
  return {
33
  "team_name": self.team_name,
 
35
  }
36
 
37
  @classmethod
38
+ def from_dict(cls, data: dict[str, Any]) -> "TimeoutRegionConfig":
39
  """Create from dictionary."""
40
  bbox = (data["bbox"]["x"], data["bbox"]["y"], data["bbox"]["width"], data["bbox"]["height"])
41
  return cls(team_name=data["team_name"], bbox=bbox)
src/pipeline/orchestrator.py CHANGED
@@ -13,9 +13,10 @@ import logging
13
  from pathlib import Path
14
  from typing import Any, Dict
15
 
16
- from config.session import SessionConfig, MIN_PLAY_DURATION
17
  from detection import DetectTimeouts
18
- from pipeline.play_detector import DetectionConfig, PlayDetector, format_detection_result_dict
 
19
 
20
  logger = logging.getLogger(__name__)
21
 
@@ -165,7 +166,7 @@ def print_results_summary(
165
  video_basename: str,
166
  generate_individual: bool,
167
  expected_plays: int = 12,
168
- ):
169
  """
170
  Print the final results summary.
171
 
 
13
  from pathlib import Path
14
  from typing import Any, Dict
15
 
16
+ from config import SessionConfig, MIN_PLAY_DURATION
17
  from detection import DetectTimeouts
18
+ from pipeline import DetectionConfig
19
+ from pipeline.play_detector import PlayDetector, format_detection_result_dict
20
 
21
  logger = logging.getLogger(__name__)
22
 
 
166
  video_basename: str,
167
  generate_individual: bool,
168
  expected_plays: int = 12,
169
+ ) -> None:
170
  """
171
  Print the final results summary.
172
 
src/pipeline/parallel.py CHANGED
@@ -175,13 +175,14 @@ def _read_video_frames(cap: Any, start_frame: int, end_frame: int, frame_skip: i
175
 
176
  return frames, io_time
177
 
 
178
  # pylint: disable=too-many-locals
179
  def _process_chunk(
180
  chunk_id: int,
181
  config: ParallelProcessingConfig,
182
  chunk_start: float,
183
  chunk_end: float,
184
- progress_dict: Optional[Dict] = None,
185
  ) -> ChunkResult:
186
  """
187
  Process a single video chunk using OpenCV.
@@ -309,7 +310,7 @@ def _calculate_chunk_boundaries(start_time: float, end_time: float, num_workers:
309
  return chunks
310
 
311
 
312
- def _create_progress_monitor(progress_dict: Dict, num_workers: int) -> Tuple[threading.Thread, threading.Event]:
313
  """
314
  Create a progress monitoring thread.
315
 
@@ -322,7 +323,7 @@ def _create_progress_monitor(progress_dict: Dict, num_workers: int) -> Tuple[thr
322
  """
323
  stop_monitor = threading.Event()
324
 
325
- def monitor_progress():
326
  """Monitor and display progress from workers."""
327
  while not stop_monitor.is_set():
328
  _display_progress(progress_dict, num_workers)
@@ -332,7 +333,7 @@ def _create_progress_monitor(progress_dict: Dict, num_workers: int) -> Tuple[thr
332
  return monitor_thread, stop_monitor
333
 
334
 
335
- def _display_progress(progress_dict: Dict, num_workers: int) -> None:
336
  """
337
  Build and display current progress string.
338
 
@@ -385,8 +386,8 @@ def _submit_chunk_jobs(
385
  executor: ProcessPoolExecutor,
386
  chunks: List[Tuple[int, float, float]],
387
  config: ParallelProcessingConfig,
388
- progress_dict: Dict,
389
- ) -> Dict[Future, int]:
390
  """
391
  Submit all chunk processing jobs to the executor.
392
 
@@ -413,7 +414,7 @@ def _submit_chunk_jobs(
413
  return futures
414
 
415
 
416
- def _collect_chunk_results(futures: Dict[Future, int]) -> Dict[int, Optional[ChunkResult]]:
417
  """
418
  Collect results from all chunk futures as they complete.
419
 
 
175
 
176
  return frames, io_time
177
 
178
+
179
  # pylint: disable=too-many-locals
180
  def _process_chunk(
181
  chunk_id: int,
182
  config: ParallelProcessingConfig,
183
  chunk_start: float,
184
  chunk_end: float,
185
+ progress_dict: Optional[Dict[int, Any]] = None,
186
  ) -> ChunkResult:
187
  """
188
  Process a single video chunk using OpenCV.
 
310
  return chunks
311
 
312
 
313
+ def _create_progress_monitor(progress_dict: Dict[int, Any], num_workers: int) -> Tuple[threading.Thread, threading.Event]:
314
  """
315
  Create a progress monitoring thread.
316
 
 
323
  """
324
  stop_monitor = threading.Event()
325
 
326
+ def monitor_progress() -> None:
327
  """Monitor and display progress from workers."""
328
  while not stop_monitor.is_set():
329
  _display_progress(progress_dict, num_workers)
 
333
  return monitor_thread, stop_monitor
334
 
335
 
336
+ def _display_progress(progress_dict: Dict[int, Any], num_workers: int) -> None:
337
  """
338
  Build and display current progress string.
339
 
 
386
  executor: ProcessPoolExecutor,
387
  chunks: List[Tuple[int, float, float]],
388
  config: ParallelProcessingConfig,
389
+ progress_dict: Dict[int, Any],
390
+ ) -> Dict[Future[ChunkResult], int]:
391
  """
392
  Submit all chunk processing jobs to the executor.
393
 
 
414
  return futures
415
 
416
 
417
+ def _collect_chunk_results(futures: Dict[Future[ChunkResult], int]) -> Dict[int, Optional[ChunkResult]]:
418
  """
419
  Collect results from all chunk futures as they complete.
420
 
src/pipeline/play_detector.py CHANGED
@@ -449,17 +449,21 @@ class PlayDetector:
449
  context: VideoContext,
450
  stats: Dict[str, Any],
451
  timing: Dict[str, float],
 
452
  ) -> DetectionResult:
453
  """
454
- Finalize detection: apply filtering and build result.
455
 
456
- Clock reset classification is now handled inline by TrackPlayState during
457
- the streaming pass, so we just need to merge/filter and build the result.
 
 
458
 
459
  Args:
460
  context: Video context
461
  stats: Processing stats
462
  timing: Timing breakdown
 
463
 
464
  Returns:
465
  Final DetectionResult
@@ -467,23 +471,39 @@ class PlayDetector:
467
  # Log timing breakdown
468
  self._log_timing_breakdown(timing)
469
 
470
- # Get plays from state machine (clock reset classification already done inline)
471
  state_machine_plays = self.state_machine.get_plays()
472
  play_stats = self.state_machine.get_stats()
473
 
474
- # Log clock reset stats from state machine
475
- clock_reset_stats = play_stats.get("clock_reset_events", {})
476
  logger.info(
477
- "Clock reset classification: %d total, %d weird (rejected), %d timeouts, %d special plays",
478
  clock_reset_stats.get("total", 0),
479
  clock_reset_stats.get("weird_clock", 0),
480
  clock_reset_stats.get("timeout", 0),
481
  clock_reset_stats.get("special", 0),
482
  )
483
 
484
- # Use PlayMerger to deduplicate and apply quiet time filter
485
- merger = PlayMerger()
486
- plays = merger.merge(state_machine_plays)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
487
 
488
  result = DetectionResult(
489
  video=Path(self.config.video_path).name,
@@ -518,6 +538,287 @@ class PlayDetector:
518
  logger.info(" TOTAL: %.2fs", total_time)
519
  logger.info("=" * 50)
520
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
521
  def detect(self) -> DetectionResult:
522
  """
523
  Run play detection on the video segment.
@@ -565,11 +866,11 @@ class PlayDetector:
565
 
566
  # Streaming detection pass: read frames + template match + state machine (all in one)
567
  # Uses threaded video I/O to overlap reading with processing
568
- # Note: Return value unused - state machine is updated inline during the pass
569
- _ = self._streaming_detection_pass(context, stats, timing)
570
 
571
- # Finalize: Clock reset classification and result building
572
- return self._finalize_detection(context, stats, timing)
573
 
574
  # pylint: disable=too-many-locals
575
  def detect_parallel(self, num_workers: int = 2, output_dir: Optional[Path] = None) -> DetectionResult:
@@ -706,8 +1007,8 @@ class PlayDetector:
706
  "frames_with_clock": stats["frames_with_clock"],
707
  }
708
 
709
- # Finalize: Clock reset classification and result building
710
- return self._finalize_detection(context, stats_dict, timing)
711
 
712
  def _log_detection_mode(self) -> None:
713
  """Log the detection mode being used."""
 
449
  context: VideoContext,
450
  stats: Dict[str, Any],
451
  timing: Dict[str, float],
452
+ frame_data: List[Dict[str, Any]],
453
  ) -> DetectionResult:
454
  """
455
+ Finalize detection: run post-hoc clock reset detection and build result.
456
 
457
+ Performs 3-class classification for 40→25 clock reset events:
458
+ - Class A (weird_clock): 25 counts down immediately rejected
459
+ - Class B (timeout): Timeout indicator changed → tracked as timeout
460
+ - Class C (special): Neither A nor B → special play (punt/FG/XP)
461
 
462
  Args:
463
  context: Video context
464
  stats: Processing stats
465
  timing: Timing breakdown
466
+ frame_data: List of frame data dicts with clock values and timeout counts
467
 
468
  Returns:
469
  Final DetectionResult
 
471
  # Log timing breakdown
472
  self._log_timing_breakdown(timing)
473
 
474
+ # Get plays from state machine (normal 40-second plays)
475
  state_machine_plays = self.state_machine.get_plays()
476
  play_stats = self.state_machine.get_stats()
477
 
478
+ # Run post-hoc clock reset detection (40→25 transitions)
479
+ clock_reset_plays, clock_reset_stats = self._detect_clock_resets(frame_data)
480
  logger.info(
481
+ "Clock reset detection: %d total, %d weird (rejected), %d timeouts, %d special plays",
482
  clock_reset_stats.get("total", 0),
483
  clock_reset_stats.get("weird_clock", 0),
484
  clock_reset_stats.get("timeout", 0),
485
  clock_reset_stats.get("special", 0),
486
  )
487
 
488
+ # Merge clock reset stats into play stats
489
+ play_stats["clock_reset_events"] = clock_reset_stats
490
+
491
+ # Merge state machine plays with clock reset plays (handles deduplication)
492
+ plays = self._merge_plays(state_machine_plays, clock_reset_plays)
493
+
494
+ # Recalculate stats from merged plays
495
+ start_methods = {}
496
+ end_methods = {}
497
+ play_types = {}
498
+ for play in plays:
499
+ start_methods[play.start_method] = start_methods.get(play.start_method, 0) + 1
500
+ end_methods[play.end_method] = end_methods.get(play.end_method, 0) + 1
501
+ play_types[play.play_type] = play_types.get(play.play_type, 0) + 1
502
+
503
+ play_stats["total_plays"] = len(plays)
504
+ play_stats["start_methods"] = start_methods
505
+ play_stats["end_methods"] = end_methods
506
+ play_stats["play_types"] = play_types
507
 
508
  result = DetectionResult(
509
  video=Path(self.config.video_path).name,
 
538
  logger.info(" TOTAL: %.2fs", total_time)
539
  logger.info("=" * 50)
540
 
541
+ # =========================================================================
542
+ # Clock Reset Detection (Post-hoc 40→25 analysis)
543
+ # =========================================================================
544
+
545
+ def _detect_clock_resets(self, frame_data: List[Dict[str, Any]]) -> Tuple[List[PlayEvent], Dict[str, int]]:
546
+ """
547
+ Detect and classify 40 -> 25 clock reset events.
548
+
549
+ Classification:
550
+ - Class A (weird_clock): 25 counts down immediately -> rejected
551
+ - Class B (timeout): Timeout indicator changed -> tracked as timeout
552
+ - Class C (special): Neither A nor B -> special play with extension
553
+
554
+ Args:
555
+ frame_data: List of frame data with clock values and timeout counts
556
+
557
+ Returns:
558
+ Tuple of (list of PlayEvent for valid clock resets, stats dict)
559
+ """
560
+ plays = []
561
+ stats = {"total": 0, "weird_clock": 0, "timeout": 0, "special": 0}
562
+
563
+ # Parameters
564
+ immediate_countdown_window = 2.0 # Seconds to check if 25 counts down
565
+ special_play_extension = 10.0 # Extension for Class C plays
566
+
567
+ prev_clock = None
568
+ saw_40_at = None
569
+
570
+ for i, frame in enumerate(frame_data):
571
+ clock_value = frame.get("clock_value")
572
+ timestamp = frame["timestamp"]
573
+
574
+ if clock_value is not None:
575
+ # Detect 40 -> 25 transition
576
+ if prev_clock == 40 and clock_value == 25:
577
+ stats["total"] += 1
578
+
579
+ # Check if 25 immediately counts down (Class A: weird clock)
580
+ is_immediate_countdown = self._check_immediate_countdown(frame_data, i, immediate_countdown_window)
581
+
582
+ # Check if timeout changed (Class B: team timeout)
583
+ timeout_team = self._check_timeout_change(frame_data, i)
584
+
585
+ if is_immediate_countdown:
586
+ # Class A: Weird clock behavior - reject
587
+ stats["weird_clock"] += 1
588
+ logger.debug("Clock reset at %.1fs: weird_clock (25 counts down immediately)", timestamp)
589
+ elif timeout_team:
590
+ # Class B: Team timeout - record but mark as timeout
591
+ stats["timeout"] += 1
592
+ play_end = self._find_clock_reset_play_end(frame_data, i, max_duration=15.0)
593
+ play = PlayEvent(
594
+ play_number=0,
595
+ start_time=timestamp,
596
+ end_time=play_end,
597
+ confidence=0.8,
598
+ start_method=f"timeout_{timeout_team}",
599
+ end_method="timeout_end",
600
+ direct_end_time=play_end,
601
+ start_clock_value=prev_clock,
602
+ end_clock_value=25,
603
+ play_type="timeout",
604
+ )
605
+ plays.append(play)
606
+ logger.debug("Clock reset at %.1fs: timeout (%s team)", timestamp, timeout_team)
607
+ else:
608
+ # Class C: Special play (injury/punt/FG/XP)
609
+ stats["special"] += 1
610
+ play_end = self._find_clock_reset_play_end(frame_data, i, max_duration=special_play_extension)
611
+ play_duration = play_end - timestamp
612
+ end_method = "max_duration" if play_duration >= special_play_extension - 0.1 else "scorebug_disappeared"
613
+ play = PlayEvent(
614
+ play_number=0,
615
+ start_time=timestamp,
616
+ end_time=play_end,
617
+ confidence=0.8,
618
+ start_method="clock_reset_special",
619
+ end_method=end_method,
620
+ direct_end_time=play_end,
621
+ start_clock_value=prev_clock,
622
+ end_clock_value=25,
623
+ play_type="special",
624
+ )
625
+ plays.append(play)
626
+ logger.debug("Clock reset at %.1fs: special play (%.1fs duration)", timestamp, play_end - timestamp)
627
+
628
+ # Track when 40 first appeared
629
+ if clock_value == 40 and prev_clock != 40:
630
+ saw_40_at = timestamp
631
+
632
+ prev_clock = clock_value
633
+
634
+ return plays, stats
635
+
636
+ def _check_immediate_countdown(self, frame_data: List[Dict[str, Any]], frame_idx: int, window: float) -> bool:
637
+ """Check if 25 immediately starts counting down (indicates weird clock behavior)."""
638
+ reset_timestamp = frame_data[frame_idx]["timestamp"]
639
+
640
+ for j in range(frame_idx + 1, len(frame_data)):
641
+ frame = frame_data[j]
642
+ elapsed = frame["timestamp"] - reset_timestamp
643
+ if elapsed > window:
644
+ break
645
+ clock_value = frame.get("clock_value")
646
+ if clock_value is not None and clock_value < 25:
647
+ return True # 25 counted down - weird clock
648
+
649
+ return False
650
+
651
+ def _check_timeout_change(self, frame_data: List[Dict[str, Any]], frame_idx: int) -> Optional[str]:
652
+ """Check if a timeout indicator changed around the reset."""
653
+ # Get timeout counts before reset
654
+ before_home = None
655
+ before_away = None
656
+
657
+ for j in range(frame_idx - 1, max(0, frame_idx - 20), -1):
658
+ frame = frame_data[j]
659
+ if frame.get("home_timeouts") is not None:
660
+ before_home = frame.get("home_timeouts", 3)
661
+ before_away = frame.get("away_timeouts", 3)
662
+ break
663
+
664
+ if before_home is None:
665
+ return None
666
+
667
+ # Look forward for timeout change (up to 15 seconds)
668
+ frame_interval = frame_data[1]["timestamp"] - frame_data[0]["timestamp"] if len(frame_data) > 1 else 0.5
669
+ max_frames_forward = int(15.0 / frame_interval) if frame_interval > 0 else 30
670
+
671
+ for j in range(frame_idx, min(len(frame_data), frame_idx + max_frames_forward)):
672
+ frame = frame_data[j]
673
+ if frame.get("home_timeouts") is not None:
674
+ after_home = frame.get("home_timeouts", 3)
675
+ after_away = frame.get("away_timeouts", 3)
676
+
677
+ if after_home < before_home:
678
+ return "home"
679
+ if after_away < before_away:
680
+ return "away"
681
+
682
+ return None
683
+
684
+ def _find_clock_reset_play_end(self, frame_data: List[Dict[str, Any]], frame_idx: int, max_duration: float) -> float:
685
+ """
686
+ Find the end time for a clock reset play (Class C special play).
687
+
688
+ The play ends when EITHER:
689
+ - Scorebug disappears (cut to commercial/replay)
690
+ - max_duration seconds have elapsed since play START
691
+
692
+ Args:
693
+ frame_data: Frame data list
694
+ frame_idx: Index of the frame where 40->25 reset occurred
695
+ max_duration: Maximum play duration from start
696
+
697
+ Returns:
698
+ Play end timestamp
699
+ """
700
+ start_timestamp = frame_data[frame_idx]["timestamp"]
701
+ max_end_time = start_timestamp + max_duration
702
+
703
+ # Look for scorebug disappearance (but cap at max_duration from start)
704
+ for j in range(frame_idx + 1, len(frame_data)):
705
+ frame = frame_data[j]
706
+ timestamp = frame["timestamp"]
707
+
708
+ # If we've exceeded max_duration, end the play at max_duration
709
+ if timestamp >= max_end_time:
710
+ return max_end_time
711
+
712
+ # Check for play clock disappearance
713
+ clock_available = frame.get("clock_detected", frame.get("scorebug_detected", False))
714
+ if not clock_available:
715
+ return timestamp
716
+
717
+ # Default: end at max_duration (or end of data if shorter)
718
+ return min(max_end_time, frame_data[-1]["timestamp"] if frame_data else max_end_time)
719
+
720
+ def _merge_plays(self, state_machine_plays: List[PlayEvent], clock_reset_plays: List[PlayEvent]) -> List[PlayEvent]:
721
+ """
722
+ Merge plays from state machine and clock reset detection, removing overlaps and duplicates.
723
+
724
+ Handles two types of duplicates:
725
+ 1. Overlapping plays (start_time < last.end_time)
726
+ 2. Close plays (start times within proximity_threshold) representing the same event
727
+
728
+ Args:
729
+ state_machine_plays: Plays from the state machine
730
+ clock_reset_plays: Plays from clock reset detection
731
+
732
+ Returns:
733
+ Merged list of plays sorted by start time
734
+ """
735
+ all_plays = list(state_machine_plays) + list(clock_reset_plays)
736
+ all_plays.sort(key=lambda p: p.start_time)
737
+
738
+ if not all_plays:
739
+ return []
740
+
741
+ # Proximity threshold: plays within this time are considered the same event
742
+ proximity_threshold = 5.0 # seconds
743
+
744
+ # Remove overlapping and close plays (keep state machine plays over clock reset plays)
745
+ filtered = [all_plays[0]]
746
+ for play in all_plays[1:]:
747
+ last = filtered[-1]
748
+
749
+ # Check for overlap OR proximity (both indicate same event)
750
+ is_overlapping = play.start_time < last.end_time
751
+ is_close = abs(play.start_time - last.start_time) < proximity_threshold
752
+
753
+ if is_overlapping or is_close:
754
+ # Same event detected twice - keep the better one
755
+ # Priority: normal > special > timeout (normal plays are most reliable)
756
+ type_priority = {"normal": 3, "special": 2, "timeout": 1}
757
+ last_priority = type_priority.get(last.play_type, 0)
758
+ play_priority = type_priority.get(play.play_type, 0)
759
+
760
+ if play_priority > last_priority:
761
+ filtered[-1] = play # Replace with higher priority play
762
+ elif play_priority == last_priority and play.confidence > last.confidence:
763
+ filtered[-1] = play # Same priority, but higher confidence
764
+ # else: keep existing
765
+ else:
766
+ filtered.append(play)
767
+
768
+ # Apply quiet time filter to remove false positives after normal plays
769
+ filtered = self._apply_quiet_time_filter(filtered)
770
+
771
+ # Renumber plays
772
+ for i, play in enumerate(filtered, 1):
773
+ play.play_number = i
774
+
775
+ return filtered
776
+
777
+ def _apply_quiet_time_filter(self, plays: List[PlayEvent], quiet_time: float = 10.0) -> List[PlayEvent]:
778
+ """
779
+ Apply quiet time filter after normal plays.
780
+
781
+ After a normal play ends, no new special/timeout plays can start for quiet_time seconds.
782
+ This filters out false positives from penalties during plays.
783
+
784
+ Args:
785
+ plays: List of plays sorted by start time
786
+ quiet_time: Seconds of quiet time after normal plays
787
+
788
+ Returns:
789
+ Filtered list of plays
790
+ """
791
+ if not plays:
792
+ return []
793
+
794
+ filtered = []
795
+ last_normal_end = -999.0 # Track when last normal play ended
796
+
797
+ for play in plays:
798
+ # Check if this play starts during quiet time after a normal play
799
+ if play.start_time < last_normal_end + quiet_time and play.play_type != "normal":
800
+ # This non-normal play starts during quiet time - filter it out
801
+ time_since_normal = play.start_time - last_normal_end
802
+ logger.debug(
803
+ "Quiet time filter: Removing %s play at %.1fs (%.1fs after normal play ended)",
804
+ play.play_type,
805
+ play.start_time,
806
+ time_since_normal,
807
+ )
808
+ continue
809
+
810
+ filtered.append(play)
811
+
812
+ # Update last normal play end time
813
+ if play.play_type == "normal":
814
+ last_normal_end = play.end_time
815
+
816
+ removed_count = len(plays) - len(filtered)
817
+ if removed_count > 0:
818
+ logger.info("Quiet time filter removed %d plays", removed_count)
819
+
820
+ return filtered
821
+
822
  def detect(self) -> DetectionResult:
823
  """
824
  Run play detection on the video segment.
 
866
 
867
  # Streaming detection pass: read frames + template match + state machine (all in one)
868
  # Uses threaded video I/O to overlap reading with processing
869
+ # Returns frame_data needed for post-hoc clock reset detection
870
+ frame_data = self._streaming_detection_pass(context, stats, timing)
871
 
872
+ # Finalize: Post-hoc clock reset classification (Class A/B/C) and result building
873
+ return self._finalize_detection(context, stats, timing, frame_data)
874
 
875
  # pylint: disable=too-many-locals
876
  def detect_parallel(self, num_workers: int = 2, output_dir: Optional[Path] = None) -> DetectionResult:
 
1007
  "frames_with_clock": stats["frames_with_clock"],
1008
  }
1009
 
1010
+ # Finalize: Post-hoc clock reset classification (Class A/B/C) and result building
1011
+ return self._finalize_detection(context, stats_dict, timing, frame_data)
1012
 
1013
  def _log_detection_mode(self) -> None:
1014
  """Log the detection mode being used."""
src/readers/playclock.py CHANGED
@@ -19,7 +19,7 @@ from typing import List, Tuple
19
  import cv2
20
  import numpy as np
21
 
22
- from setup import DigitTemplateLibrary
23
  from utils import extract_center_region, extract_far_left_region, extract_left_region, extract_right_region, preprocess_playclock_region
24
  from .models import PlayClockReading, TemplateMatchResult, TemplatePlayClockReading
25
 
@@ -81,7 +81,7 @@ class ReadPlayClock:
81
  """
82
  return preprocess_playclock_region(region, self.scale_factor)
83
 
84
- def match_digit(self, region: np.ndarray, templates: List) -> TemplateMatchResult:
85
  """
86
  Match a region against a set of digit templates.
87
 
 
19
  import cv2
20
  import numpy as np
21
 
22
+ from setup import DigitTemplate, DigitTemplateLibrary
23
  from utils import extract_center_region, extract_far_left_region, extract_left_region, extract_right_region, preprocess_playclock_region
24
  from .models import PlayClockReading, TemplateMatchResult, TemplatePlayClockReading
25
 
 
81
  """
82
  return preprocess_playclock_region(region, self.scale_factor)
83
 
84
+ def match_digit(self, region: np.ndarray, templates: List[DigitTemplate]) -> TemplateMatchResult:
85
  """
86
  Match a region against a set of digit templates.
87
 
src/setup/template_builder.py CHANGED
@@ -9,7 +9,7 @@ Region extraction and preprocessing utilities are shared from utils to eliminate
9
  """
10
 
11
  import logging
12
- from typing import Dict, List, Optional, Tuple
13
 
14
  import cv2
15
  import numpy as np
@@ -296,7 +296,7 @@ class DigitTemplateBuilder:
296
 
297
  return library
298
 
299
- def get_coverage_status(self) -> Dict[str, any]:
300
  """Get current sample coverage status."""
301
  # Get keys for samples that have at least one entry
302
  keys_with_samples = [key for key, samples in self.samples.items() if len(samples) >= 1]
 
9
  """
10
 
11
  import logging
12
+ from typing import Any, Dict, List, Optional, Tuple
13
 
14
  import cv2
15
  import numpy as np
 
296
 
297
  return library
298
 
299
+ def get_coverage_status(self) -> Dict[str, Any]:
300
  """Get current sample coverage status."""
301
  # Get keys for samples that have at least one entry
302
  keys_with_samples = [key for key, samples in self.samples.items() if len(samples) >= 1]
src/setup/template_library.py CHANGED
@@ -37,7 +37,7 @@ class DigitTemplateLibrary:
37
  TENS_DIGITS = [-1, 1, 2, 3, 4] # -1 = blank, 1-4 for 10-40
38
  POSITIONS = ["left", "center", "right"]
39
 
40
- def __init__(self):
41
  """Initialize empty template library."""
42
  # Templates: {(is_tens, digit_value, position): DigitTemplate}
43
  self.templates: Dict[Tuple[bool, int, str], DigitTemplate] = {}
@@ -107,20 +107,20 @@ class DigitTemplateLibrary:
107
  status = calculate_coverage_status(ones_center_have, ones_right_have, tens_have, has_blank, total_items=len(self.templates))
108
 
109
  # Convert -1 to "blank" for display
110
- def format_tens(digits):
111
  return sorted(["blank" if d == -1 else d for d in digits], key=lambda x: (isinstance(x, str), x))
112
 
113
  # Add legacy fields for backward compatibility
114
  status["ones_have"] = sorted(ones_center_have | ones_right_have)
115
  status["ones_missing"] = sorted((ONES_DIGITS - ones_center_have) & (ONES_DIGITS - ones_right_have))
116
  status["tens_have_formatted"] = format_tens(tens_have | ({-1} if has_blank else set()))
117
- status["tens_missing_formatted"] = format_tens((status["tens_missing"]) | (set() if has_blank else {-1}))
118
 
119
  return status
120
 
121
  def is_complete(self) -> bool:
122
  """Check if all required templates are present."""
123
- return self.get_coverage_status()["is_complete"]
124
 
125
  def save(self, output_path: str) -> None:
126
  """
@@ -132,7 +132,7 @@ class DigitTemplateLibrary:
132
  output_dir = Path(output_path)
133
  output_dir.mkdir(parents=True, exist_ok=True)
134
 
135
- metadata = {"templates": [], "version": 2} # Version 2 includes position
136
 
137
  for (is_tens, digit, position), template in self.templates.items():
138
  # Use "blank" instead of -1 for the empty tens digit in filenames
@@ -140,7 +140,7 @@ class DigitTemplateLibrary:
140
  position_suffix = f"_{position}" if position != "left" or not is_tens else ""
141
  filename = f"{'tens' if is_tens else 'ones'}_{digit_str}{position_suffix}.png"
142
  cv2.imwrite(str(output_dir / filename), template.template)
143
- metadata["templates"].append(
144
  {
145
  "filename": filename,
146
  "digit_value": digit_str, # Use "blank" for display
@@ -151,6 +151,8 @@ class DigitTemplateLibrary:
151
  }
152
  )
153
 
 
 
154
  with open(output_dir / "templates_metadata.json", "w", encoding="utf-8") as f:
155
  json.dump(metadata, f, indent=2)
156
 
 
37
  TENS_DIGITS = [-1, 1, 2, 3, 4] # -1 = blank, 1-4 for 10-40
38
  POSITIONS = ["left", "center", "right"]
39
 
40
+ def __init__(self) -> None:
41
  """Initialize empty template library."""
42
  # Templates: {(is_tens, digit_value, position): DigitTemplate}
43
  self.templates: Dict[Tuple[bool, int, str], DigitTemplate] = {}
 
107
  status = calculate_coverage_status(ones_center_have, ones_right_have, tens_have, has_blank, total_items=len(self.templates))
108
 
109
  # Convert -1 to "blank" for display
110
+ def format_tens(digits: set[int]) -> list[str | int]:
111
  return sorted(["blank" if d == -1 else d for d in digits], key=lambda x: (isinstance(x, str), x))
112
 
113
  # Add legacy fields for backward compatibility
114
  status["ones_have"] = sorted(ones_center_have | ones_right_have)
115
  status["ones_missing"] = sorted((ONES_DIGITS - ones_center_have) & (ONES_DIGITS - ones_right_have))
116
  status["tens_have_formatted"] = format_tens(tens_have | ({-1} if has_blank else set()))
117
+ status["tens_missing_formatted"] = format_tens(set(status["tens_missing"]) | (set() if has_blank else {-1}))
118
 
119
  return status
120
 
121
  def is_complete(self) -> bool:
122
  """Check if all required templates are present."""
123
+ return bool(self.get_coverage_status()["is_complete"])
124
 
125
  def save(self, output_path: str) -> None:
126
  """
 
132
  output_dir = Path(output_path)
133
  output_dir.mkdir(parents=True, exist_ok=True)
134
 
135
+ templates_list: list[dict[str, object]] = []
136
 
137
  for (is_tens, digit, position), template in self.templates.items():
138
  # Use "blank" instead of -1 for the empty tens digit in filenames
 
140
  position_suffix = f"_{position}" if position != "left" or not is_tens else ""
141
  filename = f"{'tens' if is_tens else 'ones'}_{digit_str}{position_suffix}.png"
142
  cv2.imwrite(str(output_dir / filename), template.template)
143
+ templates_list.append(
144
  {
145
  "filename": filename,
146
  "digit_value": digit_str, # Use "blank" for display
 
151
  }
152
  )
153
 
154
+ metadata = {"templates": templates_list, "version": 2} # Version 2 includes position
155
+
156
  with open(output_dir / "templates_metadata.json", "w", encoding="utf-8") as f:
157
  json.dump(metadata, f, indent=2)
158
 
src/tracking/play_state.py CHANGED
@@ -315,7 +315,7 @@ class TrackPlayState:
315
  self._start_play(timestamp, "clock_reset_special", 40)
316
 
317
  # Play tracking is started, not completed - no PlayEvent to return
318
- # (implicitly returns None)
319
 
320
  def _check_timeout_change(self, timeout_info: Optional[TimeoutInfo] = None) -> Optional[str]:
321
  """
@@ -373,6 +373,8 @@ class TrackPlayState:
373
 
374
  def _check_play_timeout(self, timestamp: float, clock_value: int) -> Optional[PlayEvent]:
375
  """Check if play duration has exceeded maximum allowed time."""
 
 
376
  play_duration = timestamp - self._state.current_play_start_time
377
  if play_duration > self.config.max_play_duration:
378
  # Cap the end time at start + max_duration to avoid inflated durations
@@ -702,7 +704,7 @@ class TrackPlayState:
702
  """Get current state."""
703
  return self._state.state
704
 
705
- def get_stats(self) -> dict:
706
  """Get statistics about detected plays."""
707
  if not self._state.plays:
708
  return {"total_plays": 0, "clock_reset_events": self._state.clock_reset_stats.model_dump()}
 
315
  self._start_play(timestamp, "clock_reset_special", 40)
316
 
317
  # Play tracking is started, not completed - no PlayEvent to return
318
+ return None
319
 
320
  def _check_timeout_change(self, timeout_info: Optional[TimeoutInfo] = None) -> Optional[str]:
321
  """
 
373
 
374
  def _check_play_timeout(self, timestamp: float, clock_value: int) -> Optional[PlayEvent]:
375
  """Check if play duration has exceeded maximum allowed time."""
376
+ if self._state.current_play_start_time is None:
377
+ return None # No play in progress
378
  play_duration = timestamp - self._state.current_play_start_time
379
  if play_duration > self.config.max_play_duration:
380
  # Cap the end time at start + max_duration to avoid inflated durations
 
704
  """Get current state."""
705
  return self._state.state
706
 
707
+ def get_stats(self) -> dict[str, object]:
708
  """Get statistics about detected plays."""
709
  if not self._state.plays:
710
  return {"total_plays": 0, "clock_reset_events": self._state.clock_reset_stats.model_dump()}
src/ui/api.py CHANGED
@@ -19,7 +19,7 @@ from .sessions import PlayClockSelectionSession, ScorebugSelectionSession, Timeo
19
  logger = logging.getLogger(__name__)
20
 
21
 
22
- def print_banner():
23
  """Print the welcome banner for CFB40."""
24
  print("\n" + "=" * 60)
25
  print(" CFB40 - College Football Play Detection Pipeline")
@@ -31,7 +31,7 @@ def extract_sample_frames_for_selection(video_path: str, start_time: float, num_
31
  return extract_sample_frames(video_path, start_time, num_frames, interval)
32
 
33
 
34
- def select_scorebug_region(frames: List[Tuple[float, np.ndarray]], video_path: str = None) -> Tuple[Optional[Tuple[int, int, int, int]], Optional[Tuple[float, np.ndarray]]]:
35
  """
36
  Interactive selection of scorebug region.
37
 
 
19
  logger = logging.getLogger(__name__)
20
 
21
 
22
+ def print_banner() -> None:
23
  """Print the welcome banner for CFB40."""
24
  print("\n" + "=" * 60)
25
  print(" CFB40 - College Football Play Detection Pipeline")
 
31
  return extract_sample_frames(video_path, start_time, num_frames, interval)
32
 
33
 
34
+ def select_scorebug_region(frames: List[Tuple[float, np.ndarray]], video_path: str | None = None) -> Tuple[Optional[Tuple[int, int, int, int]], Optional[Tuple[float, np.ndarray]]]:
35
  """
36
  Interactive selection of scorebug region.
37
 
src/ui/selector.py CHANGED
@@ -41,7 +41,7 @@ class RegionSelector:
41
 
42
  self.selection_complete = False
43
 
44
- def mouse_callback(self, event, x, y, _flags, _param):
45
  """Handle mouse events for region selection."""
46
  if self.mode == "two_click":
47
  if event == cv2.EVENT_MOUSEMOVE:
@@ -79,7 +79,7 @@ class RegionSelector:
79
  return None
80
  return bbox
81
 
82
- def reset(self):
83
  """Reset the selection state."""
84
  self.points = []
85
  self.current_point = None
 
41
 
42
  self.selection_complete = False
43
 
44
+ def mouse_callback(self, event: int, x: int, y: int, _flags: int, _param: object) -> None:
45
  """Handle mouse events for region selection."""
46
  if self.mode == "two_click":
47
  if event == cv2.EVENT_MOUSEMOVE:
 
79
  return None
80
  return bbox
81
 
82
+ def reset(self) -> None:
83
  """Reset the selection state."""
84
  self.points = []
85
  self.current_point = None
src/utils/color.py CHANGED
@@ -41,7 +41,7 @@ def detect_red_digits(region: np.ndarray) -> bool:
41
 
42
  # Red digits: high red channel, very low green/blue, red > 2x green
43
  max_gb = max(g_mean, b_mean)
44
- is_red = r_mean > 15 > max_gb and r_mean > g_mean * 2
45
 
46
  if is_red:
47
  logger.debug("Red digits detected: R=%.1f, G=%.1f, B=%.1f", r_mean, g_mean, b_mean)
 
41
 
42
  # Red digits: high red channel, very low green/blue, red > 2x green
43
  max_gb = max(g_mean, b_mean)
44
+ is_red = bool(r_mean > 15 > max_gb and r_mean > g_mean * 2)
45
 
46
  if is_red:
47
  logger.debug("Red digits detected: R=%.1f, G=%.1f, B=%.1f", r_mean, g_mean, b_mean)
src/video/ffmpeg_ops.py CHANGED
@@ -146,7 +146,7 @@ def extract_clip_reencode(
146
  return (False, error_msg)
147
 
148
 
149
- def _extract_clip_for_parallel(args: Tuple[int, Dict, str, Path, float]) -> Tuple[int, Path, bool, str]:
150
  """
151
  Extract a single clip using stream copy (for parallel execution).
152
 
@@ -273,7 +273,7 @@ def _generate_clips_reencode(
273
  return clip_paths
274
 
275
 
276
- def concatenate_clips(clip_paths: List[Path], output_path: Path, working_dir: Path = None) -> Tuple[bool, str]:
277
  """
278
  Concatenate multiple video clips into a single video.
279
 
 
146
  return (False, error_msg)
147
 
148
 
149
+ def _extract_clip_for_parallel(args: Tuple[int, Dict[str, Any], str, Path, float]) -> Tuple[int, Path, bool, str]:
150
  """
151
  Extract a single clip using stream copy (for parallel execution).
152
 
 
273
  return clip_paths
274
 
275
 
276
+ def concatenate_clips(clip_paths: List[Path], output_path: Path, working_dir: Path | None = None) -> Tuple[bool, str]:
277
  """
278
  Concatenate multiple video clips into a single video.
279
 
src/video/frame_extractor.py CHANGED
@@ -7,7 +7,7 @@ getting video metadata, and other video-related utilities.
7
 
8
  import logging
9
  import subprocess
10
- from typing import List, Optional, Tuple
11
 
12
  import cv2
13
  import numpy as np
@@ -161,7 +161,7 @@ def extract_frames_with_callback(
161
  start_time: float,
162
  end_time: Optional[float],
163
  interval: float,
164
- callback,
165
  ) -> int:
166
  """
167
  Extract frames from a video and call a callback function for each frame.
 
7
 
8
  import logging
9
  import subprocess
10
+ from typing import Callable, List, Optional, Tuple
11
 
12
  import cv2
13
  import numpy as np
 
161
  start_time: float,
162
  end_time: Optional[float],
163
  interval: float,
164
+ callback: Callable[[float, np.ndarray], bool],
165
  ) -> int:
166
  """
167
  Extract frames from a video and call a callback function for each frame.