Spaces:
Sleeping
Sleeping
Commit Β·
8abcfef
1
Parent(s): 719b8f7
clarifying detect meaning
Browse files- main.py +7 -7
- src/pipeline/__init__.py +6 -6
- src/pipeline/orchestrator.py +17 -17
- src/pipeline/{play_detector.py β play_extractor.py} +57 -334
- src/tracking/__init__.py +3 -0
- src/tracking/clock_reset_identifier.py +243 -0
main.py
CHANGED
|
@@ -58,7 +58,7 @@ from ui import (
|
|
| 58 |
)
|
| 59 |
from ui.api import extract_sample_frames_for_selection
|
| 60 |
from video import generate_clips
|
| 61 |
-
from pipeline import
|
| 62 |
|
| 63 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
| 64 |
logger = logging.getLogger(__name__)
|
|
@@ -368,19 +368,19 @@ def _print_region_summary(config: SessionConfig) -> None:
|
|
| 368 |
# =============================================================================
|
| 369 |
|
| 370 |
|
| 371 |
-
def
|
| 372 |
"""
|
| 373 |
-
Phase 3: Run play
|
| 374 |
|
| 375 |
Args:
|
| 376 |
session_config: Configuration with video path and regions.
|
| 377 |
num_workers: Number of parallel workers.
|
| 378 |
|
| 379 |
Returns:
|
| 380 |
-
|
| 381 |
"""
|
| 382 |
print("\n" + "=" * 60)
|
| 383 |
-
return
|
| 384 |
|
| 385 |
|
| 386 |
# =============================================================================
|
|
@@ -492,8 +492,8 @@ def main() -> int:
|
|
| 492 |
if session_config is None:
|
| 493 |
return 1
|
| 494 |
|
| 495 |
-
# Phase 3:
|
| 496 |
-
results =
|
| 497 |
|
| 498 |
# Phase 4: Clip Generation
|
| 499 |
clip_timing = {}
|
|
|
|
| 58 |
)
|
| 59 |
from ui.api import extract_sample_frames_for_selection
|
| 60 |
from video import generate_clips
|
| 61 |
+
from pipeline import run_extraction, print_results_summary
|
| 62 |
|
| 63 |
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
| 64 |
logger = logging.getLogger(__name__)
|
|
|
|
| 368 |
# =============================================================================
|
| 369 |
|
| 370 |
|
| 371 |
+
def _phase3_extraction(session_config: SessionConfig, num_workers: int) -> Dict[str, Any]:
|
| 372 |
"""
|
| 373 |
+
Phase 3: Run play extraction on the video.
|
| 374 |
|
| 375 |
Args:
|
| 376 |
session_config: Configuration with video path and regions.
|
| 377 |
num_workers: Number of parallel workers.
|
| 378 |
|
| 379 |
Returns:
|
| 380 |
+
Extraction results dictionary.
|
| 381 |
"""
|
| 382 |
print("\n" + "=" * 60)
|
| 383 |
+
return run_extraction(session_config, OUTPUT_DIR, num_workers=num_workers)
|
| 384 |
|
| 385 |
|
| 386 |
# =============================================================================
|
|
|
|
| 492 |
if session_config is None:
|
| 493 |
return 1
|
| 494 |
|
| 495 |
+
# Phase 3: Extraction
|
| 496 |
+
results = _phase3_extraction(session_config, args.parallel)
|
| 497 |
|
| 498 |
# Phase 4: Clip Generation
|
| 499 |
clip_timing = {}
|
src/pipeline/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
"""Pipeline modules for video processing and
|
| 2 |
|
| 3 |
Note: OCR-based clock reading has been removed in favor of template matching.
|
| 4 |
Streaming processing is used for optimal performance.
|
|
@@ -13,8 +13,8 @@ from .models import (
|
|
| 13 |
)
|
| 14 |
|
| 15 |
# Pipeline classes and functions
|
| 16 |
-
from .
|
| 17 |
-
from .orchestrator import
|
| 18 |
from .template_builder_pass import TemplateBuildingPass
|
| 19 |
|
| 20 |
__all__ = [
|
|
@@ -23,9 +23,9 @@ __all__ = [
|
|
| 23 |
"DetectionResult",
|
| 24 |
"VideoContext",
|
| 25 |
# Pipeline
|
| 26 |
-
"
|
| 27 |
-
"
|
| 28 |
-
"
|
| 29 |
"print_results_summary",
|
| 30 |
"TemplateBuildingPass",
|
| 31 |
]
|
|
|
|
| 1 |
+
"""Pipeline modules for video processing and play extraction orchestration.
|
| 2 |
|
| 3 |
Note: OCR-based clock reading has been removed in favor of template matching.
|
| 4 |
Streaming processing is used for optimal performance.
|
|
|
|
| 13 |
)
|
| 14 |
|
| 15 |
# Pipeline classes and functions
|
| 16 |
+
from .play_extractor import PlayExtractor, format_extraction_result_dict
|
| 17 |
+
from .orchestrator import run_extraction, print_results_summary
|
| 18 |
from .template_builder_pass import TemplateBuildingPass
|
| 19 |
|
| 20 |
__all__ = [
|
|
|
|
| 23 |
"DetectionResult",
|
| 24 |
"VideoContext",
|
| 25 |
# Pipeline
|
| 26 |
+
"PlayExtractor",
|
| 27 |
+
"format_extraction_result_dict",
|
| 28 |
+
"run_extraction",
|
| 29 |
"print_results_summary",
|
| 30 |
"TemplateBuildingPass",
|
| 31 |
]
|
src/pipeline/orchestrator.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
"""
|
| 2 |
-
Pipeline orchestration for play
|
| 3 |
|
| 4 |
-
This module provides the high-level functions for running play
|
| 5 |
including result filtering and summary printing.
|
| 6 |
|
| 7 |
Supports both sequential and parallel processing modes:
|
|
@@ -16,14 +16,14 @@ from typing import Any, Dict
|
|
| 16 |
from config import SessionConfig, MIN_PLAY_DURATION
|
| 17 |
from detection import DetectTimeouts
|
| 18 |
from pipeline import DetectionConfig
|
| 19 |
-
from pipeline.
|
| 20 |
|
| 21 |
logger = logging.getLogger(__name__)
|
| 22 |
|
| 23 |
|
| 24 |
-
def
|
| 25 |
"""
|
| 26 |
-
Run play
|
| 27 |
|
| 28 |
Uses 3-class classification for 40->25 clock resets:
|
| 29 |
- Class A (weird_clock): 25 counts down immediately -> rejected
|
|
@@ -36,18 +36,18 @@ def run_detection(config: SessionConfig, output_dir: Path, num_workers: int = 1)
|
|
| 36 |
num_workers: Number of parallel workers (1=sequential, 2+=parallel).
|
| 37 |
|
| 38 |
Returns:
|
| 39 |
-
|
| 40 |
- video: Video path
|
| 41 |
- segment: Start/end times
|
| 42 |
- processing: Frame processing stats
|
| 43 |
- timing: Timing breakdown
|
| 44 |
-
- plays: List of
|
| 45 |
- stats: Play statistics
|
| 46 |
"""
|
| 47 |
if num_workers > 1:
|
| 48 |
-
print(f"\n[Phase 3] Running
|
| 49 |
else:
|
| 50 |
-
print("\n[Phase 3] Running
|
| 51 |
print("-" * 50)
|
| 52 |
|
| 53 |
basename = config.video_basename
|
|
@@ -87,14 +87,14 @@ def run_detection(config: SessionConfig, output_dir: Path, num_workers: int = 1)
|
|
| 87 |
else:
|
| 88 |
logger.info("No timeout tracker config found - clock reset classification will be limited")
|
| 89 |
|
| 90 |
-
# Initialize
|
| 91 |
-
|
| 92 |
|
| 93 |
-
# Run
|
| 94 |
if num_workers > 1:
|
| 95 |
-
result =
|
| 96 |
else:
|
| 97 |
-
result =
|
| 98 |
|
| 99 |
# Filter out plays shorter than minimum duration (e.g., clock operator errors)
|
| 100 |
original_play_count = len(result.plays)
|
|
@@ -153,10 +153,10 @@ def run_detection(config: SessionConfig, output_dir: Path, num_workers: int = 1)
|
|
| 153 |
|
| 154 |
# Save results with video-specific name
|
| 155 |
results_path = output_dir / f"{basename}_plays.json"
|
| 156 |
-
|
| 157 |
|
| 158 |
# Convert to dictionary for return
|
| 159 |
-
return
|
| 160 |
|
| 161 |
|
| 162 |
def print_results_summary(
|
|
@@ -171,7 +171,7 @@ def print_results_summary(
|
|
| 171 |
Print the final results summary.
|
| 172 |
|
| 173 |
Args:
|
| 174 |
-
results:
|
| 175 |
testing_mode: Whether running in testing mode.
|
| 176 |
clip_timing: Timing information from clip generation.
|
| 177 |
video_basename: Base name for output files.
|
|
|
|
| 1 |
"""
|
| 2 |
+
Pipeline orchestration for play extraction.
|
| 3 |
|
| 4 |
+
This module provides the high-level functions for running play extraction,
|
| 5 |
including result filtering and summary printing.
|
| 6 |
|
| 7 |
Supports both sequential and parallel processing modes:
|
|
|
|
| 16 |
from config import SessionConfig, MIN_PLAY_DURATION
|
| 17 |
from detection import DetectTimeouts
|
| 18 |
from pipeline import DetectionConfig
|
| 19 |
+
from pipeline.play_extractor import PlayExtractor, format_extraction_result_dict
|
| 20 |
|
| 21 |
logger = logging.getLogger(__name__)
|
| 22 |
|
| 23 |
|
| 24 |
+
def run_extraction(config: SessionConfig, output_dir: Path, num_workers: int = 1) -> Dict[str, Any]:
|
| 25 |
"""
|
| 26 |
+
Run play extraction using the configured regions.
|
| 27 |
|
| 28 |
Uses 3-class classification for 40->25 clock resets:
|
| 29 |
- Class A (weird_clock): 25 counts down immediately -> rejected
|
|
|
|
| 36 |
num_workers: Number of parallel workers (1=sequential, 2+=parallel).
|
| 37 |
|
| 38 |
Returns:
|
| 39 |
+
Extraction results dictionary with keys:
|
| 40 |
- video: Video path
|
| 41 |
- segment: Start/end times
|
| 42 |
- processing: Frame processing stats
|
| 43 |
- timing: Timing breakdown
|
| 44 |
+
- plays: List of extracted plays
|
| 45 |
- stats: Play statistics
|
| 46 |
"""
|
| 47 |
if num_workers > 1:
|
| 48 |
+
print(f"\n[Phase 3] Running Extraction (parallel: {num_workers} workers)...")
|
| 49 |
else:
|
| 50 |
+
print("\n[Phase 3] Running Extraction...")
|
| 51 |
print("-" * 50)
|
| 52 |
|
| 53 |
basename = config.video_basename
|
|
|
|
| 87 |
else:
|
| 88 |
logger.info("No timeout tracker config found - clock reset classification will be limited")
|
| 89 |
|
| 90 |
+
# Initialize extractor - fixed coordinates mode is configured via DetectionConfig
|
| 91 |
+
extractor = PlayExtractor(detection_config, timeout_tracker=timeout_tracker)
|
| 92 |
|
| 93 |
+
# Run extraction - parallel or sequential based on num_workers
|
| 94 |
if num_workers > 1:
|
| 95 |
+
result = extractor.extract_parallel(num_workers=num_workers, output_dir=output_dir)
|
| 96 |
else:
|
| 97 |
+
result = extractor.extract()
|
| 98 |
|
| 99 |
# Filter out plays shorter than minimum duration (e.g., clock operator errors)
|
| 100 |
original_play_count = len(result.plays)
|
|
|
|
| 153 |
|
| 154 |
# Save results with video-specific name
|
| 155 |
results_path = output_dir / f"{basename}_plays.json"
|
| 156 |
+
extractor.save_results(result, str(results_path))
|
| 157 |
|
| 158 |
# Convert to dictionary for return
|
| 159 |
+
return format_extraction_result_dict(result)
|
| 160 |
|
| 161 |
|
| 162 |
def print_results_summary(
|
|
|
|
| 171 |
Print the final results summary.
|
| 172 |
|
| 173 |
Args:
|
| 174 |
+
results: Extraction results dictionary from run_extraction().
|
| 175 |
testing_mode: Whether running in testing mode.
|
| 176 |
clip_timing: Timing information from clip generation.
|
| 177 |
video_basename: Base name for output files.
|
src/pipeline/{play_detector.py β play_extractor.py}
RENAMED
|
@@ -1,11 +1,12 @@
|
|
| 1 |
"""
|
| 2 |
-
Play
|
| 3 |
|
| 4 |
-
This module orchestrates the complete play
|
| 5 |
1. Video frame extraction
|
| 6 |
2. Scorebug detection
|
| 7 |
3. Play clock reading via template matching
|
| 8 |
4. Play state machine processing
|
|
|
|
| 9 |
|
| 10 |
Performance optimizations:
|
| 11 |
- Streaming processing: read frame -> process immediately (no intermediate storage)
|
|
@@ -28,7 +29,7 @@ import numpy as np
|
|
| 28 |
from detection import DetectScoreBug, ScorebugDetection, DetectTimeouts
|
| 29 |
from readers import ReadPlayClock, PlayClockReading
|
| 30 |
from setup import DigitTemplateBuilder, DigitTemplateLibrary, PlayClockRegionConfig, PlayClockRegionExtractor
|
| 31 |
-
from tracking import TrackPlayState, PlayEvent, PlayMerger, TimeoutInfo
|
| 32 |
from utils import create_frame_result
|
| 33 |
from video import ThreadedFrameReader
|
| 34 |
from .models import DetectionConfig, DetectionResult, ParallelProcessingConfig, VideoContext
|
|
@@ -38,7 +39,7 @@ from .template_builder_pass import TemplateBuildingPass
|
|
| 38 |
logger = logging.getLogger(__name__)
|
| 39 |
|
| 40 |
|
| 41 |
-
def
|
| 42 |
"""
|
| 43 |
Format a DetectionResult into a dictionary for JSON serialization or API return.
|
| 44 |
|
|
@@ -62,20 +63,21 @@ def format_detection_result_dict(result: DetectionResult) -> Dict[str, Any]:
|
|
| 62 |
}
|
| 63 |
|
| 64 |
|
| 65 |
-
class
|
| 66 |
"""
|
| 67 |
-
Main pipeline for
|
| 68 |
|
| 69 |
-
This class orchestrates all
|
| 70 |
- DetectScoreBug: Locates scorebug in frames
|
| 71 |
- ReadPlayClock: Reads play clock digits via template matching
|
| 72 |
- TrackPlayState: Determines play boundaries
|
| 73 |
- DetectTimeouts: Tracks timeout indicators for 3-class clock reset classification
|
|
|
|
| 74 |
"""
|
| 75 |
|
| 76 |
def __init__(self, config: DetectionConfig, timeout_tracker: Optional[DetectTimeouts] = None):
|
| 77 |
"""
|
| 78 |
-
Initialize the play
|
| 79 |
|
| 80 |
Args:
|
| 81 |
config: Detection configuration
|
|
@@ -111,11 +113,11 @@ class PlayDetector:
|
|
| 111 |
raise FileNotFoundError(f"Clock region config not found: {self.config.clock_region_config_path}")
|
| 112 |
|
| 113 |
def _initialize_components(self) -> None:
|
| 114 |
-
"""Initialize
|
| 115 |
-
logger.info("Initializing play
|
| 116 |
|
| 117 |
# Determine if we're using fixed coordinates mode
|
| 118 |
-
# In this mode, we still use the same
|
| 119 |
use_fixed_coords = self.config.fixed_playclock_coords is not None
|
| 120 |
|
| 121 |
if use_fixed_coords:
|
|
@@ -176,7 +178,7 @@ class PlayDetector:
|
|
| 176 |
self.template_reader = ReadPlayClock(self.template_library, region_w, region_h)
|
| 177 |
else:
|
| 178 |
self.template_library = None
|
| 179 |
-
logger.info("Could not load templates, will build during
|
| 180 |
|
| 181 |
# Initialize template builder for collection phase if no templates loaded
|
| 182 |
if self.template_library is None:
|
|
@@ -262,9 +264,9 @@ class PlayDetector:
|
|
| 262 |
|
| 263 |
return self.template_library is not None
|
| 264 |
|
| 265 |
-
def
|
| 266 |
"""
|
| 267 |
-
Streaming
|
| 268 |
|
| 269 |
This combines the old Pass 1 (frame extraction) and Pass 2 (template matching) into
|
| 270 |
a single streaming pass. Each frame is:
|
|
@@ -284,7 +286,7 @@ class PlayDetector:
|
|
| 284 |
Returns:
|
| 285 |
List of frame data dictionaries with all processing results
|
| 286 |
"""
|
| 287 |
-
logger.info("Streaming
|
| 288 |
|
| 289 |
logger.info(
|
| 290 |
"Threaded reading: frame_skip=%d (%.2f fps effective), frames %d-%d",
|
|
@@ -337,7 +339,7 @@ class PlayDetector:
|
|
| 337 |
# Progress logging
|
| 338 |
if stats["total_frames"] % progress_interval == 0:
|
| 339 |
progress_pct = 100 * (current_time - context.start_time) / (context.end_time - context.start_time)
|
| 340 |
-
logger.info("
|
| 341 |
|
| 342 |
finally:
|
| 343 |
# Stop the reader thread and get I/O timing
|
|
@@ -346,7 +348,7 @@ class PlayDetector:
|
|
| 346 |
context.cap.release()
|
| 347 |
|
| 348 |
logger.info(
|
| 349 |
-
"Streaming
|
| 350 |
stats["total_frames"],
|
| 351 |
stats["frames_with_scorebug"],
|
| 352 |
stats["frames_with_clock"],
|
|
@@ -446,7 +448,7 @@ class PlayDetector:
|
|
| 446 |
|
| 447 |
return frame_result
|
| 448 |
|
| 449 |
-
def
|
| 450 |
self,
|
| 451 |
context: VideoContext,
|
| 452 |
stats: Dict[str, Any],
|
|
@@ -454,9 +456,9 @@ class PlayDetector:
|
|
| 454 |
frame_data: List[Dict[str, Any]],
|
| 455 |
) -> DetectionResult:
|
| 456 |
"""
|
| 457 |
-
Finalize
|
| 458 |
|
| 459 |
-
|
| 460 |
- Class A (weird_clock): 25 counts down immediately β rejected
|
| 461 |
- Class B (timeout): Timeout indicator changed β tracked as timeout
|
| 462 |
- Class C (special): Neither A nor B β special play (punt/FG/XP)
|
|
@@ -477,10 +479,11 @@ class PlayDetector:
|
|
| 477 |
state_machine_plays = self.state_machine.get_plays()
|
| 478 |
play_stats = self.state_machine.get_stats()
|
| 479 |
|
| 480 |
-
# Run post-hoc clock reset
|
| 481 |
-
|
|
|
|
| 482 |
logger.info(
|
| 483 |
-
"Clock reset
|
| 484 |
clock_reset_stats.get("total", 0),
|
| 485 |
clock_reset_stats.get("weird_clock", 0),
|
| 486 |
clock_reset_stats.get("timeout", 0),
|
|
@@ -490,8 +493,9 @@ class PlayDetector:
|
|
| 490 |
# Merge clock reset stats into play stats
|
| 491 |
play_stats["clock_reset_events"] = clock_reset_stats
|
| 492 |
|
| 493 |
-
# Merge state machine plays with clock reset plays
|
| 494 |
-
|
|
|
|
| 495 |
|
| 496 |
# Recalculate stats from merged plays
|
| 497 |
start_methods: Dict[str, int] = {}
|
|
@@ -520,16 +524,16 @@ class PlayDetector:
|
|
| 520 |
)
|
| 521 |
|
| 522 |
# Log final summary
|
| 523 |
-
logger.info("
|
| 524 |
logger.info("Processed %d frames", stats["total_frames"])
|
| 525 |
logger.info("Frames with scorebug: %d (%.1f%%)", stats["frames_with_scorebug"], 100 * stats["frames_with_scorebug"] / max(1, stats["total_frames"]))
|
| 526 |
logger.info("Frames with clock: %d (%.1f%%)", stats["frames_with_clock"], 100 * stats["frames_with_clock"] / max(1, stats["total_frames"]))
|
| 527 |
-
logger.info("Plays
|
| 528 |
|
| 529 |
return result
|
| 530 |
|
| 531 |
def _log_timing_breakdown(self, timing: Dict[str, float]) -> None:
|
| 532 |
-
"""Log the timing breakdown for the
|
| 533 |
total_time = sum(timing.values())
|
| 534 |
logger.info("=" * 50)
|
| 535 |
logger.info("TIMING BREAKDOWN")
|
|
@@ -540,304 +544,23 @@ class PlayDetector:
|
|
| 540 |
logger.info(" TOTAL: %.2fs", total_time)
|
| 541 |
logger.info("=" * 50)
|
| 542 |
|
| 543 |
-
|
| 544 |
-
# Clock Reset Detection (Post-hoc 40β25 analysis)
|
| 545 |
-
# =========================================================================
|
| 546 |
-
|
| 547 |
-
def _detect_clock_resets(self, frame_data: List[Dict[str, Any]]) -> Tuple[List[PlayEvent], Dict[str, int]]:
|
| 548 |
-
"""
|
| 549 |
-
Detect and classify 40 -> 25 clock reset events.
|
| 550 |
-
|
| 551 |
-
Classification:
|
| 552 |
-
- Class A (weird_clock): 25 counts down immediately -> rejected
|
| 553 |
-
- Class B (timeout): Timeout indicator changed -> tracked as timeout
|
| 554 |
-
- Class C (special): Neither A nor B -> special play with extension
|
| 555 |
-
|
| 556 |
-
Args:
|
| 557 |
-
frame_data: List of frame data with clock values and timeout counts
|
| 558 |
-
|
| 559 |
-
Returns:
|
| 560 |
-
Tuple of (list of PlayEvent for valid clock resets, stats dict)
|
| 561 |
-
"""
|
| 562 |
-
plays = []
|
| 563 |
-
stats = {"total": 0, "weird_clock": 0, "timeout": 0, "special": 0}
|
| 564 |
-
|
| 565 |
-
# Parameters
|
| 566 |
-
immediate_countdown_window = 2.0 # Seconds to check if 25 counts down
|
| 567 |
-
special_play_extension = 10.0 # Extension for Class C plays
|
| 568 |
-
|
| 569 |
-
prev_clock = None
|
| 570 |
-
saw_40_at = None
|
| 571 |
-
|
| 572 |
-
for i, frame in enumerate(frame_data):
|
| 573 |
-
clock_value = frame.get("clock_value")
|
| 574 |
-
timestamp = frame["timestamp"]
|
| 575 |
-
|
| 576 |
-
if clock_value is not None:
|
| 577 |
-
# Detect 40 -> 25 transition
|
| 578 |
-
if prev_clock == 40 and clock_value == 25:
|
| 579 |
-
stats["total"] += 1
|
| 580 |
-
|
| 581 |
-
# Check if 25 immediately counts down (Class A: weird clock)
|
| 582 |
-
is_immediate_countdown = self._check_immediate_countdown(frame_data, i, immediate_countdown_window)
|
| 583 |
-
|
| 584 |
-
# Check if timeout changed (Class B: team timeout)
|
| 585 |
-
timeout_team = self._check_timeout_change(frame_data, i)
|
| 586 |
-
|
| 587 |
-
if is_immediate_countdown:
|
| 588 |
-
# Class A: Weird clock behavior - reject
|
| 589 |
-
stats["weird_clock"] += 1
|
| 590 |
-
logger.debug("Clock reset at %.1fs: weird_clock (25 counts down immediately)", timestamp)
|
| 591 |
-
elif timeout_team:
|
| 592 |
-
# Class B: Team timeout - record but mark as timeout
|
| 593 |
-
stats["timeout"] += 1
|
| 594 |
-
play_end = self._find_clock_reset_play_end(frame_data, i, max_duration=15.0)
|
| 595 |
-
play = PlayEvent(
|
| 596 |
-
play_number=0,
|
| 597 |
-
start_time=timestamp,
|
| 598 |
-
end_time=play_end,
|
| 599 |
-
confidence=0.8,
|
| 600 |
-
start_method=f"timeout_{timeout_team}",
|
| 601 |
-
end_method="timeout_end",
|
| 602 |
-
direct_end_time=play_end,
|
| 603 |
-
start_clock_value=prev_clock,
|
| 604 |
-
end_clock_value=25,
|
| 605 |
-
play_type="timeout",
|
| 606 |
-
)
|
| 607 |
-
plays.append(play)
|
| 608 |
-
logger.debug("Clock reset at %.1fs: timeout (%s team)", timestamp, timeout_team)
|
| 609 |
-
else:
|
| 610 |
-
# Class C: Special play (injury/punt/FG/XP)
|
| 611 |
-
stats["special"] += 1
|
| 612 |
-
play_end = self._find_clock_reset_play_end(frame_data, i, max_duration=special_play_extension)
|
| 613 |
-
play_duration = play_end - timestamp
|
| 614 |
-
end_method = "max_duration" if play_duration >= special_play_extension - 0.1 else "scorebug_disappeared"
|
| 615 |
-
play = PlayEvent(
|
| 616 |
-
play_number=0,
|
| 617 |
-
start_time=timestamp,
|
| 618 |
-
end_time=play_end,
|
| 619 |
-
confidence=0.8,
|
| 620 |
-
start_method="clock_reset_special",
|
| 621 |
-
end_method=end_method,
|
| 622 |
-
direct_end_time=play_end,
|
| 623 |
-
start_clock_value=prev_clock,
|
| 624 |
-
end_clock_value=25,
|
| 625 |
-
play_type="special",
|
| 626 |
-
)
|
| 627 |
-
plays.append(play)
|
| 628 |
-
logger.debug("Clock reset at %.1fs: special play (%.1fs duration)", timestamp, play_end - timestamp)
|
| 629 |
-
|
| 630 |
-
# Track when 40 first appeared
|
| 631 |
-
if clock_value == 40 and prev_clock != 40:
|
| 632 |
-
saw_40_at = timestamp
|
| 633 |
-
|
| 634 |
-
prev_clock = clock_value
|
| 635 |
-
|
| 636 |
-
return plays, stats
|
| 637 |
-
|
| 638 |
-
def _check_immediate_countdown(self, frame_data: List[Dict[str, Any]], frame_idx: int, window: float) -> bool:
|
| 639 |
-
"""Check if 25 immediately starts counting down (indicates weird clock behavior)."""
|
| 640 |
-
reset_timestamp = frame_data[frame_idx]["timestamp"]
|
| 641 |
-
|
| 642 |
-
for j in range(frame_idx + 1, len(frame_data)):
|
| 643 |
-
frame = frame_data[j]
|
| 644 |
-
elapsed = frame["timestamp"] - reset_timestamp
|
| 645 |
-
if elapsed > window:
|
| 646 |
-
break
|
| 647 |
-
clock_value = frame.get("clock_value")
|
| 648 |
-
if clock_value is not None and clock_value < 25:
|
| 649 |
-
return True # 25 counted down - weird clock
|
| 650 |
-
|
| 651 |
-
return False
|
| 652 |
-
|
| 653 |
-
def _check_timeout_change(self, frame_data: List[Dict[str, Any]], frame_idx: int) -> Optional[str]:
|
| 654 |
-
"""Check if a timeout indicator changed around the reset."""
|
| 655 |
-
# Get timeout counts before reset
|
| 656 |
-
before_home = None
|
| 657 |
-
before_away = None
|
| 658 |
-
|
| 659 |
-
for j in range(frame_idx - 1, max(0, frame_idx - 20), -1):
|
| 660 |
-
frame = frame_data[j]
|
| 661 |
-
if frame.get("home_timeouts") is not None:
|
| 662 |
-
before_home = frame.get("home_timeouts", 3)
|
| 663 |
-
before_away = frame.get("away_timeouts", 3)
|
| 664 |
-
break
|
| 665 |
-
|
| 666 |
-
if before_home is None:
|
| 667 |
-
return None
|
| 668 |
-
|
| 669 |
-
# Look forward for timeout change (up to 15 seconds)
|
| 670 |
-
frame_interval = frame_data[1]["timestamp"] - frame_data[0]["timestamp"] if len(frame_data) > 1 else 0.5
|
| 671 |
-
max_frames_forward = int(15.0 / frame_interval) if frame_interval > 0 else 30
|
| 672 |
-
|
| 673 |
-
for j in range(frame_idx, min(len(frame_data), frame_idx + max_frames_forward)):
|
| 674 |
-
frame = frame_data[j]
|
| 675 |
-
if frame.get("home_timeouts") is not None:
|
| 676 |
-
after_home = frame.get("home_timeouts", 3)
|
| 677 |
-
after_away = frame.get("away_timeouts", 3)
|
| 678 |
-
|
| 679 |
-
if after_home < before_home:
|
| 680 |
-
return "home"
|
| 681 |
-
if after_away < before_away:
|
| 682 |
-
return "away"
|
| 683 |
-
|
| 684 |
-
return None
|
| 685 |
-
|
| 686 |
-
def _find_clock_reset_play_end(self, frame_data: List[Dict[str, Any]], frame_idx: int, max_duration: float) -> float:
|
| 687 |
-
"""
|
| 688 |
-
Find the end time for a clock reset play (Class C special play).
|
| 689 |
-
|
| 690 |
-
The play ends when EITHER:
|
| 691 |
-
- Scorebug disappears (cut to commercial/replay)
|
| 692 |
-
- max_duration seconds have elapsed since play START
|
| 693 |
-
|
| 694 |
-
Args:
|
| 695 |
-
frame_data: Frame data list
|
| 696 |
-
frame_idx: Index of the frame where 40->25 reset occurred
|
| 697 |
-
max_duration: Maximum play duration from start
|
| 698 |
-
|
| 699 |
-
Returns:
|
| 700 |
-
Play end timestamp
|
| 701 |
-
"""
|
| 702 |
-
start_timestamp: float = frame_data[frame_idx]["timestamp"]
|
| 703 |
-
max_end_time = start_timestamp + max_duration
|
| 704 |
-
|
| 705 |
-
# Look for scorebug disappearance (but cap at max_duration from start)
|
| 706 |
-
for j in range(frame_idx + 1, len(frame_data)):
|
| 707 |
-
frame = frame_data[j]
|
| 708 |
-
timestamp: float = frame["timestamp"]
|
| 709 |
-
|
| 710 |
-
# If we've exceeded max_duration, end the play at max_duration
|
| 711 |
-
if timestamp >= max_end_time:
|
| 712 |
-
return max_end_time
|
| 713 |
-
|
| 714 |
-
# Check for play clock disappearance
|
| 715 |
-
clock_available = frame.get("clock_detected", frame.get("scorebug_detected", False))
|
| 716 |
-
if not clock_available:
|
| 717 |
-
return timestamp
|
| 718 |
-
|
| 719 |
-
# Default: end at max_duration (or end of data if shorter)
|
| 720 |
-
return min(max_end_time, float(frame_data[-1]["timestamp"]) if frame_data else max_end_time)
|
| 721 |
-
|
| 722 |
-
def _merge_plays(self, state_machine_plays: List[PlayEvent], clock_reset_plays: List[PlayEvent]) -> List[PlayEvent]:
|
| 723 |
-
"""
|
| 724 |
-
Merge plays from state machine and clock reset detection, removing overlaps and duplicates.
|
| 725 |
-
|
| 726 |
-
Handles two types of duplicates:
|
| 727 |
-
1. Overlapping plays (start_time < last.end_time)
|
| 728 |
-
2. Close plays (start times within proximity_threshold) representing the same event
|
| 729 |
-
|
| 730 |
-
Args:
|
| 731 |
-
state_machine_plays: Plays from the state machine
|
| 732 |
-
clock_reset_plays: Plays from clock reset detection
|
| 733 |
-
|
| 734 |
-
Returns:
|
| 735 |
-
Merged list of plays sorted by start time
|
| 736 |
-
"""
|
| 737 |
-
all_plays = list(state_machine_plays) + list(clock_reset_plays)
|
| 738 |
-
all_plays.sort(key=lambda p: p.start_time)
|
| 739 |
-
|
| 740 |
-
if not all_plays:
|
| 741 |
-
return []
|
| 742 |
-
|
| 743 |
-
# Proximity threshold: plays within this time are considered the same event
|
| 744 |
-
proximity_threshold = 5.0 # seconds
|
| 745 |
-
|
| 746 |
-
# Remove overlapping and close plays (keep state machine plays over clock reset plays)
|
| 747 |
-
filtered = [all_plays[0]]
|
| 748 |
-
for play in all_plays[1:]:
|
| 749 |
-
last = filtered[-1]
|
| 750 |
-
|
| 751 |
-
# Check for overlap OR proximity (both indicate same event)
|
| 752 |
-
is_overlapping = play.start_time < last.end_time
|
| 753 |
-
is_close = abs(play.start_time - last.start_time) < proximity_threshold
|
| 754 |
-
|
| 755 |
-
if is_overlapping or is_close:
|
| 756 |
-
# Same event detected twice - keep the better one
|
| 757 |
-
# Priority: normal > special > timeout (normal plays are most reliable)
|
| 758 |
-
type_priority = {"normal": 3, "special": 2, "timeout": 1}
|
| 759 |
-
last_priority = type_priority.get(last.play_type, 0)
|
| 760 |
-
play_priority = type_priority.get(play.play_type, 0)
|
| 761 |
-
|
| 762 |
-
if play_priority > last_priority:
|
| 763 |
-
filtered[-1] = play # Replace with higher priority play
|
| 764 |
-
elif play_priority == last_priority and play.confidence > last.confidence:
|
| 765 |
-
filtered[-1] = play # Same priority, but higher confidence
|
| 766 |
-
# else: keep existing
|
| 767 |
-
else:
|
| 768 |
-
filtered.append(play)
|
| 769 |
-
|
| 770 |
-
# Apply quiet time filter to remove false positives after normal plays
|
| 771 |
-
filtered = self._apply_quiet_time_filter(filtered)
|
| 772 |
-
|
| 773 |
-
# Renumber plays
|
| 774 |
-
for i, play in enumerate(filtered, 1):
|
| 775 |
-
play.play_number = i
|
| 776 |
-
|
| 777 |
-
return filtered
|
| 778 |
-
|
| 779 |
-
def _apply_quiet_time_filter(self, plays: List[PlayEvent], quiet_time: float = 10.0) -> List[PlayEvent]:
|
| 780 |
-
"""
|
| 781 |
-
Apply quiet time filter after normal plays.
|
| 782 |
-
|
| 783 |
-
After a normal play ends, no new special/timeout plays can start for quiet_time seconds.
|
| 784 |
-
This filters out false positives from penalties during plays.
|
| 785 |
-
|
| 786 |
-
Args:
|
| 787 |
-
plays: List of plays sorted by start time
|
| 788 |
-
quiet_time: Seconds of quiet time after normal plays
|
| 789 |
-
|
| 790 |
-
Returns:
|
| 791 |
-
Filtered list of plays
|
| 792 |
-
"""
|
| 793 |
-
if not plays:
|
| 794 |
-
return []
|
| 795 |
-
|
| 796 |
-
filtered = []
|
| 797 |
-
last_normal_end = -999.0 # Track when last normal play ended
|
| 798 |
-
|
| 799 |
-
for play in plays:
|
| 800 |
-
# Check if this play starts during quiet time after a normal play
|
| 801 |
-
if play.start_time < last_normal_end + quiet_time and play.play_type != "normal":
|
| 802 |
-
# This non-normal play starts during quiet time - filter it out
|
| 803 |
-
time_since_normal = play.start_time - last_normal_end
|
| 804 |
-
logger.debug(
|
| 805 |
-
"Quiet time filter: Removing %s play at %.1fs (%.1fs after normal play ended)",
|
| 806 |
-
play.play_type,
|
| 807 |
-
play.start_time,
|
| 808 |
-
time_since_normal,
|
| 809 |
-
)
|
| 810 |
-
continue
|
| 811 |
-
|
| 812 |
-
filtered.append(play)
|
| 813 |
-
|
| 814 |
-
# Update last normal play end time
|
| 815 |
-
if play.play_type == "normal":
|
| 816 |
-
last_normal_end = play.end_time
|
| 817 |
-
|
| 818 |
-
removed_count = len(plays) - len(filtered)
|
| 819 |
-
if removed_count > 0:
|
| 820 |
-
logger.info("Quiet time filter removed %d plays", removed_count)
|
| 821 |
-
|
| 822 |
-
return filtered
|
| 823 |
-
|
| 824 |
-
def detect(self) -> DetectionResult:
|
| 825 |
"""
|
| 826 |
-
Run play
|
| 827 |
|
| 828 |
Uses streaming processing for optimal performance:
|
| 829 |
- Pass 0 (if needed): Build digit templates using OCR on scorebug-verified frames
|
| 830 |
- Streaming pass: Read frame -> extract region -> template match -> state machine update
|
| 831 |
(threaded video I/O overlaps reading with processing)
|
| 832 |
-
- Finalize: Clock reset
|
| 833 |
|
| 834 |
When fixed coordinates are provided, the scorebug detection step simply verifies
|
| 835 |
the scorebug is present at the known location (faster than searching).
|
| 836 |
|
| 837 |
Returns:
|
| 838 |
-
DetectionResult with all
|
| 839 |
"""
|
| 840 |
-
logger.info("Starting play
|
| 841 |
logger.info("Video: %s", self.config.video_path)
|
| 842 |
logger.info("Segment: %.1fs to %s", self.config.start_time, self.config.end_time or "end")
|
| 843 |
|
|
@@ -858,26 +581,26 @@ class PlayDetector:
|
|
| 858 |
if not self.template_reader and self.template_builder:
|
| 859 |
success = self._pass0_build_templates_with_real_detection(timing)
|
| 860 |
if not success:
|
| 861 |
-
logger.warning("Pass 0 failed to build templates,
|
| 862 |
|
| 863 |
# Log mode info (after Pass 0 so we can show if templates were built)
|
| 864 |
-
self.
|
| 865 |
|
| 866 |
# Initialize video and get processing context
|
| 867 |
context, stats, _ = self._open_video_and_get_context()
|
| 868 |
|
| 869 |
-
# Streaming
|
| 870 |
# Uses threaded video I/O to overlap reading with processing
|
| 871 |
-
# Returns frame_data needed for post-hoc clock reset
|
| 872 |
-
frame_data = self.
|
| 873 |
|
| 874 |
-
# Finalize: Post-hoc clock reset
|
| 875 |
-
return self.
|
| 876 |
|
| 877 |
# pylint: disable=too-many-locals
|
| 878 |
-
def
|
| 879 |
"""
|
| 880 |
-
Run play
|
| 881 |
|
| 882 |
This provides ~26% speedup over sequential processing by using multiple
|
| 883 |
processes to read and process different segments of the video simultaneously.
|
|
@@ -887,16 +610,16 @@ class PlayDetector:
|
|
| 887 |
2. Save templates to disk for worker processes to load
|
| 888 |
3. Parallel pass: Each worker processes a video chunk independently
|
| 889 |
4. Merge: Combine frame data from all chunks in chronological order
|
| 890 |
-
5. State machine: Process merged data to
|
| 891 |
|
| 892 |
Args:
|
| 893 |
num_workers: Number of parallel workers (default 2).
|
| 894 |
output_dir: Output directory for templates (required).
|
| 895 |
|
| 896 |
Returns:
|
| 897 |
-
DetectionResult with all
|
| 898 |
"""
|
| 899 |
-
logger.info("Starting parallel play
|
| 900 |
logger.info("Video: %s", self.config.video_path)
|
| 901 |
logger.info("Segment: %.1fs to %s", self.config.start_time, self.config.end_time or "end")
|
| 902 |
|
|
@@ -914,7 +637,7 @@ class PlayDetector:
|
|
| 914 |
if not self.template_reader and self.template_builder:
|
| 915 |
success = self._pass0_build_templates_with_real_detection(timing)
|
| 916 |
if not success:
|
| 917 |
-
logger.warning("Pass 0 failed to build templates,
|
| 918 |
|
| 919 |
# Save templates to disk for worker processes
|
| 920 |
template_path = None
|
|
@@ -1012,11 +735,11 @@ class PlayDetector:
|
|
| 1012 |
"frames_with_clock": stats["frames_with_clock"],
|
| 1013 |
}
|
| 1014 |
|
| 1015 |
-
# Finalize: Post-hoc clock reset
|
| 1016 |
-
return self.
|
| 1017 |
|
| 1018 |
-
def
|
| 1019 |
-
"""Log the
|
| 1020 |
use_fixed_region = self.scorebug_detector and self.scorebug_detector.is_fixed_region_mode
|
| 1021 |
|
| 1022 |
if use_fixed_region:
|
|
@@ -1050,16 +773,16 @@ class PlayDetector:
|
|
| 1050 |
|
| 1051 |
def save_results(self, result: DetectionResult, output_path: str) -> None:
|
| 1052 |
"""
|
| 1053 |
-
Save
|
| 1054 |
|
| 1055 |
Args:
|
| 1056 |
-
result:
|
| 1057 |
output_path: Path to output file
|
| 1058 |
"""
|
| 1059 |
output = Path(output_path)
|
| 1060 |
output.parent.mkdir(parents=True, exist_ok=True)
|
| 1061 |
|
| 1062 |
-
data =
|
| 1063 |
|
| 1064 |
# Include configuration if provided (for reproducibility)
|
| 1065 |
if result.config:
|
|
|
|
| 1 |
"""
|
| 2 |
+
Play extractor pipeline module.
|
| 3 |
|
| 4 |
+
This module orchestrates the complete play extraction pipeline:
|
| 5 |
1. Video frame extraction
|
| 6 |
2. Scorebug detection
|
| 7 |
3. Play clock reading via template matching
|
| 8 |
4. Play state machine processing
|
| 9 |
+
5. Post-hoc clock reset identification (timeout/special plays)
|
| 10 |
|
| 11 |
Performance optimizations:
|
| 12 |
- Streaming processing: read frame -> process immediately (no intermediate storage)
|
|
|
|
| 29 |
from detection import DetectScoreBug, ScorebugDetection, DetectTimeouts
|
| 30 |
from readers import ReadPlayClock, PlayClockReading
|
| 31 |
from setup import DigitTemplateBuilder, DigitTemplateLibrary, PlayClockRegionConfig, PlayClockRegionExtractor
|
| 32 |
+
from tracking import TrackPlayState, PlayEvent, PlayMerger, TimeoutInfo, ClockResetIdentifier
|
| 33 |
from utils import create_frame_result
|
| 34 |
from video import ThreadedFrameReader
|
| 35 |
from .models import DetectionConfig, DetectionResult, ParallelProcessingConfig, VideoContext
|
|
|
|
| 39 |
logger = logging.getLogger(__name__)
|
| 40 |
|
| 41 |
|
| 42 |
+
def format_extraction_result_dict(result: DetectionResult) -> Dict[str, Any]:
|
| 43 |
"""
|
| 44 |
Format a DetectionResult into a dictionary for JSON serialization or API return.
|
| 45 |
|
|
|
|
| 63 |
}
|
| 64 |
|
| 65 |
|
| 66 |
+
class PlayExtractor:
|
| 67 |
"""
|
| 68 |
+
Main pipeline for extracting plays from video.
|
| 69 |
|
| 70 |
+
This class orchestrates all extraction components:
|
| 71 |
- DetectScoreBug: Locates scorebug in frames
|
| 72 |
- ReadPlayClock: Reads play clock digits via template matching
|
| 73 |
- TrackPlayState: Determines play boundaries
|
| 74 |
- DetectTimeouts: Tracks timeout indicators for 3-class clock reset classification
|
| 75 |
+
- ClockResetIdentifier: Post-hoc identification of timeout/special plays
|
| 76 |
"""
|
| 77 |
|
| 78 |
def __init__(self, config: DetectionConfig, timeout_tracker: Optional[DetectTimeouts] = None):
|
| 79 |
"""
|
| 80 |
+
Initialize the play extractor pipeline.
|
| 81 |
|
| 82 |
Args:
|
| 83 |
config: Detection configuration
|
|
|
|
| 113 |
raise FileNotFoundError(f"Clock region config not found: {self.config.clock_region_config_path}")
|
| 114 |
|
| 115 |
def _initialize_components(self) -> None:
|
| 116 |
+
"""Initialize extraction components."""
|
| 117 |
+
logger.info("Initializing play extractor components...")
|
| 118 |
|
| 119 |
# Determine if we're using fixed coordinates mode
|
| 120 |
+
# In this mode, we still use the same logic but with pre-set regions
|
| 121 |
use_fixed_coords = self.config.fixed_playclock_coords is not None
|
| 122 |
|
| 123 |
if use_fixed_coords:
|
|
|
|
| 178 |
self.template_reader = ReadPlayClock(self.template_library, region_w, region_h)
|
| 179 |
else:
|
| 180 |
self.template_library = None
|
| 181 |
+
logger.info("Could not load templates, will build during extraction")
|
| 182 |
|
| 183 |
# Initialize template builder for collection phase if no templates loaded
|
| 184 |
if self.template_library is None:
|
|
|
|
| 264 |
|
| 265 |
return self.template_library is not None
|
| 266 |
|
| 267 |
+
def _streaming_extraction_pass(self, context: VideoContext, stats: Dict[str, Any], timing: Dict[str, float]) -> List[Dict[str, Any]]:
|
| 268 |
"""
|
| 269 |
+
Streaming extraction pass: Read frames, process immediately, no intermediate storage.
|
| 270 |
|
| 271 |
This combines the old Pass 1 (frame extraction) and Pass 2 (template matching) into
|
| 272 |
a single streaming pass. Each frame is:
|
|
|
|
| 286 |
Returns:
|
| 287 |
List of frame data dictionaries with all processing results
|
| 288 |
"""
|
| 289 |
+
logger.info("Streaming extraction pass: frame extraction + template matching...")
|
| 290 |
|
| 291 |
logger.info(
|
| 292 |
"Threaded reading: frame_skip=%d (%.2f fps effective), frames %d-%d",
|
|
|
|
| 339 |
# Progress logging
|
| 340 |
if stats["total_frames"] % progress_interval == 0:
|
| 341 |
progress_pct = 100 * (current_time - context.start_time) / (context.end_time - context.start_time)
|
| 342 |
+
logger.info("Extraction progress: %.1fs / %.1fs (%.0f%%)", current_time, context.end_time, progress_pct)
|
| 343 |
|
| 344 |
finally:
|
| 345 |
# Stop the reader thread and get I/O timing
|
|
|
|
| 348 |
context.cap.release()
|
| 349 |
|
| 350 |
logger.info(
|
| 351 |
+
"Streaming extraction complete: %d frames processed, %d with scorebug, %d with clock",
|
| 352 |
stats["total_frames"],
|
| 353 |
stats["frames_with_scorebug"],
|
| 354 |
stats["frames_with_clock"],
|
|
|
|
| 448 |
|
| 449 |
return frame_result
|
| 450 |
|
| 451 |
+
def _finalize_extraction(
|
| 452 |
self,
|
| 453 |
context: VideoContext,
|
| 454 |
stats: Dict[str, Any],
|
|
|
|
| 456 |
frame_data: List[Dict[str, Any]],
|
| 457 |
) -> DetectionResult:
|
| 458 |
"""
|
| 459 |
+
Finalize extraction: run post-hoc clock reset identification and build result.
|
| 460 |
|
| 461 |
+
Uses ClockResetIdentifier for 3-class classification of 40β25 clock reset events:
|
| 462 |
- Class A (weird_clock): 25 counts down immediately β rejected
|
| 463 |
- Class B (timeout): Timeout indicator changed β tracked as timeout
|
| 464 |
- Class C (special): Neither A nor B β special play (punt/FG/XP)
|
|
|
|
| 479 |
state_machine_plays = self.state_machine.get_plays()
|
| 480 |
play_stats = self.state_machine.get_stats()
|
| 481 |
|
| 482 |
+
# Run post-hoc clock reset identification (40β25 transitions)
|
| 483 |
+
clock_reset_identifier = ClockResetIdentifier()
|
| 484 |
+
clock_reset_plays, clock_reset_stats = clock_reset_identifier.identify(frame_data)
|
| 485 |
logger.info(
|
| 486 |
+
"Clock reset identification: %d total, %d weird (rejected), %d timeouts, %d special plays",
|
| 487 |
clock_reset_stats.get("total", 0),
|
| 488 |
clock_reset_stats.get("weird_clock", 0),
|
| 489 |
clock_reset_stats.get("timeout", 0),
|
|
|
|
| 493 |
# Merge clock reset stats into play stats
|
| 494 |
play_stats["clock_reset_events"] = clock_reset_stats
|
| 495 |
|
| 496 |
+
# Merge state machine plays with clock reset plays using PlayMerger
|
| 497 |
+
merger = PlayMerger()
|
| 498 |
+
plays = merger.merge(state_machine_plays, clock_reset_plays)
|
| 499 |
|
| 500 |
# Recalculate stats from merged plays
|
| 501 |
start_methods: Dict[str, int] = {}
|
|
|
|
| 524 |
)
|
| 525 |
|
| 526 |
# Log final summary
|
| 527 |
+
logger.info("Extraction complete!")
|
| 528 |
logger.info("Processed %d frames", stats["total_frames"])
|
| 529 |
logger.info("Frames with scorebug: %d (%.1f%%)", stats["frames_with_scorebug"], 100 * stats["frames_with_scorebug"] / max(1, stats["total_frames"]))
|
| 530 |
logger.info("Frames with clock: %d (%.1f%%)", stats["frames_with_clock"], 100 * stats["frames_with_clock"] / max(1, stats["total_frames"]))
|
| 531 |
+
logger.info("Plays extracted: %d", len(plays))
|
| 532 |
|
| 533 |
return result
|
| 534 |
|
| 535 |
def _log_timing_breakdown(self, timing: Dict[str, float]) -> None:
|
| 536 |
+
"""Log the timing breakdown for the extraction run."""
|
| 537 |
total_time = sum(timing.values())
|
| 538 |
logger.info("=" * 50)
|
| 539 |
logger.info("TIMING BREAKDOWN")
|
|
|
|
| 544 |
logger.info(" TOTAL: %.2fs", total_time)
|
| 545 |
logger.info("=" * 50)
|
| 546 |
|
| 547 |
+
def extract(self) -> DetectionResult:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 548 |
"""
|
| 549 |
+
Run play extraction on the video segment.
|
| 550 |
|
| 551 |
Uses streaming processing for optimal performance:
|
| 552 |
- Pass 0 (if needed): Build digit templates using OCR on scorebug-verified frames
|
| 553 |
- Streaming pass: Read frame -> extract region -> template match -> state machine update
|
| 554 |
(threaded video I/O overlaps reading with processing)
|
| 555 |
+
- Finalize: Clock reset identification and result building
|
| 556 |
|
| 557 |
When fixed coordinates are provided, the scorebug detection step simply verifies
|
| 558 |
the scorebug is present at the known location (faster than searching).
|
| 559 |
|
| 560 |
Returns:
|
| 561 |
+
DetectionResult with all extracted plays
|
| 562 |
"""
|
| 563 |
+
logger.info("Starting play extraction...")
|
| 564 |
logger.info("Video: %s", self.config.video_path)
|
| 565 |
logger.info("Segment: %.1fs to %s", self.config.start_time, self.config.end_time or "end")
|
| 566 |
|
|
|
|
| 581 |
if not self.template_reader and self.template_builder:
|
| 582 |
success = self._pass0_build_templates_with_real_detection(timing)
|
| 583 |
if not success:
|
| 584 |
+
logger.warning("Pass 0 failed to build templates, extraction may fail or be inaccurate")
|
| 585 |
|
| 586 |
# Log mode info (after Pass 0 so we can show if templates were built)
|
| 587 |
+
self._log_extraction_mode()
|
| 588 |
|
| 589 |
# Initialize video and get processing context
|
| 590 |
context, stats, _ = self._open_video_and_get_context()
|
| 591 |
|
| 592 |
+
# Streaming extraction pass: read frames + template match + state machine (all in one)
|
| 593 |
# Uses threaded video I/O to overlap reading with processing
|
| 594 |
+
# Returns frame_data needed for post-hoc clock reset identification
|
| 595 |
+
frame_data = self._streaming_extraction_pass(context, stats, timing)
|
| 596 |
|
| 597 |
+
# Finalize: Post-hoc clock reset identification (Class A/B/C) and result building
|
| 598 |
+
return self._finalize_extraction(context, stats, timing, frame_data)
|
| 599 |
|
| 600 |
# pylint: disable=too-many-locals
|
| 601 |
+
def extract_parallel(self, num_workers: int = 2, output_dir: Optional[Path] = None) -> DetectionResult:
|
| 602 |
"""
|
| 603 |
+
Run play extraction using parallel chunk processing.
|
| 604 |
|
| 605 |
This provides ~26% speedup over sequential processing by using multiple
|
| 606 |
processes to read and process different segments of the video simultaneously.
|
|
|
|
| 610 |
2. Save templates to disk for worker processes to load
|
| 611 |
3. Parallel pass: Each worker processes a video chunk independently
|
| 612 |
4. Merge: Combine frame data from all chunks in chronological order
|
| 613 |
+
5. State machine: Process merged data to extract plays
|
| 614 |
|
| 615 |
Args:
|
| 616 |
num_workers: Number of parallel workers (default 2).
|
| 617 |
output_dir: Output directory for templates (required).
|
| 618 |
|
| 619 |
Returns:
|
| 620 |
+
DetectionResult with all extracted plays
|
| 621 |
"""
|
| 622 |
+
logger.info("Starting parallel play extraction (%d workers)...", num_workers)
|
| 623 |
logger.info("Video: %s", self.config.video_path)
|
| 624 |
logger.info("Segment: %.1fs to %s", self.config.start_time, self.config.end_time or "end")
|
| 625 |
|
|
|
|
| 637 |
if not self.template_reader and self.template_builder:
|
| 638 |
success = self._pass0_build_templates_with_real_detection(timing)
|
| 639 |
if not success:
|
| 640 |
+
logger.warning("Pass 0 failed to build templates, extraction may fail or be inaccurate")
|
| 641 |
|
| 642 |
# Save templates to disk for worker processes
|
| 643 |
template_path = None
|
|
|
|
| 735 |
"frames_with_clock": stats["frames_with_clock"],
|
| 736 |
}
|
| 737 |
|
| 738 |
+
# Finalize: Post-hoc clock reset identification (Class A/B/C) and result building
|
| 739 |
+
return self._finalize_extraction(context, stats_dict, timing, frame_data)
|
| 740 |
|
| 741 |
+
def _log_extraction_mode(self) -> None:
|
| 742 |
+
"""Log the extraction mode being used."""
|
| 743 |
use_fixed_region = self.scorebug_detector and self.scorebug_detector.is_fixed_region_mode
|
| 744 |
|
| 745 |
if use_fixed_region:
|
|
|
|
| 773 |
|
| 774 |
def save_results(self, result: DetectionResult, output_path: str) -> None:
|
| 775 |
"""
|
| 776 |
+
Save extraction results to a JSON file.
|
| 777 |
|
| 778 |
Args:
|
| 779 |
+
result: Extraction results
|
| 780 |
output_path: Path to output file
|
| 781 |
"""
|
| 782 |
output = Path(output_path)
|
| 783 |
output.parent.mkdir(parents=True, exist_ok=True)
|
| 784 |
|
| 785 |
+
data = format_extraction_result_dict(result)
|
| 786 |
|
| 787 |
# Include configuration if provided (for reproducibility)
|
| 788 |
if result.config:
|
src/tracking/__init__.py
CHANGED
|
@@ -7,6 +7,7 @@ to detect play boundaries and other temporal events.
|
|
| 7 |
from .models import PlayEvent, PlayState, PlayTrackingState, TrackPlayStateConfig, TimeoutInfo, ClockResetStats
|
| 8 |
from .play_state import TrackPlayState
|
| 9 |
from .play_merger import PlayMerger
|
|
|
|
| 10 |
|
| 11 |
__all__ = [
|
| 12 |
# Models
|
|
@@ -20,4 +21,6 @@ __all__ = [
|
|
| 20 |
"TrackPlayState",
|
| 21 |
# Merger
|
| 22 |
"PlayMerger",
|
|
|
|
|
|
|
| 23 |
]
|
|
|
|
| 7 |
from .models import PlayEvent, PlayState, PlayTrackingState, TrackPlayStateConfig, TimeoutInfo, ClockResetStats
|
| 8 |
from .play_state import TrackPlayState
|
| 9 |
from .play_merger import PlayMerger
|
| 10 |
+
from .clock_reset_identifier import ClockResetIdentifier
|
| 11 |
|
| 12 |
__all__ = [
|
| 13 |
# Models
|
|
|
|
| 21 |
"TrackPlayState",
|
| 22 |
# Merger
|
| 23 |
"PlayMerger",
|
| 24 |
+
# Clock reset identification
|
| 25 |
+
"ClockResetIdentifier",
|
| 26 |
]
|
src/tracking/clock_reset_identifier.py
ADDED
|
@@ -0,0 +1,243 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Clock reset identifier module for post-hoc 40β25 transition analysis.
|
| 3 |
+
|
| 4 |
+
This module identifies and classifies 40β25 play clock reset events by analyzing
|
| 5 |
+
frame data after the initial extraction pass. It complements the real-time
|
| 6 |
+
TrackPlayState by catching timeout and special plays that the state machine
|
| 7 |
+
may miss or classify differently.
|
| 8 |
+
|
| 9 |
+
Classification (Class A/B/C):
|
| 10 |
+
- Class A (weird_clock): 25 counts down immediately β rejected (false positive)
|
| 11 |
+
- Class B (timeout): Timeout indicator changed β tracked as timeout play
|
| 12 |
+
- Class C (special): Neither A nor B β special play (punt/FG/XP/injury)
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
import logging
|
| 16 |
+
from typing import Any, Dict, List, Optional, Tuple
|
| 17 |
+
|
| 18 |
+
from .models import PlayEvent
|
| 19 |
+
|
| 20 |
+
logger = logging.getLogger(__name__)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class ClockResetIdentifier:
|
| 24 |
+
"""
|
| 25 |
+
Identifies and classifies 40β25 clock reset events from frame data.
|
| 26 |
+
|
| 27 |
+
This performs post-hoc analysis on extracted frame data to find timeout
|
| 28 |
+
and special plays by looking for 40β25 clock transitions and classifying
|
| 29 |
+
them based on subsequent behavior and timeout indicator changes.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def __init__(
|
| 33 |
+
self,
|
| 34 |
+
immediate_countdown_window: float = 2.0,
|
| 35 |
+
special_play_extension: float = 10.0,
|
| 36 |
+
timeout_max_duration: float = 15.0,
|
| 37 |
+
):
|
| 38 |
+
"""
|
| 39 |
+
Initialize the clock reset identifier.
|
| 40 |
+
|
| 41 |
+
Args:
|
| 42 |
+
immediate_countdown_window: Seconds to check if 25 counts down (Class A filter)
|
| 43 |
+
special_play_extension: Max duration for special plays (Class C)
|
| 44 |
+
timeout_max_duration: Max duration for timeout plays (Class B)
|
| 45 |
+
"""
|
| 46 |
+
self.immediate_countdown_window = immediate_countdown_window
|
| 47 |
+
self.special_play_extension = special_play_extension
|
| 48 |
+
self.timeout_max_duration = timeout_max_duration
|
| 49 |
+
|
| 50 |
+
def identify(self, frame_data: List[Dict[str, Any]]) -> Tuple[List[PlayEvent], Dict[str, int]]:
|
| 51 |
+
"""
|
| 52 |
+
Identify and classify 40β25 clock reset events in frame data.
|
| 53 |
+
|
| 54 |
+
Scans through frame_data looking for 40β25 transitions and classifies each:
|
| 55 |
+
- Class A (weird_clock): 25 counts down immediately β rejected
|
| 56 |
+
- Class B (timeout): Timeout indicator changed β timeout play
|
| 57 |
+
- Class C (special): Neither A nor B β special play
|
| 58 |
+
|
| 59 |
+
Args:
|
| 60 |
+
frame_data: List of frame data dicts with clock_value, timestamp,
|
| 61 |
+
home_timeouts, away_timeouts, etc.
|
| 62 |
+
|
| 63 |
+
Returns:
|
| 64 |
+
Tuple of (list of PlayEvent for valid clock resets, stats dict)
|
| 65 |
+
"""
|
| 66 |
+
plays: List[PlayEvent] = []
|
| 67 |
+
stats = {"total": 0, "weird_clock": 0, "timeout": 0, "special": 0}
|
| 68 |
+
|
| 69 |
+
prev_clock: Optional[int] = None
|
| 70 |
+
|
| 71 |
+
for i, frame in enumerate(frame_data):
|
| 72 |
+
clock_value = frame.get("clock_value")
|
| 73 |
+
timestamp: float = frame["timestamp"]
|
| 74 |
+
|
| 75 |
+
if clock_value is not None:
|
| 76 |
+
# Identify 40 β 25 transition
|
| 77 |
+
if prev_clock == 40 and clock_value == 25:
|
| 78 |
+
stats["total"] += 1
|
| 79 |
+
|
| 80 |
+
# Check Class A: 25 immediately counts down (weird clock behavior)
|
| 81 |
+
is_immediate_countdown = self._check_immediate_countdown(frame_data, i)
|
| 82 |
+
|
| 83 |
+
# Check Class B: timeout indicator changed
|
| 84 |
+
timeout_team = self._check_timeout_change(frame_data, i)
|
| 85 |
+
|
| 86 |
+
if is_immediate_countdown:
|
| 87 |
+
# Class A: Weird clock behavior - reject
|
| 88 |
+
stats["weird_clock"] += 1
|
| 89 |
+
logger.debug("Clock reset at %.1fs: weird_clock (25 counts down immediately)", timestamp)
|
| 90 |
+
|
| 91 |
+
elif timeout_team:
|
| 92 |
+
# Class B: Team timeout
|
| 93 |
+
stats["timeout"] += 1
|
| 94 |
+
play_end = self._find_play_end(frame_data, i, max_duration=self.timeout_max_duration)
|
| 95 |
+
play = PlayEvent(
|
| 96 |
+
play_number=0,
|
| 97 |
+
start_time=timestamp,
|
| 98 |
+
end_time=play_end,
|
| 99 |
+
confidence=0.8,
|
| 100 |
+
start_method=f"timeout_{timeout_team}",
|
| 101 |
+
end_method="timeout_end",
|
| 102 |
+
direct_end_time=play_end,
|
| 103 |
+
start_clock_value=prev_clock,
|
| 104 |
+
end_clock_value=25,
|
| 105 |
+
play_type="timeout",
|
| 106 |
+
)
|
| 107 |
+
plays.append(play)
|
| 108 |
+
logger.debug("Clock reset at %.1fs: timeout (%s team)", timestamp, timeout_team)
|
| 109 |
+
|
| 110 |
+
else:
|
| 111 |
+
# Class C: Special play (punt/FG/XP/injury)
|
| 112 |
+
stats["special"] += 1
|
| 113 |
+
play_end = self._find_play_end(frame_data, i, max_duration=self.special_play_extension)
|
| 114 |
+
play_duration = play_end - timestamp
|
| 115 |
+
end_method = "max_duration" if play_duration >= self.special_play_extension - 0.1 else "scorebug_disappeared"
|
| 116 |
+
play = PlayEvent(
|
| 117 |
+
play_number=0,
|
| 118 |
+
start_time=timestamp,
|
| 119 |
+
end_time=play_end,
|
| 120 |
+
confidence=0.8,
|
| 121 |
+
start_method="clock_reset_special",
|
| 122 |
+
end_method=end_method,
|
| 123 |
+
direct_end_time=play_end,
|
| 124 |
+
start_clock_value=prev_clock,
|
| 125 |
+
end_clock_value=25,
|
| 126 |
+
play_type="special",
|
| 127 |
+
)
|
| 128 |
+
plays.append(play)
|
| 129 |
+
logger.debug("Clock reset at %.1fs: special play (%.1fs duration)", timestamp, play_end - timestamp)
|
| 130 |
+
|
| 131 |
+
prev_clock = clock_value
|
| 132 |
+
|
| 133 |
+
return plays, stats
|
| 134 |
+
|
| 135 |
+
def _check_immediate_countdown(self, frame_data: List[Dict[str, Any]], frame_idx: int) -> bool:
|
| 136 |
+
"""
|
| 137 |
+
Check if 25 immediately starts counting down (Class A filter).
|
| 138 |
+
|
| 139 |
+
If the clock shows a value < 25 within the countdown window after
|
| 140 |
+
the reset, this indicates weird clock behavior (false positive).
|
| 141 |
+
|
| 142 |
+
Args:
|
| 143 |
+
frame_data: Frame data list
|
| 144 |
+
frame_idx: Index of frame where 40β25 reset occurred
|
| 145 |
+
|
| 146 |
+
Returns:
|
| 147 |
+
True if 25 counts down immediately (Class A), False otherwise
|
| 148 |
+
"""
|
| 149 |
+
reset_timestamp: float = frame_data[frame_idx]["timestamp"]
|
| 150 |
+
|
| 151 |
+
for j in range(frame_idx + 1, len(frame_data)):
|
| 152 |
+
frame = frame_data[j]
|
| 153 |
+
elapsed = frame["timestamp"] - reset_timestamp
|
| 154 |
+
if elapsed > self.immediate_countdown_window:
|
| 155 |
+
break
|
| 156 |
+
clock_value = frame.get("clock_value")
|
| 157 |
+
if clock_value is not None and clock_value < 25:
|
| 158 |
+
return True # 25 counted down - weird clock
|
| 159 |
+
|
| 160 |
+
return False
|
| 161 |
+
|
| 162 |
+
def _check_timeout_change(self, frame_data: List[Dict[str, Any]], frame_idx: int) -> Optional[str]:
|
| 163 |
+
"""
|
| 164 |
+
Check if a timeout indicator changed around the reset (Class B check).
|
| 165 |
+
|
| 166 |
+
Compares timeout counts before and after the reset to determine
|
| 167 |
+
if a team timeout was called.
|
| 168 |
+
|
| 169 |
+
Args:
|
| 170 |
+
frame_data: Frame data list
|
| 171 |
+
frame_idx: Index of frame where 40β25 reset occurred
|
| 172 |
+
|
| 173 |
+
Returns:
|
| 174 |
+
"home" or "away" if timeout was used, None otherwise
|
| 175 |
+
"""
|
| 176 |
+
# Get timeout counts before reset (look back up to 20 frames)
|
| 177 |
+
before_home: Optional[int] = None
|
| 178 |
+
before_away: Optional[int] = None
|
| 179 |
+
|
| 180 |
+
for j in range(frame_idx - 1, max(0, frame_idx - 20), -1):
|
| 181 |
+
frame = frame_data[j]
|
| 182 |
+
if frame.get("home_timeouts") is not None:
|
| 183 |
+
before_home = frame.get("home_timeouts", 3)
|
| 184 |
+
before_away = frame.get("away_timeouts", 3)
|
| 185 |
+
break
|
| 186 |
+
|
| 187 |
+
if before_home is None:
|
| 188 |
+
return None
|
| 189 |
+
|
| 190 |
+
# Look forward for timeout change (up to 15 seconds)
|
| 191 |
+
frame_interval = frame_data[1]["timestamp"] - frame_data[0]["timestamp"] if len(frame_data) > 1 else 0.5
|
| 192 |
+
max_frames_forward = int(15.0 / frame_interval) if frame_interval > 0 else 30
|
| 193 |
+
|
| 194 |
+
for j in range(frame_idx, min(len(frame_data), frame_idx + max_frames_forward)):
|
| 195 |
+
frame = frame_data[j]
|
| 196 |
+
if frame.get("home_timeouts") is not None:
|
| 197 |
+
after_home = frame.get("home_timeouts", 3)
|
| 198 |
+
after_away = frame.get("away_timeouts", 3)
|
| 199 |
+
|
| 200 |
+
if after_home < before_home:
|
| 201 |
+
return "home"
|
| 202 |
+
if after_away < before_away:
|
| 203 |
+
return "away"
|
| 204 |
+
|
| 205 |
+
return None
|
| 206 |
+
|
| 207 |
+
def _find_play_end(self, frame_data: List[Dict[str, Any]], frame_idx: int, max_duration: float) -> float:
|
| 208 |
+
"""
|
| 209 |
+
Find the end time for a clock reset play.
|
| 210 |
+
|
| 211 |
+
The play ends when EITHER:
|
| 212 |
+
- Scorebug/clock disappears (cut to commercial/replay)
|
| 213 |
+
- max_duration seconds have elapsed since the reset
|
| 214 |
+
|
| 215 |
+
Whichever comes first.
|
| 216 |
+
|
| 217 |
+
Args:
|
| 218 |
+
frame_data: Frame data list
|
| 219 |
+
frame_idx: Index of frame where 40β25 reset occurred
|
| 220 |
+
max_duration: Maximum play duration from reset
|
| 221 |
+
|
| 222 |
+
Returns:
|
| 223 |
+
Play end timestamp
|
| 224 |
+
"""
|
| 225 |
+
start_timestamp: float = frame_data[frame_idx]["timestamp"]
|
| 226 |
+
max_end_time = start_timestamp + max_duration
|
| 227 |
+
|
| 228 |
+
# Look for scorebug disappearance (but cap at max_duration)
|
| 229 |
+
for j in range(frame_idx + 1, len(frame_data)):
|
| 230 |
+
frame = frame_data[j]
|
| 231 |
+
timestamp: float = frame["timestamp"]
|
| 232 |
+
|
| 233 |
+
# If we've exceeded max_duration, end at max_duration
|
| 234 |
+
if timestamp >= max_end_time:
|
| 235 |
+
return max_end_time
|
| 236 |
+
|
| 237 |
+
# Check for clock/scorebug disappearance
|
| 238 |
+
clock_available = frame.get("clock_detected", frame.get("scorebug_detected", False))
|
| 239 |
+
if not clock_available:
|
| 240 |
+
return timestamp
|
| 241 |
+
|
| 242 |
+
# Default: end at max_duration (or end of data if shorter)
|
| 243 |
+
return min(max_end_time, float(frame_data[-1]["timestamp"]) if frame_data else max_end_time)
|