andytaylor-smg commited on
Commit
719b8f7
·
1 Parent(s): 1f3bac1

perfect mypy

Browse files
pyproject.toml CHANGED
@@ -19,6 +19,7 @@ dev = [
19
  "black>=24.0.0",
20
  "pylint>=3.0.0",
21
  "pylint-pydantic>=0.3.0",
 
22
  ]
23
 
24
  [build-system]
@@ -36,6 +37,12 @@ python_version = "3.13"
36
  strict = true
37
  ignore_missing_imports = true
38
  exclude = ["tests", "scripts", "examples", "static"]
 
 
 
 
 
 
39
 
40
  [tool.black]
41
  line-length = 180
 
19
  "black>=24.0.0",
20
  "pylint>=3.0.0",
21
  "pylint-pydantic>=0.3.0",
22
+ "mypy>=1.0.0",
23
  ]
24
 
25
  [build-system]
 
37
  strict = true
38
  ignore_missing_imports = true
39
  exclude = ["tests", "scripts", "examples", "static"]
40
+ plugins = ["pydantic.mypy"]
41
+
42
+ [tool.pydantic-mypy]
43
+ init_forbid_extra = true
44
+ init_typed = true
45
+ warn_required_dynamic_aliases = true
46
 
47
  [tool.black]
48
  line-length = 180
src/config/session.py CHANGED
@@ -8,7 +8,7 @@ including region selections, video paths, and project constants.
8
  import json
9
  import logging
10
  from pathlib import Path
11
- from typing import Tuple
12
 
13
  import cv2
14
  import numpy as np
@@ -127,7 +127,7 @@ def _save_config_json(config: SessionConfig, output_dir: Path, basename: str) ->
127
  return config_path
128
 
129
 
130
- def _extract_frame_from_video(video_path: str, timestamp: float) -> np.ndarray | None:
131
  """
132
  Extract a single frame from a video at the given timestamp.
133
 
@@ -151,7 +151,7 @@ def _extract_frame_from_video(video_path: str, timestamp: float) -> np.ndarray |
151
  return frame
152
 
153
 
154
- def _save_template_image(config: SessionConfig, output_dir: Path, basename: str, frame: np.ndarray) -> Path:
155
  """
156
  Extract the scorebug region from a frame and save it as the template image.
157
 
@@ -255,7 +255,7 @@ def _save_timeout_config(config: SessionConfig, output_dir: Path, basename: str)
255
  def save_session_config(
256
  config: SessionConfig,
257
  output_dir: Path,
258
- selected_frame: Tuple[float, np.ndarray] | None = None,
259
  ) -> Tuple[str, str]:
260
  """
261
  Save session configuration and generate template image.
@@ -276,6 +276,7 @@ def save_session_config(
276
  config_path = _save_config_json(config, output_dir, basename)
277
 
278
  # Get frame for template generation
 
279
  if selected_frame is not None:
280
  timestamp, frame = selected_frame
281
  logger.info("Using selected frame @ %.1fs for template generation", timestamp)
 
8
  import json
9
  import logging
10
  from pathlib import Path
11
+ from typing import Any, Optional, Tuple
12
 
13
  import cv2
14
  import numpy as np
 
127
  return config_path
128
 
129
 
130
+ def _extract_frame_from_video(video_path: str, timestamp: float) -> np.ndarray[Any, Any] | None:
131
  """
132
  Extract a single frame from a video at the given timestamp.
133
 
 
151
  return frame
152
 
153
 
154
+ def _save_template_image(config: SessionConfig, output_dir: Path, basename: str, frame: np.ndarray[Any, Any]) -> Path:
155
  """
156
  Extract the scorebug region from a frame and save it as the template image.
157
 
 
255
  def save_session_config(
256
  config: SessionConfig,
257
  output_dir: Path,
258
+ selected_frame: Tuple[float, np.ndarray[Any, Any]] | None = None,
259
  ) -> Tuple[str, str]:
260
  """
261
  Save session configuration and generate template image.
 
276
  config_path = _save_config_json(config, output_dir, basename)
277
 
278
  # Get frame for template generation
279
+ frame: Optional[np.ndarray[Any, Any]] = None
280
  if selected_frame is not None:
281
  timestamp, frame = selected_frame
282
  logger.info("Using selected frame @ %.1fs for template generation", timestamp)
src/detection/scorebug.py CHANGED
@@ -8,7 +8,7 @@ This module provides functions to detect the presence and location of the scoreb
8
  import json
9
  import logging
10
  from pathlib import Path
11
- from typing import Optional, Tuple
12
 
13
  import cv2
14
  import numpy as np
@@ -55,15 +55,15 @@ class DetectScoreBug:
55
  fixed_region_config_path: Path to JSON config with fixed region (alternative to fixed_region)
56
  use_split_detection: Enable split-half detection for robustness to partial overlays (default: True)
57
  """
58
- self.template = None
59
  self.template_path = template_path
60
  self.fixed_region = fixed_region
61
  self._use_fixed_region = fixed_region is not None
62
  self.use_split_detection = use_split_detection
63
 
64
  # Pre-computed template halves for split detection (populated when template is loaded)
65
- self._template_left = None
66
- self._template_right = None
67
 
68
  if template_path:
69
  self.load_template(template_path)
@@ -126,7 +126,7 @@ class DetectScoreBug:
126
  self._template_right.shape[0],
127
  )
128
 
129
- def detect(self, frame: np.ndarray) -> ScorebugDetection:
130
  """
131
  Detect scorebug in a frame.
132
 
@@ -167,7 +167,7 @@ class DetectScoreBug:
167
  return detection
168
 
169
  # pylint: disable=too-many-locals
170
- def _detect_in_fixed_region(self, frame: np.ndarray) -> ScorebugDetection:
171
  """
172
  Detect scorebug by checking only the fixed known location.
173
 
@@ -185,6 +185,10 @@ class DetectScoreBug:
185
  Returns:
186
  Detection result
187
  """
 
 
 
 
188
  x, y, _, _ = self.fixed_region
189
  th, tw = self.template.shape[:2]
190
 
@@ -244,7 +248,7 @@ class DetectScoreBug:
244
  return ScorebugDetection(detected=False, confidence=full_confidence, bbox=(x, y, tw, th), method="fixed_region")
245
 
246
  # pylint: disable=too-many-locals
247
- def _detect_by_template_fullsearch(self, frame: np.ndarray) -> ScorebugDetection:
248
  """
249
  Detect scorebug using full-frame template matching.
250
 
@@ -351,7 +355,7 @@ class DetectScoreBug:
351
 
352
  logger.info("Saved fixed region config to: %s", config_path)
353
 
354
- def discover_and_lock_region(self, frame: np.ndarray) -> bool:
355
  """
356
  Discover scorebug location using full search, then lock to fixed region mode.
357
 
@@ -376,7 +380,7 @@ class DetectScoreBug:
376
  self._use_fixed_region = old_use_fixed
377
  return False
378
 
379
- def visualize_detection(self, frame: np.ndarray, detection: ScorebugDetection) -> np.ndarray:
380
  """
381
  Draw detection results on frame for visualization.
382
 
@@ -403,7 +407,7 @@ class DetectScoreBug:
403
  return vis_frame
404
 
405
 
406
- def create_template_from_frame(frame: np.ndarray, bbox: Tuple[int, int, int, int], output_path: str) -> None:
407
  """
408
  Extract a region from a frame to use as a template.
409
 
 
8
  import json
9
  import logging
10
  from pathlib import Path
11
+ from typing import Any, Optional, Tuple
12
 
13
  import cv2
14
  import numpy as np
 
55
  fixed_region_config_path: Path to JSON config with fixed region (alternative to fixed_region)
56
  use_split_detection: Enable split-half detection for robustness to partial overlays (default: True)
57
  """
58
+ self.template: Optional[np.ndarray[Any, Any]] = None
59
  self.template_path = template_path
60
  self.fixed_region = fixed_region
61
  self._use_fixed_region = fixed_region is not None
62
  self.use_split_detection = use_split_detection
63
 
64
  # Pre-computed template halves for split detection (populated when template is loaded)
65
+ self._template_left: Optional[np.ndarray[Any, Any]] = None
66
+ self._template_right: Optional[np.ndarray[Any, Any]] = None
67
 
68
  if template_path:
69
  self.load_template(template_path)
 
126
  self._template_right.shape[0],
127
  )
128
 
129
+ def detect(self, frame: np.ndarray[Any, Any]) -> ScorebugDetection:
130
  """
131
  Detect scorebug in a frame.
132
 
 
167
  return detection
168
 
169
  # pylint: disable=too-many-locals
170
+ def _detect_in_fixed_region(self, frame: np.ndarray[Any, Any]) -> ScorebugDetection:
171
  """
172
  Detect scorebug by checking only the fixed known location.
173
 
 
185
  Returns:
186
  Detection result
187
  """
188
+ # Asserts: this method should only be called when fixed_region and template are set
189
+ assert self.fixed_region is not None
190
+ assert self.template is not None
191
+
192
  x, y, _, _ = self.fixed_region
193
  th, tw = self.template.shape[:2]
194
 
 
248
  return ScorebugDetection(detected=False, confidence=full_confidence, bbox=(x, y, tw, th), method="fixed_region")
249
 
250
  # pylint: disable=too-many-locals
251
+ def _detect_by_template_fullsearch(self, frame: np.ndarray[Any, Any]) -> ScorebugDetection:
252
  """
253
  Detect scorebug using full-frame template matching.
254
 
 
355
 
356
  logger.info("Saved fixed region config to: %s", config_path)
357
 
358
+ def discover_and_lock_region(self, frame: np.ndarray[Any, Any]) -> bool:
359
  """
360
  Discover scorebug location using full search, then lock to fixed region mode.
361
 
 
380
  self._use_fixed_region = old_use_fixed
381
  return False
382
 
383
+ def visualize_detection(self, frame: np.ndarray[Any, Any], detection: ScorebugDetection) -> np.ndarray[Any, Any]:
384
  """
385
  Draw detection results on frame for visualization.
386
 
 
407
  return vis_frame
408
 
409
 
410
+ def create_template_from_frame(frame: np.ndarray[Any, Any], bbox: Tuple[int, int, int, int], output_path: str) -> None:
411
  """
412
  Extract a region from a frame to use as a template.
413
 
src/detection/timeouts.py CHANGED
@@ -9,7 +9,7 @@ Detecting when an oval changes from white to dark indicates a timeout was called
9
  import json
10
  import logging
11
  from pathlib import Path
12
- from typing import Optional, Tuple, List
13
 
14
  import cv2
15
  import numpy as np
@@ -128,7 +128,7 @@ class DetectTimeouts:
128
  self._configured = True
129
  logger.info("Timeout regions set: home=%s, away=%s", home_region.bbox, away_region.bbox)
130
 
131
- def _extract_oval_bright_ratios(self, frame: np.ndarray, region: TimeoutRegionConfig) -> List[float]:
132
  """
133
  Extract the ratio of bright pixels for each oval in a region.
134
 
@@ -194,7 +194,7 @@ class DetectTimeouts:
194
  """Count how many timeouts are available (white ovals)."""
195
  return sum(1 for state in oval_states if state)
196
 
197
- def read_timeouts(self, frame: np.ndarray) -> TimeoutReading:
198
  """
199
  Read the current timeout count for each team.
200
 
@@ -208,6 +208,10 @@ class DetectTimeouts:
208
  logger.warning("Timeout tracker not configured")
209
  return TimeoutReading(home_timeouts=3, away_timeouts=3, confidence=0.0)
210
 
 
 
 
 
211
  # Read home team timeouts using bright pixel ratio
212
  home_bright_ratios = self._extract_oval_bright_ratios(frame, self.home_region)
213
  home_states = self._classify_ovals(home_bright_ratios)
@@ -291,7 +295,7 @@ class DetectTimeouts:
291
  self._prev_reading = curr_reading
292
  return result
293
 
294
- def update(self, frame: np.ndarray) -> Tuple[TimeoutReading, Optional[str]]:
295
  """
296
  Read timeouts and detect any change in one call.
297
 
@@ -310,7 +314,7 @@ class DetectTimeouts:
310
  self._prev_reading = None
311
  logger.debug("Timeout tracking reset")
312
 
313
- def visualize(self, frame: np.ndarray, reading: Optional[TimeoutReading] = None) -> np.ndarray:
314
  """
315
  Draw timeout regions and states on frame for visualization.
316
 
 
9
  import json
10
  import logging
11
  from pathlib import Path
12
+ from typing import Any, Optional, Tuple, List
13
 
14
  import cv2
15
  import numpy as np
 
128
  self._configured = True
129
  logger.info("Timeout regions set: home=%s, away=%s", home_region.bbox, away_region.bbox)
130
 
131
+ def _extract_oval_bright_ratios(self, frame: np.ndarray[Any, Any], region: TimeoutRegionConfig) -> List[float]:
132
  """
133
  Extract the ratio of bright pixels for each oval in a region.
134
 
 
194
  """Count how many timeouts are available (white ovals)."""
195
  return sum(1 for state in oval_states if state)
196
 
197
+ def read_timeouts(self, frame: np.ndarray[Any, Any]) -> TimeoutReading:
198
  """
199
  Read the current timeout count for each team.
200
 
 
208
  logger.warning("Timeout tracker not configured")
209
  return TimeoutReading(home_timeouts=3, away_timeouts=3, confidence=0.0)
210
 
211
+ # Asserts: _configured guarantees regions are set
212
+ assert self.home_region is not None
213
+ assert self.away_region is not None
214
+
215
  # Read home team timeouts using bright pixel ratio
216
  home_bright_ratios = self._extract_oval_bright_ratios(frame, self.home_region)
217
  home_states = self._classify_ovals(home_bright_ratios)
 
295
  self._prev_reading = curr_reading
296
  return result
297
 
298
+ def update(self, frame: np.ndarray[Any, Any]) -> Tuple[TimeoutReading, Optional[str]]:
299
  """
300
  Read timeouts and detect any change in one call.
301
 
 
314
  self._prev_reading = None
315
  logger.debug("Timeout tracking reset")
316
 
317
+ def visualize(self, frame: np.ndarray[Any, Any], reading: Optional[TimeoutReading] = None) -> np.ndarray[Any, Any]:
318
  """
319
  Draw timeout regions and states on frame for visualization.
320
 
src/pipeline/parallel.py CHANGED
@@ -13,7 +13,7 @@ import time
13
  from concurrent.futures import Future, ProcessPoolExecutor, as_completed
14
  from multiprocessing import Manager
15
  from pathlib import Path
16
- from typing import Any, Dict, List, Optional, Tuple
17
 
18
  from utils import create_frame_result
19
  from .models import ChunkResult, ParallelProcessingConfig
@@ -182,7 +182,7 @@ def _process_chunk(
182
  config: ParallelProcessingConfig,
183
  chunk_start: float,
184
  chunk_end: float,
185
- progress_dict: Optional[Dict[int, Any]] = None,
186
  ) -> ChunkResult:
187
  """
188
  Process a single video chunk using OpenCV.
@@ -310,7 +310,7 @@ def _calculate_chunk_boundaries(start_time: float, end_time: float, num_workers:
310
  return chunks
311
 
312
 
313
- def _create_progress_monitor(progress_dict: Dict[int, Any], num_workers: int) -> Tuple[threading.Thread, threading.Event]:
314
  """
315
  Create a progress monitoring thread.
316
 
@@ -333,7 +333,7 @@ def _create_progress_monitor(progress_dict: Dict[int, Any], num_workers: int) ->
333
  return monitor_thread, stop_monitor
334
 
335
 
336
- def _display_progress(progress_dict: Dict[int, Any], num_workers: int) -> None:
337
  """
338
  Build and display current progress string.
339
 
@@ -386,7 +386,7 @@ def _submit_chunk_jobs(
386
  executor: ProcessPoolExecutor,
387
  chunks: List[Tuple[int, float, float]],
388
  config: ParallelProcessingConfig,
389
- progress_dict: Dict[int, Any],
390
  ) -> Dict[Future[ChunkResult], int]:
391
  """
392
  Submit all chunk processing jobs to the executor.
 
13
  from concurrent.futures import Future, ProcessPoolExecutor, as_completed
14
  from multiprocessing import Manager
15
  from pathlib import Path
16
+ from typing import Any, Dict, List, MutableMapping, Optional, Tuple
17
 
18
  from utils import create_frame_result
19
  from .models import ChunkResult, ParallelProcessingConfig
 
182
  config: ParallelProcessingConfig,
183
  chunk_start: float,
184
  chunk_end: float,
185
+ progress_dict: Optional[MutableMapping[int, Any]] = None,
186
  ) -> ChunkResult:
187
  """
188
  Process a single video chunk using OpenCV.
 
310
  return chunks
311
 
312
 
313
+ def _create_progress_monitor(progress_dict: MutableMapping[int, Any], num_workers: int) -> Tuple[threading.Thread, threading.Event]:
314
  """
315
  Create a progress monitoring thread.
316
 
 
333
  return monitor_thread, stop_monitor
334
 
335
 
336
+ def _display_progress(progress_dict: MutableMapping[int, Any], num_workers: int) -> None:
337
  """
338
  Build and display current progress string.
339
 
 
386
  executor: ProcessPoolExecutor,
387
  chunks: List[Tuple[int, float, float]],
388
  config: ParallelProcessingConfig,
389
+ progress_dict: MutableMapping[int, Any],
390
  ) -> Dict[Future[ChunkResult], int]:
391
  """
392
  Submit all chunk processing jobs to the executor.
src/pipeline/play_detector.py CHANGED
@@ -82,17 +82,15 @@ class PlayDetector:
82
  timeout_tracker: Optional timeout tracker for clock reset classification
83
  """
84
  self.config = config
85
- self.scorebug_detector: Optional[DetectScoreBug] = None
86
- self.clock_reader: Optional[PlayClockRegionExtractor] = None
87
- self.state_machine: Optional[TrackPlayState] = None
88
  self.timeout_tracker = timeout_tracker
89
 
90
- # Template-based clock reading components
91
  self.template_builder: Optional[DigitTemplateBuilder] = None
92
  self.template_library: Optional[DigitTemplateLibrary] = None
93
  self.template_reader: Optional[ReadPlayClock] = None
94
 
95
  self._validate_config()
 
96
  self._initialize_components()
97
 
98
  def _validate_config(self) -> None:
@@ -125,6 +123,7 @@ class PlayDetector:
125
  logger.info("Fixed coordinates mode - regions pre-configured")
126
 
127
  # Compute play clock offset relative to scorebug from absolute coordinates
 
128
  pc_x, pc_y, pc_w, pc_h = self.config.fixed_playclock_coords
129
  if self.config.fixed_scorebug_coords:
130
  sb_x, sb_y, _, _ = self.config.fixed_scorebug_coords
@@ -138,7 +137,7 @@ class PlayDetector:
138
  playclock_config = PlayClockRegionConfig(x_offset=x_offset, y_offset=y_offset, width=pc_w, height=pc_h, source_video="", scorebug_template="", samples_used=0)
139
 
140
  # Initialize scorebug detector (will set fixed region below)
141
- self.scorebug_detector = DetectScoreBug(template_path=None, use_split_detection=self.config.use_split_detection)
142
 
143
  # Set the fixed region immediately so no template matching is needed
144
  if self.config.fixed_scorebug_coords:
@@ -146,7 +145,7 @@ class PlayDetector:
146
  logger.info("Scorebug fixed region set: %s", self.config.fixed_scorebug_coords)
147
 
148
  # Initialize play clock region extractor with the derived config
149
- self.clock_reader = PlayClockRegionExtractor(region_config=playclock_config)
150
  logger.info("Play clock region extractor initialized with offset=(%d, %d), size=(%d, %d)", x_offset, y_offset, pc_w, pc_h)
151
  else:
152
  # Standard mode: use template and config files
@@ -158,7 +157,7 @@ class PlayDetector:
158
  logger.info("Play clock region extractor initialized")
159
 
160
  # Initialize state machine
161
- self.state_machine = TrackPlayState()
162
  logger.info("State machine initialized")
163
 
164
  # Initialize template matching components
@@ -249,6 +248,8 @@ class PlayDetector:
249
  True if templates were built successfully, False otherwise
250
  """
251
  # Use the extracted TemplateBuildingPass module
 
 
252
  template_pass = TemplateBuildingPass(
253
  config=self.config,
254
  clock_reader=self.clock_reader,
@@ -355,7 +356,7 @@ class PlayDetector:
355
 
356
  def _process_frame_streaming(
357
  self,
358
- frame: np.ndarray,
359
  current_time: float,
360
  timing: Dict[str, float],
361
  stats: Dict[str, Any],
@@ -410,6 +411,7 @@ class PlayDetector:
410
 
411
  # Extract play clock region and run template matching immediately
412
  t_start = time.perf_counter()
 
413
  play_clock_region = self.clock_reader.extract_region(frame, scorebug.bbox)
414
  timing["preprocessing"] += time.perf_counter() - t_start
415
 
@@ -492,9 +494,9 @@ class PlayDetector:
492
  plays = self._merge_plays(state_machine_plays, clock_reset_plays)
493
 
494
  # Recalculate stats from merged plays
495
- start_methods = {}
496
- end_methods = {}
497
- play_types = {}
498
  for play in plays:
499
  start_methods[play.start_method] = start_methods.get(play.start_method, 0) + 1
500
  end_methods[play.end_method] = end_methods.get(play.end_method, 0) + 1
@@ -697,13 +699,13 @@ class PlayDetector:
697
  Returns:
698
  Play end timestamp
699
  """
700
- start_timestamp = frame_data[frame_idx]["timestamp"]
701
  max_end_time = start_timestamp + max_duration
702
 
703
  # Look for scorebug disappearance (but cap at max_duration from start)
704
  for j in range(frame_idx + 1, len(frame_data)):
705
  frame = frame_data[j]
706
- timestamp = frame["timestamp"]
707
 
708
  # If we've exceeded max_duration, end the play at max_duration
709
  if timestamp >= max_end_time:
@@ -715,7 +717,7 @@ class PlayDetector:
715
  return timestamp
716
 
717
  # Default: end at max_duration (or end of data if shorter)
718
- return min(max_end_time, frame_data[-1]["timestamp"] if frame_data else max_end_time)
719
 
720
  def _merge_plays(self, state_machine_plays: List[PlayEvent], clock_reset_plays: List[PlayEvent]) -> List[PlayEvent]:
721
  """
@@ -942,6 +944,9 @@ class PlayDetector:
942
  t_parallel_start = time.perf_counter()
943
 
944
  # Create parallel processing config
 
 
 
945
  parallel_config = ParallelProcessingConfig(
946
  video_path=self.config.video_path,
947
  start_time=self.config.start_time,
 
82
  timeout_tracker: Optional timeout tracker for clock reset classification
83
  """
84
  self.config = config
 
 
 
85
  self.timeout_tracker = timeout_tracker
86
 
87
+ # Template-based clock reading components (conditionally initialized)
88
  self.template_builder: Optional[DigitTemplateBuilder] = None
89
  self.template_library: Optional[DigitTemplateLibrary] = None
90
  self.template_reader: Optional[ReadPlayClock] = None
91
 
92
  self._validate_config()
93
+ # Core components are initialized here (scorebug_detector, clock_reader, state_machine)
94
  self._initialize_components()
95
 
96
  def _validate_config(self) -> None:
 
123
  logger.info("Fixed coordinates mode - regions pre-configured")
124
 
125
  # Compute play clock offset relative to scorebug from absolute coordinates
126
+ assert self.config.fixed_playclock_coords is not None # Already checked above, helps mypy
127
  pc_x, pc_y, pc_w, pc_h = self.config.fixed_playclock_coords
128
  if self.config.fixed_scorebug_coords:
129
  sb_x, sb_y, _, _ = self.config.fixed_scorebug_coords
 
137
  playclock_config = PlayClockRegionConfig(x_offset=x_offset, y_offset=y_offset, width=pc_w, height=pc_h, source_video="", scorebug_template="", samples_used=0)
138
 
139
  # Initialize scorebug detector (will set fixed region below)
140
+ self.scorebug_detector: DetectScoreBug = DetectScoreBug(template_path=None, use_split_detection=self.config.use_split_detection)
141
 
142
  # Set the fixed region immediately so no template matching is needed
143
  if self.config.fixed_scorebug_coords:
 
145
  logger.info("Scorebug fixed region set: %s", self.config.fixed_scorebug_coords)
146
 
147
  # Initialize play clock region extractor with the derived config
148
+ self.clock_reader: PlayClockRegionExtractor = PlayClockRegionExtractor(region_config=playclock_config)
149
  logger.info("Play clock region extractor initialized with offset=(%d, %d), size=(%d, %d)", x_offset, y_offset, pc_w, pc_h)
150
  else:
151
  # Standard mode: use template and config files
 
157
  logger.info("Play clock region extractor initialized")
158
 
159
  # Initialize state machine
160
+ self.state_machine: TrackPlayState = TrackPlayState()
161
  logger.info("State machine initialized")
162
 
163
  # Initialize template matching components
 
248
  True if templates were built successfully, False otherwise
249
  """
250
  # Use the extracted TemplateBuildingPass module
251
+ # Assert: template_builder is initialized when this method is called
252
+ assert self.template_builder is not None
253
  template_pass = TemplateBuildingPass(
254
  config=self.config,
255
  clock_reader=self.clock_reader,
 
356
 
357
  def _process_frame_streaming(
358
  self,
359
+ frame: np.ndarray[Any, Any],
360
  current_time: float,
361
  timing: Dict[str, float],
362
  stats: Dict[str, Any],
 
411
 
412
  # Extract play clock region and run template matching immediately
413
  t_start = time.perf_counter()
414
+ assert scorebug.bbox is not None # scorebug.detected implies bbox is set
415
  play_clock_region = self.clock_reader.extract_region(frame, scorebug.bbox)
416
  timing["preprocessing"] += time.perf_counter() - t_start
417
 
 
494
  plays = self._merge_plays(state_machine_plays, clock_reset_plays)
495
 
496
  # Recalculate stats from merged plays
497
+ start_methods: Dict[str, int] = {}
498
+ end_methods: Dict[str, int] = {}
499
+ play_types: Dict[str, int] = {}
500
  for play in plays:
501
  start_methods[play.start_method] = start_methods.get(play.start_method, 0) + 1
502
  end_methods[play.end_method] = end_methods.get(play.end_method, 0) + 1
 
699
  Returns:
700
  Play end timestamp
701
  """
702
+ start_timestamp: float = frame_data[frame_idx]["timestamp"]
703
  max_end_time = start_timestamp + max_duration
704
 
705
  # Look for scorebug disappearance (but cap at max_duration from start)
706
  for j in range(frame_idx + 1, len(frame_data)):
707
  frame = frame_data[j]
708
+ timestamp: float = frame["timestamp"]
709
 
710
  # If we've exceeded max_duration, end the play at max_duration
711
  if timestamp >= max_end_time:
 
717
  return timestamp
718
 
719
  # Default: end at max_duration (or end of data if shorter)
720
+ return min(max_end_time, float(frame_data[-1]["timestamp"]) if frame_data else max_end_time)
721
 
722
  def _merge_plays(self, state_machine_plays: List[PlayEvent], clock_reset_plays: List[PlayEvent]) -> List[PlayEvent]:
723
  """
 
944
  t_parallel_start = time.perf_counter()
945
 
946
  # Create parallel processing config
947
+ # Asserts: validated by _validate_config, parallel mode requires fixed coords
948
+ assert self.config.fixed_playclock_coords is not None
949
+ assert self.config.fixed_scorebug_coords is not None
950
  parallel_config = ParallelProcessingConfig(
951
  video_path=self.config.video_path,
952
  start_time=self.config.start_time,
src/pipeline/template_builder_pass.py CHANGED
@@ -14,7 +14,7 @@ import logging
14
  import time
15
  from functools import lru_cache
16
  from pathlib import Path
17
- from typing import Optional, Tuple
18
 
19
  import cv2
20
  import easyocr
@@ -56,7 +56,7 @@ def _validate_config(config: DetectionConfig) -> bool:
56
 
57
 
58
  def _process_scorebug_frame(
59
- frame: np.ndarray,
60
  scorebug_bbox: Tuple[int, int, int, int],
61
  current_time: float,
62
  clock_reader: PlayClockRegionExtractor,
@@ -167,6 +167,7 @@ def _scan_video(
167
 
168
  if scorebug_result.detected:
169
  frames_with_scorebug += 1
 
170
  valid_samples += _process_scorebug_frame(frame, scorebug_result.bbox, current_time, clock_reader, template_builder, reader)
171
 
172
  # Progress logging every 200 frames
@@ -238,6 +239,9 @@ def run_template_building_pass(
238
  if not _validate_config(config):
239
  return None, None, time.perf_counter() - t_build_start
240
 
 
 
 
241
  # Create temporary scorebug detector for Pass 0
242
  sb_x, sb_y, sb_w, sb_h = config.fixed_scorebug_coords
243
  logger.info(" Scorebug region: (%d, %d, %d, %d)", sb_x, sb_y, sb_w, sb_h)
 
14
  import time
15
  from functools import lru_cache
16
  from pathlib import Path
17
+ from typing import Any, Optional, Tuple
18
 
19
  import cv2
20
  import easyocr
 
56
 
57
 
58
  def _process_scorebug_frame(
59
+ frame: np.ndarray[Any, Any],
60
  scorebug_bbox: Tuple[int, int, int, int],
61
  current_time: float,
62
  clock_reader: PlayClockRegionExtractor,
 
167
 
168
  if scorebug_result.detected:
169
  frames_with_scorebug += 1
170
+ assert scorebug_result.bbox is not None # detected implies bbox is set
171
  valid_samples += _process_scorebug_frame(frame, scorebug_result.bbox, current_time, clock_reader, template_builder, reader)
172
 
173
  # Progress logging every 200 frames
 
239
  if not _validate_config(config):
240
  return None, None, time.perf_counter() - t_build_start
241
 
242
+ # Assert: _validate_config guarantees fixed_scorebug_coords is set
243
+ assert config.fixed_scorebug_coords is not None
244
+
245
  # Create temporary scorebug detector for Pass 0
246
  sb_x, sb_y, sb_w, sb_h = config.fixed_scorebug_coords
247
  logger.info(" Scorebug region: (%d, %d, %d, %d)", sb_x, sb_y, sb_w, sb_h)
src/readers/playclock.py CHANGED
@@ -14,7 +14,7 @@ Performance comparison (from ocr_benchmark.md):
14
  """
15
 
16
  import logging
17
- from typing import List, Tuple
18
 
19
  import cv2
20
  import numpy as np
@@ -67,7 +67,7 @@ class ReadPlayClock:
67
 
68
  logger.info("ReadPlayClock initialized")
69
 
70
- def preprocess_region(self, region: np.ndarray) -> np.ndarray:
71
  """
72
  Preprocess play clock region for template matching.
73
 
@@ -81,7 +81,7 @@ class ReadPlayClock:
81
  """
82
  return preprocess_playclock_region(region, self.scale_factor)
83
 
84
- def match_digit(self, region: np.ndarray, templates: List[DigitTemplate]) -> TemplateMatchResult:
85
  """
86
  Match a region against a set of digit templates.
87
 
@@ -124,7 +124,7 @@ class ReadPlayClock:
124
 
125
  return TemplateMatchResult(digit_value=best_digit, confidence=best_confidence, is_valid=is_valid)
126
 
127
- def _try_double_digit(self, preprocessed: np.ndarray) -> TemplatePlayClockReading:
128
  """
129
  Try to read as double-digit display (10-40): tens on left, ones on right.
130
 
@@ -194,7 +194,7 @@ class ReadPlayClock:
194
  method="template_double",
195
  )
196
 
197
- def _try_single_digit(self, preprocessed: np.ndarray) -> TemplatePlayClockReading:
198
  """
199
  Try to read as single-digit display (0-9): digit is centered.
200
 
@@ -268,7 +268,7 @@ class ReadPlayClock:
268
  method="template_single",
269
  )
270
 
271
- def read(self, region: np.ndarray) -> TemplatePlayClockReading:
272
  """
273
  Read the play clock value from a region using dual-mode template matching.
274
 
@@ -306,7 +306,7 @@ class ReadPlayClock:
306
 
307
  def read_from_frame(
308
  self,
309
- frame: np.ndarray,
310
  scorebug_bbox: Tuple[int, int, int, int],
311
  clock_region_offset: Tuple[int, int, int, int],
312
  ) -> TemplatePlayClockReading:
@@ -347,7 +347,7 @@ class ReadPlayClock:
347
 
348
  def read_from_fixed_location(
349
  self,
350
- frame: np.ndarray,
351
  absolute_coords: Tuple[int, int, int, int],
352
  ) -> TemplatePlayClockReading:
353
  """
 
14
  """
15
 
16
  import logging
17
+ from typing import Any, List, Tuple
18
 
19
  import cv2
20
  import numpy as np
 
67
 
68
  logger.info("ReadPlayClock initialized")
69
 
70
+ def preprocess_region(self, region: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
71
  """
72
  Preprocess play clock region for template matching.
73
 
 
81
  """
82
  return preprocess_playclock_region(region, self.scale_factor)
83
 
84
+ def match_digit(self, region: np.ndarray[Any, Any], templates: List[DigitTemplate]) -> TemplateMatchResult:
85
  """
86
  Match a region against a set of digit templates.
87
 
 
124
 
125
  return TemplateMatchResult(digit_value=best_digit, confidence=best_confidence, is_valid=is_valid)
126
 
127
+ def _try_double_digit(self, preprocessed: np.ndarray[Any, Any]) -> TemplatePlayClockReading:
128
  """
129
  Try to read as double-digit display (10-40): tens on left, ones on right.
130
 
 
194
  method="template_double",
195
  )
196
 
197
+ def _try_single_digit(self, preprocessed: np.ndarray[Any, Any]) -> TemplatePlayClockReading:
198
  """
199
  Try to read as single-digit display (0-9): digit is centered.
200
 
 
268
  method="template_single",
269
  )
270
 
271
+ def read(self, region: np.ndarray[Any, Any]) -> TemplatePlayClockReading:
272
  """
273
  Read the play clock value from a region using dual-mode template matching.
274
 
 
306
 
307
  def read_from_frame(
308
  self,
309
+ frame: np.ndarray[Any, Any],
310
  scorebug_bbox: Tuple[int, int, int, int],
311
  clock_region_offset: Tuple[int, int, int, int],
312
  ) -> TemplatePlayClockReading:
 
347
 
348
  def read_from_fixed_location(
349
  self,
350
+ frame: np.ndarray[Any, Any],
351
  absolute_coords: Tuple[int, int, int, int],
352
  ) -> TemplatePlayClockReading:
353
  """
src/setup/models.py CHANGED
@@ -6,6 +6,8 @@ OCR-labeled samples are collected and averaged into digit templates.
6
  Also includes region configuration models for setup.
7
  """
8
 
 
 
9
  import numpy as np
10
  from pydantic import BaseModel, ConfigDict, Field
11
 
@@ -18,7 +20,7 @@ class DigitSample(BaseModel):
18
  digit_value: int = Field(..., description="0-9 for ones digit, 0-4 for tens digit, -1 for blank")
19
  is_tens_digit: bool = Field(..., description="True if this is the tens place digit")
20
  position: str = Field(..., description="'left', 'center', or 'right' - where digit appears in region")
21
- image: np.ndarray = Field(..., description="The digit image (grayscale, preprocessed)")
22
  source_clock_value: int = Field(..., description="The full clock value this was extracted from")
23
  timestamp: float = Field(..., description="Video timestamp where this was captured")
24
  confidence: float = Field(..., description="OCR confidence for this sample")
@@ -32,7 +34,7 @@ class DigitTemplate(BaseModel):
32
  digit_value: int = Field(..., description="0-9 for ones, 0-4 for tens, -1 for blank")
33
  is_tens_digit: bool = Field(..., description="True if this is a tens place template")
34
  position: str = Field(..., description="'left', 'center', or 'right' - where digit appears in region")
35
- template: np.ndarray = Field(..., description="The template image (grayscale)")
36
  sample_count: int = Field(..., description="Number of samples used to build this template")
37
  avg_confidence: float = Field(..., description="Average OCR confidence of source samples")
38
 
 
6
  Also includes region configuration models for setup.
7
  """
8
 
9
+ from typing import Any
10
+
11
  import numpy as np
12
  from pydantic import BaseModel, ConfigDict, Field
13
 
 
20
  digit_value: int = Field(..., description="0-9 for ones digit, 0-4 for tens digit, -1 for blank")
21
  is_tens_digit: bool = Field(..., description="True if this is the tens place digit")
22
  position: str = Field(..., description="'left', 'center', or 'right' - where digit appears in region")
23
+ image: np.ndarray[Any, Any] = Field(..., description="The digit image (grayscale, preprocessed)")
24
  source_clock_value: int = Field(..., description="The full clock value this was extracted from")
25
  timestamp: float = Field(..., description="Video timestamp where this was captured")
26
  confidence: float = Field(..., description="OCR confidence for this sample")
 
34
  digit_value: int = Field(..., description="0-9 for ones, 0-4 for tens, -1 for blank")
35
  is_tens_digit: bool = Field(..., description="True if this is a tens place template")
36
  position: str = Field(..., description="'left', 'center', or 'right' - where digit appears in region")
37
+ template: np.ndarray[Any, Any] = Field(..., description="The template image (grayscale)")
38
  sample_count: int = Field(..., description="Number of samples used to build this template")
39
  avg_confidence: float = Field(..., description="Average OCR confidence of source samples")
40
 
src/setup/playclock_region.py CHANGED
@@ -14,7 +14,7 @@ Color detection utilities are shared from utils.color to eliminate code duplicat
14
  import json
15
  import logging
16
  from pathlib import Path
17
- from typing import Optional, Tuple
18
 
19
  import cv2
20
  import numpy as np
@@ -90,7 +90,7 @@ class PlayClockRegionExtractor:
90
  self.config.height,
91
  )
92
 
93
- def extract_region(self, frame: np.ndarray, scorebug_bbox: Tuple[int, int, int, int]) -> Optional[np.ndarray]:
94
  """
95
  Extract the play clock region from the frame.
96
 
@@ -131,7 +131,7 @@ class PlayClockRegionExtractor:
131
  region = frame[pc_y : pc_y + pc_h, pc_x : pc_x + pc_w].copy()
132
  return region
133
 
134
- def preprocess_for_ocr(self, region: np.ndarray) -> np.ndarray:
135
  """
136
  Preprocess the play clock region for OCR (used during template building).
137
 
@@ -167,7 +167,7 @@ class PlayClockRegionExtractor:
167
 
168
  if is_red:
169
  # For red digits, use percentile-based threshold on the red channel
170
- threshold_value = np.percentile(scaled, 90)
171
  _, binary = cv2.threshold(scaled, threshold_value, 255, cv2.THRESH_BINARY)
172
  logger.debug("Red digit threshold (90th percentile): %.1f", threshold_value)
173
 
@@ -182,7 +182,7 @@ class PlayClockRegionExtractor:
182
  _, binary = cv2.threshold(scaled, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
183
 
184
  # Determine if we need to invert
185
- mean_intensity = np.mean(binary)
186
  if mean_intensity < 128:
187
  binary = cv2.bitwise_not(binary)
188
 
 
14
  import json
15
  import logging
16
  from pathlib import Path
17
+ from typing import Any, Optional, Tuple
18
 
19
  import cv2
20
  import numpy as np
 
90
  self.config.height,
91
  )
92
 
93
+ def extract_region(self, frame: np.ndarray[Any, Any], scorebug_bbox: Tuple[int, int, int, int]) -> Optional[np.ndarray[Any, Any]]:
94
  """
95
  Extract the play clock region from the frame.
96
 
 
131
  region = frame[pc_y : pc_y + pc_h, pc_x : pc_x + pc_w].copy()
132
  return region
133
 
134
+ def preprocess_for_ocr(self, region: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
135
  """
136
  Preprocess the play clock region for OCR (used during template building).
137
 
 
167
 
168
  if is_red:
169
  # For red digits, use percentile-based threshold on the red channel
170
+ threshold_value = float(np.percentile(np.asarray(scaled), 90))
171
  _, binary = cv2.threshold(scaled, threshold_value, 255, cv2.THRESH_BINARY)
172
  logger.debug("Red digit threshold (90th percentile): %.1f", threshold_value)
173
 
 
182
  _, binary = cv2.threshold(scaled, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
183
 
184
  # Determine if we need to invert
185
+ mean_intensity = np.mean(np.asarray(binary))
186
  if mean_intensity < 128:
187
  binary = cv2.bitwise_not(binary)
188
 
src/setup/template_builder.py CHANGED
@@ -57,11 +57,11 @@ class DigitTemplateBuilder:
57
  self.samples: Dict[Tuple[bool, int, str], List[DigitSample]] = {}
58
 
59
  # Track raw clock region images for potential reprocessing
60
- self.raw_regions: List[Tuple[float, int, np.ndarray]] = [] # (timestamp, clock_value, region)
61
 
62
  logger.info("DigitTemplateBuilder initialized (region: %dx%d)", region_width, region_height)
63
 
64
- def preprocess_region(self, region: np.ndarray) -> np.ndarray:
65
  """
66
  Preprocess play clock region for template extraction.
67
 
@@ -75,7 +75,9 @@ class DigitTemplateBuilder:
75
  """
76
  return preprocess_playclock_region(region, scale_factor=4)
77
 
78
- def extract_digits(self, preprocessed: np.ndarray, clock_value: int) -> Tuple[Optional[np.ndarray], Optional[np.ndarray], Optional[np.ndarray], Optional[np.ndarray]]:
 
 
79
  """
80
  Extract individual digit images from preprocessed play clock region.
81
 
@@ -97,7 +99,7 @@ class DigitTemplateBuilder:
97
  # Single-digit: far-left is blank (truly empty), ones is centered
98
  return None, None, extract_center_region(preprocessed), extract_far_left_region(preprocessed)
99
 
100
- def add_sample(self, region: np.ndarray, clock_value: int, timestamp: float, confidence: float = 1.0) -> None:
101
  """
102
  Add a play clock sample for template building.
103
 
@@ -130,6 +132,7 @@ class DigitTemplateBuilder:
130
 
131
  if clock_value >= 10:
132
  # Double-digit display (10-40): tens on left, ones on right
 
133
  # Store tens sample (left position)
134
  tens_sample = DigitSample(
135
  digit_value=tens_digit,
@@ -146,6 +149,7 @@ class DigitTemplateBuilder:
146
  self.samples[tens_key].append(tens_sample)
147
 
148
  # Store ones sample (right position)
 
149
  ones_sample = DigitSample(
150
  digit_value=ones_digit,
151
  is_tens_digit=False,
@@ -170,6 +174,7 @@ class DigitTemplateBuilder:
170
  else:
171
  # Single-digit display (0-9): digit is centered, tens position is blank
172
  # Store blank sample (far-left position - should be truly empty)
 
173
  blank_sample = DigitSample(
174
  digit_value=-1, # blank
175
  is_tens_digit=True,
@@ -185,6 +190,7 @@ class DigitTemplateBuilder:
185
  self.samples[blank_key].append(blank_sample)
186
 
187
  # Store ones sample (center position)
 
188
  ones_sample = DigitSample(
189
  digit_value=ones_digit,
190
  is_tens_digit=False,
 
57
  self.samples: Dict[Tuple[bool, int, str], List[DigitSample]] = {}
58
 
59
  # Track raw clock region images for potential reprocessing
60
+ self.raw_regions: List[Tuple[float, int, np.ndarray[Any, Any]]] = [] # (timestamp, clock_value, region)
61
 
62
  logger.info("DigitTemplateBuilder initialized (region: %dx%d)", region_width, region_height)
63
 
64
+ def preprocess_region(self, region: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
65
  """
66
  Preprocess play clock region for template extraction.
67
 
 
75
  """
76
  return preprocess_playclock_region(region, scale_factor=4)
77
 
78
+ def extract_digits(
79
+ self, preprocessed: np.ndarray[Any, Any], clock_value: int
80
+ ) -> Tuple[Optional[np.ndarray[Any, Any]], Optional[np.ndarray[Any, Any]], Optional[np.ndarray[Any, Any]], Optional[np.ndarray[Any, Any]]]:
81
  """
82
  Extract individual digit images from preprocessed play clock region.
83
 
 
99
  # Single-digit: far-left is blank (truly empty), ones is centered
100
  return None, None, extract_center_region(preprocessed), extract_far_left_region(preprocessed)
101
 
102
+ def add_sample(self, region: np.ndarray[Any, Any], clock_value: int, timestamp: float, confidence: float = 1.0) -> None:
103
  """
104
  Add a play clock sample for template building.
105
 
 
132
 
133
  if clock_value >= 10:
134
  # Double-digit display (10-40): tens on left, ones on right
135
+ assert tens_img is not None # Asserts: validated by extract_digits
136
  # Store tens sample (left position)
137
  tens_sample = DigitSample(
138
  digit_value=tens_digit,
 
149
  self.samples[tens_key].append(tens_sample)
150
 
151
  # Store ones sample (right position)
152
+ assert ones_right_img is not None # Asserts: validated by extract_digits
153
  ones_sample = DigitSample(
154
  digit_value=ones_digit,
155
  is_tens_digit=False,
 
174
  else:
175
  # Single-digit display (0-9): digit is centered, tens position is blank
176
  # Store blank sample (far-left position - should be truly empty)
177
+ assert blank_img is not None # Asserts: validated by extract_digits
178
  blank_sample = DigitSample(
179
  digit_value=-1, # blank
180
  is_tens_digit=True,
 
190
  self.samples[blank_key].append(blank_sample)
191
 
192
  # Store ones sample (center position)
193
+ assert ones_center_img is not None # Asserts: validated by extract_digits
194
  ones_sample = DigitSample(
195
  digit_value=ones_digit,
196
  is_tens_digit=False,
src/ui/api.py CHANGED
@@ -7,7 +7,7 @@ that are intended to be called by external code.
7
 
8
  import logging
9
  from pathlib import Path
10
- from typing import List, Optional, Tuple
11
 
12
  import numpy as np
13
 
@@ -26,12 +26,14 @@ def print_banner() -> None:
26
  print("=" * 60 + "\n")
27
 
28
 
29
- def extract_sample_frames_for_selection(video_path: str, start_time: float, num_frames: int = 5, interval: float = 2.0) -> List[Tuple[float, np.ndarray]]:
30
  """Extract sample frames from the video for region selection."""
31
  return extract_sample_frames(video_path, start_time, num_frames, interval)
32
 
33
 
34
- def select_scorebug_region(frames: List[Tuple[float, np.ndarray]], video_path: str | None = None) -> Tuple[Optional[Tuple[int, int, int, int]], Optional[Tuple[float, np.ndarray]]]:
 
 
35
  """
36
  Interactive selection of scorebug region.
37
 
@@ -70,7 +72,7 @@ def select_scorebug_region(frames: List[Tuple[float, np.ndarray]], video_path: s
70
  return bbox.to_tuple(), selected_frame
71
 
72
 
73
- def select_playclock_region(selected_frame: Tuple[float, np.ndarray], scorebug_bbox: Tuple[int, int, int, int]) -> Optional[Tuple[int, int, int, int]]:
74
  """
75
  Interactive selection of play clock region within the scorebug.
76
 
@@ -105,7 +107,7 @@ def select_playclock_region(selected_frame: Tuple[float, np.ndarray], scorebug_b
105
  return bbox.to_tuple() if bbox else None
106
 
107
 
108
- def select_timeout_region(selected_frame: Tuple[float, np.ndarray], scorebug_bbox: Tuple[int, int, int, int], team: str) -> Optional[Tuple[int, int, int, int]]:
109
  """
110
  Interactive selection of timeout indicator region for a team.
111
 
 
7
 
8
  import logging
9
  from pathlib import Path
10
+ from typing import Any, List, Optional, Tuple
11
 
12
  import numpy as np
13
 
 
26
  print("=" * 60 + "\n")
27
 
28
 
29
+ def extract_sample_frames_for_selection(video_path: str, start_time: float, num_frames: int = 5, interval: float = 2.0) -> List[Tuple[float, np.ndarray[Any, Any]]]:
30
  """Extract sample frames from the video for region selection."""
31
  return extract_sample_frames(video_path, start_time, num_frames, interval)
32
 
33
 
34
+ def select_scorebug_region(
35
+ frames: List[Tuple[float, np.ndarray[Any, Any]]], video_path: str | None = None
36
+ ) -> Tuple[Optional[Tuple[int, int, int, int]], Optional[Tuple[float, np.ndarray[Any, Any]]]]:
37
  """
38
  Interactive selection of scorebug region.
39
 
 
72
  return bbox.to_tuple(), selected_frame
73
 
74
 
75
+ def select_playclock_region(selected_frame: Tuple[float, np.ndarray[Any, Any]], scorebug_bbox: Tuple[int, int, int, int]) -> Optional[Tuple[int, int, int, int]]:
76
  """
77
  Interactive selection of play clock region within the scorebug.
78
 
 
107
  return bbox.to_tuple() if bbox else None
108
 
109
 
110
+ def select_timeout_region(selected_frame: Tuple[float, np.ndarray[Any, Any]], scorebug_bbox: Tuple[int, int, int, int], team: str) -> Optional[Tuple[int, int, int, int]]:
111
  """
112
  Interactive selection of timeout indicator region for a team.
113
 
src/ui/models.py CHANGED
@@ -5,7 +5,7 @@ This module contains all the data models used for region selection,
5
  including bounding boxes, view configurations, and selection state.
6
  """
7
 
8
- from typing import List, Optional, Tuple
9
 
10
  import numpy as np
11
  from pydantic import BaseModel, ConfigDict
@@ -75,13 +75,13 @@ class SelectionState(BaseModel):
75
  model_config = ConfigDict(arbitrary_types_allowed=True)
76
 
77
  frame_idx: int = 0
78
- frames: List[Tuple[float, np.ndarray]] = []
79
  should_quit: bool = False
80
  should_confirm: bool = False
81
  video_path: Optional[str] = None
82
 
83
  @property
84
- def current_frame(self) -> Tuple[float, np.ndarray]:
85
  """Get the current (timestamp, frame) tuple."""
86
  return self.frames[self.frame_idx]
87
 
@@ -91,6 +91,6 @@ class SelectionState(BaseModel):
91
  return self.frames[self.frame_idx][0]
92
 
93
  @property
94
- def frame(self) -> np.ndarray:
95
  """Get the current frame."""
96
  return self.frames[self.frame_idx][1]
 
5
  including bounding boxes, view configurations, and selection state.
6
  """
7
 
8
+ from typing import Any, List, Optional, Tuple
9
 
10
  import numpy as np
11
  from pydantic import BaseModel, ConfigDict
 
75
  model_config = ConfigDict(arbitrary_types_allowed=True)
76
 
77
  frame_idx: int = 0
78
+ frames: List[Tuple[float, np.ndarray[Any, Any]]] = []
79
  should_quit: bool = False
80
  should_confirm: bool = False
81
  video_path: Optional[str] = None
82
 
83
  @property
84
+ def current_frame(self) -> Tuple[float, np.ndarray[Any, Any]]:
85
  """Get the current (timestamp, frame) tuple."""
86
  return self.frames[self.frame_idx]
87
 
 
91
  return self.frames[self.frame_idx][0]
92
 
93
  @property
94
+ def frame(self) -> np.ndarray[Any, Any]:
95
  """Get the current frame."""
96
  return self.frames[self.frame_idx][1]
src/ui/sessions.py CHANGED
@@ -7,7 +7,7 @@ This module provides session classes for different types of region selection:
7
  - TimeoutSelectionSession: For selecting timeout indicator regions
8
  """
9
 
10
- from typing import Dict, List, Optional, Tuple
11
 
12
  import cv2
13
  import numpy as np
@@ -16,7 +16,7 @@ from .models import BBox, SelectionState, SelectionViewConfig
16
  from .selector import KeyHandler, RegionSelector
17
 
18
 
19
- def _extract_sample_frames_for_selection(video_path: str, start_time: float, num_frames: int = 5, interval: float = 2.0) -> List[Tuple[float, np.ndarray]]:
20
  """Extract sample frames for interactive selection.
21
 
22
  Note: Import is inside function to avoid circular imports.
@@ -42,7 +42,7 @@ class InteractiveSelectionSession:
42
  def __init__(
43
  self,
44
  window_name: str,
45
- frames: List[Tuple[float, np.ndarray]],
46
  view_config: Optional[SelectionViewConfig] = None,
47
  video_path: Optional[str] = None,
48
  ):
@@ -130,15 +130,15 @@ class InteractiveSelectionSession:
130
  # Display Methods (to be overridden by subclasses)
131
  # -------------------------------------------------------------------------
132
 
133
- def _render_frame(self, display_frame: np.ndarray) -> np.ndarray:
134
  """Render overlays on the display frame. Override in subclasses."""
135
  return display_frame
136
 
137
- def _get_display_frame(self) -> np.ndarray:
138
  """Get the frame to display. Override in subclasses for custom views."""
139
  return self.state.frame.copy()
140
 
141
- def _draw_selection_overlay(self, display_frame: np.ndarray) -> np.ndarray:
142
  """Draw the selection points and rectangles on the frame."""
143
  # Draw clicked points
144
  for i, point in enumerate(self.selector.points):
@@ -196,7 +196,7 @@ class InteractiveSelectionSession:
196
  class ScorebugSelectionSession(InteractiveSelectionSession):
197
  """Interactive session for selecting the scorebug region."""
198
 
199
- def __init__(self, frames: List[Tuple[float, np.ndarray]], video_path: Optional[str] = None):
200
  """Initialize scorebug selection session."""
201
  super().__init__(
202
  window_name="Select Scorebug Region",
@@ -211,7 +211,7 @@ class ScorebugSelectionSession(InteractiveSelectionSession):
211
  self._key_handlers[ord("S")] = self._handle_skip_5min
212
  self._key_handlers[13] = self._handle_confirm # Enter key
213
 
214
- def _render_frame(self, display_frame: np.ndarray) -> np.ndarray:
215
  """Render scorebug-specific overlays and instructions."""
216
  # Draw point labels
217
  for i, point in enumerate(self.selector.points):
@@ -242,7 +242,7 @@ class ScorebugSelectionSession(InteractiveSelectionSession):
242
  class PlayClockSelectionSession(InteractiveSelectionSession):
243
  """Interactive session for selecting the play clock region within a scorebug."""
244
 
245
- def __init__(self, frame: np.ndarray, scorebug_bbox: BBox, scale_factor: int = 3):
246
  """Initialize play clock selection session."""
247
  self.scorebug_bbox = scorebug_bbox
248
  self.scale_factor = scale_factor
@@ -268,7 +268,7 @@ class PlayClockSelectionSession(InteractiveSelectionSession):
268
  )
269
  self._key_handlers[13] = self._handle_confirm # Enter key
270
 
271
- def _render_frame(self, display_frame: np.ndarray) -> np.ndarray:
272
  """Render play clock selection overlays."""
273
  # Draw grid for reference
274
  grid_spacing = 50 * self.scale_factor
@@ -293,7 +293,7 @@ class PlayClockSelectionSession(InteractiveSelectionSession):
293
  class TimeoutSelectionSession(InteractiveSelectionSession):
294
  """Interactive session for selecting timeout indicator regions."""
295
 
296
- def __init__(self, frame: np.ndarray, scorebug_bbox: BBox, team: str, scale_factor: int = 3, padding: int = 50):
297
  """Initialize timeout selection session."""
298
  self.scorebug_bbox = scorebug_bbox
299
  self.scale_factor = scale_factor
@@ -328,7 +328,7 @@ class TimeoutSelectionSession(InteractiveSelectionSession):
328
  )
329
  self._key_handlers[13] = self._handle_confirm # Enter key
330
 
331
- def _render_frame(self, display_frame: np.ndarray) -> np.ndarray:
332
  """Render timeout selection overlays."""
333
  # Draw scorebug boundary for reference
334
  sb_scaled = self.scorebug_offset.scaled(self.scale_factor)
 
7
  - TimeoutSelectionSession: For selecting timeout indicator regions
8
  """
9
 
10
+ from typing import Any, Dict, List, Optional, Tuple
11
 
12
  import cv2
13
  import numpy as np
 
16
  from .selector import KeyHandler, RegionSelector
17
 
18
 
19
+ def _extract_sample_frames_for_selection(video_path: str, start_time: float, num_frames: int = 5, interval: float = 2.0) -> List[Tuple[float, np.ndarray[Any, Any]]]:
20
  """Extract sample frames for interactive selection.
21
 
22
  Note: Import is inside function to avoid circular imports.
 
42
  def __init__(
43
  self,
44
  window_name: str,
45
+ frames: List[Tuple[float, np.ndarray[Any, Any]]],
46
  view_config: Optional[SelectionViewConfig] = None,
47
  video_path: Optional[str] = None,
48
  ):
 
130
  # Display Methods (to be overridden by subclasses)
131
  # -------------------------------------------------------------------------
132
 
133
+ def _render_frame(self, display_frame: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
134
  """Render overlays on the display frame. Override in subclasses."""
135
  return display_frame
136
 
137
+ def _get_display_frame(self) -> np.ndarray[Any, Any]:
138
  """Get the frame to display. Override in subclasses for custom views."""
139
  return self.state.frame.copy()
140
 
141
+ def _draw_selection_overlay(self, display_frame: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
142
  """Draw the selection points and rectangles on the frame."""
143
  # Draw clicked points
144
  for i, point in enumerate(self.selector.points):
 
196
  class ScorebugSelectionSession(InteractiveSelectionSession):
197
  """Interactive session for selecting the scorebug region."""
198
 
199
+ def __init__(self, frames: List[Tuple[float, np.ndarray[Any, Any]]], video_path: Optional[str] = None):
200
  """Initialize scorebug selection session."""
201
  super().__init__(
202
  window_name="Select Scorebug Region",
 
211
  self._key_handlers[ord("S")] = self._handle_skip_5min
212
  self._key_handlers[13] = self._handle_confirm # Enter key
213
 
214
+ def _render_frame(self, display_frame: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
215
  """Render scorebug-specific overlays and instructions."""
216
  # Draw point labels
217
  for i, point in enumerate(self.selector.points):
 
242
  class PlayClockSelectionSession(InteractiveSelectionSession):
243
  """Interactive session for selecting the play clock region within a scorebug."""
244
 
245
+ def __init__(self, frame: np.ndarray[Any, Any], scorebug_bbox: BBox, scale_factor: int = 3):
246
  """Initialize play clock selection session."""
247
  self.scorebug_bbox = scorebug_bbox
248
  self.scale_factor = scale_factor
 
268
  )
269
  self._key_handlers[13] = self._handle_confirm # Enter key
270
 
271
+ def _render_frame(self, display_frame: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
272
  """Render play clock selection overlays."""
273
  # Draw grid for reference
274
  grid_spacing = 50 * self.scale_factor
 
293
  class TimeoutSelectionSession(InteractiveSelectionSession):
294
  """Interactive session for selecting timeout indicator regions."""
295
 
296
+ def __init__(self, frame: np.ndarray[Any, Any], scorebug_bbox: BBox, team: str, scale_factor: int = 3, padding: int = 50):
297
  """Initialize timeout selection session."""
298
  self.scorebug_bbox = scorebug_bbox
299
  self.scale_factor = scale_factor
 
328
  )
329
  self._key_handlers[13] = self._handle_confirm # Enter key
330
 
331
+ def _render_frame(self, display_frame: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
332
  """Render timeout selection overlays."""
333
  # Draw scorebug boundary for reference
334
  sb_scaled = self.scorebug_offset.scaled(self.scale_factor)
src/utils/color.py CHANGED
@@ -11,6 +11,7 @@ These utilities are shared across:
11
  """
12
 
13
  import logging
 
14
 
15
  import cv2
16
  import numpy as np
@@ -18,7 +19,7 @@ import numpy as np
18
  logger = logging.getLogger(__name__)
19
 
20
 
21
- def detect_red_digits(region: np.ndarray) -> bool:
22
  """
23
  Detect if the play clock digits are red.
24
 
@@ -31,13 +32,13 @@ def detect_red_digits(region: np.ndarray) -> bool:
31
  Returns:
32
  True if red digits detected, False otherwise
33
  """
34
- # Split into BGR channels
35
  b, g, r = cv2.split(region)
36
 
37
  # Calculate mean values for each channel
38
- r_mean = np.mean(r)
39
- g_mean = np.mean(g)
40
- b_mean = np.mean(b)
41
 
42
  # Red digits: high red channel, very low green/blue, red > 2x green
43
  max_gb = max(g_mean, b_mean)
@@ -49,7 +50,7 @@ def detect_red_digits(region: np.ndarray) -> bool:
49
  return is_red
50
 
51
 
52
- def normalize_to_grayscale(region: np.ndarray) -> np.ndarray:
53
  """
54
  Normalize a play clock region to grayscale, handling both red and white digits.
55
 
 
11
  """
12
 
13
  import logging
14
+ from typing import Any
15
 
16
  import cv2
17
  import numpy as np
 
19
  logger = logging.getLogger(__name__)
20
 
21
 
22
+ def detect_red_digits(region: np.ndarray[Any, Any]) -> bool:
23
  """
24
  Detect if the play clock digits are red.
25
 
 
32
  Returns:
33
  True if red digits detected, False otherwise
34
  """
35
+ # Split into BGR channels (np.asarray normalizes cv2.Mat type for numpy)
36
  b, g, r = cv2.split(region)
37
 
38
  # Calculate mean values for each channel
39
+ r_mean = np.mean(np.asarray(r))
40
+ g_mean = np.mean(np.asarray(g))
41
+ b_mean = np.mean(np.asarray(b))
42
 
43
  # Red digits: high red channel, very low green/blue, red > 2x green
44
  max_gb = max(g_mean, b_mean)
 
50
  return is_red
51
 
52
 
53
+ def normalize_to_grayscale(region: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
54
  """
55
  Normalize a play clock region to grayscale, handling both red and white digits.
56
 
src/utils/regions.py CHANGED
@@ -13,13 +13,15 @@ These utilities are shared across:
13
  - setup/template_builder.py (template building)
14
  """
15
 
 
 
16
  import cv2
17
  import numpy as np
18
 
19
  from .color import normalize_to_grayscale
20
 
21
 
22
- def extract_left_region(preprocessed: np.ndarray) -> np.ndarray:
23
  """
24
  Extract left region for tens digit in double-digit displays.
25
 
@@ -34,7 +36,7 @@ def extract_left_region(preprocessed: np.ndarray) -> np.ndarray:
34
  return preprocessed[:, : mid_x - 2] # Small gap in middle
35
 
36
 
37
- def extract_right_region(preprocessed: np.ndarray) -> np.ndarray:
38
  """
39
  Extract right region for ones digit in double-digit displays.
40
 
@@ -49,7 +51,7 @@ def extract_right_region(preprocessed: np.ndarray) -> np.ndarray:
49
  return preprocessed[:, mid_x + 2 :]
50
 
51
 
52
- def extract_center_region(preprocessed: np.ndarray) -> np.ndarray:
53
  """
54
  Extract center region for ones digit in single-digit displays.
55
 
@@ -67,7 +69,7 @@ def extract_center_region(preprocessed: np.ndarray) -> np.ndarray:
67
  return preprocessed[:, center_start:center_end]
68
 
69
 
70
- def extract_far_left_region(preprocessed: np.ndarray) -> np.ndarray:
71
  """
72
  Extract far left region for blank detection in single-digit displays.
73
 
@@ -85,7 +87,7 @@ def extract_far_left_region(preprocessed: np.ndarray) -> np.ndarray:
85
  return preprocessed[:, :far_left_end]
86
 
87
 
88
- def preprocess_playclock_region(region: np.ndarray, scale_factor: int = 4) -> np.ndarray:
89
  """
90
  Preprocess play clock region for template matching or building.
91
 
@@ -109,7 +111,7 @@ def preprocess_playclock_region(region: np.ndarray, scale_factor: int = 4) -> np
109
  _, binary = cv2.threshold(scaled, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
110
 
111
  # Ensure white digits on black background (digits should be bright)
112
- mean_intensity = np.mean(binary)
113
  if mean_intensity > 128:
114
  binary = cv2.bitwise_not(binary)
115
 
 
13
  - setup/template_builder.py (template building)
14
  """
15
 
16
+ from typing import Any
17
+
18
  import cv2
19
  import numpy as np
20
 
21
  from .color import normalize_to_grayscale
22
 
23
 
24
+ def extract_left_region(preprocessed: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
25
  """
26
  Extract left region for tens digit in double-digit displays.
27
 
 
36
  return preprocessed[:, : mid_x - 2] # Small gap in middle
37
 
38
 
39
+ def extract_right_region(preprocessed: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
40
  """
41
  Extract right region for ones digit in double-digit displays.
42
 
 
51
  return preprocessed[:, mid_x + 2 :]
52
 
53
 
54
+ def extract_center_region(preprocessed: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
55
  """
56
  Extract center region for ones digit in single-digit displays.
57
 
 
69
  return preprocessed[:, center_start:center_end]
70
 
71
 
72
+ def extract_far_left_region(preprocessed: np.ndarray[Any, Any]) -> np.ndarray[Any, Any]:
73
  """
74
  Extract far left region for blank detection in single-digit displays.
75
 
 
87
  return preprocessed[:, :far_left_end]
88
 
89
 
90
+ def preprocess_playclock_region(region: np.ndarray[Any, Any], scale_factor: int = 4) -> np.ndarray[Any, Any]:
91
  """
92
  Preprocess play clock region for template matching or building.
93
 
 
111
  _, binary = cv2.threshold(scaled, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
112
 
113
  # Ensure white digits on black background (digits should be bright)
114
+ mean_intensity = np.mean(np.asarray(binary))
115
  if mean_intensity > 128:
116
  binary = cv2.bitwise_not(binary)
117
 
src/video/frame_extractor.py CHANGED
@@ -7,7 +7,7 @@ getting video metadata, and other video-related utilities.
7
 
8
  import logging
9
  import subprocess
10
- from typing import Callable, List, Optional, Tuple
11
 
12
  import cv2
13
  import numpy as np
@@ -87,7 +87,7 @@ def get_video_frame_count(video_path: str) -> int:
87
  return frame_count
88
 
89
 
90
- def extract_sample_frames(video_path: str, start_time: float, num_frames: int = 5, interval: float = 2.0) -> List[Tuple[float, np.ndarray]]:
91
  """
92
  Extract sample frames from a video file at specified intervals.
93
 
@@ -126,7 +126,7 @@ def extract_sample_frames(video_path: str, start_time: float, num_frames: int =
126
  return frames
127
 
128
 
129
- def extract_frame_at_time(video_path: str, timestamp: float) -> Optional[np.ndarray]:
130
  """
131
  Extract a single frame from a video at a specific timestamp.
132
 
@@ -161,7 +161,7 @@ def extract_frames_with_callback(
161
  start_time: float,
162
  end_time: Optional[float],
163
  interval: float,
164
- callback: Callable[[float, np.ndarray], bool],
165
  ) -> int:
166
  """
167
  Extract frames from a video and call a callback function for each frame.
@@ -175,7 +175,7 @@ def extract_frames_with_callback(
175
  end_time: End time in seconds, or None to process until end.
176
  interval: Time interval between frames in seconds.
177
  callback: Function to call for each frame. Signature:
178
- callback(timestamp: float, frame: np.ndarray) -> bool
179
  Return False to stop processing early.
180
 
181
  Returns:
 
7
 
8
  import logging
9
  import subprocess
10
+ from typing import Any, Callable, List, Optional, Tuple
11
 
12
  import cv2
13
  import numpy as np
 
87
  return frame_count
88
 
89
 
90
+ def extract_sample_frames(video_path: str, start_time: float, num_frames: int = 5, interval: float = 2.0) -> List[Tuple[float, np.ndarray[Any, Any]]]:
91
  """
92
  Extract sample frames from a video file at specified intervals.
93
 
 
126
  return frames
127
 
128
 
129
+ def extract_frame_at_time(video_path: str, timestamp: float) -> Optional[np.ndarray[Any, Any]]:
130
  """
131
  Extract a single frame from a video at a specific timestamp.
132
 
 
161
  start_time: float,
162
  end_time: Optional[float],
163
  interval: float,
164
+ callback: Callable[[float, np.ndarray[Any, Any]], bool],
165
  ) -> int:
166
  """
167
  Extract frames from a video and call a callback function for each frame.
 
175
  end_time: End time in seconds, or None to process until end.
176
  interval: Time interval between frames in seconds.
177
  callback: Function to call for each frame. Signature:
178
+ callback(timestamp: float, frame: np.ndarray[Any, Any]) -> bool
179
  Return False to stop processing early.
180
 
181
  Returns:
src/video/frame_reader.py CHANGED
@@ -9,7 +9,7 @@ import logging
9
  import queue
10
  import threading
11
  import time
12
- from typing import Optional, Tuple
13
 
14
  import cv2
15
  import numpy as np
@@ -45,8 +45,8 @@ class ThreadedFrameReader:
45
  self.frame_skip = frame_skip
46
  self.queue_size = queue_size
47
 
48
- # Frame queue: (frame_number, frame_data) or (frame_number, None) for read failures
49
- self.frame_queue: queue.Queue = queue.Queue(maxsize=queue_size)
50
 
51
  # Control flags
52
  self.stop_flag = threading.Event()
@@ -76,7 +76,7 @@ class ThreadedFrameReader:
76
  self.reader_thread.join(timeout=2.0)
77
  logger.debug("Threaded frame reader stopped (read %d frames, %.2fs I/O)", self.frames_read, self.io_time)
78
 
79
- def get_frame(self, timeout: float = 5.0) -> Optional[Tuple[int, Optional[np.ndarray]]]:
80
  """
81
  Get the next frame from the queue.
82
 
 
9
  import queue
10
  import threading
11
  import time
12
+ from typing import Any, Optional, Tuple
13
 
14
  import cv2
15
  import numpy as np
 
45
  self.frame_skip = frame_skip
46
  self.queue_size = queue_size
47
 
48
+ # Frame queue: (frame_number, frame_data) or None as end-of-stream sentinel
49
+ self.frame_queue: queue.Queue[Tuple[int, np.ndarray[Any, Any] | None] | None] = queue.Queue(maxsize=queue_size)
50
 
51
  # Control flags
52
  self.stop_flag = threading.Event()
 
76
  self.reader_thread.join(timeout=2.0)
77
  logger.debug("Threaded frame reader stopped (read %d frames, %.2fs I/O)", self.frames_read, self.io_time)
78
 
79
+ def get_frame(self, timeout: float = 5.0) -> Optional[Tuple[int, Optional[np.ndarray[Any, Any]]]]:
80
  """
81
  Get the next frame from the queue.
82