Zhen Ye
Implement Batch Inference & Queue Backpressure Fixes
5c36daa
from typing import NamedTuple, Optional, Sequence
import numpy as np
class DetectionResult(NamedTuple):
boxes: np.ndarray
scores: Sequence[float]
labels: Sequence[int]
label_names: Optional[Sequence[str]] = None
class ObjectDetector:
"""Detector interface to keep inference agnostic to model details."""
name: str
supports_batch: bool = False
max_batch_size: int = 1
def predict(self, frame: np.ndarray, queries: Sequence[str]) -> DetectionResult:
raise NotImplementedError
def predict_batch(self, frames: Sequence[np.ndarray], queries: Sequence[str]) -> Sequence[DetectionResult]:
"""Default: sequential fallback"""
return [self.predict(f, queries) for f in frames]