Zhen Ye
Implement Batch Inference & Queue Backpressure Fixes
5c36daa
from typing import NamedTuple, Optional, Sequence, List
import numpy as np
class SegmentationResult(NamedTuple):
"""Result from segmentation inference."""
masks: np.ndarray # NxHxW binary or soft masks
scores: Optional[np.ndarray] = None # Confidence scores
boxes: Optional[np.ndarray] = None # Bounding boxes (xyxy)
class Segmenter:
"""Base interface for segmentation models."""
name: str
supports_batch: bool = False
max_batch_size: int = 1
def predict(self, frame: np.ndarray, text_prompts: Optional[list] = None) -> SegmentationResult:
"""
Run segmentation on a single frame.
Args:
frame: Input image as numpy array (HxWxC)
text_prompts: Optional list of text prompts for segmentation
Returns:
SegmentationResult with masks and optional metadata
"""
raise NotImplementedError
def predict_batch(self, frames: Sequence[np.ndarray], text_prompts: Optional[list] = None) -> Sequence[SegmentationResult]:
return [self.predict(f, text_prompts) for f in frames]