File size: 2,128 Bytes
ea93121
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
"""Runtime helpers for offline model loading and image decoding."""

from __future__ import annotations

import base64
import io
import logging
from dataclasses import dataclass
from pathlib import Path
from typing import Optional

import numpy as np

logger = logging.getLogger(__name__)

try:
    from PIL import Image
except Exception:  # pragma: no cover - optional dependency
    Image = None


@dataclass
class RuntimeConfig:
    """Model runtime configuration bound to local artifact paths."""

    model_path: Path
    provider: str = "CPUExecutionProvider"


def decode_image_b64(image_b64: str, size: int = 112) -> np.ndarray:
    """Decode base64 image into RGB float32 tensor-like array."""
    raw = base64.b64decode(image_b64)
    if not raw:
        raise ValueError("empty image payload")

    if Image is not None:
        img = Image.open(io.BytesIO(raw)).convert("RGB").resize((size, size))
        arr = np.asarray(img, dtype=np.float32) / 255.0
        return arr

    # Fallback path for ultra-minimal environments without Pillow.
    arr = np.frombuffer(raw[: size * size * 3], dtype=np.uint8)
    if arr.size < size * size * 3:
        arr = np.pad(arr, (0, size * size * 3 - arr.size), mode="constant")
    arr = arr.reshape(size, size, 3).astype(np.float32) / 255.0
    return arr


def maybe_load_onnx(model_path: Path, provider: str = "CPUExecutionProvider"):
    """Load ONNX Runtime session when dependency and model are available."""
    if not model_path.exists():
        logger.warning("ONNX model not found: %s", model_path)
        return None
    try:
        import onnxruntime as ort  # type: ignore

        session = ort.InferenceSession(str(model_path), providers=[provider])
        logger.info("Loaded ONNX model: %s", model_path)
        return session
    except Exception as exc:  # pragma: no cover - optional dependency
        logger.warning("ONNX runtime unavailable or failed to load %s: %s", model_path, exc)
        return None


def l2_normalize(vec: np.ndarray, eps: float = 1e-9) -> np.ndarray:
    """L2 normalize vector."""
    return vec / (np.linalg.norm(vec) + eps)