#!/usr/bin/env python3 """ Embedders service (deterministic hashing embeddings; no external deps). Endpoint: POST /embed {text: str} or {texts: [str]} -> {vector: [...]} or {vectors: [[...]]} Environment: PORT (default 7013), DIM (default 384) """ import os import hashlib from typing import List, Dict, Any from fastapi import FastAPI, Request from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import JSONResponse from prometheus_client import Counter, Histogram, make_asgi_app import uvicorn PORT = int(os.getenv("PORT", "7013")) DIM = int(os.getenv("DIM", "384")) app = FastAPI(title="Nova Embedders", version="0.1.0") app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) REQUESTS = Counter("embed_requests_total", "Embedders requests", ["route"]) LATENCY = Histogram("embed_request_latency_seconds", "Latency", ["route"]) def _embed_one(text: str) -> List[float]: # Simple hashing TF-like vector with l2 normalization vec = [0.0] * DIM for tok in text.lower().split(): h = int(hashlib.sha256(tok.encode()).hexdigest(), 16) idx = h % DIM vec[idx] += 1.0 # l2 normalize norm = sum(v * v for v in vec) ** 0.5 if norm > 0: vec = [v / norm for v in vec] return vec @app.get("/health") def health() -> Dict[str, Any]: REQUESTS.labels(route="health").inc() return {"status": "ok", "port": PORT, "dim": DIM} @app.post("/embed") async def embed(req: Request) -> JSONResponse: with LATENCY.labels(route="embed").time(): REQUESTS.labels(route="embed").inc() body = await req.json() if "text" in body: return JSONResponse(status_code=200, content={"vector": _embed_one(str(body["text"]))}) if "texts" in body: return JSONResponse(status_code=200, content={"vectors": [_embed_one(str(t)) for t in body["texts"]]}) return JSONResponse(status_code=400, content={"error": "provide 'text' or 'texts'"}) # Prometheus metrics metrics_app = make_asgi_app() app.mount("/metrics", metrics_app) if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=PORT)