| """ |
| Embedding utilities - Generate vector embeddings using DashScope. |
| """ |
| from __future__ import annotations |
|
|
| from http import HTTPStatus |
| import os |
| import time |
| from typing import List, Optional, Dict, Any |
| import dashscope |
| from dashscope import TextEmbedding |
| import numpy as np |
| import config |
| from utils.benchmark_logging import current_call_context, log_api_call, now_iso |
|
|
|
|
| class EmbeddingModel: |
| """ |
| Embedding model backed by DashScope's text-embedding-v4 API. |
| """ |
| _DIMENSION_CACHE: Dict[str, int] = {} |
|
|
| def __init__(self, model_name: str = None, api_key: Optional[str] = None): |
| self.model_name = model_name or config.EMBEDDING_MODEL |
| self.api_key = api_key or getattr(config, "DASHSCOPE_API_KEY", None) or os.getenv("DASHSCOPE_API_KEY") or os.getenv("OPENAI_API_KEY") |
| if not self.api_key: |
| raise ValueError("DASHSCOPE_API_KEY or OPENAI_API_KEY must be set for EmbeddingModel") |
| self.dimension = None |
| self.model_type = "dashscope_text_embedding" |
| self.batch_size = max(1, getattr(config, "EMBEDDING_BATCH_SIZE", 8)) |
|
|
| dashscope.api_key = self.api_key |
| print(f"Initializing DashScope embedding model: {self.model_name}") |
| cached_dimension = self._DIMENSION_CACHE.get(self.model_name) |
| if cached_dimension is not None: |
| self.dimension = cached_dimension |
| print(f"Using cached DashScope embedding dimension: {self.dimension}") |
| else: |
| self._probe_dimension() |
|
|
| def encode(self, texts: List[str], is_query: bool = False) -> np.ndarray: |
| """ |
| Encode list of texts to vectors |
| """ |
| if isinstance(texts, str): |
| texts = [texts] |
| if not texts: |
| width = self.dimension or 0 |
| return np.empty((0, width), dtype=np.float32) |
|
|
| batches = [] |
| for start in range(0, len(texts), self.batch_size): |
| batches.append(texts[start:start + self.batch_size]) |
|
|
| encoded_batches = [] |
| for batch in batches: |
| encoded_batches.append(self._encode_batch(batch, is_query=is_query)) |
| return np.vstack(encoded_batches) |
|
|
| def encode_single(self, text: str, is_query: bool = False) -> np.ndarray: |
| """ |
| Encode single text |
| """ |
| return self.encode([text], is_query=is_query)[0] |
| |
| def encode_query(self, queries: List[str]) -> np.ndarray: |
| """ |
| Encode queries. |
| """ |
| return self.encode(queries, is_query=True) |
| |
| def encode_documents(self, documents: List[str]) -> np.ndarray: |
| """ |
| Encode documents. |
| """ |
| return self.encode(documents, is_query=False) |
|
|
| def _probe_dimension(self) -> None: |
| vectors, usage, request_id = self._embed_batch(["dimension probe"], token_stage="setup_embedding_probe") |
| if not vectors or not vectors[0]: |
| raise RuntimeError("DashScope embedding dimension probe returned an empty vector") |
| self.dimension = len(vectors[0]) |
| self._DIMENSION_CACHE[self.model_name] = self.dimension |
| print(f"Detected embedding dimension from DashScope: {self.dimension}") |
| self._log_embedding_call( |
| texts=["dimension probe"], |
| vectors=vectors, |
| usage=usage, |
| request_id=request_id, |
| success=True, |
| token_stage="setup_embedding_probe", |
| ) |
|
|
| def _encode_batch(self, texts: List[str], is_query: bool = False) -> np.ndarray: |
| vectors, usage, request_id = self._embed_batch( |
| texts, |
| token_stage=None, |
| is_query=is_query, |
| ) |
| matrix = np.array(vectors, dtype=np.float32) |
| matrix = self._normalize(matrix) |
| self._log_embedding_call( |
| texts=texts, |
| vectors=vectors, |
| usage=usage, |
| request_id=request_id, |
| success=True, |
| ) |
| return matrix |
|
|
| def _embed_batch( |
| self, |
| texts: List[str], |
| token_stage: Optional[str], |
| is_query: bool = False, |
| ) -> tuple[List[List[float]], Dict[str, Any], Optional[str]]: |
| last_exception = None |
| max_retries = getattr(config, "EMBEDDING_MAX_RETRIES", 5) |
| for attempt in range(max_retries): |
| try: |
| response = TextEmbedding.call( |
| model=self.model_name, |
| input=texts, |
| api_key=self.api_key, |
| ) |
| status = getattr(response, "status_code", None) |
| if status not in (HTTPStatus.OK, 200): |
| raise RuntimeError(f"DashScope embedding call failed with status {status}: {response}") |
|
|
| output = getattr(response, "output", None) or response.get("output") or {} |
| embeddings = output.get("embeddings") or [] |
| ordered = [None] * len(texts) |
| for item in embeddings: |
| index = item.get("text_index", 0) |
| ordered[index] = item.get("embedding") |
| if any(vector is None for vector in ordered): |
| raise RuntimeError(f"DashScope embedding response missing vectors for some inputs: {response}") |
|
|
| usage = self._usage_to_dict(getattr(response, "usage", None) or response.get("usage")) |
| request_id = getattr(response, "request_id", None) or response.get("request_id") |
| return ordered, usage, request_id |
| except Exception as exc: |
| last_exception = exc |
| self._log_embedding_call( |
| texts=texts, |
| vectors=[], |
| usage={}, |
| request_id=None, |
| success=False, |
| error=exc, |
| token_stage=token_stage, |
| is_query=is_query, |
| attempt=attempt + 1, |
| ) |
| if attempt < max_retries - 1: |
| wait_time = 2 ** attempt |
| print(f"Embedding API call failed (attempt {attempt + 1}/{max_retries}): {exc}") |
| print(f"Retrying in {wait_time} seconds...") |
| time.sleep(wait_time) |
| else: |
| print(f"Embedding API call failed after {max_retries} attempts: {exc}") |
| raise last_exception |
|
|
| def _usage_to_dict(self, usage: Any) -> Dict[str, Any]: |
| if usage is None: |
| return {} |
| if hasattr(usage, "items"): |
| return dict(usage) |
| return {"total_tokens": getattr(usage, "total_tokens", None)} |
|
|
| def _normalize(self, matrix: np.ndarray) -> np.ndarray: |
| norms = np.linalg.norm(matrix, axis=1, keepdims=True) |
| norms = np.where(norms == 0, 1.0, norms) |
| return matrix / norms |
|
|
| def _log_embedding_call( |
| self, |
| *, |
| texts: List[str], |
| vectors: List[List[float]], |
| usage: Dict[str, Any], |
| request_id: Optional[str], |
| success: bool, |
| error: Optional[Exception] = None, |
| token_stage: Optional[str] = None, |
| is_query: bool = False, |
| attempt: int = 1, |
| ) -> None: |
| context = current_call_context() |
| log_api_call( |
| { |
| "timestamp": now_iso(), |
| "success": success, |
| "provider": "dashscope", |
| "api_kind": "embedding", |
| "stage": context.get("stage") if token_stage is None else "setup", |
| "token_stage": token_stage or context.get("embedding_stage") or context.get("stage"), |
| "patient_id": context.get("patient_id"), |
| "question_no": context.get("question_no"), |
| "text_id": context.get("text_id"), |
| "operation_id": context.get("operation_id"), |
| "attempt": attempt, |
| "model": self.model_name, |
| "request_id": request_id, |
| "input_count": len(texts), |
| "inputs": texts, |
| "is_query": is_query, |
| "vector_dimension": self.dimension or (len(vectors[0]) if vectors else None), |
| "usage": usage, |
| "error": repr(error) if error is not None else None, |
| } |
| ) |
|
|