""" Embedding Engine - Generación de vectores faciales """ from deepface import DeepFace import numpy as np from loguru import logger class EmbeddingEngine: """ Genera embeddings faciales usando modelos de deep learning. """ SUPPORTED_MODELS = [ "VGG-Face", "Facenet", "Facenet512", "OpenFace", "DeepFace", "DeepID", "ArcFace", "Dlib", "SFace" ] def __init__(self, model="ArcFace"): """ Inicializa el motor de embeddings. Args: model: Modelo a usar (default: ArcFace - el más preciso) """ if model not in self.SUPPORTED_MODELS: logger.warning(f"Modelo {model} no soportado, usando ArcFace") model = "ArcFace" self.model_name = model logger.info(f"Embedding Engine inicializado con modelo: {model}") def generate_embedding(self, face_image): """ Genera un vector de embedding para un rostro. Args: face_image: Imagen del rostro (numpy array RGB, 160x160) Returns: Vector numpy de embeddings o None si falla """ try: # DeepFace espera un array numpy embedding_obj = DeepFace.represent( img_path=face_image, model_name=self.model_name, enforce_detection=False, detector_backend='skip' # Ya hicimos detección con MTCNN ) # Extraer el vector embedding = np.array(embedding_obj[0]["embedding"]) logger.debug(f"Embedding generado: {len(embedding)} dimensiones") return embedding except Exception as e: logger.error(f"Error generando embedding: {e}") return None