startech-api / backend /main.py
persee-tech's picture
Update backend/main.py
0a5c8ec verified
raw
history blame
6.23 kB
import os
import socketio
import uvicorn
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
import asyncio
import base64
import cv2
import numpy as np
import random
from deepface import DeepFace
from supabase import create_client, Client
import time
# --- 1. CONFIGURATION ---
SUPABASE_URL = os.getenv("SUPABASE_URL", "https://gwjrwejdjpctizolfkcz.supabase.co")
SUPABASE_KEY = os.getenv("SUPABASE_KEY")
try:
if SUPABASE_KEY:
supabase: Client = create_client(SUPABASE_URL, SUPABASE_KEY)
print("☁️ Supabase Connecté")
except: pass
# --- 2. SOCKET IO (Optimisé) ---
sio = socketio.AsyncServer(async_mode='asgi', cors_allowed_origins='*', logger=False, engineio_logger=False, always_connect=True)
app = FastAPI()
app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"])
socket_app = socketio.ASGIApp(sio, app)
# --- 3. MOTEUR LÉGER (Pour le Carré Vert) ---
# On charge le détecteur de visage ultra-rapide d'OpenCV
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')
# --- 4. ETAT GLOBAL ---
sessions = {}
def get_default_kpis():
return {
"engagement": 0, "satisfaction": 50, "trust": 50, "loyalty": 50, "opinion": 50,
"lbl_eng": "En attente...", "lbl_sat": "Calibration..."
}
# --- 5. TÂCHE DE FOND (LOURDE) ---
def deepface_task(frame):
# C'est ici que l'IA réfléchit (ça peut prendre 1 à 2 secondes)
try:
objs = DeepFace.analyze(frame, actions=['emotion'], enforce_detection=False, silent=True)
data = objs[0] if isinstance(objs, list) else objs
# Calculs KPIs simulés basés sur l'émotion réelle
emo = data['dominant_emotion']
val = 0.8 if emo == "happy" else (-0.6 if emo in ["sad", "angry", "fear"] else 0.0)
aro = 0.8 if emo in ["angry", "fear", "surprise"] else 0.3
def clamp(n): return max(0, min(100, int(n)))
return {
"emotion": emo,
"metrics": {
"engagement": clamp((aro * 100) + random.uniform(0, 10)),
"satisfaction": clamp(((val + 1) / 2) * 100),
"trust": clamp(50 + (val * 20)),
"loyalty": clamp(50 + (val * 10)),
"opinion": clamp(((val + 1) / 2) * 100),
"lbl_eng": "Fort 🔥" if aro > 0.6 else "Moyen",
"lbl_sat": "Positif 😃" if val > 0.2 else "Neutre 😐"
}
}
except: return None
# --- 6. ÉVÉNEMENTS ---
@sio.event
async def connect(sid, environ):
print(f"✅ CONNECTÉ: {sid}")
sessions[sid] = {
"is_recording": False, "session_time": 0, "db_id": None,
"face_coords": None, "emotion": "neutral", "metrics": get_default_kpis(),
"last_deepface_time": 0
}
@sio.event
async def disconnect(sid):
if sid in sessions: del sessions[sid]
@sio.event
async def start_session(sid, data):
if sid in sessions:
sessions[sid]["is_recording"] = True
sessions[sid]["session_time"] = 0
try:
res = supabase.table('sessions').insert({"first_name": data.get('firstName', ''), "last_name": data.get('lastName', ''), "client_id": data.get('clientId', '')}).execute()
sessions[sid]["db_id"] = res.data[0]['id']
except: pass
@sio.event
async def stop_session(sid):
if sid in sessions: sessions[sid]["is_recording"] = False
@sio.event
async def process_frame(sid, data_uri):
if sid not in sessions: return
try:
# 1. Décodage Rapide
encoded_data = data_uri.split(',')[1]
nparr = np.frombuffer(base64.b64decode(encoded_data), np.uint8)
frame = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
# 2. DÉTECTION RAPIDE (CARRÉ VERT) - Ça prend 0.01s
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.1, 4)
if len(faces) > 0:
(x, y, w, h) = faces[0] # On prend le premier visage
sessions[sid]["face_coords"] = {'x': int(x), 'y': int(y), 'w': int(w), 'h': int(h)}
else:
sessions[sid]["face_coords"] = None # Pas de visage = Pas de carré
# 3. ANALYSE LENTE (ÉMOTIONS) - Seulement toutes les 1.5 secondes
now = time.time()
if now - sessions[sid]["last_deepface_time"] > 1.5:
sessions[sid]["last_deepface_time"] = now
# On lance l'IA en arrière-plan sans bloquer
asyncio.create_task(run_deepface_background(sid, frame))
except Exception as e:
print(f"Erreur Frame: {e}")
async def run_deepface_background(sid, frame):
# Wrapper pour exécuter DeepFace sans bloquer le serveur
try:
result = await asyncio.to_thread(deepface_task, frame)
if result and sid in sessions:
sessions[sid]["emotion"] = result["emotion"]
sessions[sid]["metrics"] = result["metrics"]
except: pass
# --- 7. BOUCLE DE D'ENVOI (BROADCAST) ---
async def broadcast_loop():
while True:
await asyncio.sleep(0.5) # Mise à jour fluide (2 fois par seconde)
for sid in list(sessions.keys()):
if sid not in sessions: continue
sess = sessions[sid]
if sess["is_recording"]:
sess["session_time"] += 0.5 # On ajoute 0.5s au chrono
# Sauvegarde DB allégée ici si besoin
# On envoie TOUT ce qu'on a (le carré vert est à jour, l'émotion peut dater d'il y a 1s)
payload = {
"emotion": sess["emotion"],
"face_coords": sess["face_coords"], # C'est ça qui affiche le carré vert !
"metrics": sess["metrics"],
"session_time": int(sess["session_time"]),
"is_recording": sess["is_recording"]
}
try: await sio.emit('metrics_update', payload, room=sid)
except: pass
if __name__ == "__main__":
@app.on_event("startup")
async def startup(): asyncio.create_task(broadcast_loop())
uvicorn.run(socket_app, host="0.0.0.0", port=7860)