AskLAQ3 / deploy /Deploy_AskLAQ3.py
OUAREDAEK's picture
Upload Deploy_AskLAQ3.py
bf259a1 verified
import os, torch, pandas as pd, gradio as gr, uvicorn, nest_asyncio
from flask import Flask, render_template, request, jsonify
from sentence_transformers import SentenceTransformer, util
from fastapi import FastAPI
from fastapi.middleware.wsgi import WSGIMiddleware
# Configuration
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
flask_app = Flask(__name__,
template_folder=os.path.join(BASE_DIR, "templates"),
static_folder=os.path.join(BASE_DIR, "static"))
# Chargement IA
model = SentenceTransformer("OrdalieTech/Solon-embeddings-mini-beta-1.1", device="cpu", trust_remote_code=True)
@flask_app.route("/")
def index():
return render_template("index.html")
@flask_app.route("/ask", methods=["POST"])
def ask():
try:
data = request.get_json()
question = data.get("question", "")
df = pd.read_csv("dataset_2026.csv")
emb_base = torch.load("embeddings_questions.pt", map_location="cpu")
emb_q = model.encode(question, convert_to_tensor=True, normalize_embeddings=True)
scores = util.pytorch_cos_sim(emb_q, emb_base)[0]
idx = torch.argmax(scores).item()
return jsonify({
"response": df["rationale"].iloc[idx],
"confidence": int(scores[idx].item() * 100)
})
except Exception as e:
return jsonify({"error": str(e)})
# Montage FastAPI (pour Hugging Face)
app = FastAPI()
app.mount("/", WSGIMiddleware(flask_app))
if __name__ == "__main__":
nest_asyncio.apply()
uvicorn.run(app, host="0.0.0.0", port=7860)