CIAG001-chatbot / app.py
Decli-Tech's picture
Update app.py
160ff03 verified
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
# ─── Chargement du modèle ─────────────────────────────────────────────────────
MODEL_NAME = "microsoft/DialoGPT-medium"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
def repondre(message: str, historique: list):
if not message.strip():
return "", historique
new_input_ids = tokenizer.encode(
message + tokenizer.eos_token, return_tensors="pt"
)
if historique:
context = ""
for m in historique[-6:]:
context += m["content"] + tokenizer.eos_token
context_ids = tokenizer.encode(context, return_tensors="pt")
bot_input_ids = torch.cat([context_ids, new_input_ids], dim=-1)
else:
bot_input_ids = new_input_ids
output_ids = model.generate(
bot_input_ids,
max_new_tokens=120,
pad_token_id=tokenizer.eos_token_id,
no_repeat_ngram_size=3,
do_sample=True,
top_k=100,
top_p=0.7,
temperature=0.8,
)
reponse = tokenizer.decode(
output_ids[:, bot_input_ids.shape[-1]:][0],
skip_special_tokens=True,
)
if not reponse.strip():
reponse = "I didn't quite get that — could you rephrase?"
historique.append({"role": "user", "content": message})
historique.append({"role": "assistant", "content": reponse})
return "", historique
# ─── Interface Gradio Blocks ──────────────────────────────────────────────────
with gr.Blocks(title="CIAG001 — Chatbot IA Générative") as demo:
gr.Markdown(
"""
# 🤖 CIAG001 — Chatbot IA Générative
**Module :** Coding et IA Générative  |  **Niveau :** Débutant  |  **Durée :** 1h30
Conversez avec un chatbot propulsé par **DialoGPT-medium** (Microsoft).
> 💡 Le modèle est optimisé pour l'anglais. Il conserve les 3 derniers échanges en mémoire.
"""
)
with gr.Row():
with gr.Column(scale=3):
chatbot_ui = gr.Chatbot(label="Conversation", height=420)
with gr.Row():
msg_input = gr.Textbox(
placeholder="Tapez votre message en anglais…",
label="Votre message",
scale=5,
)
btn_envoyer = gr.Button("Envoyer 📨", variant="primary", scale=1)
btn_effacer = gr.Button("🗑️ Nouvelle conversation", size="sm")
with gr.Column(scale=1):
gr.Markdown(
"""
### 📊 Modèle
- **DialoGPT-medium** (Microsoft)
- Tâche : `text-generation`
- 345M paramètres — CPU OK
### 💡 Exemples
- *What is artificial intelligence?*
- *Tell me a joke.*
- *What do you think about space?*
### 🎓 Compétences
- `AutoModelForCausalLM`
- `AutoTokenizer`
- Gestion historique
- Gradio Blocks + Chatbot
"""
)
msg_input.submit(repondre, [msg_input, chatbot_ui], [msg_input, chatbot_ui])
btn_envoyer.click(repondre, [msg_input, chatbot_ui], [msg_input, chatbot_ui])
btn_effacer.click(lambda: ([], ""), outputs=[chatbot_ui, msg_input])
# ✅ theme déplacé dans launch() —
demo.launch(theme=gr.themes.Soft())