dorna-chat / app.py
omid99's picture
Update app.py
2eabd74 verified
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
model_name = "rinrikatoki/dorna-merged-diabetes"
# لود مدل و توکنایزر
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto")
# تابع تولید پاسخ
def chat(history, user_input):
prompt = ""
for message in history:
prompt += f"User: {message[0]}\nAI: {message[1]}\n"
prompt += f"User: {user_input}\nAI:"
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
output = model.generate(**inputs, max_new_tokens=200, do_sample=True, temperature=0.7, pad_token_id=tokenizer.eos_token_id)
response = tokenizer.decode(output[0], skip_special_tokens=True)
# استخراج فقط پاسخ AI از متن
response = response.split("AI:")[-1].strip()
history.append((user_input, response))
return history, history
# رابط گرافیکی گریدیو
chatbot = gr.Chatbot()
iface = gr.Interface(fn=chat,
inputs=[gr.State([]), gr.Textbox(placeholder="پیام خود را بنویسید...")],
outputs=[chatbot, gr.State()],
title="چت‌بات دیابت با مدل Dorna")
iface.launch()