| import gradio as gr |
| from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
| |
| |
| |
| model_name = "moses132/dailydialog-chatbot" |
| tokenizer = AutoTokenizer.from_pretrained(model_name) |
| model = AutoModelForCausalLM.from_pretrained(model_name) |
|
|
| |
| |
| |
| chat_history = [] |
|
|
| def moses_chat(user_message): |
| |
| chat_history.append(f"User: {user_message}") |
| |
| |
| prompt = "\n".join(chat_history) + "\nAI:" |
| |
| |
| inputs = tokenizer.encode(prompt, return_tensors="pt") |
| |
| |
| outputs = model.generate( |
| inputs, |
| max_length=200, |
| do_sample=True, |
| top_p=0.9, |
| temperature=0.8, |
| pad_token_id=tokenizer.eos_token_id |
| ) |
| |
| |
| reply = tokenizer.decode(outputs[0], skip_special_tokens=True) |
| reply_text = reply.replace(prompt, "").strip() |
| |
| |
| chat_history.append(f"AI: {reply_text}") |
| |
| |
| messages = [] |
| for i, msg in enumerate(chat_history): |
| if msg.startswith("User:"): |
| messages.append(("User", msg.replace("User:","").strip())) |
| else: |
| messages.append(("AI", msg.replace("AI:","").strip())) |
| return messages |
|
|
| |
| |
| |
| with gr.Blocks(title="Moses Chat") as demo: |
| gr.Markdown("<h1 style='text-align:center; color:#4B0082;'>Moses Chat</h1>") |
| |
| with gr.Row(): |
| chatbot = gr.Chatbot(elem_id="moses-chatbot").style(height=500) |
| |
| with gr.Row(): |
| msg = gr.Textbox(placeholder="Type your message here...", lines=1) |
| submit = gr.Button("Send") |
| |
| def respond(message): |
| return moses_chat(message) |
| |
| submit.click(respond, inputs=msg, outputs=chatbot) |
| msg.submit(respond, inputs=msg, outputs=chatbot) |
|
|
| |
| demo.launch() |