|
|
import gradio as gr |
|
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
|
|
|
|
|
|
model_name = "microsoft/DialoGPT-medium" |
|
|
model = AutoModelForCausalLM.from_pretrained(model_name) |
|
|
tokenizer = AutoTokenizer.from_pretrained(model_name) |
|
|
|
|
|
|
|
|
conversation_history = [] |
|
|
|
|
|
|
|
|
|
|
|
def respond_to_input(user_input): |
|
|
global conversation_history |
|
|
|
|
|
|
|
|
conversation_history.append(f"User: {user_input}") |
|
|
|
|
|
|
|
|
input_text = " ".join(conversation_history[-5:]) |
|
|
input_ids = tokenizer.encode(input_text + tokenizer.eos_token, return_tensors="pt") |
|
|
|
|
|
|
|
|
response_ids = model.generate(input_ids, max_length=1000, pad_token_id=tokenizer.eos_token_id) |
|
|
|
|
|
|
|
|
bot_response = tokenizer.decode(response_ids[:, input_ids.shape[-1]:][0], skip_special_tokens=True) |
|
|
|
|
|
|
|
|
conversation_history.append(f"Bot: {bot_response}") |
|
|
|
|
|
|
|
|
chat_history = "\n".join(conversation_history[-10:]) |
|
|
return chat_history, "" |
|
|
|
|
|
|
|
|
|
|
|
iface = gr.Interface( |
|
|
fn=respond_to_input, |
|
|
inputs=gr.Textbox(label="", placeholder="Type here...", lines=1, scale=2), |
|
|
outputs=[gr.Textbox(label="Conversation History", lines=15, interactive=False), gr.Textbox()], |
|
|
title="ChatGPT-like Chatbot", |
|
|
description="Chat with a bot powered by DialoGPT. Type your question below!", |
|
|
theme="default", |
|
|
live=True, |
|
|
allow_flagging="never", |
|
|
css=".output-textbox { height: 400px; }" |
|
|
) |
|
|
|
|
|
|
|
|
iface.launch() |