Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import BlenderbotSmallTokenizer, BlenderbotSmallForConditionalGeneration | |
| model_name = "facebook/blenderbot-90M" | |
| tokenizer = BlenderbotSmallTokenizer.from_pretrained(model_name) | |
| model = BlenderbotSmallForConditionalGeneration.from_pretrained(model_name) | |
| def chat_with_bot(message, history): | |
| if not message: | |
| return "Hi there! 👋 Ask me something to get started." | |
| conversation = "" | |
| if history: | |
| for turn in history: | |
| role = turn.get("role") | |
| content = turn.get("content") | |
| if role == "user": | |
| conversation += f"User: {content}\n" | |
| elif role == "assistant": | |
| conversation += f"Bot: {content}\n" | |
| conversation += f"User: {message}\nBot:" | |
| inputs = tokenizer( | |
| conversation, | |
| return_tensors="pt", | |
| truncation=True, | |
| padding="max_length", | |
| max_length=512, | |
| ) | |
| reply_ids = model.generate(**inputs, max_length=120) | |
| reply = tokenizer.decode(reply_ids[0], skip_special_tokens=True) | |
| return reply | |
| initial_messages = [ | |
| {"role": "assistant", "content": "👋 Hello! I’m your chatbot. Ask me anything to start our conversation!"} | |
| ] | |
| demo = gr.ChatInterface( | |
| fn=chat_with_bot, | |
| title="🤖 Mini Chatbot (Facebook BlenderBot-90M)", | |
| description="Hi 👋 I’m a small conversational chatbot powered by Facebook’s BlenderBot-90M.", | |
| theme="soft", | |
| type="messages", | |
| chatbot=gr.Chatbot(value=initial_messages, type="messages"), | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |