Spaces:
Sleeping
Sleeping
| from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration | |
| import gradio as gr | |
| # Load the model and tokenizer | |
| model_name = "facebook/blenderbot-400M-distill" | |
| tokenizer = BlenderbotTokenizer.from_pretrained(model_name) | |
| model = BlenderbotForConditionalGeneration.from_pretrained(model_name) | |
| # Initialize message history | |
| conversation_history = [] | |
| # Function to interact with the chatbot | |
| def vanilla_chatbot(message, history): | |
| global conversation_history | |
| # Append user message to history | |
| conversation_history.append(message) | |
| # Encode the new user input, add the eos_token and return a tensor in Pytorch | |
| inputs = tokenizer([message], return_tensors='pt') | |
| # Generate bot response | |
| reply_ids = model.generate(**inputs) | |
| bot_response = tokenizer.batch_decode(reply_ids, skip_special_tokens=True)[0] | |
| # Append bot response to history | |
| conversation_history.append(bot_response) | |
| # Return the generated response | |
| return bot_response | |
| # Create a Gradio chat interface | |
| demo_chatbot = gr.Interface( | |
| fn=vanilla_chatbot, | |
| inputs=gr.Textbox(lines=2, placeholder="Enter your message here..."), | |
| outputs=gr.Textbox(placeholder="Bot response will appear here..."), | |
| title="Mashdemy Chatbot", | |
| description="Enter text to start chatting." | |
| ) | |
| # Launch the Gradio interface | |
| demo_chatbot.launch(share=True) | |
| cey-riof-zjr |