Spaces:
Build error
Build error
| import gradio as gr | |
| from transformers import BlenderbotTokenizer, BlenderbotForConditionalGeneration | |
| # Load the model and tokenizer | |
| MODEL_NAME = "facebook/blenderbot-400M-distill" | |
| try: | |
| tokenizer = BlenderbotTokenizer.from_pretrained(MODEL_NAME) | |
| model = BlenderbotForConditionalGeneration.from_pretrained(MODEL_NAME) | |
| print("Model and tokenizer loaded successfully!") | |
| except Exception as e: | |
| print(f"Error loading model or tokenizer: {e}") | |
| # Function to generate chatbot response | |
| def chat_bot(user_input, history): | |
| """Handles chat conversation in real-time.""" | |
| try: | |
| # Tokenize the user input | |
| inputs = tokenizer([user_input], return_tensors="pt", max_length=1000, truncation=True) | |
| # Generate the response using the model | |
| reply_ids = model.generate(**inputs) | |
| response = tokenizer.decode(reply_ids[0], skip_special_tokens=True) | |
| print(f"Generated response: {response}") # Debugging: Print generated response | |
| # Update the history | |
| history.append((user_input, response)) | |
| print(f"Updated history: {history}") # Debugging: Print updated history | |
| return history | |
| except Exception as e: | |
| print(f"Error in chat_bot function: {e}") | |
| return history # Return the history even if there's an error | |
| # Gradio Chat Interface | |
| with gr.Blocks() as app: | |
| gr.Markdown("<h1 style='text-align: center;'>💬 Open-Source Chatbot</h1>") | |
| chatbot = gr.Chatbot() | |
| msg = gr.Textbox(placeholder="Type your message here...", label="Your Message") | |
| clear = gr.Button("Clear") | |
| # Auto-update conversation | |
| msg.submit(chat_bot, inputs=[msg, chatbot], outputs=[chatbot]) | |
| clear.click(lambda: [], None, chatbot, queue=False) | |
| # Launch app in Hugging Face Spaces | |
| app.launch(share=True) |