Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline | |
| # Step 1: Load the pre-trained model | |
| # You can choose any suitable conversational model from the Hugging Face Model Hub | |
| chatbot = pipeline("conversational", model="EleutherAI/gpt-neo-2.7B") | |
| # Step 2: Define the function that will process user input and return the model's response | |
| def respond(input_text): | |
| conversation = chatbot([input_text]) | |
| return conversation[0]["generated_text"] | |
| # Step 3: Create a Gradio interface | |
| # This interface will take text input from the user and display the chatbot's response | |
| iface = gr.Interface( | |
| fn=respond, # The function to call when the user submits input | |
| inputs="text", # The type of input (text) | |
| outputs="text", # The type of output (text) | |
| title="HugChat - Your AI Chatbot", # The title of your chatbot | |
| description="Chat with an AI-powered bot based on a pre-trained model." # A short description | |
| ) | |
| # Step 4: Launch the interface | |
| if __name__ == "__main__": | |
| iface.launch() | |