import gradio as gr from transformers import pipeline, Conversation # --- Load model --- # Use a lighter model for HF Spaces CPU (replace with a custom HF model if available) chatbot_model_id = "microsoft/DialoGPT-small" chatbot = pipeline("conversational", model=chatbot_model_id) # --- Define response function --- def respond(message, history=[]): conv = Conversation(message) response = chatbot(conv) reply = response.generated_responses[-1] # Last generated reply history.append((message, reply)) return history, history # --- Build Gradio interface --- with gr.Blocks() as demo: chat_history = gr.State([]) # Maintains conversation with gr.Row(): user_input = gr.Textbox(label="You", placeholder="Type here...") send_button = gr.Button("Send") chat_display = gr.Chatbot(label="Mental Health Bot") send_button.click( fn=respond, inputs=[user_input, chat_history], outputs=[chat_display, chat_history] ) # Launch app demo.launch()