Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from dotenv import load_dotenv | |
| from langchain_core.messages import HumanMessage, AIMessage | |
| from database.db_handler import init_db | |
| from langchain_logic.agent_setup import create_agent_executor | |
| # Load environment variables from .env for local development | |
| # On Hugging Face, this line will do nothing, which is what we want. | |
| load_dotenv() | |
| # --- App Setup --- | |
| # Initialize the database and table if they don't exist | |
| print("Initializing database...") | |
| init_db() | |
| print("Database initialized.") | |
| # Create the agent executor | |
| agent_executor = create_agent_executor() | |
| print("Agent Executor created.") | |
| # --- Gradio Interface --- | |
| # We need to manage chat history | |
| def respond(message, chat_history): | |
| # Convert Gradio's chat history to LangChain's format | |
| history_langchain_format = [] | |
| for human, ai in chat_history: | |
| history_langchain_format.append(HumanMessage(content=human)) | |
| history_langchain_format.append(AIMessage(content=ai)) | |
| # Invoke the agent | |
| response = agent_executor.invoke({ | |
| "input": message, | |
| "chat_history": history_langchain_format | |
| }) | |
| # Append the new interaction to the chat history | |
| chat_history.append((message, response['output'])) | |
| return "", chat_history | |
| # Build the Gradio UI | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# Appointment Scheduling Assistant") | |
| chatbot = gr.Chatbot() | |
| msg = gr.Textbox(label="Your Message", placeholder="Type your request here (e.g., 'show all appointments', 'book a haircut for Jane Doe')") | |
| clear = gr.Button("Clear") | |
| msg.submit(respond, [msg, chatbot], [msg, chatbot]) | |
| clear.click(lambda: None, None, chatbot, queue=False) | |
| if __name__ == "__main__": | |
| demo.launch(debug=True) # debug=True is for local testing |