Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from langchain_core.messages import AnyMessage, HumanMessage, AIMessage | |
| from agent import build_graph | |
| def create_qna_interface(): | |
| """Create the Q&A chatbot interface.""" | |
| # ====================================== | |
| # 2) Helper functions for the Gradio UI | |
| # ====================================== | |
| def _msg_content_to_str(msg: AnyMessage) -> str: | |
| """ | |
| Coerce LangChain message content (which might contain tool call structures) | |
| into displayable text for the Chatbot. | |
| """ | |
| # Most often, content is a string already | |
| content = getattr(msg, "content", "") | |
| if isinstance(content, str): | |
| return content | |
| # If it's a list of parts (e.g., tool call traces), join any text parts | |
| if isinstance(content, list): | |
| texts = [] | |
| for part in content: | |
| if isinstance(part, dict) and "text" in part: | |
| texts.append(part["text"]) | |
| elif isinstance(part, str): | |
| texts.append(part) | |
| return "\n".join(texts) if texts else str(content) | |
| # Fallback | |
| return str(content) | |
| def startup_state() -> list[AnyMessage]: | |
| """Start with an empty conversation.""" | |
| return [] | |
| # Gradio expects chatbot history as list[tuple[str, str]] | |
| def submit_user_message( | |
| user_text: str, | |
| chat_history: list[tuple[str, str]], | |
| agent_messages: list[AnyMessage], | |
| ): | |
| """ | |
| 1) Append HumanMessage to agent state | |
| 2) Run Alfred | |
| 3) Extract last AIMessage and append to chat_history | |
| """ | |
| if not user_text or user_text.strip() == "": | |
| return gr.update(), chat_history, agent_messages | |
| # Step 1: add HumanMessage to state | |
| agent_messages = list(agent_messages or []) | |
| agent_messages.append(HumanMessage(content=user_text)) | |
| # get API key | |
| api_key = os.getenv('HF_TOKEN') | |
| # Step 2: run the graph | |
| alfred = build_graph(hf_token=api_key) | |
| out = alfred.invoke({"messages": agent_messages}) | |
| # The graph returns a new messages list *including* the latest assistant/tool steps. | |
| # We use the last AIMessage as the displayed reply. | |
| new_msgs: list[AnyMessage] = out["messages"] | |
| agent_messages = new_msgs # keep full state for the next turn | |
| # Find the last assistant message to show in the UI | |
| ai_text = "" | |
| for m in reversed(new_msgs): | |
| if isinstance(m, AIMessage): | |
| ai_text = _msg_content_to_str(m) | |
| break | |
| if not ai_text: | |
| # fallback: in rare cases of only tool messages, show a generic note | |
| ai_text = "I processed your request using my tools." | |
| chat_history = list(chat_history or []) | |
| chat_history.append({"role": "user", "content": user_text}) | |
| chat_history.append({"role": "assistant", "content": ai_text}) | |
| return "", chat_history, agent_messages | |
| def clear_chat(): | |
| """Reset the Gradio UI and agent state.""" | |
| return [], startup_state() | |
| # ======================== | |
| # 3) Gradio App UI layout | |
| # ======================== | |
| with gr.Blocks(title="Alfred — LangGraph Agent") as demo: | |
| gr.Markdown( | |
| """ | |
| # 🎩 Alfred — Your LangGraph Agent | |
| Ask questions and Alfred will respond, using: | |
| - a vector search tool over the guest list | |
| - DuckDuckGo web search | |
| """ | |
| ) | |
| with gr.Row(): | |
| chatbot = gr.Chatbot( | |
| label="Conversation", | |
| type="messages", | |
| height=500, | |
| show_copy_button=True, | |
| avatar_images=(None, None), # customize if you like | |
| ) | |
| with gr.Row(): | |
| txt = gr.Textbox( | |
| label="Your message", | |
| placeholder="Ask anything…", | |
| autofocus=True, | |
| scale=4, | |
| ) | |
| send_btn = gr.Button("Send", variant="primary", scale=1) | |
| clear_btn = gr.Button("Clear") | |
| # Hidden state: the agent’s full message list (LangChain messages) | |
| agent_state = gr.State(startup_state()) | |
| # Wire up events | |
| txt.submit(submit_user_message, [txt, chatbot, agent_state], [txt, chatbot, agent_state]) | |
| send_btn.click(submit_user_message, [txt, chatbot, agent_state], [txt, chatbot, agent_state]) | |
| clear_btn.click(clear_chat, outputs=[chatbot, agent_state]) | |
| return demo | |
| # Entry point | |
| if __name__ == "__main__": | |
| # Create and launch the interface | |
| demo = create_qna_interface() | |
| demo.launch() |