File size: 4,764 Bytes
7592d9f
 
a948051
2976609
7592d9f
2976609
 
7592d9f
2976609
 
 
a948051
2976609
 
 
 
7592d9f
2976609
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7592d9f
2976609
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7592d9f
2976609
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7592d9f
 
2976609
 
 
 
 
 
 
 
7592d9f
2976609
 
 
 
 
 
 
 
 
 
 
 
7592d9f
2976609
 
 
 
7592d9f
2976609
a948051
7592d9f
 
 
2976609
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import os
import gradio as gr
from langchain_core.messages import AnyMessage, HumanMessage, AIMessage
from agent import build_graph

def create_qna_interface():
    """Create the Q&A chatbot interface."""

    # ======================================
    # 2) Helper functions for the Gradio UI
    # ======================================

    def _msg_content_to_str(msg: AnyMessage) -> str:
        """
        Coerce LangChain message content (which might contain tool call structures)
        into displayable text for the Chatbot.
        """
        # Most often, content is a string already
        content = getattr(msg, "content", "")
        if isinstance(content, str):
            return content

        # If it's a list of parts (e.g., tool call traces), join any text parts
        if isinstance(content, list):
            texts = []
            for part in content:
                if isinstance(part, dict) and "text" in part:
                    texts.append(part["text"])
                elif isinstance(part, str):
                    texts.append(part)
            return "\n".join(texts) if texts else str(content)

        # Fallback
        return str(content)

    def startup_state() -> list[AnyMessage]:
        """Start with an empty conversation."""
        return []
    
    # Gradio expects chatbot history as list[tuple[str, str]]
    def submit_user_message(
        user_text: str,
        chat_history: list[tuple[str, str]],
        agent_messages: list[AnyMessage],
    ):
        """
        1) Append HumanMessage to agent state
        2) Run Alfred
        3) Extract last AIMessage and append to chat_history
        """
        if not user_text or user_text.strip() == "":
            return gr.update(), chat_history, agent_messages

        # Step 1: add HumanMessage to state
        agent_messages = list(agent_messages or [])
        agent_messages.append(HumanMessage(content=user_text))

        # get API key
        api_key = os.getenv('HF_TOKEN')

        # Step 2: run the graph
        alfred = build_graph(hf_token=api_key)
        out = alfred.invoke({"messages": agent_messages})

        # The graph returns a new messages list *including* the latest assistant/tool steps.
        # We use the last AIMessage as the displayed reply.
        new_msgs: list[AnyMessage] = out["messages"]
        agent_messages = new_msgs  # keep full state for the next turn

        # Find the last assistant message to show in the UI
        ai_text = ""
        for m in reversed(new_msgs):
            if isinstance(m, AIMessage):
                ai_text = _msg_content_to_str(m)
                break
        if not ai_text:
            # fallback: in rare cases of only tool messages, show a generic note
            ai_text = "I processed your request using my tools."

        chat_history = list(chat_history or [])
        chat_history.append({"role": "user", "content": user_text})
        chat_history.append({"role": "assistant", "content": ai_text})
        return "", chat_history, agent_messages

    def clear_chat():
        """Reset the Gradio UI and agent state."""
        return [], startup_state()
    
    # ========================
    # 3) Gradio App UI layout
    # ========================

    with gr.Blocks(title="Alfred — LangGraph Agent") as demo:
        gr.Markdown(
            """
            # 🎩 Alfred — Your LangGraph Agent
            Ask questions and Alfred will respond, using:
            - a vector search tool over the guest list
            - DuckDuckGo web search
            """
        )

        with gr.Row():
            chatbot = gr.Chatbot(
                label="Conversation",
                type="messages",
                height=500,
                show_copy_button=True,
                avatar_images=(None, None),  # customize if you like
            )

        with gr.Row():
            txt = gr.Textbox(
                label="Your message",
                placeholder="Ask anything…",
                autofocus=True,
                scale=4,
            )
            send_btn = gr.Button("Send", variant="primary", scale=1)
            clear_btn = gr.Button("Clear")

        # Hidden state: the agent’s full message list (LangChain messages)
        agent_state = gr.State(startup_state())

        # Wire up events
        txt.submit(submit_user_message, [txt, chatbot, agent_state], [txt, chatbot, agent_state])
        send_btn.click(submit_user_message, [txt, chatbot, agent_state], [txt, chatbot, agent_state])
        clear_btn.click(clear_chat, outputs=[chatbot, agent_state])

        return demo


# Entry point
if __name__ == "__main__":
    # Create and launch the interface
    demo = create_qna_interface()
    demo.launch()