File size: 2,359 Bytes
9a62fc6
cb1a7f5
 
38812af
cb1a7f5
38812af
7ac8208
38812af
cb1a7f5
7ac8208
 
 
 
38812af
cb1a7f5
 
38812af
cb1a7f5
 
 
 
 
38812af
cb1a7f5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38812af
cb1a7f5
 
 
 
 
9a62fc6
501c4dc
cb1a7f5
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import gradio as gr
from langgraph.graph import StateGraph, END
from langchain_core.runnables import RunnableLambda

# Import tools
from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
from retriever import load_guest_dataset,GuestInfoTool

# Initialize all tools
search_tool = DuckDuckGoSearchTool
weather_tool = WeatherInfoTool
hub_stats_tool = HubStatsTool
guest_info_tool = GuestInfoTool

# Define simple state container
class AppState(dict): pass

# Define tool handlers
def handle_guest_info(state: AppState):
    query = state.get("input", "")
    result = guest_info_tool.invoke(query)
    return AppState({"input": query, "output": result})

def handle_weather(state: AppState):
    query = state.get("input", "")
    result = weather_tool.invoke(query)
    return AppState({"input": query, "output": result})

def handle_search(state: AppState):
    query = state.get("input", "")
    result = search_tool.invoke(query)
    return AppState({"input": query, "output": result})

def handle_hub_stats(state: AppState):
    query = state.get("input", "")
    result = hub_stats_tool.invoke(query)
    return AppState({"input": query, "output": result})

# Build the graph
builder = StateGraph(AppState)
builder.add_node("guest_info", RunnableLambda(handle_guest_info))
builder.add_node("weather", RunnableLambda(handle_weather))
builder.add_node("search", RunnableLambda(handle_search))
builder.add_node("hub_stats", RunnableLambda(handle_hub_stats))

# Define entry and routing logic
def router(state: AppState):
    query = state.get("input", "").lower()
    if "weather" in query:
        return "weather"
    elif "guest" in query:
        return "guest_info"
    elif "hub" in query:
        return "hub_stats"
    else:
        return "search"

builder.set_entry_point(router)
builder.add_conditional_edges("guest_info", lambda _: END)
builder.add_conditional_edges("weather", lambda _: END)
builder.add_conditional_edges("search", lambda _: END)
builder.add_conditional_edges("hub_stats", lambda _: END)

# Compile the graph
graph = builder.compile()

# Define Gradio UI
def chatbot_fn(user_input):
    initial_state = AppState({"input": user_input})
    result = graph.invoke(initial_state)
    return result["output"]

if __name__ == "__main__":
    gr.ChatInterface(fn=chatbot_fn, title="LangGraph Alfred Assistant").launch()