PRONGS-CHIRAG
Resolved errors
7ac8208
import gradio as gr
from langgraph.graph import StateGraph, END
from langchain_core.runnables import RunnableLambda
# Import tools
from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
from retriever import load_guest_dataset,GuestInfoTool
# Initialize all tools
search_tool = DuckDuckGoSearchTool
weather_tool = WeatherInfoTool
hub_stats_tool = HubStatsTool
guest_info_tool = GuestInfoTool
# Define simple state container
class AppState(dict): pass
# Define tool handlers
def handle_guest_info(state: AppState):
query = state.get("input", "")
result = guest_info_tool.invoke(query)
return AppState({"input": query, "output": result})
def handle_weather(state: AppState):
query = state.get("input", "")
result = weather_tool.invoke(query)
return AppState({"input": query, "output": result})
def handle_search(state: AppState):
query = state.get("input", "")
result = search_tool.invoke(query)
return AppState({"input": query, "output": result})
def handle_hub_stats(state: AppState):
query = state.get("input", "")
result = hub_stats_tool.invoke(query)
return AppState({"input": query, "output": result})
# Build the graph
builder = StateGraph(AppState)
builder.add_node("guest_info", RunnableLambda(handle_guest_info))
builder.add_node("weather", RunnableLambda(handle_weather))
builder.add_node("search", RunnableLambda(handle_search))
builder.add_node("hub_stats", RunnableLambda(handle_hub_stats))
# Define entry and routing logic
def router(state: AppState):
query = state.get("input", "").lower()
if "weather" in query:
return "weather"
elif "guest" in query:
return "guest_info"
elif "hub" in query:
return "hub_stats"
else:
return "search"
builder.set_entry_point(router)
builder.add_conditional_edges("guest_info", lambda _: END)
builder.add_conditional_edges("weather", lambda _: END)
builder.add_conditional_edges("search", lambda _: END)
builder.add_conditional_edges("hub_stats", lambda _: END)
# Compile the graph
graph = builder.compile()
# Define Gradio UI
def chatbot_fn(user_input):
initial_state = AppState({"input": user_input})
result = graph.invoke(initial_state)
return result["output"]
if __name__ == "__main__":
gr.ChatInterface(fn=chatbot_fn, title="LangGraph Alfred Assistant").launch()