Spaces:
Sleeping
Sleeping
File size: 1,762 Bytes
8986591 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | """
main.py
ββββββββ
Entry point β runs the CLI chat loop.
Gradio frontend will replace this file in the next phase.
"""
from langchain_core.messages import HumanMessage
from app.graph.builder import build_graph
from app.state import AgentState
def main():
graph = build_graph()
thread_config = {"configurable": {"thread_id": "session-001"}}
conversation_history = []
print("\nπ LangGraph Agent ready. Type 'quit' to exit.")
print("β" * 50)
print("Try:")
print(" β’ 'What is RAG?' β RAG route")
print(" β’ 'Calculate 15 * 8' β Tool route")
print(" β’ 'Weather in Pune' β Tool route")
print(" β’ 'Tell me a joke' β General route")
print("β" * 50 + "\n")
while True:
user_input = input("You: ").strip()
if not user_input:
continue
if user_input.lower() in ("quit", "exit", "q"):
print("Goodbye! π")
break
conversation_history.append(HumanMessage(content=user_input))
initial_state: AgentState = {
"messages": conversation_history.copy(),
"query": user_input,
"route": "",
"rag_context": "",
"tool_calls": [],
"tool_results": [],
"response": "",
"retry_count": 0,
"hitl_approved": False,
"evaluation_score": 0.0,
"guardrail_passed": True,
"memory_summary": "",
}
final_state = graph.invoke(initial_state, config=thread_config)
conversation_history = final_state["messages"]
if __name__ == "__main__":
main()
|