Spaces:
Sleeping
Sleeping
| """ | |
| main.py | |
| ββββββββ | |
| Entry point β runs the CLI chat loop. | |
| Gradio frontend will replace this file in the next phase. | |
| """ | |
| from langchain_core.messages import HumanMessage | |
| from app.graph.builder import build_graph | |
| from app.state import AgentState | |
| def main(): | |
| graph = build_graph() | |
| thread_config = {"configurable": {"thread_id": "session-001"}} | |
| conversation_history = [] | |
| print("\nπ LangGraph Agent ready. Type 'quit' to exit.") | |
| print("β" * 50) | |
| print("Try:") | |
| print(" β’ 'What is RAG?' β RAG route") | |
| print(" β’ 'Calculate 15 * 8' β Tool route") | |
| print(" β’ 'Weather in Pune' β Tool route") | |
| print(" β’ 'Tell me a joke' β General route") | |
| print("β" * 50 + "\n") | |
| while True: | |
| user_input = input("You: ").strip() | |
| if not user_input: | |
| continue | |
| if user_input.lower() in ("quit", "exit", "q"): | |
| print("Goodbye! π") | |
| break | |
| conversation_history.append(HumanMessage(content=user_input)) | |
| initial_state: AgentState = { | |
| "messages": conversation_history.copy(), | |
| "query": user_input, | |
| "route": "", | |
| "rag_context": "", | |
| "tool_calls": [], | |
| "tool_results": [], | |
| "response": "", | |
| "retry_count": 0, | |
| "hitl_approved": False, | |
| "evaluation_score": 0.0, | |
| "guardrail_passed": True, | |
| "memory_summary": "", | |
| } | |
| final_state = graph.invoke(initial_state, config=thread_config) | |
| conversation_history = final_state["messages"] | |
| if __name__ == "__main__": | |
| main() | |