| | """Plan Node - Initial ReAct planning loop""" |
| | from typing import Dict, Any |
| | from langchain_core.messages import SystemMessage, HumanMessage, AIMessage |
| | from langchain_groq import ChatGroq |
| | from src.tracing import get_langfuse_callback_handler |
| |
|
| |
|
| | def load_system_prompt() -> str: |
| | """Load the system prompt from file""" |
| | try: |
| | with open("./prompts/system_prompt.txt", "r", encoding="utf-8") as f: |
| | return f.read().strip() |
| | except FileNotFoundError: |
| | return "You are a helpful assistant tasked with answering GAIA benchmark questions." |
| |
|
| |
|
| | def plan_node(state: Dict[str, Any]) -> Dict[str, Any]: |
| | """ |
| | Initial planning node that sets up the conversation with system prompt |
| | and prepares for agent routing |
| | """ |
| | print("Plan Node: Processing query") |
| | |
| | try: |
| | |
| | system_prompt = load_system_prompt() |
| | |
| | |
| | llm = ChatGroq(model="qwen-qwq-32b", temperature=0.1) |
| | |
| | |
| | callback_handler = get_langfuse_callback_handler() |
| | callbacks = [callback_handler] if callback_handler else [] |
| | |
| | |
| | messages = state.get("messages", []) |
| | if not messages: |
| | return {"messages": [SystemMessage(content=system_prompt)]} |
| | |
| | |
| | plan_messages = [SystemMessage(content=system_prompt)] |
| | |
| | |
| | for msg in messages: |
| | if msg.type != "system": |
| | plan_messages.append(msg) |
| | |
| | |
| | planning_instruction = """ |
| | Analyze this query and prepare a plan for answering it. Consider: |
| | 1. What type of information or processing is needed? |
| | 2. What tools or agents would be most appropriate? |
| | 3. What is the expected output format? |
| | |
| | Provide a brief analysis and initial plan. |
| | """ |
| | |
| | if plan_messages and plan_messages[-1].type == "human": |
| | |
| | analysis_messages = plan_messages + [HumanMessage(content=planning_instruction)] |
| | |
| | response = llm.invoke(analysis_messages, config={"callbacks": callbacks}) |
| | plan_messages.append(response) |
| | |
| | return { |
| | "messages": plan_messages, |
| | "plan_complete": True, |
| | "current_step": "routing" |
| | } |
| | |
| | except Exception as e: |
| | print(f"Plan Node Error: {e}") |
| | |
| | system_prompt = load_system_prompt() |
| | return { |
| | "messages": [SystemMessage(content=system_prompt)] + state.get("messages", []), |
| | "plan_complete": True, |
| | "current_step": "routing" |
| | } |