kamorou commited on
Commit
527c902
·
verified ·
1 Parent(s): 985ea2a

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +117 -0
agent.py CHANGED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import gradio as gr
3
+ from langchain_huggingface import HuggingFaceEndpoint
4
+ from langchain_community.tools.tavily_search import TavilySearchResults
5
+ # NEW IMPORT
6
+ from langchain_experimental.tools import PythonREPLTool
7
+ from langchain_core.messages import BaseMessage, HumanMessage
8
+ from langgraph.graph import StateGraph, END
9
+ from langgraph.prebuilt import ToolNode
10
+ from typing import TypedDict, Annotated, List
11
+ from dotenv import load_dotenv
12
+
13
+ # --- 1. LOAD API KEYS ---
14
+ load_dotenv()
15
+ hf_token = os.getenv("HF_TOKEN")
16
+ tavily_api_key = os.getenv("TAVILY_API_KEY")
17
+
18
+ if not hf_token or not tavily_api_key:
19
+ raise ValueError("Hugging Face Token or Tavily API Key is not set in the environment variables.")
20
+ os.environ["TAVILY_API_KEY"] = tavily_api_key
21
+
22
+
23
+ # --- 2. DEFINE TOOLS and INITIALIZE LLM ---
24
+ # UPDATED TOOLS LIST
25
+ tools = [TavilySearchResults(max_results=3), PythonREPLTool()]
26
+ tool_node = ToolNode(tools)
27
+
28
+ repo_id = "meta-llama/Meta-Llama-3-8B-Instruct"
29
+ llm = HuggingFaceEndpoint(
30
+ repo_id=repo_id,
31
+ huggingfacehub_api_token=hf_token,
32
+ temperature=0.1,
33
+ max_new_tokens=1024,
34
+ )
35
+ llm_with_tools = llm.bind_tools(tools)
36
+
37
+
38
+ # --- 3. DEFINE THE AGENT'S STATE ---
39
+ class AgentState(TypedDict):
40
+ messages: Annotated[List[BaseMessage], lambda x, y: x + y]
41
+
42
+ # SYSTEM PROMPT
43
+ system_prompt = """
44
+ You are a helpful assistant tasked with answering questions using a set of tools.
45
+ Now, I will ask you a question. Report your thoughts, and finish your answer with the following template:
46
+ FINAL ANSWER: [YOUR FINAL ANSWER].
47
+ YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings. If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise. If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string.
48
+ Your answer should only start with "FINAL ANSWER: ", then follows with the answer.
49
+ """
50
+
51
+
52
+
53
+ # --- 4. DEFINE THE NODES OF THE GRAPH ---
54
+ def agent_node(state):
55
+ response = llm_with_tools.invoke(state["messages"])
56
+ return {"messages": [response]}
57
+
58
+
59
+ # --- 5. DEFINE THE EDGES OF THE GRAPH ---
60
+ def should_continue(state):
61
+ last_message = state["messages"][-1]
62
+ if last_message.tool_calls:
63
+ return "tools"
64
+ return END
65
+
66
+
67
+ # --- 6. ASSEMBLE THE GRAPH ---
68
+ workflow = StateGraph(AgentState)
69
+
70
+ workflow.add_node("agent", agent_node)
71
+ workflow.add_node("tools", tool_node)
72
+
73
+ workflow.set_entry_point("agent")
74
+
75
+ workflow.add_conditional_edges(
76
+ "agent",
77
+ should_continue,
78
+ {
79
+ "tools": "tools",
80
+ "end": END,
81
+ },
82
+ )
83
+
84
+ workflow.add_edge("tools", "agent")
85
+
86
+ app = workflow.compile()
87
+
88
+
89
+ # --- 7. CREATE THE USER INTERFACE (UI) ---
90
+ def run_agent(query: str):
91
+ try:
92
+ inputs = {"messages": [HumanMessage(content=query)]}
93
+ final_response = None
94
+ # Using stream to get final output, can be slow for complex tasks
95
+ for s in app.stream(inputs, {"recursion_limit": 10}):
96
+ if "agent" in s:
97
+ final_response = s["agent"]["messages"][-1].content
98
+ return final_response if final_response else "Agent did not produce a final answer."
99
+
100
+ except Exception as e:
101
+ return f"An error occurred: {e}"
102
+
103
+ # iface = gr.Interface(
104
+ # fn=run_agent,
105
+ # inputs=gr.Textbox(lines=2, placeholder="Ask the agent anything..."),
106
+ # outputs="markdown",
107
+ # title="GAIA Agent v0.3 (LangGraph + Code Interpreter)",
108
+ # description="This agent can use web search and a Python code interpreter.",
109
+ # examples=[
110
+ # ["What is the square root of the number of states in the USA?"],
111
+ # ["What is the total number of letters in the names of the first three planets in our solar system?"]
112
+ # ],
113
+ # )
114
+
115
+
116
+ # # --- 8. LAUNCH THE APP ---
117
+ # iface.launch()