hhhhmmmm commited on
Commit
9e26969
·
verified ·
1 Parent(s): 84d5dce

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +100 -0
agent.py CHANGED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from langchain.agents import AgentExecutor, create_react_agent
3
+ from langchain.tools import tool
4
+ from langchain.prompts import PromptTemplate
5
+ from langchain_openai import ChatOpenAI # OpenAI-compatible for Groq API
6
+ from ddgs import DDGS # Updated DuckDuckGo Search
7
+ from dotenv import load_dotenv
8
+ import re
9
+ import json
10
+
11
+ # Load environment variables
12
+ load_dotenv()
13
+
14
+ # --- Define Tools ---
15
+ @tool
16
+ def python_code_executor(code: str) -> str:
17
+ """Execute Python code and return the result as a string. Use for calculations or data processing."""
18
+ try:
19
+ local_vars = {}
20
+ exec(code, {}, local_vars)
21
+ return str(local_vars.get("result", "No result defined. Set 'result' variable."))
22
+ except Exception as e:
23
+ return f"Error: {str(e)}"
24
+
25
+ @tool
26
+ def download_file(url: str) -> str:
27
+ """Download a file from URL and return its content (text if possible)."""
28
+ try:
29
+ response = requests.get(url, timeout=10)
30
+ response.raise_for_status()
31
+ return response.text[:1000] # Truncate for brevity
32
+ except Exception as e:
33
+ return f"Error downloading: {str(e)}"
34
+
35
+ @tool
36
+ def duckduckgo_search(query: str) -> str:
37
+ """Perform a DuckDuckGo search and return top results as a short summary."""
38
+ try:
39
+ with DDGS() as ddgs:
40
+ results = list(ddgs.text(query, max_results=3))
41
+ if not results:
42
+ return "No good results found."
43
+ return json.dumps([{"title": r["title"], "snippet": r["body"]} for r in results])
44
+ except Exception as e:
45
+ return f"Search error: {str(e)}"
46
+
47
+
48
+ # load the system prompt from the file
49
+ with open("system_prompt.txt", "r", encoding="utf-8") as f:
50
+ system_prompt = f.read()
51
+
52
+ # System message
53
+ sys_msg = SystemMessage(content=system_prompt)
54
+
55
+ tools = [
56
+ python_code_executor,
57
+ download_file,
58
+ duckduckgo_search,
59
+ ]
60
+
61
+ # Build graph function
62
+ #def build_graph(provider: str = "huggingface"):
63
+ def build_graph(provider: str = "google"):
64
+ """Build the graph"""
65
+ # Load environment variables from .env file
66
+ if provider == "google":
67
+ # Google Gemini
68
+ llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
69
+ elif provider == "groq":
70
+ # Groq https://console.groq.com/docs/models
71
+ llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
72
+ elif provider == "huggingface":
73
+ llm = ChatHuggingFace(
74
+ llm=HuggingFaceEndpoint(
75
+ repo_id = "Qwen/Qwen2.5-Coder-32B-Instruct"
76
+ ),
77
+ )
78
+ else:
79
+ raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
80
+
81
+ # Bind tools to LLM
82
+ llm_with_tools = llm.bind_tools(tools)
83
+
84
+ # Node
85
+ def assistant(state: MessagesState):
86
+ """Assistant node"""
87
+ return {"messages": [llm_with_tools.invoke([sys_msg] + state["messages"])]}
88
+
89
+ builder = StateGraph(MessagesState)
90
+ builder.add_node("assistant", assistant)
91
+ builder.add_node("tools", ToolNode(tools))
92
+ builder.add_edge(START, "assistant")
93
+ builder.add_conditional_edges(
94
+ "assistant",
95
+ tools_condition,
96
+ )
97
+ builder.add_edge("tools", "assistant")
98
+
99
+ # Compile graph
100
+ return builder.compile()