Ramja commited on
Commit
98cb5d3
·
verified ·
1 Parent(s): 10ef737

Update agents.py

Browse files

Agents added for attempt 3

Files changed (1) hide show
  1. agents.py +46 -7
agents.py CHANGED
@@ -1,12 +1,51 @@
1
- from smolagents import CodeAgent, HfApiModel
2
- from tools import duck_search_tool, google_search_tool
3
 
4
 
5
 
6
  # Initialize the model for agent_1
7
- model_1 = HfApiModel()
8
- agent_1_instructions = "Search for and retrieve relevant information about the question asked. Do not respond with any thought just the final answer:\n"
9
- agent_1 = CodeAgent(
10
- model = model_1,
11
- tools=[duck_search_tool, google_search_tool]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  )
 
1
+ from smolagents import CodeAgent, HfApiModel, LiteLLMModel, InferenceClientModel
2
+ from tools import duck_search_tool, visit_web_page_tool
3
 
4
 
5
 
6
  # Initialize the model for agent_1
7
+ model_1 = HfApiModel(
8
+ "Qwen/Qwen2.5-Coder-32B-Instruct", provider="together", max_tokens=8096
9
+ )
10
+
11
+ web_agent_1 = CodeAgent(
12
+ model=model_1,
13
+ tools=[
14
+ duck_search_tool,
15
+ visit_web_page_tool,
16
+ ],
17
+ name="web_agent_1",
18
+ description="Searches the web and retrieve relevant information about the question asked",
19
+ verbosity_level=0,
20
+ max_steps=10,
21
+ )
22
+
23
+
24
+ model_2 = InferenceClientModel(model_id= "meta-llama/Llama-3.3-70B-Instruct")
25
+
26
+
27
+ web_agent_2 = CodeAgent(
28
+ model=model_2,
29
+ tools=[
30
+ duck_search_tool,
31
+ visit_web_page_tool,
32
+ ],
33
+ name="web_agent_2",
34
+ description="Searches the web and retrieve relevant information about the question asked",
35
+ verbosity_level=0,
36
+ max_steps=10,
37
+ )
38
+
39
+ model_3 = LiteLLMModel(model_id="anthropic/claude-3-5-sonnet-latest")
40
+
41
+ web_agent_3 = CodeAgent(
42
+ model=model_3,
43
+ tools=[
44
+ duck_search_tool,
45
+ visit_web_page_tool,
46
+ ],
47
+ name="web_agent_3",
48
+ description="Searches the web and retrieve relevant information about the question asked",
49
+ verbosity_level=0,
50
+ max_steps=10,
51
  )