Spaces:
Sleeping
Sleeping
| from smolagents import CodeAgent, LiteLLMModel, InferenceClientModel | |
| from tools import duck_search_tool, visit_web_page_tool, youtube_transcript_search | |
| # Initialize the model for agent_1 | |
| agent_1_instructions = "Search for and retrieve relevant information about the question asked. Do not respond with any thought just the final answer:\n" | |
| model_1 = InferenceClientModel( | |
| "Qwen/Qwen2.5-Coder-32B-Instruct", provider="together", max_tokens=8096 | |
| ) | |
| web_agent_1 = CodeAgent( | |
| model=model_1, | |
| tools=[ | |
| duck_search_tool, | |
| visit_web_page_tool, | |
| youtube_transcript_search, | |
| ], | |
| name="web_agent_1", | |
| description="Searches youtube transcripts and retrieve relevant information about the question asked", | |
| verbosity_level=0, | |
| max_steps=10, | |
| ) | |
| model_2 = InferenceClientModel(model_id= "meta-llama/Llama-3.3-70B-Instruct") | |
| web_agent_2 = CodeAgent( | |
| model=model_2, | |
| tools=[ | |
| duck_search_tool, | |
| visit_web_page_tool, | |
| ], | |
| name="web_agent_2", | |
| description="Searches the web and retrieve relevant information about the question asked", | |
| verbosity_level=0, | |
| max_steps=10, | |
| ) | |
| model_3 = LiteLLMModel(model_id="anthropic/claude-3-5-sonnet-latest") | |
| web_agent_3 = CodeAgent( | |
| model=model_3, | |
| tools=[ | |
| duck_search_tool, | |
| visit_web_page_tool, | |
| ], | |
| name="web_agent_3", | |
| description="Searches the web and retrieve relevant information about the question asked", | |
| verbosity_level=0, | |
| max_steps=10, | |
| ) |