from smolagents import ( CodeAgent, ToolCallingAgent, #InferenceClientModel, #WebSearchTool, OpenAIServerModel, LiteLLMModel, ) import os import wikipedia import time import numpy import pandas import xlrd import markdownify import requests from tools import python_wikipedia_tools, smolagents_code_tools, smolagents_web_tools, smolagents_speech_tools #model = InferenceClientModel( # max_tokens=2096, # temperature=0.5, # model_id='Qwen/Qwen2.5-Coder-32B-Instruct', # custom_role_conversions=None, # ) #model = OpenAIServerModel( # model_id="gemini-2.0-flash", # api_base="https://generativelanguage.googleapis.com/v1beta/openai/", # api_key=os.getenv("GEMINI_API_KEY"), #) # to run locally model_gemma = LiteLLMModel( model_id="ollama/gemma3:27b", api_base="http://localhost:11434", ) model_devstral = LiteLLMModel( model_id="ollama/devstral:latest", api_base="http://localhost:11434", ) web_search_agent = ToolCallingAgent( tools=smolagents_web_tools, model=model_gemma, max_steps=10, #additional_authorized_imports=["wikipedia", "markdownify", "requests"], name="web_search_agent", description="A tool calling agent which can perform web searches. It has available tools including GoogleSearchTool, VisitWebpageTool and WikipediaSearchTool", ) code_agent = CodeAgent( tools=smolagents_code_tools + python_wikipedia_tools, model=model_gemma, max_steps=10, additional_authorized_imports=["time", "numpy", "pandas", "wikipedia", "xlrd", "markdownify", "requests"], name="code_agent", description="A code agent which can write and interpret code. It has available tools including PythonInterpreterTool, search_wikipedia_page, get_wikipedia_page and get_wikipedia_summary", ) manager_agent = CodeAgent( tools=[], model=model_gemma, managed_agents=[web_search_agent, code_agent], max_steps=10, verbosity_level=1, name="manager_agent", description="Manages the web_search_agent and code_agent. Can perform web searches with the ToolCallingAgent called web_search_agent. Can use the CodeAgent called code_agent to run pythin code.", )