import os import dotenv import yaml from smolagents import CodeAgent, LiteLLMModel, OpenAIServerModel, DuckDuckGoSearchTool, VisitWebpageTool from tools import ToolReadFiles, ToolReverseString, ToolDownloadImage, FinalAnswerTool dotenv.load_dotenv() # Create prompt template # with open("prompts.yaml", 'r') as stream: # prompt_templates = yaml.safe_load(stream) class Agent: def __init__(self): # self.hf_token = os.getenv("HF_API_KEY") # self.openai_key = os.getenv("OPEN_AI_KEY") self.gemini_key = os.getenv("GEMINI_API_KEY") # self.model = HfApiModel( # model_id="Qwen/Qwen3-235B-A22B", # token=self.hf_token # ) # self.model = OpenAIServerModel( # model_id="gpt-4o-mini-2024-07-18", # api_key=self.openai_key # ) self.model = LiteLLMModel( model_id="gemini/gemini-2.0-flash-lite", api_key=self.gemini_key ) self.agent = CodeAgent( model=self.model, add_base_tools=True, tools=[ToolReadFiles, ToolReverseString, ToolDownloadImage, FinalAnswerTool(self.model)], max_steps=10, additional_authorized_imports=['numpy','beautifulsoup4','wiki','re','pandas'] ) def __call__(self, query:str, file:str) -> str: if file != "": query = f"{query} [FILE] {file}" return self.ask(query) def ask(self, query:str) -> str: result = self.agent.run(query) return result if __name__ == "__main__": otto = Agent() response = otto.ask("Who was President of the United States in 1957?") print(response) # response = agent.run("Where were the Vietnamese specimens described by Kuznetzov in Nedoshivina's 2010 paper eventually deposited? Just give me the city name without abbreviations.") # print(response) # response = model(messages)