thatting's picture
Update agent.py
feb9394 verified
raw
history blame
2.01 kB
import os
import dotenv
import yaml
from smolagents import CodeAgent, LiteLLMModel, OpenAIServerModel, HfApiModel, DuckDuckGoSearchTool, VisitWebpageTool
from tools import ToolReadFiles, ToolReverseString, ToolDownloadImage, FinalAnswerTool
dotenv.load_dotenv()
# HF_API_KEY = os.getenv("HF_API_KEY")
# OPENAI_KEY = os.getenv("OPEN_AI_KEY")
# Create prompt template
# with open("prompts.yaml", 'r') as stream:
# prompt_templates = yaml.safe_load(stream)
class Agent:
def __init__(self):
self.hf_token = os.getenv("HF_API_KEY")
self.model = HfApiModel(
model_id="Qwen/Qwen3-235B-A22B",
token=self.hf_token
)
# self.model = OpenAIServerModel(
# model_id="gpt-4o-mini-2024-07-18",
# api_key=OPENAI_KEY
# )
# self.model_local = LiteLLMModel(
# model_id="ollama/qwen3:8b",
# api_base="http://localhost:11434",
# num_ctx=40000, # Context set for Mistral Nemo
# )
self.agent = CodeAgent(
model=self.model,
add_base_tools=True,
tools=[ToolReadFiles, ToolReverseString, ToolDownloadImage, FinalAnswerTool()],
max_steps=10,
additional_authorized_imports=['numpy','beautifulsoup4','wiki','re','pandas']
)
def __call__(self, query:str, file:str) -> str:
if file != "":
query = f"{query} [FILE] {file}"
return self.ask(query)
def ask(self, query:str) -> str:
result = self.agent.run(query)
return result
if __name__ == "__main__":
otto = Agent()
response = otto.ask("Who was President of the United States in 1957?")
print(response)
# response = agent.run("Where were the Vietnamese specimens described by Kuznetzov in Nedoshivina's 2010 paper eventually deposited? Just give me the city name without abbreviations.")
# print(response)
# response = model(messages)