GAIA_v2 / agent.py
sajjadpsavoji
update prompt and allow for up to 5 steps
ec5a2e5
import os
import yaml
from smolagents import CodeAgent, InferenceClientModel
from tools.final_answer import FinalAnswerTool as FinalAnswer
from tools.web_search import DuckDuckGoSearchTool as WebSearch
from tools.visit_webpage import VisitWebpageTool as VisitWebpage
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
class Agent:
def __init__(
self,
default_answer: str = "Sorry, I don’t have an answer for that."
):
# select the LLM model to use
model = InferenceClientModel(
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
)
# select the tools to use
tools = [
WebSearch(),
VisitWebpage(),
FinalAnswer(),
]
# load the prompt templates from the prompts.yaml file
with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream:
prompt_templates = yaml.safe_load(stream)
# create the agent with the selected model, tools, and prompt templates
self.agent = CodeAgent(
model=model,
tools=tools,
managed_agents=[],
max_steps=5,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
executor_type='local',
executor_kwargs={},
max_print_outputs_length=None,
prompt_templates=prompt_templates
)
# set the default answer to return if the agent fails to answer
self.default_answer = default_answer
def __call__(self, question: str) -> str:
try:
answer = self.agent.run(question)
except Exception:
# if the agent errors out, return the fallback
return self.default_answer
# if the agent returns None or an empty string, also use fallback
if not answer:
return self.default_answer
return answer