import yaml import os from smolagents import GradioUI, CodeAgent, InferenceClientModel # Get current directory path CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) from tools.final_answer import FinalAnswerTool as FinalAnswer model = InferenceClientModel( provider="openai", model_id="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY") ) final_answer = FinalAnswer() with open(os.path.join(CURRENT_DIR, "prompts.yaml"), 'r') as stream: prompt_templates = yaml.safe_load(stream) agent = CodeAgent( model=model, tools=[], managed_agents=[], max_steps=20, verbosity_level=1, grammar=None, planning_interval=None, name=None, description=None, executor_type='local', executor_kwargs={}, max_print_outputs_length=None, prompt_templates=prompt_templates ) if __name__ == "__main__": GradioUI(agent).launch()