from smolagents import ( HfApiModel, CodeAgent, load_tool, Tool, InferenceClientModel, ToolCallingAgent, FinalAnswerTool, DuckDuckGoSearchTool, VisitWebpageTool, GoogleSearchTool, PythonInterpreterTool, ) import os from huggingface_hub import login from dotenv import load_dotenv # from langchain.agents import load_tools from langchain_community.agent_toolkits.load_tools import load_tools from tools.transcriber import transcribe_audio load_dotenv() login(os.environ["HF_API_KEY"]) from data.sample_questions import QUESTIONS # Tools wikipedia = Tool.from_langchain(load_tools(["wikipedia"])[0]) tools = [ # GoogleSearchTool(), DuckDuckGoSearchTool(), VisitWebpageTool(), PythonInterpreterTool(), FinalAnswerTool(), wikipedia, transcribe_audio ] # Model # LLM Model model = HfApiModel( "Qwen/Qwen2.5-72B-Instruct", # "deepseek-ai/DeepSeek-R1", provider="together", # max_tokens=40096, temperature=0.1, # token=get_huggingface_token(), ) # Tool Calling Agent llm = HfApiModel("meta-llama/Llama-3.2-3B-Instruct", temperature=0) hf_tool_calling_agent = ToolCallingAgent( model=model, tools=tools, max_steps=10, name="web_search_tool_calling_agent", description="Can perform web searches and can visit the websites", verbosity_level=2, ) hf_tool_calling_agent.logger.console.width = 66 if __name__ == "__main__": question = QUESTIONS[0] answer = hf_tool_calling_agent.run(question) print(answer)