from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool, LiteLLMModel import datetime import requests import pytz import yaml import os from dotenv import load_dotenv import json import xml.etree.ElementTree as ET from tools.final_answer import FinalAnswerTool from Gradio_UI import GradioUI load_dotenv() api_key = os.environ.get("GOOGLE_API_KEY") @tool def search_arxiv_papers(topic: str, max_results: int = 10) -> str: """Searches arXiv for papers related to the given topic and returns results as JSON. Args: topic: The search query for arXiv. max_results: Maximum number of results to return (default 10). """ try: url = 'http://export.arxiv.org/api/query' params = { 'search_query': f'all:{topic}', 'start': 0, 'max_results': max_results, 'sortBy': 'submittedDate', 'sortOrder': 'descending' } headers = {'User-Agent': 'Mozilla/5.0'} # arXiv requires user agent response = requests.get(url, params=params) response.raise_for_status() root = ET.fromstring(response.content) entries = root.findall('{http://www.w3.org/2005/Atom}entry') papers = [] for entry in entries: title = entry.find('{http://www.w3.org/2005/Atom}title').text.strip() authors = [author.find('{http://www.w3.org/2005/Atom}name').text for author in entry.findall('{http://www.w3.org/2005/Atom}author')] summary = entry.find('{http://www.w3.org/2005/Atom}summary').text.strip() link = entry.find('{http://www.w3.org/2005/Atom}id').text papers.append({ 'title': title, 'authors': authors, 'summary': summary, 'link': link }) return json.dumps(papers) except Exception as e: return f"Error searching arXiv: {str(e)}" @tool def save_papers_to_folder(papers_json: str, folder: str = "downloads") -> str: """Saves papers data to specified folder as text files. Args: papers_json: JSON string containing papers data. folder: Target folder path (default 'downloads'). """ try: os.makedirs(folder, exist_ok=True) papers = json.loads(papers_json) for idx, paper in enumerate(papers, 1): # Handle missing fields gracefully title = paper.get('title', f'Untitled_{idx}') authors = paper.get('authors', ['Unknown']) link = paper.get('link', 'No link available') summary = paper.get('summary', 'No summary available') # Improved filename sanitization clean_title = "".join([c if c.isalnum() else "_" for c in title])[:50] filename = os.path.join(folder, f"{clean_title or f'paper_{idx}'}.txt") print("Current working directory:", os.getcwd()) print("Absolute path to downloads:", os.path.abspath("downloads")) print(filename) with open(filename, 'w') as f: f.write(f"Title: {paper['title']}\n") f.write(f"Authors: {', '.join(paper['authors'])}\n") f.write(f"Link: {paper['link']}\n") f.write(f"Summary: {paper['summary']}\n\n") return f"Saved {len(papers)} papers to {folder} directory." except Exception as e: return f"Error saving papers: {str(e)}" # Below is an example of a tool that does nothing. Amaze us with your creativity ! @tool def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type #Keep this format for the description / args / args description but feel free to modify the tool """A tool that does nothing yet Args: arg1: the first argument arg2: the second argument """ return "What magic will you build ?" @tool def get_current_time_in_timezone(timezone: str) -> str: """A tool that fetches the current local time in a specified timezone. Args: timezone: A string representing a valid timezone (e.g., 'America/New_York'). """ try: # Create timezone object tz = pytz.timezone(timezone) # Get current time in that timezone local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S") return f"The current local time in {timezone} is: {local_time}" except Exception as e: return f"Error fetching time for timezone '{timezone}': {str(e)}" final_answer = FinalAnswerTool() # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder: # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud' #model = HfApiModel( #max_tokens=2096, #temperature=0.5, #model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded #model_id='google/gemma-3-1b-it', #custom_role_conversions=None, #) #model=LiteLLMModel( # model_id = "ollama/qwen2:7b", # api_base="http://0.0.0.0:11434", # num_ctx=8192, #) model=LiteLLMModel( model_id = "gemini/gemini-2.0-flash-lite", #api_base = "https://generativelanguage.googleapis.com/v1beta/models", api_key = api_key, num_ctx=8192, ) # Import tool from Hub image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True) with open("prompts.yaml", 'r') as stream: prompt_templates = yaml.safe_load(stream) agent = CodeAgent( model=model, tools=[final_answer, search_arxiv_papers, save_papers_to_folder], ## add your tools here (don't remove final answer) max_steps=6, verbosity_level=1, grammar=None, planning_interval=None, name=None, description=None, prompt_templates=prompt_templates ) GradioUI(agent).launch()