micposso's picture
add a tool
bc40562 verified
from smolagents import CodeAgent, DuckDuckGoSearchTool, InferenceClientModel, load_tool, tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
# Below is an example of a tool that does nothing. Amaze us with your creativity!
import requests
from smolagents import tool
@tool
def city_team_weather(city: str, team: str) -> str:
"""
Provides a fun weather-and-sports mashup based on the city and team name.
Args:
city: The city to get the weather for (e.g., "Boston")
team: The sports team name (e.g., "Red Sox")
"""
try:
# Use Open-Meteo API (no API key required for basic access)
url = f"https://api.open-meteo.com/v1/forecast?latitude=0&longitude=0&current_weather=true"
geo_url = f"https://geocoding-api.open-meteo.com/v1/search?name={city}&count=1"
geo_resp = requests.get(geo_url).json()
if not geo_resp.get("results"):
return f"Could not find weather data for {city}."
lat = geo_resp["results"][0]["latitude"]
lon = geo_resp["results"][0]["longitude"]
weather_url = f"https://api.open-meteo.com/v1/forecast?latitude={lat}&longitude={lon}&current_weather=true"
weather_resp = requests.get(weather_url).json()
weather = weather_resp["current_weather"]["temperature"]
return (
f"In {city}, it's currently {weather}°C. "
f"The {team} are heating up too—maybe it's the weather, maybe it's just game day vibes!"
)
except Exception as e:
return f"Error retrieving data: {str(e)}"
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""A tool that fetches the current local time in a specified timezone.
Args:
timezone: A string representing a valid timezone (e.g., 'America/New_York').
"""
try:
# Create timezone object
tz = pytz.timezone(timezone)
# Get current time in that timezone
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
return f"The current local time in {timezone} is: {local_time}"
except Exception as e:
return f"Error fetching time for timezone '{timezone}': {str(e)}"
final_answer = FinalAnswerTool()
model = InferenceClientModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
custom_role_conversions=None,
)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
# Load system prompt from prompt.yaml file
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer], # add your tools here (don't remove final_answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates # Pass system prompt to CodeAgent
)
GradioUI(agent).launch()