Spaces:
Sleeping
Sleeping
File size: 2,141 Bytes
b605167 6b51f04 b605167 347e1ae dee5be2 6b51f04 4a22348 e9dbe69 4a22348 e9dbe69 4a22348 e9dbe69 4a22348 e9dbe69 4a22348 e9dbe69 4a22348 8c01ffb 4a22348 8fe992b 4a22348 b605167 dee5be2 b605167 dee5be2 b605167 4a22348 b605167 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 | from smolagents import CodeAgent, HfApiModel, load_tool, tool
from tools.final_answer import FinalAnswerTool
from duckduckgo_search import DDGS
import datetime
import pytz
import yaml
from Gradio_UI import GradioUI
@tool
def get_current_weather(city: str) -> str:
"""
Get current weather for a city using Open-Meteo API.
Args:
city (str): The city name.
Returns:
str: Current temperature and wind speed.
"""
import requests
# Get coordinates
geo_url = f"https://geocoding-api.open-meteo.com/v1/search?name={city}"
geo_response = requests.get(geo_url).json()
if "results" not in geo_response:
return "City not found."
lat = geo_response["results"][0]["latitude"]
lon = geo_response["results"][0]["longitude"]
# Get weather
weather_url = (
f"https://api.open-meteo.com/v1/forecast?"
f"latitude={lat}&longitude={lon}¤t_weather=true"
)
weather_data = requests.get(weather_url).json()
current = weather_data.get("current_weather", {})
return (
f"Current weather in {city}: "
f"{current.get('temperature')}°C, "
f"Wind {current.get('windspeed')} km/h"
)
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""
Get the current local time in a specified timezone.
Args:
timezone (str): A valid timezone string such as 'America/New_York'.
Returns:
str: The current local time formatted as YYYY-MM-DD HH:MM:SS.
"""
tz = pytz.timezone(timezone)
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
return f"The current local time in {timezone} is {local_time}"
final_answer = FinalAnswerTool()
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
)
with open("prompts.yaml", "r") as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[get_current_weather, get_current_time_in_timezone, final_answer],
max_steps=6,
verbosity_level=1,
prompt_templates=prompt_templates
)
GradioUI(agent).launch() |