File size: 2,692 Bytes
b7558d3
7d9919c
9b5b26a
7d9919c
9b5b26a
c19d193
7d9919c
2a5615f
7d9919c
9f98750
 
 
 
2a5615f
9b5b26a
 
7d9919c
9b5b26a
7d9919c
 
 
 
 
 
 
b7558d3
 
7d9919c
b7558d3
 
 
7d9919c
 
b7558d3
 
7d9919c
 
 
b7558d3
7d9919c
 
b7558d3
9b5b26a
 
d7765d2
69ecf87
 
 
 
9b5b26a
 
 
 
 
2a5615f
 
8c01ffb
b7558d3
6aae614
ae7a494
7d9919c
 
 
2a5615f
7d9919c
2a5615f
8c01ffb
7d9919c
9b5b26a
8c01ffb
b7558d3
7d9919c
861422e
c4422ae
7d9919c
8c01ffb
8fe992b
7d9919c
8c01ffb
 
25ecf8b
b7558d3
7d9919c
 
861422e
8fe992b
 
7d9919c
b7558d3
7d9919c
b7558d3
7d9919c
b7558d3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
# app.py
from smolagents import CodeAgent, DuckDuckGoSearchTool, load_tool, tool, InferenceClientModel
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool

# Weather-specific imports
import openmeteo_requests
import pandas as pd
import requests_cache
from retry_requests import retry

from Gradio_UI import GradioUI

# Example tool
@tool
def hourly_temperature(latitude: float, longitude: float) -> str:
    """Fetch hourly temperature from Open-Meteo API.

    Args:
        latitude: Latitude of the location
        longitude: Longitude of the location
    """
    cache_session = requests_cache.CachedSession('.cache', expire_after=3600)
    retry_session = retry(cache_session, retries=5, backoff_factor=0.2)
    openmeteo = openmeteo_requests.Client(session=retry_session)

    url = "https://api.open-meteo.com/v1/forecast"
    params = {
        "latitude": latitude,
        "longitude": longitude,
        "hourly": "temperature_2m",
    }
    response = openmeteo.weather_api(url, params=params)
    hourly = response.Hourly()
    hourly_temperature_2m = hourly.Variables(0).ValuesAsNumpy()

    data = {"temperature_2m": hourly_temperature_2m}
    return data["temperature_2m"]

@tool
def get_current_time_in_timezone(timezone: str) -> str:
    """Fetch current local time for a given timezone.

    Args:
        timezone: A string representing a valid timezone (e.g., 'Africa/Kigali').
    """
    try:
        tz = pytz.timezone(timezone)
        local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
        return f"The current local time in {timezone} is: {local_time}"
    except Exception as e:
        return f"Error fetching time for timezone '{timezone}': {str(e)}"


# Final answer tool
final_answer = FinalAnswerTool()

# Create model using latest smolagents class
model = InferenceClientModel(
    model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
    temperature=0.5,
    max_new_tokens=2048
)

# Load additional tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)

# Load prompt templates
with open("prompts.yaml", 'r') as stream:
    prompt_templates = yaml.safe_load(stream)

# Create agent
agent = CodeAgent(
    model=model,
    tools=[final_answer, hourly_temperature, get_current_time_in_timezone, image_generation_tool],
    max_steps=6,
    verbosity_level=1,
    #grammar=None,
    planning_interval=None,
    name="DormAI",
    description="AI agent for dorm management",
    prompt_templates=prompt_templates
)

# Test the tools
hourly_temperature(32.9, 15)
get_current_time_in_timezone('Africa/Kigali')

# Launch UI
GradioUI(agent).launch()