File size: 2,117 Bytes
edc711d
9b5b26a
 
 
c19d193
6aae614
9b5b26a
1089234
 
9b5b26a
edc711d
9b5b26a
edc711d
 
9b5b26a
edc711d
 
9b5b26a
edc711d
9b5b26a
edc711d
9b5b26a
 
 
 
 
 
 
 
 
 
 
 
8c01ffb
edc711d
6aae614
ae7a494
1089234
4b401e5
 
 
1089234
 
4b401e5
e121372
1089234
edc711d
4b401e5
13d500a
4b401e5
edc711d
9b5b26a
8c01ffb
edc711d
861422e
 
edc711d
 
8c01ffb
8fe992b
edc711d
ec4df9d
47ad282
8c01ffb
 
 
 
861422e
8fe992b
 
edc711d
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
import os 


# Updated custom tool to do something useful
@tool
def my_custom_tool(arg1: str, arg2: int) -> str:
    """A tool that combines a string and an integer and returns a formatted message.
    Args:
        arg1: The first argument as a string.
        arg2: The second argument as an integer.
    """
    return f"Your custom tool received: '{arg1}' and the number {arg2}."

# Tool to get current time in a specific timezone
@tool
def get_current_time_in_timezone(timezone: str) -> str:
    """A tool that fetches the current local time in a specified timezone.
    Args:
        timezone: A string representing a valid timezone (e.g., 'America/New_York').
    """
    try:
        tz = pytz.timezone(timezone)
        local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
        return f"The current local time in {timezone} is: {local_time}"
    except Exception as e:
        return f"Error fetching time for timezone '{timezone}': {str(e)}"

# Final answer tool
final_answer = FinalAnswerTool()

# Model setup

# Ensure HF token is set in the environment

os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")

# Model setup (without api_key)
model = HfApiModel(
    max_tokens=1024,
    temperature=0.5,
    model_id='Qwen/Qwen2.5-Coder-32B-Instruct'
)

# Import external tool from Hugging Face Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)

# Load prompt templates
with open("prompts.yaml", 'r') as stream:
    prompt_templates = yaml.safe_load(stream)

# Create the agent with the added tools
agent = CodeAgent(
    model=model,
    tools=[final_answer, my_custom_tool, DuckDuckGoSearchTool(), image_generation_tool],
    max_steps=3,
    verbosity_level=0,
    grammar=None,
    planning_interval=None,
    name=None,
    description=None,
    prompt_templates=prompt_templates
)

# Launch Gradio UI
GradioUI(agent).launch()