File size: 1,278 Bytes
98ad884
9b5b26a
 
 
c19d193
6aae614
8fe992b
9b5b26a
 
 
98ad884
 
9b5b26a
 
98ad884
9b5b26a
 
98ad884
9b5b26a
 
 
8c01ffb
6aae614
e4bd8a8
eda4b2d
98ad884
 
 
13d500a
8c01ffb
98ad884
8c01ffb
861422e
 
98ad884
 
8fe992b
98ad884
8c01ffb
 
 
 
 
 
861422e
8fe992b
 
98ad884
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
from smolagents import CodeAgent, DuckDuckGoSearchTool, FinalAnswerTool, InferenceClientModel, load_tool, tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool

from Gradio_UI import GradioUI

@tool
def my_custom_tool(arg1:str, arg2:int)->str:
    return 

@tool
def get_current_time_in_timezone(timezone:str)->str:
    try:
        tz = pytz.timezone(timezone)
        local_time=datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
        return f"The current local time in {timezone} is: {local_time}"
    except Exception as e:
        return f"Error fetching time for timezone '{timezone}': {str(e)}"

final_answer = FinalAnswerTool()
model = InferenceClientModel(
    max_tokens=2096,
    temperature=0.5,
    model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
    custom_role_conversions=None,
)

image_generation_tool = load_tool("agents-course/text-to-image",trust_remote_code=True)

with open("prompts.yaml", 'r') as stream:
    prompt_templates = yaml.safe_load(stream)

agent=CodeAgent(
    model=model,
    tool=[final_answer],
    max_steps=6,
    verbosity_level=1,
    grammar=None,
    planning_interval=None,
    name=None,
    description=None,
    prompt_templates=prompt_templates
)

GradioUI(agent).launch