Spaces:
Runtime error
Runtime error
File size: 1,458 Bytes
06e0fc1 9b5b26a c19d193 8fe992b 9b5b26a 06e0fc1 9b5b26a 06e0fc1 9b5b26a 06e0fc1 9b5b26a 6cb4a73 06e0fc1 9b5b26a 06e0fc1 9b5b26a e3e2db5 8c01ffb 6aae614 06e0fc1 13d500a 8c01ffb 06e0fc1 8c01ffb 06e0fc1 8fe992b 06e0fc1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
from smolagents import CodeAgent, DuckDuckGoSearchTool, FinalAnswerTool, InferenceClientModel, load_tool, tool
import datetime
import requests
import pytz
import yaml
@tool
def my_custom_tool(arg1: str, arg2: str)-> str:
"""A tool that does nothjing yet
Args:
arg1: the first argument
arg2" the second argument
"""
return "What maging will you build ?"
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""A tool that fetches the current local time in a specified timezone
Args:
timezone: A string respresentin a valid timezone (e.g, 'America/New_York')
"""
try:
tz = pytz.timezone(timezone)
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
return f"The current local time in {timezone} is: {local_time}"
except Exception as e:
return f"Error fetching timezone '{timezone}': {str(e)}'"
final_answer = FinalAnswerTool()
model = InferenceClientModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
custom_role_conventions=None,
)
with open("promps.yaml", "r") as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model = model,
tools = [final_answer],
max_steps = 6,
verbosity_level = 1,
grammar = None,
planning_interval = None,
name = None,
description = None,
prompt_template = prompt_templates
)
GradIO(agent).launch() |