Spaces:
Sleeping
Sleeping
File size: 3,068 Bytes
0c7ceb7 9b5b26a c19d193 6aae614 90615e7 59d2231 8fe992b 9b5b26a 5df72d6 9b5b26a 30e4341 9b5b26a 30e4341 9b5b26a 30e4341 9b5b26a 8c01ffb 6aae614 3a01c5e ae7a494 0c7ceb7 4a46040 b352f3c 994ab24 0c7ceb7 13d500a 8c01ffb 0c7ceb7 8c01ffb 9b5b26a 8c01ffb 861422e 9b5b26a 8c01ffb 8fe992b a68f43d 8c01ffb 6d9ca55 8c01ffb 861422e 8fe992b 9b5b26a 8c01ffb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool,LiteLLMModel
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from tools.visit_webpage import VisitWebpageTool
from tools.web_search import DuckDuckGoSearchTool
from Gradio_UI import GradioUI
# Below is an example of a tool that does nothing. Amaze us with your creativity !
@tool
def compare_tool(arg1:str, arg2:str)-> str: #it's import to specify the return type
#Keep this format for the description / args / args description but feel free to modify the tool
"""A tool comparing to values
Args:
arg1: the first argument
arg2: the second argument
"""
try:
t_first = float(arg1)
t_second = float(arg2)
except:
t_first = arg1
t_second = arg2
return str(t_first) + " is" + ("more " if t_first > t_second else "less ") + "than " + str(t_second)
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""A tool that fetches the current local time in a specified timezone.
Args:
timezone: A string representing a valid timezone (e.g., 'America/New_York').
"""
try:
# Create timezone object
tz = pytz.timezone(timezone)
# Get current time in that timezone
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
return f"The current local time in {timezone} is: {local_time}"
except Exception as e:
return f"Error fetching time for timezone '{timezone}': {str(e)}"
final_answer = FinalAnswerTool()
web_search=DuckDuckGoSearchTool()
visit_webpage = VisitWebpageTool()
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
model= LiteLLMModel(
model_id="ollama_chat/hf.co/unsloth/Qwen2.5-Coder-7B-Instruct-128K-GGUF:Q4_K_M",
# model_id="ollama_chat/qwen2.5:7b-instruct", # Or try other Ollama-supported models
api_base="http://127.0.0.1:4040", # Default Ollama local server
num_ctx=8192,
)
#model = HfApiModel(
#max_tokens=2096,
#temperature=0.5,
#model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
#model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud',
#custom_role_conversions=None,
#)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer, web_search, visit_webpage, compare_tool, get_current_time_in_timezone, image_generation_tool], ## add your tools here (don't remove final answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch() |