Spaces:
Sleeping
Sleeping
File size: 3,501 Bytes
52b1d4b 9b5b26a c19d193 52b1d4b 4f78577 52b1d4b 5d296a5 8fe992b f70c374 734b741 f70c374 734b741 9b5b26a 4f78577 015a7d5 4f78577 015a7d5 4f78577 015a7d5 eefca5a 9b5b26a 83f9faf 492aa00 f70c374 492aa00 ebc76ca e8ad927 492aa00 f70c374 9b5b26a f70c374 734b741 f70c374 320844b 9363a24 9b5b26a ebc76ca 9b5b26a f70c374 9b5b26a f70c374 9b5b26a 8c01ffb 6aae614 52b1d4b f70c374 ae7a494 52b1d4b 861422e f70c374 8c01ffb 8fe992b 52b1d4b 8c01ffb 861422e 8fe992b 62ec65e f70c374 52b1d4b f70c374 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
import requests
from bs4 import BeautifulSoup
from Gradio_UI import GradioUI
# Initialize the Hugging Face web search model
search_model = HfApiModel(
"Intelligent-Internet/II-Search-4B",
return_text=True,
streaming=False # ensures the output is plain text
)
@tool
def get_top_sightseeing(city: str, count: int) -> str:
"""Searches the web for top sightseeing locations in a given city.
Args:
city: Name of the city.
count: Number of top sightseeing locations to return.
"""
search_tool = DuckDuckGoSearchTool()
query = f"top {count} sightseeing locations in {city}"
results = search_tool.run(query)
if results:
return f"Top sightseeing locations in {city}:\n" + "\n".join(f"{i+1}. {r['title']}" for i, r in enumerate(results[:count]))
else:
return f"No sightseeing data found for {city}."
@tool
def my_custom_tool(city:str, season:str)-> str: # it's important to specify the return type
"""
Fetches the top 5 sightseeing events in a city for a given season using Hugging Face II-Search.
Args:
city: Name of the city (e.g., "Paris").
season: Season or time of year (e.g., "summer").
Returns:
A formatted string listing the top 5 sightseeing events.
"""
# Query the search model
response_text = search_model(f"Top 5 sightseeing events in {city} during {season}")
# Split by lines to extract individual events
events = response_text.split("\n")
# Format output as a readable string
output = f"Top 5 sightseeing events in {city} during {season}:\n"
for i, event in enumerate(events[:5], start=1):
output += f"{i}. {event.strip()}\n"
return output
@tool
def get_current_time_in_timezone(timezone: str) -> str:
"""A tool that fetches the current local time in a specified timezone.
Args:
timezone: A string representing a valid timezone (e.g., 'America/New_York').
"""
try:
# Create timezone object
tz = pytz.timezone(timezone)
# Get current time in that timezone
local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
return f"The current local time in {timezone} is: {local_time}"
except Exception as e:
return f"Error fetching time for timezone '{timezone}': {str(e)}"
final_answer = FinalAnswerTool()
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
custom_role_conversions=None,
)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer], ## add your tools here (don't remove final answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch()
|