VictorPerezCarrera's picture
Upload app.py
2d08b53 verified
from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
# List of alternative models
MODEL_IDS = [
'https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud',
'https://wxknx1kg971u7k1n.us-east-1.aws.endpoints.huggingface.cloud',
'https://jc26mwg228mkj8dw.us-east-1.aws.endpoints.huggingface.cloud',
]
def is_model_overloaded(model_url):
"""Verify if the model is overloaded doing a test call."""
try:
response = requests.post(model_url, json={"inputs": "Test"})
if response.status_code == 503: # 503 Service Unavailable = Overloaded
return True
return False
except requests.RequestException:
return True # if there are an error is overloaded
def get_available_model():
"""Select the first model available from the list."""
for model_url in MODEL_IDS:
if not is_model_overloaded(model_url):
return model_url
return MODEL_IDS[0] # if all are failing, use the first model by dfault
# Selecting one available model
selected_model_id = get_available_model()
final_answer = FinalAnswerTool()
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id=selected_model_id,
custom_role_conversions=None,
)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer, image_generation_tool], # Add your tools here (don't remove final answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch()