|
|
import os |
|
|
import gradio as gr |
|
|
import requests |
|
|
import pandas as pd |
|
|
from smolagents import CodeAgent, DuckDuckGoSearchTool, VisitWebpageTool |
|
|
|
|
|
from smolagents import InferenceClientModel |
|
|
|
|
|
|
|
|
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space" |
|
|
|
|
|
def get_agent(): |
|
|
|
|
|
model = InferenceClientModel( |
|
|
model_id="Qwen/Qwen2.5-Coder-32B-Instruct", |
|
|
token=os.getenv("HF_TOKEN") |
|
|
) |
|
|
|
|
|
tools = [DuckDuckGoSearchTool(), VisitWebpageTool()] |
|
|
|
|
|
agent = CodeAgent( |
|
|
tools=tools, |
|
|
model=model, |
|
|
max_steps=20, |
|
|
verbosity_level=1, |
|
|
additional_authorized_imports=["pandas", "openpyxl", "json", "csv", "datetime", "requests", "bs4"] |
|
|
) |
|
|
return agent |
|
|
|
|
|
def run_and_submit_all(profile: gr.OAuthProfile | None): |
|
|
if profile: |
|
|
username = f"{profile.username}" |
|
|
else: |
|
|
return "⚠️ Por favor, loguéate arriba.", None |
|
|
|
|
|
api_url = DEFAULT_API_URL |
|
|
questions_url = f"{api_url}/questions" |
|
|
submit_url = f"{api_url}/submit" |
|
|
space_id = os.getenv("SPACE_ID") |
|
|
agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main" |
|
|
|
|
|
try: |
|
|
agent = get_agent() |
|
|
print("✅ Agente (InferenceClientModel) iniciado.") |
|
|
except Exception as e: |
|
|
return f"Error iniciando agente: {e}", None |
|
|
|
|
|
try: |
|
|
response = requests.get(questions_url, timeout=15) |
|
|
questions_data = response.json() |
|
|
except Exception as e: |
|
|
return f"Error descargando preguntas: {e}", None |
|
|
|
|
|
results_log = [] |
|
|
answers_payload = [] |
|
|
|
|
|
print(f"🚀 Procesando {len(questions_data)} preguntas...") |
|
|
|
|
|
for item in questions_data: |
|
|
task_id = item.get("task_id") |
|
|
question_text = item.get("question") |
|
|
file_name = item.get("file_name") |
|
|
|
|
|
local_msg = "" |
|
|
if file_name: |
|
|
try: |
|
|
|
|
|
f_url = f"{api_url}/files/{task_id}" |
|
|
content = requests.get(f_url).content |
|
|
with open(file_name, "wb") as f: |
|
|
f.write(content) |
|
|
local_msg = f"\nARCHIVO LOCAL: '{file_name}'." |
|
|
except: |
|
|
pass |
|
|
|
|
|
|
|
|
prompt = f"{question_text}{local_msg}\nRESPONDER MUY CORTO. SOLO EL DATO EXACTO." |
|
|
|
|
|
try: |
|
|
ans = str(agent.run(prompt)).strip() |
|
|
answers_payload.append({"task_id": task_id, "submitted_answer": ans}) |
|
|
results_log.append({"Task ID": task_id, "Answer": ans}) |
|
|
print(f"Task {task_id}: {ans}") |
|
|
except Exception as e: |
|
|
print(f"Error Task {task_id}: {e}") |
|
|
answers_payload.append({"task_id": task_id, "submitted_answer": "Error"}) |
|
|
|
|
|
submission = {"username": username, "agent_code": agent_code, "answers": answers_payload} |
|
|
|
|
|
try: |
|
|
res = requests.post(submit_url, json=submission, timeout=60).json() |
|
|
return f"🎉 Score: {res.get('score')}%", pd.DataFrame(results_log) |
|
|
except Exception as e: |
|
|
return f"Error envío: {e}", pd.DataFrame(results_log) |
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
gr.Markdown("# GAIA Agent - Final Version") |
|
|
gr.LoginButton() |
|
|
btn = gr.Button("EJECUTAR") |
|
|
out = gr.Textbox() |
|
|
tbl = gr.DataFrame() |
|
|
btn.click(run_and_submit_all, outputs=[out, tbl]) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |