Spaces:
Sleeping
Sleeping
File size: 3,765 Bytes
10e9b7d b86492c be2ea07 80601e2 10e9b7d be2ea07 dda189c be2ea07 3db6293 e80aab9 80601e2 31243f4 be2ea07 4289245 31243f4 be2ea07 4289245 be2ea07 4289245 be2ea07 3c83ee3 be2ea07 3c83ee3 be2ea07 3c83ee3 be2ea07 3c83ee3 80601e2 6e31989 be2ea07 3c83ee3 3c4371f be2ea07 80601e2 3c83ee3 e80aab9 31243f4 be2ea07 7d65c66 31243f4 7d65c66 be2ea07 e80aab9 be2ea07 80601e2 be2ea07 6e31989 3c83ee3 31243f4 be2ea07 31243f4 3c83ee3 be2ea07 3c83ee3 6e31989 e80aab9 be2ea07 3c83ee3 7d65c66 3c83ee3 e80aab9 be2ea07 e80aab9 3c83ee3 7e4a06b 3c83ee3 be2ea07 80601e2 e80aab9 80601e2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
import os
import time
import requests
import gradio as gr
import pandas as pd
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY")
GEMINI_MODEL = "gemini-2.5-flash"
GEMINI_API_URL = f"https://generativelanguage.googleapis.com/v1/models/{GEMINI_MODEL}:generateContent"
DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
class GeminiAgent:
def __init__(self):
if not GEMINI_API_KEY:
raise ValueError("❌ Переменная окружения GEMINI_API_KEY не установлена.")
def __call__(self, question: str) -> str:
headers = {"Content-Type": "application/json"}
params = {"key": GEMINI_API_KEY}
payload = {
"contents": [{"parts": [{"text": question}]}]
}
for attempt in range(3):
try:
response = requests.post(GEMINI_API_URL, headers=headers, params=params, json=payload)
if response.status_code == 429:
print(f"⚠️ Попытка {attempt+1}: Превышен лимит. Ждём 5 сек.")
time.sleep(5)
continue
response.raise_for_status()
return response.json()["candidates"][0]["content"]["parts"][0]["text"].strip()
except Exception as e:
print(f"❌ Ошибка: {e}")
time.sleep(5)
return "Ошибка при получении ответа от Gemini API"
def run_and_submit_all(profile: gr.OAuthProfile | None):
space_id = os.getenv("SPACE_ID")
if not profile:
return "Пожалуйста, войдите в Hugging Face", None
username = profile.username
agent_code = f"https://huggingface.co/spaces/{space_id}/tree/main"
print(f"Пользователь: {username}")
try:
response = requests.get(f"{DEFAULT_API_URL}/questions", timeout=15)
response.raise_for_status()
questions_data = response.json()
except Exception as e:
return f"❌ Ошибка при загрузке вопросов: {e}", None
agent = GeminiAgent()
results_log = []
answers_payload = []
for item in questions_data:
task_id = item.get("task_id")
question_text = item.get("question")
if not task_id or not question_text:
continue
answer = agent(question_text)
results_log.append({"Task ID": task_id, "Question": question_text, "Answer": answer})
answers_payload.append({"task_id": task_id, "submitted_answer": answer})
time.sleep(3) # ⏳ Задержка между запросами
try:
submission_data = {
"username": username,
"agent_code": agent_code,
"answers": answers_payload
}
submit_response = requests.post(f"{DEFAULT_API_URL}/submit", json=submission_data)
submit_response.raise_for_status()
result = submit_response.json()
score = result.get("score", "N/A")
return f"✅ Отправка завершена! Ваш результат: {score}%", pd.DataFrame(results_log)
except Exception as e:
return f"❌ Ошибка при отправке ответов: {e}", pd.DataFrame(results_log)
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("## Gemini 2.5 Agent 🤖")
gr.Markdown("Войдите в Hugging Face и нажмите кнопку для запуска агента.")
gr.LoginButton()
run_btn = gr.Button("▶️ Запустить агента")
status = gr.Textbox(label="Статус")
table = gr.DataFrame(label="Результаты")
run_btn.click(fn=run_and_submit_all, outputs=[status, table])
if __name__ == "__main__":
demo.launch() |