Spaces:
Paused
Paused
File size: 5,342 Bytes
48910c1 a9d6ab6 48910c1 4eb9c01 48910c1 4eb9c01 48910c1 1a4e9af 48910c1 4eb9c01 48910c1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 |
import os, threading
import gradio as gr
from crew import run_parallel_crew
from crew import run_crew
from utils import get_questions
def ask(question, openai_api_key, gemini_api_key, anthropic_api_key, file_name = ""):
"""
Ask General AI Assistant a question to answer.
Args:
question (str): The question to answer
openai_api_key (str): OpenAI API key
gemini_api_key (str): Gemini API key
anthropic_api_key (str): Anthropic API key
file_name (str): Optional file name
Returns:
str: The answer to the question
"""
if not question:
raise gr.Error("Question is required.")
if not openai_api_key:
raise gr.Error("OpenAI API Key is required.")
if not gemini_api_key:
raise gr.Error("Gemini API Key is required.")
if not anthropic_api_key:
raise gr.Error("Anthropic API Key is required.")
if file_name:
file_name = f"data/{file_name}"
lock = threading.Lock()
with lock:
answer = ""
try:
os.environ["OPENAI_API_KEY"] = openai_api_key
os.environ["GEMINI_API_KEY"] = gemini_api_key
os.environ["MODEL_API_KEY"] = anthropic_api_key
#answer = run_parallel_crew(question, file_name)
answer = run_crew(question, file_name)
except Exception as e:
raise gr.Error(e)
finally:
del os.environ["OPENAI_API_KEY"]
del os.environ["GEMINI_API_KEY"]
del os.environ["MODEL_API_KEY"]
return answer
gr.close_all()
with gr.Blocks() as grady:
gr.Markdown("## Grady - General AI Assistant")
with gr.Tab("Solution"):
gr.Markdown(os.environ.get("DESCRIPTION"))
with gr.Row():
with gr.Column(scale=3):
with gr.Row():
question = gr.Textbox(
label="Question *",
placeholder="In the 2025 Gradio Agents & MCP Hackathon, what percentage of participants submitted a solution during the last 24 hours?",
interactive=True
)
with gr.Row():
level = gr.Radio(
choices=[1, 2, 3],
label="GAIA Benchmark Level",
interactive=True,
scale=1
)
ground_truth = gr.Textbox(
label="Ground Truth",
interactive=True,
scale=1
)
file_name = gr.Textbox(
label="File Name",
interactive=True,
scale=2
)
with gr.Row():
openai_api_key = gr.Textbox(
label="OpenAI API Key *",
type="password",
placeholder="sk‑...",
interactive=True
)
gemini_api_key = gr.Textbox(
label="Gemini API Key *",
type="password",
interactive=True
)
anthropic_api_key = gr.Textbox(
label="Anthropic API Key *",
type="password",
placeholder="sk-ant-...",
interactive=True
)
with gr.Row():
clear_btn = gr.ClearButton(
components=[question, level, ground_truth, file_name]
)
submit_btn = gr.Button("Submit", variant="primary")
with gr.Column(scale=1):
answer = gr.Textbox(
label="Answer",
lines=1,
interactive=False
)
submit_btn.click(
fn=ask,
inputs=[question, openai_api_key, gemini_api_key, anthropic_api_key, file_name],
outputs=answer
)
QUESTION_FILE_PATH = "data/gaia_validation.jsonl"
gr.Examples(
label="GAIA Benchmark Level 1 Problems",
examples=get_questions(QUESTION_FILE_PATH, 1),
inputs=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key, anthropic_api_key],
outputs=answer,
cache_examples=False
)
gr.Examples(
label="GAIA Benchmark Level 2 Problems",
examples=get_questions(QUESTION_FILE_PATH, 2),
inputs=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key, anthropic_api_key],
outputs=answer,
cache_examples=False
)
gr.Examples(
label="GAIA Benchmark Level 3 Problems",
examples=get_questions(QUESTION_FILE_PATH, 3),
inputs=[question, level, ground_truth, file_name, openai_api_key, gemini_api_key, anthropic_api_key],
outputs=answer,
cache_examples=False
)
with gr.Tab("Documentation"):
gr.Markdown(os.environ.get("DOCUMENTATION"))
grady.launch(mcp_server=True) |