|
|
|
|
|
|
|
|
|
|
|
|
|
|
import gradio.utils, os |
|
|
import gradio as gr |
|
|
from agents.crew import run_crew |
|
|
from utils.utils import ( |
|
|
DATASET_TYPE_GAIA, |
|
|
DATASET_TYPE_HLE, |
|
|
get_dataset |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
SPACE_ID = os.environ.get("SPACE_ID") |
|
|
|
|
|
BASE_URL = f"https://huggingface.co/spaces/{SPACE_ID}/blob/main" |
|
|
|
|
|
|
|
|
|
|
|
def ask(question, openai_api_key, gemini_api_key, anthropic_api_key, file_name = ""): |
|
|
""" |
|
|
Ask General AI Assistant a question to answer. |
|
|
|
|
|
Args: |
|
|
question (str): The question to answer |
|
|
openai_api_key (str): OpenAI API key (always used) |
|
|
gemini_api_key (str): Gemini API key (always used) |
|
|
anthropic_api_key (str): Anthropic API key (only used by Stagehand tool) |
|
|
file_name (str): Optional file name |
|
|
|
|
|
Returns: |
|
|
str: The answer to the question |
|
|
""" |
|
|
if not question: |
|
|
gr.Warning("Question is required.") |
|
|
return "" |
|
|
|
|
|
if not openai_api_key: |
|
|
gr.Warning("OpenAI API Key is required.") |
|
|
return "" |
|
|
|
|
|
if not gemini_api_key: |
|
|
gr.Warning("Gemini API Key is required.") |
|
|
return "" |
|
|
|
|
|
if not anthropic_api_key: |
|
|
gr.Warning("Anthropic API Key is required.") |
|
|
return "" |
|
|
|
|
|
if file_name: |
|
|
file_name = f"files/{file_name}" |
|
|
|
|
|
try: |
|
|
|
|
|
|
|
|
openai_key = openai_api_key if openai_api_key and openai_api_key != "*" else os.environ.get("OPENAI_API_KEY") |
|
|
gemini_key = gemini_api_key if gemini_api_key and gemini_api_key != "*" else os.environ.get("GEMINI_API_KEY") |
|
|
anthropic_key = anthropic_api_key if anthropic_api_key and anthropic_api_key != "*" else os.environ.get("ANTHROPIC_API_KEY") |
|
|
|
|
|
os.environ["OPENAI_API_KEY"] = openai_key |
|
|
os.environ["GEMINI_API_KEY"] = gemini_key |
|
|
os.environ["ANTHROPIC_API_KEY"] = anthropic_key |
|
|
|
|
|
return run_crew(question, file_name) |
|
|
except Exception as e: |
|
|
gr.Warning(str(e)) |
|
|
return "" |
|
|
|
|
|
|
|
|
|
|
|
def update_file_link(file_name): |
|
|
if file_name: |
|
|
return f"<a href='{BASE_URL}/files/{file_name}' target='_blank'>Open File</a>" |
|
|
|
|
|
return "" |
|
|
|
|
|
def watchfn(*args, **kwargs): |
|
|
pass |
|
|
|
|
|
gradio.utils.watchfn_spaces = watchfn |
|
|
|
|
|
|
|
|
|
|
|
DESCRIPTION = ( |
|
|
f"<strong>Multi-agent, multi-modal, multi-model AI platform</strong> with high agency, " |
|
|
f"including code generation & execution, browser automation, and multi-modal reasoning. " |
|
|
f"The system can solve <a href='https://arxiv.org/pdf/2311.12983'>GAIA Benchmark</a> " |
|
|
f"Level 1, 2, 3 and <a href='https://arxiv.org/pdf/2501.14249'>Humanity's Last Exam</a> " |
|
|
f"problems. To get started with the <strong>GUI</strong>, select from the examples below. " |
|
|
f"To use via <strong>API</strong> or <strong>MCP</strong>, see the link below. " |
|
|
f"API keys are provided thanks to sponsor credit. " |
|
|
f"<a href='{BASE_URL}/README.md'>Documentation</a></p>" |
|
|
) |
|
|
|
|
|
DEFAULT_QUESTION = "In MCP's 1st Birthday Hackathon, hosted by Anthropic and Gradio, what percentage of participants submitted a solution so far?" |
|
|
|
|
|
CSS_FULL_WIDTH = """ |
|
|
<style> |
|
|
html, |
|
|
body, |
|
|
main, |
|
|
.gradio-app { |
|
|
width: 100% !important; |
|
|
max-width: 100% !important; |
|
|
margin: 0 !important; |
|
|
padding: 0 !important; |
|
|
overflow-x: hidden !important; |
|
|
} |
|
|
|
|
|
.full-width-app { |
|
|
width: 100% !important; |
|
|
max-width: 100% !important; |
|
|
margin: 0 !important; |
|
|
padding: 0 !important; |
|
|
} |
|
|
|
|
|
.content-padding { |
|
|
padding: 0 1.5rem 0; |
|
|
} |
|
|
</style> |
|
|
""" |
|
|
|
|
|
with gr.Blocks(elem_classes=["full-width-app"]) as gaia: |
|
|
gr.HTML(CSS_FULL_WIDTH) |
|
|
|
|
|
with gr.Column(elem_classes=["content-padding"]): |
|
|
gr.Markdown("## General AI Assistant") |
|
|
|
|
|
gr.Markdown(DESCRIPTION) |
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=3): |
|
|
with gr.Row(): |
|
|
question = gr.Textbox( |
|
|
label="Question *", |
|
|
placeholder=DEFAULT_QUESTION, |
|
|
interactive=True, |
|
|
lines=2, |
|
|
max_lines=5 |
|
|
) |
|
|
with gr.Row(): |
|
|
ground_truth = gr.Textbox( |
|
|
label="Ground Truth", |
|
|
interactive=True, |
|
|
lines=1, |
|
|
max_lines=2 |
|
|
) |
|
|
file_name = gr.Textbox( |
|
|
label="File Name", |
|
|
interactive=True, |
|
|
scale=2, |
|
|
lines=1, |
|
|
max_lines=2 |
|
|
) |
|
|
file_link = gr.HTML( |
|
|
label="File Link", |
|
|
value="" |
|
|
) |
|
|
with gr.Row(): |
|
|
openai_api_key = gr.Textbox( |
|
|
label="OpenAI API Key *", |
|
|
type="password", |
|
|
placeholder="sk‑...", |
|
|
value="*", |
|
|
interactive=True |
|
|
) |
|
|
gemini_api_key = gr.Textbox( |
|
|
label="Gemini API Key *", |
|
|
type="password", |
|
|
value="*", |
|
|
interactive=True |
|
|
) |
|
|
anthropic_api_key = gr.Textbox( |
|
|
label="Anthropic API Key *", |
|
|
type="password", |
|
|
placeholder="sk-ant-...", |
|
|
value="*", |
|
|
interactive=True |
|
|
) |
|
|
with gr.Row(): |
|
|
clear_btn = gr.ClearButton( |
|
|
components=[question, ground_truth, file_name, file_link] |
|
|
) |
|
|
submit_btn = gr.Button("Submit", variant="primary") |
|
|
with gr.Column(scale=1): |
|
|
answer = gr.Textbox( |
|
|
label="Answer", |
|
|
interactive=False, |
|
|
lines=2, |
|
|
max_lines=5 |
|
|
) |
|
|
|
|
|
submit_btn.click( |
|
|
fn=ask, |
|
|
inputs=[question, openai_api_key, gemini_api_key, anthropic_api_key, file_name], |
|
|
outputs=answer |
|
|
) |
|
|
|
|
|
file_name.change( |
|
|
fn=update_file_link, |
|
|
inputs=[file_name], |
|
|
outputs=[file_link] |
|
|
) |
|
|
|
|
|
with gr.Tabs(): |
|
|
with gr.TabItem("GAIA Benchmark Level 1"): |
|
|
gr.Examples( |
|
|
examples=get_dataset(DATASET_TYPE_GAIA, 1), |
|
|
inputs=[question, ground_truth, file_name, "", "", ""], |
|
|
examples_per_page=3, |
|
|
cache_examples=False |
|
|
) |
|
|
|
|
|
with gr.TabItem("GAIA Benchmark Level 2"): |
|
|
gr.Examples( |
|
|
examples=get_dataset(DATASET_TYPE_GAIA, 2), |
|
|
inputs=[question, ground_truth, file_name, "", "", ""], |
|
|
examples_per_page=3, |
|
|
cache_examples=False |
|
|
) |
|
|
|
|
|
with gr.TabItem("GAIA Benchmark Level 3"): |
|
|
gr.Examples( |
|
|
examples=get_dataset(DATASET_TYPE_GAIA, 3), |
|
|
inputs=[question, ground_truth, file_name, "", "", ""], |
|
|
examples_per_page=3, |
|
|
cache_examples=False |
|
|
) |
|
|
|
|
|
with gr.TabItem("Humanity's Last Exam"): |
|
|
gr.Examples( |
|
|
examples=get_dataset(DATASET_TYPE_HLE, 0), |
|
|
inputs=[question, ground_truth, file_name, "", "", ""], |
|
|
examples_per_page=3, |
|
|
cache_examples=False |
|
|
) |
|
|
|
|
|
gaia.launch(mcp_server=True, ssr_mode=False) |