Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -9,10 +9,10 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
|
| 9 |
|
| 10 |
# Basic Agent Definition using HF Inference API
|
| 11 |
class BasicAgent:
|
| 12 |
-
def __init__(self):
|
| 13 |
print("BasicAgent initialized.")
|
| 14 |
-
token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
|
| 15 |
self.client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.1", token=token)
|
|
|
|
| 16 |
def __call__(self, question: str) -> str:
|
| 17 |
print(f"Agent received question: {question[:50]}...")
|
| 18 |
try:
|
|
@@ -25,7 +25,7 @@ class BasicAgent:
|
|
| 25 |
print(f"Error while querying HF model: {e}")
|
| 26 |
return f"Error: {e}"
|
| 27 |
|
| 28 |
-
def run_and_submit_all(profile: gr.OAuthProfile | None):
|
| 29 |
space_id = os.getenv("SPACE_ID")
|
| 30 |
|
| 31 |
if profile:
|
|
@@ -41,7 +41,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
|
|
| 41 |
|
| 42 |
# 1. Instantiate Agent
|
| 43 |
try:
|
| 44 |
-
agent = BasicAgent()
|
| 45 |
except Exception as e:
|
| 46 |
print(f"Error instantiating agent: {e}")
|
| 47 |
return f"Error initializing agent: {e}", None
|
|
@@ -132,16 +132,22 @@ with gr.Blocks() as demo:
|
|
| 132 |
**Instructions:**
|
| 133 |
1. Please clone this space, then modify the code to define your agent's logic, the tools, the necessary packages, etc ...
|
| 134 |
2. Log in to your Hugging Face account using the button below. This uses your HF username for submission.
|
| 135 |
-
3.
|
|
|
|
| 136 |
"""
|
| 137 |
)
|
| 138 |
|
| 139 |
gr.LoginButton()
|
|
|
|
| 140 |
run_button = gr.Button("Run Evaluation & Submit All Answers")
|
| 141 |
status_output = gr.Textbox(label="Run Status / Submission Result", lines=5, interactive=False)
|
| 142 |
results_table = gr.DataFrame(label="Questions and Agent Answers", wrap=True)
|
| 143 |
|
| 144 |
-
run_button.click(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 145 |
|
| 146 |
if __name__ == "__main__":
|
| 147 |
demo.launch(debug=True, share=False)
|
|
|
|
| 9 |
|
| 10 |
# Basic Agent Definition using HF Inference API
|
| 11 |
class BasicAgent:
|
| 12 |
+
def __init__(self, token: str):
|
| 13 |
print("BasicAgent initialized.")
|
|
|
|
| 14 |
self.client = InferenceClient("mistralai/Mistral-7B-Instruct-v0.1", token=token)
|
| 15 |
+
|
| 16 |
def __call__(self, question: str) -> str:
|
| 17 |
print(f"Agent received question: {question[:50]}...")
|
| 18 |
try:
|
|
|
|
| 25 |
print(f"Error while querying HF model: {e}")
|
| 26 |
return f"Error: {e}"
|
| 27 |
|
| 28 |
+
def run_and_submit_all(profile: gr.OAuthProfile | None, token_input: str):
|
| 29 |
space_id = os.getenv("SPACE_ID")
|
| 30 |
|
| 31 |
if profile:
|
|
|
|
| 41 |
|
| 42 |
# 1. Instantiate Agent
|
| 43 |
try:
|
| 44 |
+
agent = BasicAgent(token=token_input)
|
| 45 |
except Exception as e:
|
| 46 |
print(f"Error instantiating agent: {e}")
|
| 47 |
return f"Error initializing agent: {e}", None
|
|
|
|
| 132 |
**Instructions:**
|
| 133 |
1. Please clone this space, then modify the code to define your agent's logic, the tools, the necessary packages, etc ...
|
| 134 |
2. Log in to your Hugging Face account using the button below. This uses your HF username for submission.
|
| 135 |
+
3. Paste your Hugging Face token below.
|
| 136 |
+
4. Click 'Run Evaluation & Submit All Answers' to fetch questions, run your agent, submit answers, and see the score.
|
| 137 |
"""
|
| 138 |
)
|
| 139 |
|
| 140 |
gr.LoginButton()
|
| 141 |
+
token_box = gr.Textbox(label="Enter your Hugging Face token", type="password", placeholder="hf_...")
|
| 142 |
run_button = gr.Button("Run Evaluation & Submit All Answers")
|
| 143 |
status_output = gr.Textbox(label="Run Status / Submission Result", lines=5, interactive=False)
|
| 144 |
results_table = gr.DataFrame(label="Questions and Agent Answers", wrap=True)
|
| 145 |
|
| 146 |
+
run_button.click(
|
| 147 |
+
fn=run_and_submit_all,
|
| 148 |
+
inputs=[gr.OAuthProfile(), token_box],
|
| 149 |
+
outputs=[status_output, results_table]
|
| 150 |
+
)
|
| 151 |
|
| 152 |
if __name__ == "__main__":
|
| 153 |
demo.launch(debug=True, share=False)
|