Trial and error.
Browse files- app.py +2 -4
- requirements.txt +1 -4
app.py
CHANGED
|
@@ -4,8 +4,6 @@ import requests
|
|
| 4 |
import inspect
|
| 5 |
import pandas as pd
|
| 6 |
from huggingface_hub import InferenceClient # Import Hugging Face InferenceClient
|
| 7 |
-
import torch
|
| 8 |
-
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 9 |
|
| 10 |
# (Keep Constants as is)
|
| 11 |
# --- Constants ---
|
|
@@ -17,7 +15,7 @@ class BasicAgent:
|
|
| 17 |
def __init__(self):
|
| 18 |
print("BasicAgent initialized.")
|
| 19 |
|
| 20 |
-
print("Loading
|
| 21 |
self.client = InferenceClient(token=os.getenv("HF_TOKEN"))
|
| 22 |
|
| 23 |
def __call__(self, question: str) -> str:
|
|
@@ -169,7 +167,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
|
|
| 169 |
|
| 170 |
# --- Build Gradio Interface using Blocks ---
|
| 171 |
with gr.Blocks() as demo:
|
| 172 |
-
gr.Markdown("# Basic Agent Evaluation Runner #
|
| 173 |
gr.Markdown(
|
| 174 |
"""
|
| 175 |
**Instructions:**
|
|
|
|
| 4 |
import inspect
|
| 5 |
import pandas as pd
|
| 6 |
from huggingface_hub import InferenceClient # Import Hugging Face InferenceClient
|
|
|
|
|
|
|
| 7 |
|
| 8 |
# (Keep Constants as is)
|
| 9 |
# --- Constants ---
|
|
|
|
| 15 |
def __init__(self):
|
| 16 |
print("BasicAgent initialized.")
|
| 17 |
|
| 18 |
+
print("Loading huggingface default model...")
|
| 19 |
self.client = InferenceClient(token=os.getenv("HF_TOKEN"))
|
| 20 |
|
| 21 |
def __call__(self, question: str) -> str:
|
|
|
|
| 167 |
|
| 168 |
# --- Build Gradio Interface using Blocks ---
|
| 169 |
with gr.Blocks() as demo:
|
| 170 |
+
gr.Markdown("# Basic Agent Evaluation Runner #7")
|
| 171 |
gr.Markdown(
|
| 172 |
"""
|
| 173 |
**Instructions:**
|
requirements.txt
CHANGED
|
@@ -1,6 +1,3 @@
|
|
| 1 |
gradio
|
| 2 |
requests
|
| 3 |
-
huggingface_hub
|
| 4 |
-
transformers
|
| 5 |
-
torch
|
| 6 |
-
accelerate
|
|
|
|
| 1 |
gradio
|
| 2 |
requests
|
| 3 |
+
huggingface_hub
|
|
|
|
|
|
|
|
|