Update app.py
Browse files
app.py
CHANGED
|
@@ -3,6 +3,7 @@ import gradio as gr
|
|
| 3 |
import requests
|
| 4 |
import inspect
|
| 5 |
import pandas as pd
|
|
|
|
| 6 |
|
| 7 |
# (Keep Constants as is)
|
| 8 |
# --- Constants ---
|
|
@@ -12,46 +13,18 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
|
| 12 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
| 13 |
class BasicAgent:
|
| 14 |
def __init__(self):
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
"nasa award number r. g. arendt": "80GSFC21M0002",
|
| 19 |
-
"vietnamese specimens nedoshivina": "Hanoi",
|
| 20 |
-
"least number of athletes 1928": "MLT",
|
| 21 |
-
"taishō tamai pitchers": "Tanaka, Yamamoto",
|
| 22 |
-
"menu items total sales": "10423.75",
|
| 23 |
-
"malko competition no longer exists": "Ivan",
|
| 24 |
-
"ozone highest values": "May",
|
| 25 |
-
"samantha octopus": "Cuttlefish",
|
| 26 |
-
"sunlight plants": "Chlorophyll",
|
| 27 |
-
"da vinci invention": "Helicopter",
|
| 28 |
-
"fastest land animal": "Cheetah",
|
| 29 |
-
"math graph": "Parabola",
|
| 30 |
-
"climate change report": "IPCC AR6",
|
| 31 |
-
"bonjour": "Hello",
|
| 32 |
-
"pi value": "3.14159",
|
| 33 |
-
"deep research": "OpenAI",
|
| 34 |
-
"timeline art movement": "Impressionism",
|
| 35 |
-
"attached pdf page number": "3",
|
| 36 |
-
"chess tournament winner": "Carlsen",
|
| 37 |
-
"fruit served breakfast ocean liner": "plums",
|
| 38 |
-
"first name malko recipient": "Ivan",
|
| 39 |
-
"earthquake deadliest century": "2004",
|
| 40 |
-
"capital of country that no longer exists": "Belgrade",
|
| 41 |
-
"painting artist signed bottom left": "Monet",
|
| 42 |
-
"company logo animal": "Puma",
|
| 43 |
-
"element in group 2 and period 3": "Magnesium",
|
| 44 |
-
"chemical formula for table salt": "NaCl",
|
| 45 |
-
"roman numeral for 2023": "MMXXIII",
|
| 46 |
-
"temperature scale used in us": "Fahrenheit"
|
| 47 |
-
}
|
| 48 |
|
| 49 |
def __call__(self, question: str) -> str:
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
|
|
|
|
|
|
| 55 |
|
| 56 |
|
| 57 |
|
|
|
|
| 3 |
import requests
|
| 4 |
import inspect
|
| 5 |
import pandas as pd
|
| 6 |
+
from transformers import pipeline
|
| 7 |
|
| 8 |
# (Keep Constants as is)
|
| 9 |
# --- Constants ---
|
|
|
|
| 13 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
| 14 |
class BasicAgent:
|
| 15 |
def __init__(self):
|
| 16 |
+
print("Loading Hugging Face model...")
|
| 17 |
+
self.pipeline = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-beta", max_new_tokens=256)
|
| 18 |
+
print("Model loaded.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 19 |
|
| 20 |
def __call__(self, question: str) -> str:
|
| 21 |
+
print(f"Received question: {question[:60]}...")
|
| 22 |
+
prompt = f"Answer the question clearly and concisely:\n{question.strip()}\nAnswer:"
|
| 23 |
+
response = self.pipeline(prompt)[0]["generated_text"]
|
| 24 |
+
# Strip the prompt from the output
|
| 25 |
+
answer = response.split("Answer:")[-1].strip().split("\n")[0]
|
| 26 |
+
print(f"Generated answer: {answer}")
|
| 27 |
+
return answer
|
| 28 |
|
| 29 |
|
| 30 |
|