Update app.py
Browse files
app.py
CHANGED
|
@@ -12,16 +12,19 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
|
|
| 12 |
# --- Basic Agent Definition ---
|
| 13 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
| 14 |
class BasicAgent:
|
| 15 |
-
|
| 16 |
print("Loading Hugging Face model...")
|
| 17 |
-
self.pipeline = pipeline(
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
print("Model loaded.")
|
| 19 |
|
| 20 |
def __call__(self, question: str) -> str:
|
| 21 |
print(f"Received question: {question[:60]}...")
|
| 22 |
prompt = f"Answer the question clearly and concisely:\n{question.strip()}\nAnswer:"
|
| 23 |
response = self.pipeline(prompt)[0]["generated_text"]
|
| 24 |
-
# Strip the prompt from the output
|
| 25 |
answer = response.split("Answer:")[-1].strip().split("\n")[0]
|
| 26 |
print(f"Generated answer: {answer}")
|
| 27 |
return answer
|
|
|
|
| 12 |
# --- Basic Agent Definition ---
|
| 13 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
| 14 |
class BasicAgent:
|
| 15 |
+
def __init__(self):
|
| 16 |
print("Loading Hugging Face model...")
|
| 17 |
+
self.pipeline = pipeline(
|
| 18 |
+
"text-generation",
|
| 19 |
+
model="HuggingFaceH4/zephyr-7b-beta", # Fully public and unrestricted
|
| 20 |
+
max_new_tokens=256
|
| 21 |
+
)
|
| 22 |
print("Model loaded.")
|
| 23 |
|
| 24 |
def __call__(self, question: str) -> str:
|
| 25 |
print(f"Received question: {question[:60]}...")
|
| 26 |
prompt = f"Answer the question clearly and concisely:\n{question.strip()}\nAnswer:"
|
| 27 |
response = self.pipeline(prompt)[0]["generated_text"]
|
|
|
|
| 28 |
answer = response.split("Answer:")[-1].strip().split("\n")[0]
|
| 29 |
print(f"Generated answer: {answer}")
|
| 30 |
return answer
|