Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -69,7 +69,8 @@ def answer_question(question, documents):
|
|
| 69 |
question_with_context = prompt.format(question=question, context=full_context)
|
| 70 |
|
| 71 |
# Use the Hugging Face InferenceClient to generate the response
|
| 72 |
-
|
|
|
|
| 73 |
|
| 74 |
# Assuming the response contains a 'generated_text' field with the model's output
|
| 75 |
return response["generated_text"]
|
|
|
|
| 69 |
question_with_context = prompt.format(question=question, context=full_context)
|
| 70 |
|
| 71 |
# Use the Hugging Face InferenceClient to generate the response
|
| 72 |
+
# Replacing the `query` method with `predict` method
|
| 73 |
+
response = client.predict(question_with_context)
|
| 74 |
|
| 75 |
# Assuming the response contains a 'generated_text' field with the model's output
|
| 76 |
return response["generated_text"]
|