Update src/qa.py
Browse files
src/qa.py
CHANGED
|
@@ -278,9 +278,13 @@ def truncate_context(context_text: str, max_tokens: int = 100000, model: str = "
|
|
| 278 |
return truncated
|
| 279 |
return context_text
|
| 280 |
|
|
|
|
|
|
|
|
|
|
| 281 |
def generate_answer(query: str, retrieved_chunks: list, reasoning_mode: bool = False):
|
| 282 |
"""
|
| 283 |
Generates an English answer using GPT-4o (SAP Gen AI Hub proxy).
|
|
|
|
| 284 |
"""
|
| 285 |
if not retrieved_chunks:
|
| 286 |
return "Sorry, I couldn’t find relevant information in the document."
|
|
@@ -312,7 +316,24 @@ def generate_answer(query: str, retrieved_chunks: list, reasoning_mode: bool = F
|
|
| 312 |
|
| 313 |
try:
|
| 314 |
response = chat_llm_local.invoke(messages)
|
| 315 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 316 |
except Exception as e:
|
| 317 |
print(f"⚠️ GPT-4o generation failed: {e}")
|
| 318 |
return "⚠️ Error: Could not generate an answer."
|
|
|
|
| 278 |
return truncated
|
| 279 |
return context_text
|
| 280 |
|
| 281 |
+
# ==========================================================
|
| 282 |
+
# 8️⃣ Answer Generation (English Only + Token-Safe + Smart Fallback)
|
| 283 |
+
# ==========================================================
|
| 284 |
def generate_answer(query: str, retrieved_chunks: list, reasoning_mode: bool = False):
|
| 285 |
"""
|
| 286 |
Generates an English answer using GPT-4o (SAP Gen AI Hub proxy).
|
| 287 |
+
Handles both strict and reasoning modes with smart fallback guidance.
|
| 288 |
"""
|
| 289 |
if not retrieved_chunks:
|
| 290 |
return "Sorry, I couldn’t find relevant information in the document."
|
|
|
|
| 316 |
|
| 317 |
try:
|
| 318 |
response = chat_llm_local.invoke(messages)
|
| 319 |
+
output = response.content.strip()
|
| 320 |
+
|
| 321 |
+
# 🔍 Smart fallback substitution
|
| 322 |
+
if "I don't know based on the provided document" in output:
|
| 323 |
+
if reasoning_mode:
|
| 324 |
+
output = (
|
| 325 |
+
"I couldn’t infer enough from the context. "
|
| 326 |
+
"Try rephrasing your question for a clearer reasoning path."
|
| 327 |
+
)
|
| 328 |
+
else:
|
| 329 |
+
output = (
|
| 330 |
+
"I couldn’t find a clear answer in this document. "
|
| 331 |
+
"You can try rephrasing the query or switch to Extended Mode "
|
| 332 |
+
"(Document + General) for a broader explanation."
|
| 333 |
+
)
|
| 334 |
+
|
| 335 |
+
return output
|
| 336 |
+
|
| 337 |
except Exception as e:
|
| 338 |
print(f"⚠️ GPT-4o generation failed: {e}")
|
| 339 |
return "⚠️ Error: Could not generate an answer."
|