rdz-falcon commited on
Commit
19b88dc
·
verified ·
1 Parent(s): 0f292b9

Update src/rag.py

Browse files
Files changed (1) hide show
  1. src/rag.py +17 -2
src/rag.py CHANGED
@@ -466,5 +466,20 @@ class AACAssistant:
466
  response = self.chain.invoke(
467
  {"question": user_query, "emotion_analysis": emotion_analysis}
468
  )
469
-
470
- return response["answer"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
466
  response = self.chain.invoke(
467
  {"question": user_query, "emotion_analysis": emotion_analysis}
468
  )
469
+ assistant_marker = "</s> <|assistant|>"
470
+
471
+ # Find the last occurrence of the marker
472
+ marker_position = raw_full_answer.rfind(assistant_marker)
473
+
474
+ if marker_position != -1:
475
+ # Extract the text *after* the marker
476
+ actual_response = raw_full_answer[marker_position + len(assistant_marker):].strip()
477
+ else:
478
+ # Fallback if the marker is not found in the response.
479
+ # This might happen if the LLM's output is unexpected or if the prompt structure changed.
480
+ print(f"WARNING: Assistant marker '{assistant_marker}' not found in raw answer. Returning raw answer as fallback.")
481
+ actual_response = raw_full_answer.strip() # Or handle as an error
482
+
483
+ print(f"DEBUG: process_query - Extracted assistant response: '{actual_response}'")
484
+ return actual_response
485
+ # return response["answer"]