DocUA commited on
Commit
ef61786
·
1 Parent(s): 102fe82

feat: Add configurable content inclusion for spiritual care messages, allowing dynamic selection of patient profile, situation, distress, and conversation context.

Browse files
src/core/provider_summary_generator.py CHANGED
@@ -685,31 +685,37 @@ class ProviderSummaryGenerator:
685
  summary: ProviderSummary,
686
  language: str = "English",
687
  session_id: Optional[str] = None,
688
- model_override: Optional[str] = None
 
 
 
 
689
  ) -> str:
690
  """
691
  Generate a compassionate, concise message for spiritual care team using LLM.
692
 
693
- Creates a natural language summary that:
694
- - Humanizes the patient's situation
695
- - Highlights key emotional/spiritual concerns
696
- - Provides essential contact and context information
697
- - Uses empathetic, professional tone
698
-
699
  Args:
700
  summary: ProviderSummary with all case details
701
  language: Language hint (deprecated - LLM auto-detects from context)
702
  session_id: Optional session ID for session-specific prompt overrides
703
  model_override: Optional model name override for this generation
 
 
 
 
704
 
705
  Returns:
706
  Natural language message for spiritual care team
707
-
708
- Requirements: 7.1, 7.2, 7.3, 7.4, 7.5
709
  """
710
  # If no AI client available, return formatted structured message
711
  if not self.ai_client:
712
- return self._generate_fallback_care_message(summary, language)
 
 
 
 
 
 
713
 
714
  # Load system prompt using PromptController for session-aware loading
715
  try:
@@ -719,18 +725,15 @@ class ProviderSummaryGenerator:
719
  )
720
  system_prompt = prompt_config.base_prompt
721
 
722
- # Load user prompt template from file
723
- prompts_dir = os.path.join(
724
- os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
725
- 'src', 'config', 'prompts'
726
- )
727
- template_path = os.path.join(prompts_dir, 'spiritual_care_message_template.txt')
728
- with open(template_path, 'r', encoding='utf-8') as f:
729
- user_template = f.read().strip()
730
-
731
  except Exception as e:
732
  print(f"⚠️ Error loading prompt files: {e}")
733
- return self._generate_fallback_care_message(summary, language)
 
 
 
 
 
 
734
 
735
  # Build context for LLM
736
  indicators_text = "\n".join(f"- {ind}" for ind in summary.indicators)
@@ -767,75 +770,126 @@ class ProviderSummaryGenerator:
767
  if goal:
768
  patient_goal = str(goal)
769
 
770
- # Fill user prompt template
771
- user_prompt = user_template.format(
772
- patient_name=summary.patient_name,
773
- patient_phone=summary.patient_phone,
774
- patient_age=patient_age,
775
- patient_conditions=patient_conditions,
776
- patient_goal=patient_goal,
777
- situation_description=summary.situation_description,
778
- indicators=indicators_text,
779
- urgency_level=summary.urgency_level,
780
- follow_up_timeline=summary.follow_up_timeline,
781
- triage_context=triage_section,
782
- conversation_snippet=conversation_snippet
783
- )
784
-
785
- try:
786
- # Use DynamicAIClient with model override if specified
787
- from .ai_client import DynamicAIClient
788
 
789
- # Create AI client for spiritual care messages
790
- ai_client = DynamicAIClient(
791
- agent_name="SpiritualCareMessage",
792
- model_override=model_override
793
- )
 
 
 
 
 
 
 
 
 
794
 
795
- # Generate response
796
- response = ai_client.generate_response(
 
 
 
797
  system_prompt=system_prompt,
798
  user_prompt=user_prompt,
799
- temperature=0.7, # Slightly creative for natural language
800
- call_type="spiritual_care_message"
 
 
801
  )
802
 
803
  return response.strip()
804
 
805
  except Exception as e:
806
  print(f"⚠️ Error generating LLM message: {e}")
807
- return self._generate_fallback_care_message(summary, language)
 
 
 
 
 
 
808
 
809
- def _generate_fallback_care_message(self, summary: ProviderSummary, language: str = "English") -> str:
 
 
 
 
 
 
 
 
810
  """Generate fallback narrative message when LLM is unavailable."""
811
 
812
- # Prepare components for narrative
813
- conditions = "None listed"
814
- if summary.medical_context and summary.medical_context.get('conditions'):
815
- cond_list = summary.medical_context.get('conditions', [])
816
- conditions = ", ".join(cond_list)
817
-
818
- indicators = ", ".join(summary.indicators) if summary.indicators else "Unspecified distress"
819
 
820
- action = "Assess and support"
821
- if summary.recommended_actions:
822
- action = summary.recommended_actions[0]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
823
 
824
- # Construct narrative based on language
825
- if language.lower() in ['ukrainian', 'uk', 'укр', 'українська']:
826
- return (
827
- f"Пацієнт {summary.patient_name} ({summary.patient_phone}) виявляє ознаки духовного дистресу, "
828
- f"зокрема: {indicators}. Медичний контекст: {conditions}. "
829
- f"Клінічна оцінка вказує на рівень серйозності: {summary.severity_level}. "
830
- f"Рекомендована ді��: {action}. Будь ласка, забезпечте подальший супровід {summary.follow_up_timeline}."
831
- )
832
- else:
833
- return (
834
- f"The patient {summary.patient_name} ({summary.patient_phone}) is presenting with spiritual distress "
835
- f"indicators including {indicators}. Relevant medical context includes {conditions}. "
836
- f"Clinical assessment indicates {summary.severity_level} severity with {summary.confidence:.0%} confidence. "
837
- f"Please review for {action} and follow up {summary.follow_up_timeline.lower()}."
838
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
839
 
840
  def generate_summary_with_validation(self, **kwargs) -> tuple[ProviderSummary, List[str]]:
841
  """
 
685
  summary: ProviderSummary,
686
  language: str = "English",
687
  session_id: Optional[str] = None,
688
+ model_override: Optional[str] = None,
689
+ include_patient_profile: bool = False,
690
+ include_situation_analysis: bool = False,
691
+ include_distress_indicators: bool = True,
692
+ include_conversation_context: bool = True
693
  ) -> str:
694
  """
695
  Generate a compassionate, concise message for spiritual care team using LLM.
696
 
 
 
 
 
 
 
697
  Args:
698
  summary: ProviderSummary with all case details
699
  language: Language hint (deprecated - LLM auto-detects from context)
700
  session_id: Optional session ID for session-specific prompt overrides
701
  model_override: Optional model name override for this generation
702
+ include_patient_profile: Whether to include name, phone, age, conditions (Default: False)
703
+ include_situation_analysis: Whether to include situation description (Default: False)
704
+ include_distress_indicators: Whether to include indicators and urgency (Default: False)
705
+ include_conversation_context: Whether to include triage exchanges and chat snippet (Default: True)
706
 
707
  Returns:
708
  Natural language message for spiritual care team
 
 
709
  """
710
  # If no AI client available, return formatted structured message
711
  if not self.ai_client:
712
+ return self._generate_fallback_care_message(
713
+ summary, language,
714
+ include_patient_profile=include_patient_profile,
715
+ include_situation_analysis=include_situation_analysis,
716
+ include_distress_indicators=include_distress_indicators,
717
+ include_conversation_context=include_conversation_context
718
+ )
719
 
720
  # Load system prompt using PromptController for session-aware loading
721
  try:
 
725
  )
726
  system_prompt = prompt_config.base_prompt
727
 
 
 
 
 
 
 
 
 
 
728
  except Exception as e:
729
  print(f"⚠️ Error loading prompt files: {e}")
730
+ return self._generate_fallback_care_message(
731
+ summary, language,
732
+ include_patient_profile=include_patient_profile,
733
+ include_situation_analysis=include_situation_analysis,
734
+ include_distress_indicators=include_distress_indicators,
735
+ include_conversation_context=include_conversation_context
736
+ )
737
 
738
  # Build context for LLM
739
  indicators_text = "\n".join(f"- {ind}" for ind in summary.indicators)
 
770
  if goal:
771
  patient_goal = str(goal)
772
 
773
+ # Construct User Prompt dynamically based on flags
774
+ prompt_parts = []
775
+
776
+ # 1. Patient Profile
777
+ if include_patient_profile:
778
+ prompt_parts.append(f"**PATIENT CONTACT:**\nName: {summary.patient_name}\nPhone: {summary.patient_phone}\nAge: {patient_age}")
779
+ prompt_parts.append(f"**PATIENT BACKGROUND:**\nMedical Conditions: {patient_conditions}\nPrimary Goal: {patient_goal}")
 
 
 
 
 
 
 
 
 
 
 
780
 
781
+ # 2. Situation Analysis
782
+ if include_situation_analysis:
783
+ prompt_parts.append(f"**SITUATION:**\n{summary.situation_description}")
784
+
785
+ # 3. Distress Indicators & Urgency
786
+ if include_distress_indicators:
787
+ prompt_parts.append(f"**DISTRESS INDICATORS:**\n{indicators_text}")
788
+ prompt_parts.append(f"**URGENCY:**\n{summary.urgency_level} - Follow up {summary.follow_up_timeline}")
789
+
790
+ # 4. Conversation Context (Triage + Chat)
791
+ if include_conversation_context:
792
+ if triage_section:
793
+ prompt_parts.append(triage_section.strip())
794
+ prompt_parts.append(f"**CONTEXT:**\n{conversation_snippet}")
795
 
796
+ user_prompt = "\n\n".join(prompt_parts)
797
+
798
+ try:
799
+ # Use existing ai_client (AIClientManager) to generate response with overrides
800
+ response = self.ai_client.generate_response(
801
  system_prompt=system_prompt,
802
  user_prompt=user_prompt,
803
+ temperature=0.7,
804
+ call_type="spiritual_care_message",
805
+ agent_name="SpiritualCareMessage",
806
+ model_override=model_override
807
  )
808
 
809
  return response.strip()
810
 
811
  except Exception as e:
812
  print(f"⚠️ Error generating LLM message: {e}")
813
+ return self._generate_fallback_care_message(
814
+ summary, language,
815
+ include_patient_profile=include_patient_profile,
816
+ include_situation_analysis=include_situation_analysis,
817
+ include_distress_indicators=include_distress_indicators,
818
+ include_conversation_context=include_conversation_context
819
+ )
820
 
821
+ def _generate_fallback_care_message(
822
+ self,
823
+ summary: ProviderSummary,
824
+ language: str = "English",
825
+ include_patient_profile: bool = False,
826
+ include_situation_analysis: bool = False,
827
+ include_distress_indicators: bool = True,
828
+ include_conversation_context: bool = True
829
+ ) -> str:
830
  """Generate fallback narrative message when LLM is unavailable."""
831
 
832
+ parts = []
 
 
 
 
 
 
833
 
834
+ # 1. Patient Profile
835
+ if include_patient_profile:
836
+ name_part = f"{summary.patient_name}"
837
+ if summary.patient_phone:
838
+ name_part += f" ({summary.patient_phone})"
839
+
840
+ if language.lower() in ['ukrainian', 'uk', 'укр', 'українська']:
841
+ parts.append(f"Пацієнт: {name_part}.")
842
+
843
+ conditions = "Не вказано"
844
+ if summary.medical_context and summary.medical_context.get('conditions'):
845
+ cond_list = summary.medical_context.get('conditions', [])
846
+ conditions = ", ".join(cond_list)
847
+ parts.append(f"Медичний контекст: {conditions}.")
848
+ else:
849
+ parts.append(f"Patient: {name_part}.")
850
+
851
+ conditions = "None listed"
852
+ if summary.medical_context and summary.medical_context.get('conditions'):
853
+ cond_list = summary.medical_context.get('conditions', [])
854
+ conditions = ", ".join(cond_list)
855
+ parts.append(f"Medical Context: {conditions}.")
856
 
857
+ # 2. Clinical Analysis (Situation)
858
+ # Note: Situation is usually a narrative, fallback might not have it pre-computed well if it depends on LLM summary.
859
+ # But summary.situation_description exists.
860
+ if include_situation_analysis and summary.situation_description:
861
+ if language.lower() in ['ukrainian', 'uk', 'укр', 'українська']:
862
+ parts.append(f"Ситуація: {summary.situation_description}")
863
+ else:
864
+ parts.append(f"Situation: {summary.situation_description}")
865
+
866
+ # 3. Distress Indicators & Urgency
867
+ if include_distress_indicators:
868
+ indicators = ", ".join(summary.indicators) if summary.indicators else "Unspecified"
869
+ action = summary.recommended_actions[0] if summary.recommended_actions else "Assess"
870
+
871
+ if language.lower() in ['ukrainian', 'uk', 'укр', 'українська']:
872
+ parts.append(f"Виявлено ознаки дистресу: {indicators}.")
873
+ parts.append(f"Оцінка: Рівень серйозності {summary.severity_level}.")
874
+ parts.append(f"План: {action}. Контроль: {summary.follow_up_timeline}.")
875
+ else:
876
+ parts.append(f"Distress Indicators: {indicators}.")
877
+ parts.append(f"Assessment: {summary.severity_level} severity level.")
878
+ parts.append(f"Plan: {action}. Follow-up: {summary.follow_up_timeline}.")
879
+
880
+ # 4. Conversation Context
881
+ if include_conversation_context:
882
+ context_snippet = summary.conversation_context[:200] + "..." if summary.conversation_context else ""
883
+ if context_snippet:
884
+ if language.lower() in ['ukrainian', 'uk', 'укр', 'українська']:
885
+ parts.append(f"Контекст чату: {context_snippet}")
886
+ else:
887
+ parts.append(f"Chat Context: {context_snippet}")
888
+
889
+ if not parts:
890
+ return "No information selected for display."
891
+
892
+ return " ".join(parts)
893
 
894
  def generate_summary_with_validation(self, **kwargs) -> tuple[ProviderSummary, List[str]]:
895
  """
src/core/simplified_medical_app.py CHANGED
@@ -761,22 +761,28 @@ Is there anything else I can help you with today?"""
761
  """
762
  return getattr(self, '_last_provider_summary', None)
763
 
764
- def generate_spiritual_care_message(self, language: str = "English", session_id: Optional[str] = None) -> Optional[str]:
 
 
 
 
 
 
 
 
765
  """
766
  Generate LLM-based message for spiritual care team.
767
 
768
- Creates a compassionate, concise message summarizing the patient's
769
- situation for the spiritual care team. Uses natural language and
770
- emphasizes emotional/spiritual aspects.
771
-
772
  Args:
773
  language: Language for the message (English/Ukrainian)
774
  session_id: Optional session ID for session-specific prompt overrides
 
 
 
 
775
 
776
  Returns:
777
  Generated message if provider summary exists, None otherwise
778
-
779
- Requirements: 7.1, 7.2, 7.3, 7.4, 7.5
780
  """
781
  summary = self.get_last_provider_summary()
782
  if not summary:
@@ -789,7 +795,11 @@ Is there anything else I can help you with today?"""
789
  summary=summary,
790
  language=language,
791
  session_id=session_id,
792
- model_override=model_override
 
 
 
 
793
  )
794
 
795
  def _detect_language(self, text: str) -> str:
 
761
  """
762
  return getattr(self, '_last_provider_summary', None)
763
 
764
+ def generate_spiritual_care_message(
765
+ self,
766
+ language: str = "English",
767
+ session_id: Optional[str] = None,
768
+ include_patient_profile: bool = False,
769
+ include_situation_analysis: bool = False,
770
+ include_distress_indicators: bool = True,
771
+ include_conversation_context: bool = True
772
+ ) -> Optional[str]:
773
  """
774
  Generate LLM-based message for spiritual care team.
775
 
 
 
 
 
776
  Args:
777
  language: Language for the message (English/Ukrainian)
778
  session_id: Optional session ID for session-specific prompt overrides
779
+ include_patient_profile: Include name, phone, age, conditions
780
+ include_situation_analysis: Include situation description
781
+ include_distress_indicators: Include indicators and urgency
782
+ include_conversation_context: Include triage, chat snippet
783
 
784
  Returns:
785
  Generated message if provider summary exists, None otherwise
 
 
786
  """
787
  summary = self.get_last_provider_summary()
788
  if not summary:
 
795
  summary=summary,
796
  language=language,
797
  session_id=session_id,
798
+ model_override=model_override,
799
+ include_patient_profile=include_patient_profile,
800
+ include_situation_analysis=include_situation_analysis,
801
+ include_distress_indicators=include_distress_indicators,
802
+ include_conversation_context=include_conversation_context
803
  )
804
 
805
  def _detect_language(self, text: str) -> str:
src/interface/simplified_gradio_app.py CHANGED
@@ -229,6 +229,27 @@ def create_simplified_interface():
229
  label="Message for Spiritual Care Team",
230
  interactive=False
231
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
232
  with gr.Row():
233
  generate_message_btn = gr.Button(
234
  "🔄 Regenerate Message",
@@ -2161,7 +2182,13 @@ Use the **Download Summary** button below to access the complete provider summar
2161
  """Clear provider summary panel."""
2162
  return gr.update(visible=False), "No provider summary available", "", ""
2163
 
2164
- def regenerate_spiritual_care_message(session: SimplifiedSessionData):
 
 
 
 
 
 
2165
  """Regenerate LLM-based spiritual care message."""
2166
  if session is None:
2167
  return ""
@@ -2169,7 +2196,11 @@ Use the **Download Summary** button below to access the complete provider summar
2169
  try:
2170
  message = session.app_instance.generate_spiritual_care_message(
2171
  language="English",
2172
- session_id=session.session_id
 
 
 
 
2173
  )
2174
  return message if message else "No provider summary available to generate message from."
2175
  except Exception as e:
@@ -2230,7 +2261,7 @@ Use the **Download Summary** button below to access the complete provider summar
2230
  # Spiritual care message handlers
2231
  generate_message_btn.click(
2232
  regenerate_spiritual_care_message,
2233
- inputs=[session_data],
2234
  outputs=[spiritual_care_message]
2235
  )
2236
 
 
229
  label="Message for Spiritual Care Team",
230
  interactive=False
231
  )
232
+ with gr.Row():
233
+ include_conversation_context = gr.Checkbox(
234
+ value=True,
235
+ label="Include Conversation Context (Chat & Triage)",
236
+ interactive=True
237
+ )
238
+ include_situation_analysis = gr.Checkbox(
239
+ value=False,
240
+ label="Include Situation Analysis",
241
+ interactive=True
242
+ )
243
+ include_distress_indicators = gr.Checkbox(
244
+ value=True,
245
+ label="Include Distress Indicators",
246
+ interactive=True
247
+ )
248
+ include_patient_profile = gr.Checkbox(
249
+ value=False,
250
+ label="Include Patient Profile (Name, Contact)",
251
+ interactive=True
252
+ )
253
  with gr.Row():
254
  generate_message_btn = gr.Button(
255
  "🔄 Regenerate Message",
 
2182
  """Clear provider summary panel."""
2183
  return gr.update(visible=False), "No provider summary available", "", ""
2184
 
2185
+ def regenerate_spiritual_care_message(
2186
+ session: SimplifiedSessionData,
2187
+ include_conversation: bool = True,
2188
+ include_situation: bool = False,
2189
+ include_indicators: bool = False,
2190
+ include_profile: bool = False
2191
+ ):
2192
  """Regenerate LLM-based spiritual care message."""
2193
  if session is None:
2194
  return ""
 
2196
  try:
2197
  message = session.app_instance.generate_spiritual_care_message(
2198
  language="English",
2199
+ session_id=session.session_id,
2200
+ include_conversation_context=include_conversation,
2201
+ include_situation_analysis=include_situation,
2202
+ include_distress_indicators=include_indicators,
2203
+ include_patient_profile=include_profile
2204
  )
2205
  return message if message else "No provider summary available to generate message from."
2206
  except Exception as e:
 
2261
  # Spiritual care message handlers
2262
  generate_message_btn.click(
2263
  regenerate_spiritual_care_message,
2264
+ inputs=[session_data, include_conversation_context, include_situation_analysis, include_distress_indicators, include_patient_profile],
2265
  outputs=[spiritual_care_message]
2266
  )
2267