ageraustine commited on
Commit
8748dfd
·
verified ·
1 Parent(s): 5492df5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -10
app.py CHANGED
@@ -87,11 +87,11 @@ Section:"""
87
  def chat_response(self, message, history):
88
  """Two-tier LLM system: Router + Specialist response"""
89
  if not message.strip():
90
- return "Please ask me something about Legion Maria Youth Affairs!"
91
 
92
  try:
93
  if not self.data_content:
94
- return "I don't have information available to answer that question."
95
 
96
  # Step 1: Router LLM decides which section to use
97
  selected_section = self.route_query(message)
@@ -114,19 +114,20 @@ Section:"""
114
  conversation_context += "Current conversation:\n"
115
 
116
  # Step 3: Response LLM generates answer using only relevant data
117
- response_prompt = f"""You are a concise assistant for the Legion Maria Directorate of Youth Affairs. Provide brief, direct answers.
118
 
119
- Relevant Information:
120
  {json.dumps(relevant_data, indent=2)}
121
 
122
  {conversation_context}User: {message}
123
 
124
  Guidelines:
 
125
  - Keep responses SHORT (1-3 sentences maximum)
126
- - Be direct and to the point
127
- - Only include essential information
128
- - No unnecessary elaboration
129
- - If information is missing, briefly say so
130
 
131
  Answer:"""
132
 
@@ -136,14 +137,14 @@ Answer:"""
136
 
137
  except Exception as e:
138
  print(f"Error generating response: {str(e)}")
139
- return "I'm sorry, I encountered an error while processing your request. Please try again."
140
 
141
  def main():
142
  assistant = LegionMariaAssistant()
143
 
144
  # Initial greeting message
145
  initial_greeting = [
146
- [None, "👋 Hello! I'm your Legion Maria Youth Affairs assistant. I can help you with information about our mission, leadership, projects, and activities. What would you like to know?"]
147
  ]
148
 
149
  # Create mobile-optimized Gradio chat interface
 
87
  def chat_response(self, message, history):
88
  """Two-tier LLM system: Router + Specialist response"""
89
  if not message.strip():
90
+ return "Please ask me something about our Legion Maria Youth Affairs!"
91
 
92
  try:
93
  if not self.data_content:
94
+ return "I don't have that information available right now."
95
 
96
  # Step 1: Router LLM decides which section to use
97
  selected_section = self.route_query(message)
 
114
  conversation_context += "Current conversation:\n"
115
 
116
  # Step 3: Response LLM generates answer using only relevant data
117
+ response_prompt = f"""You are Santa Legion from the Legion Maria Directorate of Youth Affairs. Speak in first person as a member of the organization.
118
 
119
+ Your Knowledge:
120
  {json.dumps(relevant_data, indent=2)}
121
 
122
  {conversation_context}User: {message}
123
 
124
  Guidelines:
125
+ - You are Santa Legion, speak as "I" and "we" (the organization)
126
  - Keep responses SHORT (1-3 sentences maximum)
127
+ - Be direct and personal
128
+ - Never mention being provided documents or data
129
+ - Speak as if this is your natural knowledge
130
+ - Use "our mission", "we believe", "I can help you with"
131
 
132
  Answer:"""
133
 
 
137
 
138
  except Exception as e:
139
  print(f"Error generating response: {str(e)}")
140
+ return "I'm sorry, I'm having trouble right now. Please try again."
141
 
142
  def main():
143
  assistant = LegionMariaAssistant()
144
 
145
  # Initial greeting message
146
  initial_greeting = [
147
+ [None, "👋 Hello! I'm Santa Legion from the Legion Maria Youth Affairs. I'm here to help you learn about our mission, leadership, projects, and activities. What would you like to know?"]
148
  ]
149
 
150
  # Create mobile-optimized Gradio chat interface