KURUPRASATH-J commited on
Commit
4aca28f
·
verified ·
1 Parent(s): 289e83d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -53
app.py CHANGED
@@ -101,20 +101,19 @@ class ChatbotWithMemoryAndRAG:
101
  self.vectorstore = None
102
  self.chat_history = []
103
  self.memory = {}
104
- # NEWLY ADDED: Simple user information storage
105
  self.user_name = None
106
- self.user_details = {}
107
  self.session_id = str(uuid.uuid4())
108
  self.last_rate_limit = None
109
  self.consecutive_rate_limits = 0
110
  self.prompts = juno_prompts
111
  logging.info(f"🤖 Juno AI initialized with session ID: {self.session_id}")
112
 
113
- def _extract_name_from_message(self, user_message, bot_response):
114
  """Simple name extraction from user messages"""
115
  message_lower = user_message.lower()
116
 
117
- # Pattern 1: "i am [name]" or "I'm [name]"
118
  patterns = [
119
  r"i am ([a-zA-Z]+)",
120
  r"i'm ([a-zA-Z]+)",
@@ -128,12 +127,11 @@ class ChatbotWithMemoryAndRAG:
128
  if match:
129
  name = match.group(1).capitalize()
130
  self.user_name = name
131
- self.user_details["name"] = name
132
- logging.info(f"Extracted user name: {name}")
133
  return name
134
  return None
135
 
136
- def _check_for_name_query(self, user_message):
137
  """Check if user is asking about their name"""
138
  message_lower = user_message.lower()
139
  name_queries = [
@@ -190,15 +188,7 @@ class ChatbotWithMemoryAndRAG:
190
  logging.warning(f"Generating fallback response for message: '{user_message[:50]}...'")
191
  fallback_templates = get_fallback_responses()
192
  template = random.choice(fallback_templates)
193
-
194
- # NEWLY ADDED: Include user name in fallback if available
195
- if self.user_name and self._check_for_name_query(user_message):
196
- response = f"Your name is {self.user_name}! I remember when you told me."
197
- elif self.user_name:
198
- response = template.format(user_message_preview=user_message[:50]).replace("Your", f"{self.user_name}'s")
199
- else:
200
- response = template.format(user_message_preview=user_message[:50])
201
-
202
  self.chat_history.append({"user": user_message, "bot": response, "timestamp": datetime.now().isoformat(), "fallback": True})
203
  return response
204
 
@@ -276,8 +266,11 @@ class ChatbotWithMemoryAndRAG:
276
 
277
  def generate_response(self, user_message, context=""):
278
  """Generate response using Juno AI prompts"""
279
- # NEWLY ADDED: Check for name queries first
280
- if self._check_for_name_query(user_message):
 
 
 
281
  if self.user_name:
282
  response = f"Your name is {self.user_name}! I remember when you told me."
283
  self.chat_history.append({"user": user_message, "bot": response, "timestamp": datetime.now().isoformat()})
@@ -295,21 +288,17 @@ class ChatbotWithMemoryAndRAG:
295
  if not exchange.get('fallback', False):
296
  conversation_history.append({'user': exchange['user'], 'bot': exchange['bot'], 'timestamp': exchange.get('timestamp', '')})
297
 
298
- # NEWLY ADDED: Include user name in prompt context
299
- user_context = ""
300
  if self.user_name:
301
- user_context = f"\nUSER NAME: {self.user_name} (remember to address them personally)"
302
 
303
- base_prompt = self.prompts.get_conversation_prompt(user_message=user_message, context=context, conversation_history=conversation_history, memory_context=self.memory)
304
- prompt = base_prompt + user_context
305
- return model.generate_content(prompt).text
306
 
307
  try:
308
  bot_response = self._retry_with_backoff(_generate)
309
  self.chat_history.append({"user": user_message, "bot": bot_response, "timestamp": datetime.now().isoformat()})
310
  self.update_memory(user_message, bot_response)
311
- # NEWLY ADDED: Extract name from conversation
312
- self._extract_name_from_message(user_message, bot_response)
313
  return bot_response
314
  except (ResourceExhausted, GoogleAPIError):
315
  return self._fallback_response(user_message)
@@ -361,8 +350,11 @@ class ChatbotWithMemoryAndRAG:
361
 
362
  def generate_rag_response(self, user_query, context, sources=None):
363
  """Generate RAG response using Juno AI prompts"""
364
- # NEWLY ADDED: Check for name queries first
365
- if self._check_for_name_query(user_query):
 
 
 
366
  if self.user_name:
367
  return f"Your name is {self.user_name}! I remember when you told me."
368
  else:
@@ -372,20 +364,14 @@ class ChatbotWithMemoryAndRAG:
372
  model = genai.GenerativeModel(GENERATIVE_MODEL)
373
  context_chunks = [context[i:i+2000] for i in range(0, len(context), 2000)]
374
 
375
- # NEWLY ADDED: Include user name in RAG prompt
376
- user_context = ""
377
  if self.user_name:
378
- user_context = f"\nUSER NAME: {self.user_name} (address them personally)"
379
 
380
- base_prompt = self.prompts.get_rag_response_prompt(user_query=user_query, retrieved_chunks=context_chunks[:3], source_info=sources)
381
- prompt = base_prompt + user_context
382
- return model.generate_content(prompt).text
383
 
384
  try:
385
- response = self._retry_with_backoff(_generate_rag)
386
- # NEWLY ADDED: Extract name from RAG conversations too
387
- self._extract_name_from_message(user_query, response)
388
- return response
389
  except (ResourceExhausted, GoogleAPIError):
390
  return self._fallback_response(user_query)
391
  except Exception as e:
@@ -434,8 +420,11 @@ class ChatbotWithMemoryAndRAG:
434
 
435
  def generate_streaming_response(self, user_message, context=""):
436
  """Generate streaming response using Juno AI prompts"""
437
- # NEWLY ADDED: Check for name queries first
438
- if self._check_for_name_query(user_message):
 
 
 
439
  if self.user_name:
440
  return f"Your name is {self.user_name}! I remember when you told me."
441
  else:
@@ -444,14 +433,11 @@ class ChatbotWithMemoryAndRAG:
444
  def _generate_stream():
445
  model = genai.GenerativeModel(GENERATIVE_MODEL)
446
 
447
- # NEWLY ADDED: Include user name in streaming prompt
448
- user_context = ""
449
  if self.user_name:
450
- user_context = f"\nUSER NAME: {self.user_name} (address them personally)"
451
 
452
- base_prompt = self.prompts.get_streaming_response_prompt(user_message, context)
453
- prompt = base_prompt + user_context
454
- return model.generate_content(prompt, stream=True)
455
 
456
  try:
457
  return self._retry_with_backoff(_generate_stream, max_retries=3, base_delay=1)
@@ -554,8 +540,7 @@ def summarize_document():
554
  def get_memory():
555
  return jsonify({
556
  'memory': chatbot.memory,
557
- 'user_name': chatbot.user_name, # NEWLY ADDED: Include user name
558
- 'user_details': chatbot.user_details, # NEWLY ADDED: Include user details
559
  'chat_history_length': len(chatbot.chat_history),
560
  'has_vectorstore': chatbot.vectorstore is not None,
561
  'session_id': chatbot.session_id
@@ -607,11 +592,31 @@ def chat_stream():
607
  if not user_message:
608
  return jsonify({'error': 'No message provided'}), 400
609
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
610
  context = chatbot.retrieve_relevant_context(user_message)
611
  streaming_response = chatbot.generate_streaming_response(user_message, context)
612
 
613
- if streaming_response is None:
614
- if context:
 
 
 
615
  bot_response = chatbot.generate_rag_response(user_message, context)
616
  else:
617
  bot_response = chatbot.generate_response(user_message, context)
@@ -642,8 +647,6 @@ def chat_stream():
642
 
643
  chatbot.chat_history.append({"user": user_message, "bot": full_response, "timestamp": datetime.now().isoformat()})
644
  chatbot.update_memory(user_message, full_response)
645
- # NEWLY ADDED: Extract name from streaming responses
646
- chatbot._extract_name_from_message(user_message, full_response)
647
 
648
  return jsonify({
649
  'response': full_response,
@@ -749,6 +752,6 @@ if __name__ == '__main__':
749
  logging.info("🚀 Starting Juno AI Server...")
750
  logging.info("🤖 Advanced AI Assistant with Document Processing, Web Scraping, and Memory")
751
  logging.info("🌟 Powered by Juno AI Prompts System")
752
- logging.info("🧠 Enhanced with User Name Memory")
753
  port = int(os.environ.get("PORT", 7860))
754
  app.run(debug=False, host='0.0.0.0', port=port)
 
101
  self.vectorstore = None
102
  self.chat_history = []
103
  self.memory = {}
104
+ # NEWLY ADDED: Simple user name storage
105
  self.user_name = None
 
106
  self.session_id = str(uuid.uuid4())
107
  self.last_rate_limit = None
108
  self.consecutive_rate_limits = 0
109
  self.prompts = juno_prompts
110
  logging.info(f"🤖 Juno AI initialized with session ID: {self.session_id}")
111
 
112
+ def extract_name_from_message(self, user_message):
113
  """Simple name extraction from user messages"""
114
  message_lower = user_message.lower()
115
 
116
+ # Pattern matching for name extraction
117
  patterns = [
118
  r"i am ([a-zA-Z]+)",
119
  r"i'm ([a-zA-Z]+)",
 
127
  if match:
128
  name = match.group(1).capitalize()
129
  self.user_name = name
130
+ logging.info(f"Extracted and stored user name: {name}")
 
131
  return name
132
  return None
133
 
134
+ def check_for_name_query(self, user_message):
135
  """Check if user is asking about their name"""
136
  message_lower = user_message.lower()
137
  name_queries = [
 
188
  logging.warning(f"Generating fallback response for message: '{user_message[:50]}...'")
189
  fallback_templates = get_fallback_responses()
190
  template = random.choice(fallback_templates)
191
+ response = template.format(user_message_preview=user_message[:50])
 
 
 
 
 
 
 
 
192
  self.chat_history.append({"user": user_message, "bot": response, "timestamp": datetime.now().isoformat(), "fallback": True})
193
  return response
194
 
 
266
 
267
  def generate_response(self, user_message, context=""):
268
  """Generate response using Juno AI prompts"""
269
+ # NEWLY ADDED: Extract name from message first
270
+ self.extract_name_from_message(user_message)
271
+
272
+ # NEWLY ADDED: Handle name queries directly
273
+ if self.check_for_name_query(user_message):
274
  if self.user_name:
275
  response = f"Your name is {self.user_name}! I remember when you told me."
276
  self.chat_history.append({"user": user_message, "bot": response, "timestamp": datetime.now().isoformat()})
 
288
  if not exchange.get('fallback', False):
289
  conversation_history.append({'user': exchange['user'], 'bot': exchange['bot'], 'timestamp': exchange.get('timestamp', '')})
290
 
291
+ # NEWLY ADDED: Include user name in prompt
292
+ base_prompt = self.prompts.get_conversation_prompt(user_message=user_message, context=context, conversation_history=conversation_history, memory_context=self.memory)
293
  if self.user_name:
294
+ base_prompt += f"\n\nIMPORTANT: The user's name is {self.user_name}. Address them personally when appropriate."
295
 
296
+ return model.generate_content(base_prompt).text
 
 
297
 
298
  try:
299
  bot_response = self._retry_with_backoff(_generate)
300
  self.chat_history.append({"user": user_message, "bot": bot_response, "timestamp": datetime.now().isoformat()})
301
  self.update_memory(user_message, bot_response)
 
 
302
  return bot_response
303
  except (ResourceExhausted, GoogleAPIError):
304
  return self._fallback_response(user_message)
 
350
 
351
  def generate_rag_response(self, user_query, context, sources=None):
352
  """Generate RAG response using Juno AI prompts"""
353
+ # NEWLY ADDED: Extract name from query first
354
+ self.extract_name_from_message(user_query)
355
+
356
+ # NEWLY ADDED: Handle name queries directly
357
+ if self.check_for_name_query(user_query):
358
  if self.user_name:
359
  return f"Your name is {self.user_name}! I remember when you told me."
360
  else:
 
364
  model = genai.GenerativeModel(GENERATIVE_MODEL)
365
  context_chunks = [context[i:i+2000] for i in range(0, len(context), 2000)]
366
 
367
+ base_prompt = self.prompts.get_rag_response_prompt(user_query=user_query, retrieved_chunks=context_chunks[:3], source_info=sources)
 
368
  if self.user_name:
369
+ base_prompt += f"\n\nIMPORTANT: The user's name is {self.user_name}. Address them personally when appropriate."
370
 
371
+ return model.generate_content(base_prompt).text
 
 
372
 
373
  try:
374
+ return self._retry_with_backoff(_generate_rag)
 
 
 
375
  except (ResourceExhausted, GoogleAPIError):
376
  return self._fallback_response(user_query)
377
  except Exception as e:
 
420
 
421
  def generate_streaming_response(self, user_message, context=""):
422
  """Generate streaming response using Juno AI prompts"""
423
+ # NEWLY ADDED: Extract name from message first
424
+ self.extract_name_from_message(user_message)
425
+
426
+ # NEWLY ADDED: Handle name queries directly - NO STREAMING for simple responses
427
+ if self.check_for_name_query(user_message):
428
  if self.user_name:
429
  return f"Your name is {self.user_name}! I remember when you told me."
430
  else:
 
433
  def _generate_stream():
434
  model = genai.GenerativeModel(GENERATIVE_MODEL)
435
 
436
+ base_prompt = self.prompts.get_streaming_response_prompt(user_message, context)
 
437
  if self.user_name:
438
+ base_prompt += f"\n\nIMPORTANT: The user's name is {self.user_name}. Address them personally when appropriate."
439
 
440
+ return model.generate_content(base_prompt, stream=True)
 
 
441
 
442
  try:
443
  return self._retry_with_backoff(_generate_stream, max_retries=3, base_delay=1)
 
540
  def get_memory():
541
  return jsonify({
542
  'memory': chatbot.memory,
543
+ 'user_name': chatbot.user_name, # NEWLY ADDED: Show stored user name
 
544
  'chat_history_length': len(chatbot.chat_history),
545
  'has_vectorstore': chatbot.vectorstore is not None,
546
  'session_id': chatbot.session_id
 
592
  if not user_message:
593
  return jsonify({'error': 'No message provided'}), 400
594
 
595
+ # NEWLY ADDED: Handle name queries without streaming to avoid errors
596
+ if chatbot.check_for_name_query(user_message):
597
+ chatbot.extract_name_from_message(user_message)
598
+ if chatbot.user_name:
599
+ bot_response = f"Your name is {chatbot.user_name}! I remember when you told me."
600
+ else:
601
+ bot_response = "I don't know your name yet. Would you like to tell me what it is?"
602
+
603
+ chatbot.chat_history.append({"user": user_message, "bot": bot_response, "timestamp": datetime.now().isoformat()})
604
+
605
+ return jsonify({
606
+ 'response': bot_response,
607
+ 'has_context': False,
608
+ 'session_id': chatbot.session_id,
609
+ 'streaming': False
610
+ })
611
+
612
  context = chatbot.retrieve_relevant_context(user_message)
613
  streaming_response = chatbot.generate_streaming_response(user_message, context)
614
 
615
+ if streaming_response is None or isinstance(streaming_response, str):
616
+ # Handle non-streaming response
617
+ if isinstance(streaming_response, str):
618
+ bot_response = streaming_response
619
+ elif context:
620
  bot_response = chatbot.generate_rag_response(user_message, context)
621
  else:
622
  bot_response = chatbot.generate_response(user_message, context)
 
647
 
648
  chatbot.chat_history.append({"user": user_message, "bot": full_response, "timestamp": datetime.now().isoformat()})
649
  chatbot.update_memory(user_message, full_response)
 
 
650
 
651
  return jsonify({
652
  'response': full_response,
 
752
  logging.info("🚀 Starting Juno AI Server...")
753
  logging.info("🤖 Advanced AI Assistant with Document Processing, Web Scraping, and Memory")
754
  logging.info("🌟 Powered by Juno AI Prompts System")
755
+ logging.info("🧠 Enhanced with Name Memory (Streaming-Safe)")
756
  port = int(os.environ.get("PORT", 7860))
757
  app.run(debug=False, host='0.0.0.0', port=port)