Spaces:
Running
Running
Asish Karthikeya Gogineni commited on
Commit ·
3f9d83d
1
Parent(s): d897d25
Fix: Removed duplicate chat method causing HTML leakage
Browse files
code_chatbot/retrieval/rag.py
CHANGED
|
@@ -488,26 +488,6 @@ class ChatEngine:
|
|
| 488 |
|
| 489 |
return messages, sources, context_text
|
| 490 |
|
| 491 |
-
def chat(self, question: str) -> tuple[str, list]:
|
| 492 |
-
"""Blocking chat method."""
|
| 493 |
-
messages, sources, _ = self._prepare_chat_context(question)
|
| 494 |
-
|
| 495 |
-
if not messages:
|
| 496 |
-
return "I don't have any information about this codebase. Please make sure the codebase has been indexed properly.", []
|
| 497 |
-
|
| 498 |
-
# Get response from LLM
|
| 499 |
-
response_msg = self.llm.invoke(messages)
|
| 500 |
-
answer = response_msg.content
|
| 501 |
-
|
| 502 |
-
# Update chat history
|
| 503 |
-
self.chat_history.append(HumanMessage(content=question))
|
| 504 |
-
self.chat_history.append(AIMessage(content=answer))
|
| 505 |
-
|
| 506 |
-
# Keep history manageable (last 20 messages)
|
| 507 |
-
if len(self.chat_history) > 20:
|
| 508 |
-
self.chat_history = self.chat_history[-20:]
|
| 509 |
-
|
| 510 |
-
return answer, sources
|
| 511 |
|
| 512 |
def stream_chat(self, question: str):
|
| 513 |
"""Streaming chat method returning (generator, sources)."""
|
|
@@ -530,7 +510,8 @@ class ChatEngine:
|
|
| 530 |
yield content
|
| 531 |
|
| 532 |
# Update history with AI message after generation
|
| 533 |
-
self.
|
|
|
|
| 534 |
|
| 535 |
return response_generator(), sources
|
| 536 |
|
|
|
|
| 488 |
|
| 489 |
return messages, sources, context_text
|
| 490 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 491 |
|
| 492 |
def stream_chat(self, question: str):
|
| 493 |
"""Streaming chat method returning (generator, sources)."""
|
|
|
|
| 510 |
yield content
|
| 511 |
|
| 512 |
# Update history with AI message after generation
|
| 513 |
+
clean_full_response = self._clean_response(full_response)
|
| 514 |
+
self.chat_history.append(AIMessage(content=clean_full_response))
|
| 515 |
|
| 516 |
return response_generator(), sources
|
| 517 |
|