Update app.py
Browse files
app.py
CHANGED
|
@@ -44,11 +44,12 @@ def load_spacy_model():
|
|
| 44 |
nlp = load_spacy_model()
|
| 45 |
|
| 46 |
class EnhancedContextDrivenChatbot:
|
| 47 |
-
def __init__(self, history_size=10):
|
| 48 |
self.history = []
|
| 49 |
self.history_size = history_size
|
| 50 |
self.entity_tracker = {}
|
| 51 |
self.conversation_context = ""
|
|
|
|
| 52 |
|
| 53 |
def add_to_history(self, text):
|
| 54 |
self.history.append(text)
|
|
@@ -100,6 +101,9 @@ class EnhancedContextDrivenChatbot:
|
|
| 100 |
return question
|
| 101 |
|
| 102 |
def rephrase_query(self, question):
|
|
|
|
|
|
|
|
|
|
| 103 |
prompt = f"""
|
| 104 |
Given the conversation context and the current question, rephrase the question to include relevant context:
|
| 105 |
|
|
@@ -109,22 +113,18 @@ class EnhancedContextDrivenChatbot:
|
|
| 109 |
Rephrased question:
|
| 110 |
"""
|
| 111 |
|
| 112 |
-
|
| 113 |
-
rephrased_question = generate_chunked_response(model, prompt)
|
| 114 |
|
| 115 |
return rephrased_question.strip()
|
| 116 |
|
| 117 |
def process_question(self, question):
|
| 118 |
contextualized_question = self.get_most_relevant_context(question)
|
| 119 |
|
| 120 |
-
# Extract topics from the question
|
| 121 |
topics = self.extract_topics(question)
|
| 122 |
|
| 123 |
-
# Check if it's a follow-up question and rephrase if necessary
|
| 124 |
if self.is_follow_up_question(question):
|
| 125 |
contextualized_question = self.rephrase_query(contextualized_question)
|
| 126 |
|
| 127 |
-
# Add the new question to history
|
| 128 |
self.add_to_history(question)
|
| 129 |
|
| 130 |
return contextualized_question, topics, self.entity_tracker
|
|
@@ -309,6 +309,10 @@ def ask_question(question, temperature, top_p, repetition_penalty, web_search, c
|
|
| 309 |
return "Please enter a question."
|
| 310 |
|
| 311 |
model = get_model(temperature, top_p, repetition_penalty)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 312 |
embed = get_embeddings()
|
| 313 |
|
| 314 |
if os.path.exists("faiss_database"):
|
|
|
|
| 44 |
nlp = load_spacy_model()
|
| 45 |
|
| 46 |
class EnhancedContextDrivenChatbot:
|
| 47 |
+
def __init__(self, history_size=10, model=None):
|
| 48 |
self.history = []
|
| 49 |
self.history_size = history_size
|
| 50 |
self.entity_tracker = {}
|
| 51 |
self.conversation_context = ""
|
| 52 |
+
self.model = model
|
| 53 |
|
| 54 |
def add_to_history(self, text):
|
| 55 |
self.history.append(text)
|
|
|
|
| 101 |
return question
|
| 102 |
|
| 103 |
def rephrase_query(self, question):
|
| 104 |
+
if not self.model:
|
| 105 |
+
return question # Return original question if no model is available
|
| 106 |
+
|
| 107 |
prompt = f"""
|
| 108 |
Given the conversation context and the current question, rephrase the question to include relevant context:
|
| 109 |
|
|
|
|
| 113 |
Rephrased question:
|
| 114 |
"""
|
| 115 |
|
| 116 |
+
rephrased_question = generate_chunked_response(self.model, prompt)
|
|
|
|
| 117 |
|
| 118 |
return rephrased_question.strip()
|
| 119 |
|
| 120 |
def process_question(self, question):
|
| 121 |
contextualized_question = self.get_most_relevant_context(question)
|
| 122 |
|
|
|
|
| 123 |
topics = self.extract_topics(question)
|
| 124 |
|
|
|
|
| 125 |
if self.is_follow_up_question(question):
|
| 126 |
contextualized_question = self.rephrase_query(contextualized_question)
|
| 127 |
|
|
|
|
| 128 |
self.add_to_history(question)
|
| 129 |
|
| 130 |
return contextualized_question, topics, self.entity_tracker
|
|
|
|
| 309 |
return "Please enter a question."
|
| 310 |
|
| 311 |
model = get_model(temperature, top_p, repetition_penalty)
|
| 312 |
+
|
| 313 |
+
# Update the chatbot's model
|
| 314 |
+
chatbot.model = model
|
| 315 |
+
|
| 316 |
embed = get_embeddings()
|
| 317 |
|
| 318 |
if os.path.exists("faiss_database"):
|