RithikaChalam commited on
Commit
92310f5
·
verified ·
1 Parent(s): 0d15756

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -7
app.py CHANGED
@@ -11,7 +11,7 @@ with open("cool_mom_phrases.txt", "r", encoding="utf-8") as file:
11
  # Read the entire contents of the file and store it in a variable
12
  cool_mom_text = file.read()
13
 
14
- '''with open("tutor_mom_phrases.txt", "r", encoding="utf-8") as file:
15
  # Read the entire contents of the file and store it in a variable
16
  tutor_mom_text = file.read()
17
 
@@ -22,7 +22,7 @@ with open("strict_mom_phrases.txt", "r", encoding="utf-8") as file:
22
  with open("study_techniques.txt", "r", encoding="utf-8") as file:
23
  # Read the entire contents of the file and store it in a variable
24
  study_techniques_text = file.read()
25
- '''
26
 
27
  # STEP 3 FROM SEMANTIC SEARCH
28
  def preprocess_text(text):
@@ -46,8 +46,8 @@ def preprocess_text(text):
46
 
47
  # Call the preprocess_text function and store the result in a cleaned_chunks variable
48
  cleaned_cool_chunks = preprocess_text(cool_mom_text) # Complete this line
49
- '''cleaned_tutor_chunks = preprocess_text(tutor_mom_text)
50
- cleaned_strict_chunks = preprocess_text(strict_mom_text)'''
51
 
52
  #STEP 4 FROM SEMANTIC SEARCH
53
  # Load the pre-trained embedding model that converts text to vectors
@@ -62,8 +62,8 @@ def create_embeddings(text_chunks):
62
 
63
  # Call the create_embeddings function and store the result in a new chunk_embeddings variable
64
  cool_chunk_embeddings = create_embeddings(cleaned_cool_chunks) # Complete this line
65
- '''tutor_chunk_embeddings = create_embeddings(cleaned_tutor_chunks)
66
- strict_chunk_embeddings = create_embeddings(cleaned_strict_chunks)'''
67
 
68
  #STEP 5 FROM SEMANTIC SEARCH
69
  # Define a function to find the most relevant text chunks for a given query, chunk_embeddings, and text_chunks
@@ -116,7 +116,13 @@ def respond(messages, history, mom_type) :
116
  top_results = get_top_chunks(message, tutor_chunk_embeddings, cleaned_tutor_chunks)
117
  elif mom_type == "Strict Mom":
118
  top_results = get_top_chunks(message, strict_chunk_embeddings, cleaned_strict_chunks)
119
-
 
 
 
 
 
 
120
  if history:
121
  messages.extend(history)
122
 
 
11
  # Read the entire contents of the file and store it in a variable
12
  cool_mom_text = file.read()
13
 
14
+ with open("tutor_mom_phrases.txt", "r", encoding="utf-8") as file:
15
  # Read the entire contents of the file and store it in a variable
16
  tutor_mom_text = file.read()
17
 
 
22
  with open("study_techniques.txt", "r", encoding="utf-8") as file:
23
  # Read the entire contents of the file and store it in a variable
24
  study_techniques_text = file.read()
25
+
26
 
27
  # STEP 3 FROM SEMANTIC SEARCH
28
  def preprocess_text(text):
 
46
 
47
  # Call the preprocess_text function and store the result in a cleaned_chunks variable
48
  cleaned_cool_chunks = preprocess_text(cool_mom_text) # Complete this line
49
+ cleaned_tutor_chunks = preprocess_text(tutor_mom_text)
50
+ cleaned_strict_chunks = preprocess_text(strict_mom_text)
51
 
52
  #STEP 4 FROM SEMANTIC SEARCH
53
  # Load the pre-trained embedding model that converts text to vectors
 
62
 
63
  # Call the create_embeddings function and store the result in a new chunk_embeddings variable
64
  cool_chunk_embeddings = create_embeddings(cleaned_cool_chunks) # Complete this line
65
+ tutor_chunk_embeddings = create_embeddings(cleaned_tutor_chunks)
66
+ strict_chunk_embeddings = create_embeddings(cleaned_strict_chunks)
67
 
68
  #STEP 5 FROM SEMANTIC SEARCH
69
  # Define a function to find the most relevant text chunks for a given query, chunk_embeddings, and text_chunks
 
116
  top_results = get_top_chunks(message, tutor_chunk_embeddings, cleaned_tutor_chunks)
117
  elif mom_type == "Strict Mom":
118
  top_results = get_top_chunks(message, strict_chunk_embeddings, cleaned_strict_chunks)
119
+
120
+ messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely studious, tutor-like mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {mom_type}"},
121
+ {"role": "user",
122
+ "content": (
123
+ f"Context:\n{top_tutor_results}\n\n"
124
+ f"Question{message}"
125
+ )}]
126
  if history:
127
  messages.extend(history)
128