riyamalshe commited on
Commit
b516f6a
·
verified ·
1 Parent(s): cf5db68

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -6
app.py CHANGED
@@ -108,7 +108,7 @@ def respond_cool(message, history):
108
  top_cool_results = get_top_chunks(message, cool_chunk_embeddings, cleaned_cool_chunks) # Complete this line
109
  #str_chunks = "\n".join(best_chunks)
110
 
111
- messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's cool and super chill mom. Base your response on the provided context: {top_cool_results}"},
112
  {"role": "user",
113
  "content": (
114
  f"Context:\n{top_cool_results}\n\n"
@@ -122,7 +122,6 @@ def respond_cool(message, history):
122
 
123
  response = client.chat_completion(
124
  messages,
125
- max_tokens = 100,
126
  temperature = 0.2
127
  )
128
  return response['choices'][0]['message']['content'].strip()
@@ -131,7 +130,7 @@ def respond_tutor(message, history):
131
  top_tutor_results = get_top_chunks(message, tutor_chunk_embeddings, cleaned_tutor_chunks)
132
  #str_chunks = "\n".join(best_chunks)
133
 
134
- messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely studious, tutor-like mom. Base your response on the provided context: {top_tutor_results}"},
135
  {"role": "user",
136
  "content": (
137
  f"Context:\n{top_tutor_results}\n\n"
@@ -145,7 +144,6 @@ def respond_tutor(message, history):
145
 
146
  response = client.chat_completion(
147
  messages,
148
- max_tokens = 100,
149
  temperature = 0.2
150
  )
151
  return response['choices'][0]['message']['content'].strip()
@@ -154,7 +152,7 @@ def respond_strict(message, history):
154
  top_strict_results = get_top_chunks(message, strict_chunk_embeddings, cleaned_strict_chunks)
155
  #str_chunks = "\n".join(best_chunks)
156
 
157
- messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely strict mom. Base your response on the provided context: {top_strict_results}"},
158
  {"role": "user",
159
  "content": (
160
  f"Context:\n{top_strict_results}\n\n"
@@ -168,7 +166,6 @@ def respond_strict(message, history):
168
 
169
  response = client.chat_completion(
170
  messages,
171
- max_tokens = 100,
172
  temperature = 0.2
173
  )
174
  return response['choices'][0]['message']['content'].strip()
 
108
  top_cool_results = get_top_chunks(message, cool_chunk_embeddings, cleaned_cool_chunks) # Complete this line
109
  #str_chunks = "\n".join(best_chunks)
110
 
111
+ messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's cool and super chill mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {top_cool_results}"},
112
  {"role": "user",
113
  "content": (
114
  f"Context:\n{top_cool_results}\n\n"
 
122
 
123
  response = client.chat_completion(
124
  messages,
 
125
  temperature = 0.2
126
  )
127
  return response['choices'][0]['message']['content'].strip()
 
130
  top_tutor_results = get_top_chunks(message, tutor_chunk_embeddings, cleaned_tutor_chunks)
131
  #str_chunks = "\n".join(best_chunks)
132
 
133
+ messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely studious, tutor-like mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {top_tutor_results}"},
134
  {"role": "user",
135
  "content": (
136
  f"Context:\n{top_tutor_results}\n\n"
 
144
 
145
  response = client.chat_completion(
146
  messages,
 
147
  temperature = 0.2
148
  )
149
  return response['choices'][0]['message']['content'].strip()
 
152
  top_strict_results = get_top_chunks(message, strict_chunk_embeddings, cleaned_strict_chunks)
153
  #str_chunks = "\n".join(best_chunks)
154
 
155
+ messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely strict mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {top_strict_results}"},
156
  {"role": "user",
157
  "content": (
158
  f"Context:\n{top_strict_results}\n\n"
 
166
 
167
  response = client.chat_completion(
168
  messages,
 
169
  temperature = 0.2
170
  )
171
  return response['choices'][0]['message']['content'].strip()