RithikaChalam commited on
Commit
0d15756
·
verified ·
1 Parent(s): afd0487

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -25
app.py CHANGED
@@ -108,32 +108,15 @@ def get_top_chunks(query, chunk_embeddings, text_chunks):
108
 
109
  client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
110
 
111
- def respond(message, history, mom_type):
112
- top_cool_results = get_top_chunks(message, cool_chunk_embeddings, cleaned_cool_chunks) # Complete this line
113
- top_tutor_results = get_top_chunks(message, tutor_chunk_embeddings, cleaned_tutor_chunks)
114
- top_strict_results = get_top_chunks(message, strict_chunk_embeddings, cleaned_strict_chunks)
115
- #str_chunks = "\n".join(best_chunks)
116
- if mom_type == "Cool Mom":
117
- messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's cool and super chill mom. Respond in full sentences, don't cut yourself off. Use responses from this text file: {top_cool_results} and respond very kindly. Do not be mean or strict at all"},
118
- {"role": "user",
119
- "content": (
120
- f"Context:\n{top_cool_results}\n\n"
121
- f"Question{message}"
122
- )}]
123
- elif mom_type == "Tutor Mom":
124
- messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely studious, tutor-like mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {mom_type}"},
125
- {"role": "user",
126
- "content": (
127
- f"Context:\n{top_tutor_results}\n\n"
128
- f"Question{message}"
129
- )}]
130
  elif mom_type == "Strict Mom":
131
- messages = [{"role": "system", "content": f"You are chatbot that plays the role of the user's extremely strict mom. Respond in full sentences, don't cut yourself off. Base your response on the provided context: {top_strict_results}"},
132
- {"role": "user",
133
- "content": (
134
- f"Context:\n{top_strict_results}\n\n"
135
- f"Question{message}"
136
- )}]
137
  if history:
138
  messages.extend(history)
139
 
 
108
 
109
  client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
110
 
111
+ def respond(messages, history, mom_type) :
112
+ mom_type = []
113
+ if mom_type == "Cool Mom" :
114
+ top_results = get_top_chunks(message, cool_chunk_embeddings, cleaned_cool_chunks)
115
+ elif mom_type == "Tutor Mom" :
116
+ top_results = get_top_chunks(message, tutor_chunk_embeddings, cleaned_tutor_chunks)
 
 
 
 
 
 
 
 
 
 
 
 
 
117
  elif mom_type == "Strict Mom":
118
+ top_results = get_top_chunks(message, strict_chunk_embeddings, cleaned_strict_chunks)
119
+
 
 
 
 
120
  if history:
121
  messages.extend(history)
122