Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -96,7 +96,7 @@ def get_top_chunks(query, chunk_embeddings, text_chunks):
|
|
| 96 |
|
| 97 |
# Loop through the top indices and retrieve the corresponding text chunks
|
| 98 |
for i in top_indices:
|
| 99 |
-
top_chunks.append(i)
|
| 100 |
print(top_chunks)
|
| 101 |
|
| 102 |
# Return the list of most relevant chunks
|
|
@@ -133,7 +133,8 @@ def respond(message, history):
|
|
| 133 |
if history:
|
| 134 |
messages.extend(history)
|
| 135 |
|
| 136 |
-
messages.append({"role":"user","content":
|
|
|
|
| 137 |
|
| 138 |
|
| 139 |
response = client.chat_completion(messages, max_tokens = 700, temperature = 0.2, top_p = 0.3)
|
|
|
|
| 96 |
|
| 97 |
# Loop through the top indices and retrieve the corresponding text chunks
|
| 98 |
for i in top_indices:
|
| 99 |
+
top_chunks.append(text_chunks[i])
|
| 100 |
print(top_chunks)
|
| 101 |
|
| 102 |
# Return the list of most relevant chunks
|
|
|
|
| 133 |
if history:
|
| 134 |
messages.extend(history)
|
| 135 |
|
| 136 |
+
messages.append({"role":"user","content": message})
|
| 137 |
+
|
| 138 |
|
| 139 |
|
| 140 |
response = client.chat_completion(messages, max_tokens = 700, temperature = 0.2, top_p = 0.3)
|