Spaces:
Sleeping
Sleeping
changed messages and str_chunks
Browse files
app.py
CHANGED
|
@@ -115,7 +115,15 @@ print(top_results)
|
|
| 115 |
client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
|
| 116 |
|
| 117 |
def respond(message, history):
|
| 118 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 119 |
|
| 120 |
if history:
|
| 121 |
messages.extend(history)
|
|
|
|
| 115 |
client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
|
| 116 |
|
| 117 |
def respond(message, history):
|
| 118 |
+
best_chunks = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
|
| 119 |
+
print(best_chunks)
|
| 120 |
+
str_chunks = "\n".join(best_chunks)
|
| 121 |
+
messages = [{"role": "system", "content": f"You are friend chatbot. Base your response on the provided context: {str_chunks}"},
|
| 122 |
+
{"role": "user",
|
| 123 |
+
"content": (
|
| 124 |
+
f"Context:\n{str_chunks}\n\n"
|
| 125 |
+
f"Question{message}"
|
| 126 |
+
)}]
|
| 127 |
|
| 128 |
if history:
|
| 129 |
messages.extend(history)
|