Spaces:
Sleeping
Sleeping
update
Browse files
app.py
CHANGED
|
@@ -123,12 +123,12 @@ chunk_embeddings = create_embeddings(cleaned_chunks)
|
|
| 123 |
client= InferenceClient("Qwen/Qwen2.5-7B-Instruct-1M")
|
| 124 |
response=""
|
| 125 |
#defining role of AI and user
|
|
|
|
|
|
|
| 126 |
def respond(message,history):
|
| 127 |
information = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
|
| 128 |
|
| 129 |
|
| 130 |
-
messages = [{"role": "assistant", "content": f"You are a friendly chatbot that gives advice to disadvantaged students about their education based on their question. When you give advice, keep in mind the following infromation {information}"}]
|
| 131 |
-
|
| 132 |
if history:
|
| 133 |
messages.extend(history) #keep adding history
|
| 134 |
|
|
|
|
| 123 |
client= InferenceClient("Qwen/Qwen2.5-7B-Instruct-1M")
|
| 124 |
response=""
|
| 125 |
#defining role of AI and user
|
| 126 |
+
messages = [{"role": "assistant", "content": f"You are a friendly chatbot that gives advice to disadvantaged students about their education based on their question. When you give advice, keep in mind the following infromation {information}"}]
|
| 127 |
+
|
| 128 |
def respond(message,history):
|
| 129 |
information = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
|
| 130 |
|
| 131 |
|
|
|
|
|
|
|
| 132 |
if history:
|
| 133 |
messages.extend(history) #keep adding history
|
| 134 |
|