Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -104,14 +104,14 @@ def get_top_chunks(query, chunk_embeddings, text_chunks):
|
|
| 104 |
client = InferenceClient('Qwen/Qwen2.5-7B-Instruct-1M')
|
| 105 |
def respond(message, history):
|
| 106 |
info = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
|
| 107 |
-
messages = [{'role': 'system', 'content': f'You are a friendly chatbot using {info} to answer questions'}]
|
| 108 |
|
| 109 |
if history:
|
| 110 |
messages.extend(history)
|
| 111 |
|
| 112 |
messages.append({"role": "user", "content": message})
|
| 113 |
|
| 114 |
-
response = client.chat_completion(messages, max_tokens =
|
| 115 |
|
| 116 |
return response["choices"][0]["message"]["content"].strip()
|
| 117 |
def echo(message, history):
|
|
|
|
| 104 |
client = InferenceClient('Qwen/Qwen2.5-7B-Instruct-1M')
|
| 105 |
def respond(message, history):
|
| 106 |
info = get_top_chunks(message, chunk_embeddings, cleaned_chunks)
|
| 107 |
+
messages = [{'role': 'system', 'content': f'You are a friendly chatbot using {info} to answer questions. You love creating playlists and will give at least 5 songs as a response'}]
|
| 108 |
|
| 109 |
if history:
|
| 110 |
messages.extend(history)
|
| 111 |
|
| 112 |
messages.append({"role": "user", "content": message})
|
| 113 |
|
| 114 |
+
response = client.chat_completion(messages, max_tokens = 200)
|
| 115 |
|
| 116 |
return response["choices"][0]["message"]["content"].strip()
|
| 117 |
def echo(message, history):
|