Spaces:
Runtime error
Runtime error
Update main.py
Browse files
main.py
CHANGED
|
@@ -27,10 +27,12 @@ max_new_tokens = 2048
|
|
| 27 |
def generater(message, history, temperature, top_p, top_k):
|
| 28 |
prompt = "<s>"
|
| 29 |
for user_message, assistant_message in history:
|
| 30 |
-
prompt += model.config["systemPrompt"]
|
|
|
|
| 31 |
prompt += assistant_message + "</s>"
|
| 32 |
prompt += model.config["promptTemplate"].format(message)
|
| 33 |
print("[prompt]",prompt)
|
|
|
|
| 34 |
outputs = []
|
| 35 |
for token in model.generate(prompt=prompt, temp=temperature, top_k = top_k, top_p = top_p, max_tokens = max_new_tokens, streaming=True):
|
| 36 |
outputs.append(token)
|
|
|
|
| 27 |
def generater(message, history, temperature, top_p, top_k):
|
| 28 |
prompt = "<s>"
|
| 29 |
for user_message, assistant_message in history:
|
| 30 |
+
prompt += model.config["systemPrompt"]
|
| 31 |
+
prompt += model.config["promptTemplate"].format(user_message)
|
| 32 |
prompt += assistant_message + "</s>"
|
| 33 |
prompt += model.config["promptTemplate"].format(message)
|
| 34 |
print("[prompt]",prompt)
|
| 35 |
+
|
| 36 |
outputs = []
|
| 37 |
for token in model.generate(prompt=prompt, temp=temperature, top_k = top_k, top_p = top_p, max_tokens = max_new_tokens, streaming=True):
|
| 38 |
outputs.append(token)
|