Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -19,7 +19,22 @@ generator = pipeline(
|
|
| 19 |
tokenizer=tokenizer,
|
| 20 |
device=device
|
| 21 |
)
|
| 22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
def chat_with_model(user_message, chat_history, max_new_tokens=60, temperature=0.8, top_p=0.9):
|
| 24 |
|
| 25 |
if chat_history is None:
|
|
|
|
| 19 |
tokenizer=tokenizer,
|
| 20 |
device=device
|
| 21 |
)
|
| 22 |
+
def clean_reply(text):
|
| 23 |
+
|
| 24 |
+
text = text.strip()
|
| 25 |
+
|
| 26 |
+
for prefix in ["Assistant:", "assistant:", "User:", "user:"]:
|
| 27 |
+
if text.startswith(prefix):
|
| 28 |
+
text = text[len(prefix):].strip()
|
| 29 |
+
|
| 30 |
+
lines = [l.strip() for l in text.split("\n")]
|
| 31 |
+
lines = [l for l in lines if l]
|
| 32 |
+
|
| 33 |
+
if len(lines) == 0:
|
| 34 |
+
return ""
|
| 35 |
+
|
| 36 |
+
return lines[0]
|
| 37 |
+
|
| 38 |
def chat_with_model(user_message, chat_history, max_new_tokens=60, temperature=0.8, top_p=0.9):
|
| 39 |
|
| 40 |
if chat_history is None:
|