Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -50,24 +50,27 @@ def generate_response(
|
|
| 50 |
]
|
| 51 |
for prompt, answer in history:
|
| 52 |
conversation.extend([
|
| 53 |
-
{"role": "user", "content": prompt},
|
| 54 |
-
{"role": "assistant", "content": answer},
|
| 55 |
])
|
| 56 |
-
conversation.append({"role": "user", "content": message})
|
| 57 |
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
|
|
|
| 65 |
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
|
|
|
|
|
|
| 71 |
|
| 72 |
def clear_conversation():
|
| 73 |
return [], None
|
|
|
|
| 50 |
]
|
| 51 |
for prompt, answer in history:
|
| 52 |
conversation.extend([
|
| 53 |
+
{"role": "user", "content": str(prompt)},
|
| 54 |
+
{"role": "assistant", "content": str(answer)},
|
| 55 |
])
|
| 56 |
+
conversation.append({"role": "user", "content": str(message)})
|
| 57 |
|
| 58 |
+
try:
|
| 59 |
+
response = client.chat.completions.create(
|
| 60 |
+
model="llama-3.1-8B-Instant",
|
| 61 |
+
messages=conversation,
|
| 62 |
+
temperature=temperature,
|
| 63 |
+
max_tokens=max_tokens,
|
| 64 |
+
stream=True
|
| 65 |
+
)
|
| 66 |
|
| 67 |
+
partial_message = ""
|
| 68 |
+
for chunk in response:
|
| 69 |
+
if chunk.choices[0].delta.content is not None:
|
| 70 |
+
partial_message += chunk.choices[0].delta.content
|
| 71 |
+
yield partial_message
|
| 72 |
+
except Exception as e:
|
| 73 |
+
yield f"An error occurred: {str(e)}"
|
| 74 |
|
| 75 |
def clear_conversation():
|
| 76 |
return [], None
|