Spaces:
Runtime error
Runtime error
Update chat.py
Browse files
chat.py
CHANGED
|
@@ -118,17 +118,25 @@ async def post_message(
|
|
| 118 |
|
| 119 |
async def stream_generator():
|
| 120 |
full_response = ""
|
| 121 |
-
# Pass messages list as
|
| 122 |
for chunk in llm.stream(messages):
|
| 123 |
-
#
|
| 124 |
-
content = (
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
|
| 130 |
-
|
| 131 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 132 |
history.add_ai_message(full_response)
|
| 133 |
|
| 134 |
return StreamingResponse(stream_generator(), media_type="text/plain")
|
|
|
|
| 118 |
|
| 119 |
async def stream_generator():
|
| 120 |
full_response = ""
|
| 121 |
+
# Pass messages list as positional 'input' to .stream()
|
| 122 |
for chunk in llm.stream(messages):
|
| 123 |
+
# 1) Try AIMessageChunk.content
|
| 124 |
+
content = getattr(chunk, "content", None)
|
| 125 |
+
# 2) Fallback to dict-based chunk
|
| 126 |
+
if content is None and isinstance(chunk, dict):
|
| 127 |
+
content = (
|
| 128 |
+
chunk.get("content")
|
| 129 |
+
or chunk.get("choices", [{}])[0]
|
| 130 |
+
.get("delta", {})
|
| 131 |
+
.get("content")
|
| 132 |
+
)
|
| 133 |
+
if not content:
|
| 134 |
+
continue
|
| 135 |
+
# Yield and accumulate
|
| 136 |
+
yield content
|
| 137 |
+
full_response += content
|
| 138 |
+
|
| 139 |
+
# Save final AI message
|
| 140 |
history.add_ai_message(full_response)
|
| 141 |
|
| 142 |
return StreamingResponse(stream_generator(), media_type="text/plain")
|