Spaces:
Paused
Paused
:gem: [Feature] Support openai streaming response format
Browse files- apis/chat_api.py +2 -1
- conversations/conversation_connector.py +4 -0
- networks/message_outputer.py +56 -7
- networks/message_parser.py +7 -3
apis/chat_api.py
CHANGED
|
@@ -150,7 +150,8 @@ class ChatAPIApp:
|
|
| 150 |
prompt = item.messages[-1]["content"]
|
| 151 |
|
| 152 |
return StreamingResponse(
|
| 153 |
-
connector.stream_chat(prompt=prompt, yield_output=True)
|
|
|
|
| 154 |
)
|
| 155 |
|
| 156 |
def setup_routes(self):
|
|
|
|
| 150 |
prompt = item.messages[-1]["content"]
|
| 151 |
|
| 152 |
return StreamingResponse(
|
| 153 |
+
connector.stream_chat(prompt=prompt, yield_output=True),
|
| 154 |
+
media_type="text/plain",
|
| 155 |
)
|
| 156 |
|
| 157 |
def setup_routes(self):
|
conversations/conversation_connector.py
CHANGED
|
@@ -81,6 +81,10 @@ class ConversationConnector:
|
|
| 81 |
await self.connect()
|
| 82 |
await self.send_chathub_request(prompt)
|
| 83 |
message_parser = MessageParser(outputer=OpenaiStreamOutputer())
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
while not self.wss.closed:
|
| 85 |
response_lines_str = await self.wss.receive_str()
|
| 86 |
if isinstance(response_lines_str, str):
|
|
|
|
| 81 |
await self.connect()
|
| 82 |
await self.send_chathub_request(prompt)
|
| 83 |
message_parser = MessageParser(outputer=OpenaiStreamOutputer())
|
| 84 |
+
has_output_role_message = False
|
| 85 |
+
if yield_output and not has_output_role_message:
|
| 86 |
+
has_output_role_message = True
|
| 87 |
+
yield message_parser.outputer.output(content="", content_type="Role")
|
| 88 |
while not self.wss.closed:
|
| 89 |
response_lines_str = await self.wss.receive_str()
|
| 90 |
if isinstance(response_lines_str, str):
|
networks/message_outputer.py
CHANGED
|
@@ -2,13 +2,62 @@ import json
|
|
| 2 |
|
| 3 |
|
| 4 |
class OpenaiStreamOutputer:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
def output(self, content=None, content_type=None) -> bytes:
|
| 6 |
-
|
| 7 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
{
|
| 9 |
-
"
|
| 10 |
-
"
|
|
|
|
| 11 |
}
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
).encode("utf-8")
|
|
|
|
| 2 |
|
| 3 |
|
| 4 |
class OpenaiStreamOutputer:
|
| 5 |
+
"""
|
| 6 |
+
Create chat completion - OpenAI API Documentation
|
| 7 |
+
* https://platform.openai.com/docs/api-reference/chat/create
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
def data_to_string(self, data, content_type=""):
|
| 11 |
+
# return (json.dumps(data) + "\n").encode("utf-8")
|
| 12 |
+
data_str = f"data: {json.dumps(data)}\n"
|
| 13 |
+
if content_type == "Finished":
|
| 14 |
+
data_str += "data: [DONE]\n"
|
| 15 |
+
return data_str
|
| 16 |
+
|
| 17 |
def output(self, content=None, content_type=None) -> bytes:
|
| 18 |
+
data = {
|
| 19 |
+
"created": 1677825464,
|
| 20 |
+
"id": "chatcmpl-bing",
|
| 21 |
+
"object": "chat.completion.chunk",
|
| 22 |
+
# "content_type": content_type,
|
| 23 |
+
"model": "bing",
|
| 24 |
+
"choices": [],
|
| 25 |
+
}
|
| 26 |
+
if content_type == "Role":
|
| 27 |
+
data["choices"] = [
|
| 28 |
+
{
|
| 29 |
+
"index": 0,
|
| 30 |
+
"delta": {"role": "assistant"},
|
| 31 |
+
"finish_reason": None,
|
| 32 |
+
}
|
| 33 |
+
]
|
| 34 |
+
elif content_type in [
|
| 35 |
+
"Completions",
|
| 36 |
+
"InternalSearchQuery",
|
| 37 |
+
"InternalSearchResult",
|
| 38 |
+
"SuggestedResponses",
|
| 39 |
+
]:
|
| 40 |
+
data["choices"] = [
|
| 41 |
+
{
|
| 42 |
+
"index": 0,
|
| 43 |
+
"delta": {"content": content},
|
| 44 |
+
"finish_reason": None,
|
| 45 |
+
}
|
| 46 |
+
]
|
| 47 |
+
elif content_type == "Finished":
|
| 48 |
+
data["choices"] = [
|
| 49 |
+
{
|
| 50 |
+
"index": 0,
|
| 51 |
+
"delta": {},
|
| 52 |
+
"finish_reason": "stop",
|
| 53 |
+
}
|
| 54 |
+
]
|
| 55 |
+
else:
|
| 56 |
+
data["choices"] = [
|
| 57 |
{
|
| 58 |
+
"index": 0,
|
| 59 |
+
"delta": {"content": ""},
|
| 60 |
+
"finish_reason": None,
|
| 61 |
}
|
| 62 |
+
]
|
| 63 |
+
return self.data_to_string(data, content_type)
|
|
|
networks/message_parser.py
CHANGED
|
@@ -37,8 +37,12 @@ class MessageParser:
|
|
| 37 |
delta_content, content_type="Completions"
|
| 38 |
)
|
| 39 |
if message.get("suggestedResponses"):
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
output_bytes += self.outputer.output(
|
| 41 |
-
|
| 42 |
content_type="SuggestedResponses",
|
| 43 |
)
|
| 44 |
return output_bytes
|
|
@@ -46,7 +50,7 @@ class MessageParser:
|
|
| 46 |
# Message: Search Query
|
| 47 |
elif message_type in ["InternalSearchQuery"]:
|
| 48 |
message_hidden_text = message["hiddenText"]
|
| 49 |
-
search_str = f"\n[Searching: [{message_hidden_text}]]"
|
| 50 |
logger.note(search_str)
|
| 51 |
if return_output:
|
| 52 |
return self.outputer.output(
|
|
@@ -54,7 +58,7 @@ class MessageParser:
|
|
| 54 |
)
|
| 55 |
# Message: Internal Search Results
|
| 56 |
elif message_type in ["InternalSearchResult"]:
|
| 57 |
-
analysis_str = f"\n[Analyzing search results ...]"
|
| 58 |
logger.note(analysis_str)
|
| 59 |
if return_output:
|
| 60 |
return self.outputer.output(
|
|
|
|
| 37 |
delta_content, content_type="Completions"
|
| 38 |
)
|
| 39 |
if message.get("suggestedResponses"):
|
| 40 |
+
suggestion_texts_str = "\nSuggested Questions:\n"
|
| 41 |
+
suggestion_texts_str += "\n".join(
|
| 42 |
+
f"- {item}" for item in suggestion_texts
|
| 43 |
+
)
|
| 44 |
output_bytes += self.outputer.output(
|
| 45 |
+
suggestion_texts_str,
|
| 46 |
content_type="SuggestedResponses",
|
| 47 |
)
|
| 48 |
return output_bytes
|
|
|
|
| 50 |
# Message: Search Query
|
| 51 |
elif message_type in ["InternalSearchQuery"]:
|
| 52 |
message_hidden_text = message["hiddenText"]
|
| 53 |
+
search_str = f"\n[Searching: [{message_hidden_text}]]\n"
|
| 54 |
logger.note(search_str)
|
| 55 |
if return_output:
|
| 56 |
return self.outputer.output(
|
|
|
|
| 58 |
)
|
| 59 |
# Message: Internal Search Results
|
| 60 |
elif message_type in ["InternalSearchResult"]:
|
| 61 |
+
analysis_str = f"\n[Analyzing search results ...]\n"
|
| 62 |
logger.note(analysis_str)
|
| 63 |
if return_output:
|
| 64 |
return self.outputer.output(
|