Update llm/oai.py
Browse files- llm/oai.py +3 -3
llm/oai.py
CHANGED
|
@@ -160,8 +160,8 @@ class TextChatAtOAI(BaseFnCallModel):
|
|
| 160 |
for chunk in response:
|
| 161 |
if chunk.choices:
|
| 162 |
choice = chunk.choices[0]
|
| 163 |
-
if hasattr(choice.delta, '
|
| 164 |
-
full_reasoning_content += choice.delta.
|
| 165 |
if hasattr(choice.delta, 'content') and choice.delta.content:
|
| 166 |
full_response += choice.delta.content
|
| 167 |
# 兼容 map agent 模型
|
|
@@ -179,7 +179,7 @@ class TextChatAtOAI(BaseFnCallModel):
|
|
| 179 |
function_json = json.dumps(function_call, ensure_ascii=False)
|
| 180 |
logger.info(json.dumps(function_call, ensure_ascii=False, indent=4))
|
| 181 |
full_response += f'<tool_call>{function_json}</tool_call>'
|
| 182 |
-
yield [Message(role=ASSISTANT, content=full_response,
|
| 183 |
logger.info(f'message chunk: {chunk}')
|
| 184 |
except OpenAIError as ex:
|
| 185 |
raise ModelServiceError(exception=ex)
|
|
|
|
| 160 |
for chunk in response:
|
| 161 |
if chunk.choices:
|
| 162 |
choice = chunk.choices[0]
|
| 163 |
+
if hasattr(choice.delta, 'reasoning') and choice.delta.reasoning:
|
| 164 |
+
full_reasoning_content += choice.delta.reasoning
|
| 165 |
if hasattr(choice.delta, 'content') and choice.delta.content:
|
| 166 |
full_response += choice.delta.content
|
| 167 |
# 兼容 map agent 模型
|
|
|
|
| 179 |
function_json = json.dumps(function_call, ensure_ascii=False)
|
| 180 |
logger.info(json.dumps(function_call, ensure_ascii=False, indent=4))
|
| 181 |
full_response += f'<tool_call>{function_json}</tool_call>'
|
| 182 |
+
yield [Message(role=ASSISTANT, content=full_response, reasoning=full_reasoning_content)]
|
| 183 |
logger.info(f'message chunk: {chunk}')
|
| 184 |
except OpenAIError as ex:
|
| 185 |
raise ModelServiceError(exception=ex)
|