sehsapneb commited on
Commit
0bdfb7a
·
verified ·
1 Parent(s): 1e42396

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -13
app.py CHANGED
@@ -25,9 +25,9 @@ def root():
25
  return jsonify({"message": "欢迎使用 GPT-OSS to OpenAI 格式代理API", "status": "ok"})
26
 
27
  # 辅助函数:创建一个标准的OpenAI流式数据块
28
- def create_openai_chunk(content, model="gpt-oss-120b", custom_id_prefix="chatcmpl"):
29
  return {
30
- "id": f"{custom_id_prefix}-{str(uuid.uuid4())}",
31
  "object": "chat.completion.chunk",
32
  "created": int(time.time()),
33
  "model": model,
@@ -76,22 +76,21 @@ def chat_completions_proxy():
76
  event_type = gpt_oss_data.get('type')
77
  update_type = gpt_oss_data.get('update', {}).get('type')
78
 
79
- # --- 关键改动:模拟思考过程的打字效果 ---
80
  if event_type == 'thread.item_updated' and update_type == 'cot.entry_added':
81
  thought = gpt_oss_data['update']['entry']['content']
82
- # 立即发送前缀
83
- yield f"data: {json.dumps(create_openai_chunk('[思考中] '))}\n\n"
84
- # 逐字流式传输思考内容
85
- for char in thought:
86
- yield f"data: {json.dumps(create_openai_chunk(char))}\n\n"
87
- time.sleep(0.02) # 加入微小延迟,以获得更好的视觉效果
88
  # 发送换行符以分隔
89
- yield f"data: {json.dumps(create_openai_chunk('\\n\\n'))}\n\n"
 
90
 
91
- # 普通文本片段的流式传输(这部分本身就是逐字/逐词的)
92
  if event_type == 'thread.item_updated' and update_type == 'assistant_message.content_part.text_delta':
93
  delta_content = gpt_oss_data['update'].get('delta', '')
94
- yield f"data: {json.dumps(create_openai_chunk(delta_content))}\n\n"
 
95
 
96
  yield "data: [DONE]\n\n"
97
  except IOError as e:
@@ -99,7 +98,7 @@ def chat_completions_proxy():
99
 
100
  return Response(stream_formatter(), mimetype='text/event-stream')
101
 
102
- else: # 非流式请求逻辑保持不变
103
  try:
104
  full_response_content = ""
105
  for gpt_oss_data in _internal_proxy_stream():
 
25
  return jsonify({"message": "欢迎使用 GPT-OSS to OpenAI 格式代理API", "status": "ok"})
26
 
27
  # 辅助函数:创建一个标准的OpenAI流式数据块
28
+ def create_openai_chunk(content, model="gpt-oss-120b"):
29
  return {
30
+ "id": f"chatcmpl-{str(uuid.uuid4())}",
31
  "object": "chat.completion.chunk",
32
  "created": int(time.time()),
33
  "model": model,
 
76
  event_type = gpt_oss_data.get('type')
77
  update_type = gpt_oss_data.get('update', {}).get('type')
78
 
 
79
  if event_type == 'thread.item_updated' and update_type == 'cot.entry_added':
80
  thought = gpt_oss_data['update']['entry']['content']
81
+ formatted_thought = f"[思考中] {thought}"
82
+
83
+ # 为了兼容性,不逐字发送,而是一次性发送整条思考
84
+ thought_chunk = create_openai_chunk(formatted_thought)
85
+ yield f"data: {json.dumps(thought_chunk)}\n\n"
 
86
  # 发送换行符以分隔
87
+ newline_chunk = create_openai_chunk("\n\n")
88
+ yield f"data: {json.dumps(newline_chunk)}\n\n"
89
 
 
90
  if event_type == 'thread.item_updated' and update_type == 'assistant_message.content_part.text_delta':
91
  delta_content = gpt_oss_data['update'].get('delta', '')
92
+ text_chunk = create_openai_chunk(delta_content)
93
+ yield f"data: {json.dumps(text_chunk)}\n\n"
94
 
95
  yield "data: [DONE]\n\n"
96
  except IOError as e:
 
98
 
99
  return Response(stream_formatter(), mimetype='text/event-stream')
100
 
101
+ else:
102
  try:
103
  full_response_content = ""
104
  for gpt_oss_data in _internal_proxy_stream():