bluewinliang commited on
Commit
6deff8b
·
verified ·
1 Parent(s): 6d2594a

Upload proxy_handler.py

Browse files
Files changed (1) hide show
  1. proxy_handler.py +7 -9
proxy_handler.py CHANGED
@@ -27,8 +27,6 @@ class ProxyHandler:
27
  if not self.client.is_closed:
28
  await self.client.aclose()
29
 
30
- # --- START OF FINAL FIX ---
31
- # NEW: Unified thinking content cleaner function
32
  def _clean_thinking_content(self, text: str) -> str:
33
  """
34
  A robust cleaner for the raw thinking content string.
@@ -47,9 +45,7 @@ class ProxyHandler:
47
  cleaned_text = cleaned_text.replace("Thinking…", "")
48
  # 6. Final strip to clean up any residual whitespace.
49
  return cleaned_text.strip()
50
- # --- END OF FINAL FIX ---
51
 
52
- # ... Methods _serialize_msgs, _prep_upstream remain the same ...
53
  def _serialize_msgs(self, msgs) -> list:
54
  out = []
55
  for m in msgs:
@@ -58,6 +54,7 @@ class ProxyHandler:
58
  elif isinstance(m, dict): out.append(m)
59
  else: out.append({"role": getattr(m, "role", "user"), "content": getattr(m, "content", str(m))})
60
  return out
 
61
  async def _prep_upstream(self, req: ChatCompletionRequest) -> Tuple[Dict[str, Any], Dict[str, str], str]:
62
  ck = await cookie_manager.get_next_cookie()
63
  if not ck: raise HTTPException(503, "No available cookies")
@@ -80,10 +77,12 @@ class ProxyHandler:
80
  if not think_open:
81
  yield f"data: {json.dumps({'id': comp_id, 'object': 'chat.completion.chunk', 'created': int(time.time()), 'model': req.model, 'choices': [{'index': 0, 'delta': {'content': '<think>'}, 'finish_reason': None}]})}\n\n"
82
  think_open = True
83
- # In stream, we clean as we go, but we don't strip the final result
84
- # as it might be part of a larger thought. We use a simpler clean here.
85
- cleaned_text = re.sub(r'<glm_block.*?</glm_block>', '', text, flags=re.DOTALL)
86
- cleaned_text = cleaned_text.replace("Thinking…", "") # Remove header early
 
 
87
 
88
  if cleaned_text:
89
  yield f"data: {json.dumps({'id': comp_id, 'object': 'chat.completion.chunk', 'created': int(time.time()), 'model': req.model, 'choices': [{'index': 0, 'delta': {'content': cleaned_text}, 'finish_reason': None}]})}\n\n"
@@ -174,7 +173,6 @@ class ProxyHandler:
174
  final_ans_text = ''.join(raw_answer_parts)
175
  final_content = final_ans_text
176
  if settings.SHOW_THINK_TAGS and raw_thinking_parts:
177
- # Use the new unified cleaner function
178
  cleaned_think_text = self._clean_thinking_content(''.join(raw_thinking_parts))
179
  if cleaned_think_text:
180
  final_content = f"<think>{cleaned_think_text}</think>{final_ans_text}"
 
27
  if not self.client.is_closed:
28
  await self.client.aclose()
29
 
 
 
30
  def _clean_thinking_content(self, text: str) -> str:
31
  """
32
  A robust cleaner for the raw thinking content string.
 
45
  cleaned_text = cleaned_text.replace("Thinking…", "")
46
  # 6. Final strip to clean up any residual whitespace.
47
  return cleaned_text.strip()
 
48
 
 
49
  def _serialize_msgs(self, msgs) -> list:
50
  out = []
51
  for m in msgs:
 
54
  elif isinstance(m, dict): out.append(m)
55
  else: out.append({"role": getattr(m, "role", "user"), "content": getattr(m, "content", str(m))})
56
  return out
57
+
58
  async def _prep_upstream(self, req: ChatCompletionRequest) -> Tuple[Dict[str, Any], Dict[str, str], str]:
59
  ck = await cookie_manager.get_next_cookie()
60
  if not ck: raise HTTPException(503, "No available cookies")
 
77
  if not think_open:
78
  yield f"data: {json.dumps({'id': comp_id, 'object': 'chat.completion.chunk', 'created': int(time.time()), 'model': req.model, 'choices': [{'index': 0, 'delta': {'content': '<think>'}, 'finish_reason': None}]})}\n\n"
79
  think_open = True
80
+
81
+ # --- START OF FINAL FIX ---
82
+ # Use the unified cleaning function for streaming content as well.
83
+ # This ensures consistent output with non-streaming mode.
84
+ cleaned_text = self._clean_thinking_content(text)
85
+ # --- END OF FINAL FIX ---
86
 
87
  if cleaned_text:
88
  yield f"data: {json.dumps({'id': comp_id, 'object': 'chat.completion.chunk', 'created': int(time.time()), 'model': req.model, 'choices': [{'index': 0, 'delta': {'content': cleaned_text}, 'finish_reason': None}]})}\n\n"
 
173
  final_ans_text = ''.join(raw_answer_parts)
174
  final_content = final_ans_text
175
  if settings.SHOW_THINK_TAGS and raw_thinking_parts:
 
176
  cleaned_think_text = self._clean_thinking_content(''.join(raw_thinking_parts))
177
  if cleaned_think_text:
178
  final_content = f"<think>{cleaned_think_text}</think>{final_ans_text}"