Hiren122 commited on
Commit
b6511a7
·
verified ·
1 Parent(s): 60e36b9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -5
app.py CHANGED
@@ -178,9 +178,7 @@ def parse_onyx_stream_chunk(chunk_text):
178
  if not isinstance(data, dict):
179
  return None, None, None
180
 
181
- # Handle first packet (message IDs)
182
- if 'user_message_id' in data or 'reserved_assistant_message_id' in data:
183
- return None, data.get('reserved_assistant_message_id'), 'message_ids'
184
 
185
  # Handle new packet-based format
186
  if 'obj' in data:
@@ -255,6 +253,7 @@ def generate_openai_stream_chunk(content, model, chunk_id, finish_reason=None):
255
 
256
 
257
  def stream_onyx_response(payload, model, session_key):
 
258
  """Stream response from Onyx API in OpenAI SSE format"""
259
  chunk_id = f"chatcmpl-{uuid.uuid4().hex[:24]}"
260
  url = f"{ONYX_BASE_URL}/api/chat/send-message"
@@ -332,7 +331,8 @@ def stream_onyx_response(payload, model, session_key):
332
  if msg_id:
333
  last_message_id = msg_id
334
  if packet_type == 'stop':
335
- break
 
336
  if content and packet_type in ['content', 'legacy', 'raw', 'error']:
337
  yield generate_openai_stream_chunk(content, model, chunk_id)
338
 
@@ -348,7 +348,7 @@ def stream_onyx_response(payload, model, session_key):
348
 
349
  # Update session with last message ID
350
  if session_key in chat_sessions_cache and last_message_id:
351
- chat_sessions_cache[session_key]['parent_message_id'] = last_message_id
352
 
353
  break # Success, exit loop
354
 
@@ -359,6 +359,8 @@ def stream_onyx_response(payload, model, session_key):
359
  # Send final chunk
360
  yield generate_openai_stream_chunk("", model, chunk_id, "stop")
361
  yield "data: [DONE]\n\n"
 
 
362
 
363
 
364
  def collect_full_response(payload, model, session_key):
 
178
  if not isinstance(data, dict):
179
  return None, None, None
180
 
181
+
 
 
182
 
183
  # Handle new packet-based format
184
  if 'obj' in data:
 
253
 
254
 
255
  def stream_onyx_response(payload, model, session_key):
256
+ final_message_id = None
257
  """Stream response from Onyx API in OpenAI SSE format"""
258
  chunk_id = f"chatcmpl-{uuid.uuid4().hex[:24]}"
259
  url = f"{ONYX_BASE_URL}/api/chat/send-message"
 
331
  if msg_id:
332
  last_message_id = msg_id
333
  if packet_type == 'stop':
334
+ final_message_id = last_message_id
335
+ break
336
  if content and packet_type in ['content', 'legacy', 'raw', 'error']:
337
  yield generate_openai_stream_chunk(content, model, chunk_id)
338
 
 
348
 
349
  # Update session with last message ID
350
  if session_key in chat_sessions_cache and last_message_id:
351
+
352
 
353
  break # Success, exit loop
354
 
 
359
  # Send final chunk
360
  yield generate_openai_stream_chunk("", model, chunk_id, "stop")
361
  yield "data: [DONE]\n\n"
362
+ if final_message_id:
363
+ chat_sessions_cache[session_key]['parent_message_id'] = final_message_id
364
 
365
 
366
  def collect_full_response(payload, model, session_key):