ZENLLC commited on
Commit
54cf97f
·
verified ·
1 Parent(s): 27d3e61

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +83 -49
app.py CHANGED
@@ -353,45 +353,55 @@ def build_knowledge_base(
353
  return status, kb
354
 
355
 
356
- def normalize_openai_content(raw_content: Any) -> str:
357
  """
358
- Normalize OpenAI message.content into a plain string.
359
- Handles cases where content is:
360
- - string
361
- - list of parts (with .text or ['text'])
362
- - dict
363
- - None
364
  """
365
- if raw_content is None:
366
  return ""
367
 
368
- # Simple string
369
- if isinstance(raw_content, str):
370
- return raw_content.strip()
371
-
372
- # List of parts
373
- if isinstance(raw_content, list):
374
- parts: List[str] = []
375
- for p in raw_content:
376
- text_piece = ""
377
- if isinstance(p, dict):
378
- text_piece = p.get("text") or ""
379
- else:
380
- # object with .text or fallback to str
381
- text_piece = getattr(p, "text", "") or ""
382
- if not text_piece:
383
- text_piece = str(p)
384
- parts.append(text_piece)
385
- return "\n".join(parts).strip()
386
 
387
- # Dict or something else: best-effort stringify
388
- if isinstance(raw_content, dict):
389
- text_piece = raw_content.get("text")
390
- if isinstance(text_piece, str):
391
- return text_piece.strip()
392
- return str(raw_content)
 
 
 
 
 
 
 
 
 
 
393
 
394
- return str(raw_content).strip()
 
 
 
 
 
 
 
 
 
 
395
 
396
 
397
  def chat_with_rag(
@@ -403,7 +413,7 @@ def chat_with_rag(
403
  ):
404
  """
405
  history_pairs: list of [user_str, assistant_str] pairs for the UI Chatbot.
406
- We'll rebuild OpenAI messages from this each time.
407
  """
408
  user_message = (user_message or "").strip()
409
  api_key = (api_key or "").strip()
@@ -423,15 +433,20 @@ def chat_with_rag(
423
 
424
  client = OpenAI(api_key=api_key)
425
 
426
- # Assemble messages for OpenAI
427
- messages: List[Dict[str, str]] = []
428
 
429
  combined_system = (
430
  DEFAULT_SYSTEM_PROMPT.strip()
431
  + "\n\n---\n\nUser System Instructions:\n"
432
  + system_prompt.strip()
433
  )
434
- messages.append({"role": "system", "content": combined_system})
 
 
 
 
 
435
 
436
  if context:
437
  context_block = (
@@ -440,28 +455,47 @@ def chat_with_rag(
440
  "If the answer is not supported by the context, say you don’t know.\n\n"
441
  f"{context}"
442
  )
443
- messages.append({"role": "system", "content": context_block})
 
 
 
 
 
444
 
445
  # Rebuild conversation history from pairs (last few turns)
446
  recent_pairs = history_pairs[-5:] if history_pairs else []
447
  for u, a in recent_pairs:
448
- messages.append({"role": "user", "content": u})
449
- messages.append({"role": "assistant", "content": a})
 
 
 
 
 
 
 
 
 
 
450
 
451
  # Current user message
452
- messages.append({"role": "user", "content": user_message})
 
 
 
 
 
453
 
454
- # Call OpenAI GPT-5 using max_completion_tokens ONLY (no temperature)
455
  try:
456
- resp = client.chat.completions.create(
457
  model=CHAT_MODEL,
458
- messages=messages,
459
- max_completion_tokens=900, # GPT-5-compatible param
460
  )
461
- raw_content = resp.choices[0].message.content
462
- answer = normalize_openai_content(raw_content)
463
- if not answer:
464
- answer = "⚠️ Model returned an empty response. This may be a transient issue with the API."
465
  except Exception as e:
466
  answer = f"⚠️ OpenAI API error: {e}"
467
 
 
353
  return status, kb
354
 
355
 
356
+ def extract_text_from_response(resp: Any) -> str:
357
  """
358
+ Extract plain text from the Responses API result.
359
+ We assume structure like:
360
+ resp.output -> list of output items
361
+ each item.content -> list of content parts with .text or ['text']
 
 
362
  """
363
+ if resp is None:
364
  return ""
365
 
366
+ texts: List[str] = []
367
+
368
+ # New Responses API usually has resp.output
369
+ output = getattr(resp, "output", None) or getattr(resp, "data", None)
370
+ if output is None:
371
+ # Fallback to just stringifying
372
+ return str(resp)
373
+
374
+ if not isinstance(output, (list, tuple)):
375
+ output = [output]
 
 
 
 
 
 
 
 
376
 
377
+ for item in output:
378
+ content = getattr(item, "content", None)
379
+ if content is None and isinstance(item, dict):
380
+ content = item.get("content")
381
+ if content is None:
382
+ continue
383
+
384
+ if not isinstance(content, (list, tuple)):
385
+ content = [content]
386
+
387
+ for part in content:
388
+ # Part might be object with .text
389
+ txt = getattr(part, "text", None)
390
+ if isinstance(txt, str) and txt.strip():
391
+ texts.append(txt)
392
+ continue
393
 
394
+ # Or dict-like
395
+ if isinstance(part, dict):
396
+ t = part.get("text")
397
+ if isinstance(t, str) and t.strip():
398
+ texts.append(t)
399
+ continue
400
+
401
+ # Fallback, stringify
402
+ texts.append(str(part))
403
+
404
+ return "\n".join(texts).strip()
405
 
406
 
407
  def chat_with_rag(
 
413
  ):
414
  """
415
  history_pairs: list of [user_str, assistant_str] pairs for the UI Chatbot.
416
+ We'll rebuild conversation history for the Responses API each time.
417
  """
418
  user_message = (user_message or "").strip()
419
  api_key = (api_key or "").strip()
 
433
 
434
  client = OpenAI(api_key=api_key)
435
 
436
+ # Build input for Responses API
437
+ input_messages: List[Dict[str, Any]] = []
438
 
439
  combined_system = (
440
  DEFAULT_SYSTEM_PROMPT.strip()
441
  + "\n\n---\n\nUser System Instructions:\n"
442
  + system_prompt.strip()
443
  )
444
+ input_messages.append(
445
+ {
446
+ "role": "system",
447
+ "content": [{"type": "input_text", "text": combined_system}],
448
+ }
449
+ )
450
 
451
  if context:
452
  context_block = (
 
455
  "If the answer is not supported by the context, say you don’t know.\n\n"
456
  f"{context}"
457
  )
458
+ input_messages.append(
459
+ {
460
+ "role": "system",
461
+ "content": [{"type": "input_text", "text": context_block}],
462
+ }
463
+ )
464
 
465
  # Rebuild conversation history from pairs (last few turns)
466
  recent_pairs = history_pairs[-5:] if history_pairs else []
467
  for u, a in recent_pairs:
468
+ input_messages.append(
469
+ {
470
+ "role": "user",
471
+ "content": [{"type": "input_text", "text": u}],
472
+ }
473
+ )
474
+ input_messages.append(
475
+ {
476
+ "role": "assistant",
477
+ "content": [{"type": "output_text", "text": a}],
478
+ }
479
+ )
480
 
481
  # Current user message
482
+ input_messages.append(
483
+ {
484
+ "role": "user",
485
+ "content": [{"type": "input_text", "text": user_message}],
486
+ }
487
+ )
488
 
489
+ # Call OpenAI GPT-5 via Responses API
490
  try:
491
+ resp = client.responses.create(
492
  model=CHAT_MODEL,
493
+ input=input_messages,
494
+ # no temperature, no token params -> avoid unsupported parameter errors
495
  )
496
+ answer = extract_text_from_response(resp)
497
+ if not answer.strip():
498
+ answer = "⚠️ Model returned an empty response object. This may be an API issue."
 
499
  except Exception as e:
500
  answer = f"⚠️ OpenAI API error: {e}"
501