Rajan Sharma commited on
Commit
a40f297
·
verified ·
1 Parent(s): 744c807

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -19
app.py CHANGED
@@ -78,6 +78,7 @@ def is_identity_query(message, history):
78
  return False
79
 
80
  def _iter_user_assistant(history):
 
81
  for item in (history or []):
82
  if isinstance(item, (list, tuple)):
83
  u = item[0] if len(item) > 0 else ""
@@ -133,6 +134,7 @@ def load_local_model():
133
  return mdl, tok
134
 
135
  def build_inputs(tokenizer, message, history):
 
136
  msgs = []
137
  for u, a in _iter_user_assistant(history):
138
  if u: msgs.append({"role": "user", "content": u})
@@ -227,7 +229,7 @@ def clarityops_reply(user_msg, history, tz, uploaded_files_paths):
227
  )
228
  computed = compute_operational_numbers(snapshot)
229
 
230
- # Smart scenario detection: if user message itself looks like exec MDSi context, include the pre-compute block
231
  user_lower = (safe_in or "").lower()
232
  mdsi_extra = _mdsi_block() if ("diabetes" in user_lower or "mdsi" in user_lower or "mobile screening" in user_lower) else ""
233
 
@@ -321,8 +323,8 @@ with gr.Blocks(theme=theme, css=custom_css) as demo:
321
 
322
  gr.Markdown("# ClarityOps Augmented Decision AI")
323
 
324
- # Main chat area
325
- chat = gr.Chatbot(label="", show_label=False, type="messages", height=700)
326
 
327
  # ---- Bottom bar: uploads + message box + send/clear ----
328
  with gr.Row():
@@ -330,12 +332,16 @@ with gr.Blocks(theme=theme, css=custom_css) as demo:
330
  label="Upload docs/images (PDF, DOCX, CSV, PNG, JPG)",
331
  file_types=["file"],
332
  file_count="multiple",
333
- # keep compact footprint
334
  height=68
335
  )
336
 
337
  with gr.Row():
338
- msg = gr.Textbox(placeholder="Type a message… (paste scenarios here too; ClarityOps will adapt)", scale=10)
 
 
 
 
 
339
  send = gr.Button("Send", scale=1)
340
  clear = gr.Button("Clear chat", scale=1)
341
 
@@ -347,23 +353,22 @@ with gr.Blocks(theme=theme, css=custom_css) as demo:
347
  def _store_uploads(files, current):
348
  paths = []
349
  for f in (files or []):
350
- # gradio Files returns tempfile objects with .name
351
  paths.append(getattr(f, "name", None) or f)
352
  return (current or []) + paths
353
 
354
  uploads.change(fn=_store_uploads, inputs=[uploads, state_uploaded], outputs=state_uploaded)
355
 
356
- # Send message -> compute reply -> update chat
357
  def _on_send(user_msg, history, tz, up_paths):
358
  if not user_msg or not user_msg.strip():
359
- return history, "" # no-op
360
  new_history = clarityops_reply(user_msg.strip(), history or [], tz, up_paths or [])
361
- return new_history, ""
362
 
363
  send.click(
364
  fn=_on_send,
365
  inputs=[msg, state_history, tz_box, state_uploaded],
366
- outputs=[chat, msg],
367
  queue=True,
368
  )
369
 
@@ -371,21 +376,16 @@ with gr.Blocks(theme=theme, css=custom_css) as demo:
371
  msg.submit(
372
  fn=_on_send,
373
  inputs=[msg, state_history, tz_box, state_uploaded],
374
- outputs=[chat, msg],
375
  queue=True,
376
  )
377
 
378
- # Keep Chatbot history state in sync whenever it updates
379
- chat.change(lambda h: h, inputs=chat, outputs=state_history)
380
-
381
  # Clear chat (keeps uploads so you can keep referencing docs)
382
  def _clear_chat():
383
- return [], []
384
- clear.click(lambda: [], None, chat)
385
- # If you also want to clear uploads, uncomment below:
386
- # clear.click(_clear_chat, None, [chat, state_uploaded])
387
 
388
  if __name__ == "__main__":
389
  port = int(os.environ.get("PORT", "7860"))
390
  demo.launch(server_name="0.0.0.0", server_port=port, show_api=False, max_threads=8)
391
-
 
78
  return False
79
 
80
  def _iter_user_assistant(history):
81
+ # history is a list of (user, assistant) tuples (Chatbot default format)
82
  for item in (history or []):
83
  if isinstance(item, (list, tuple)):
84
  u = item[0] if len(item) > 0 else ""
 
134
  return mdl, tok
135
 
136
  def build_inputs(tokenizer, message, history):
137
+ # Convert tuple history to chat template input for HF models
138
  msgs = []
139
  for u, a in _iter_user_assistant(history):
140
  if u: msgs.append({"role": "user", "content": u})
 
229
  )
230
  computed = compute_operational_numbers(snapshot)
231
 
232
+ # Smart scenario detection: if user message suggests exec MDSi context, include pre-compute block
233
  user_lower = (safe_in or "").lower()
234
  mdsi_extra = _mdsi_block() if ("diabetes" in user_lower or "mdsi" in user_lower or "mobile screening" in user_lower) else ""
235
 
 
323
 
324
  gr.Markdown("# ClarityOps Augmented Decision AI")
325
 
326
+ # Main chat area (IMPORTANT: no type="messages" -> uses tuple history)
327
+ chat = gr.Chatbot(label="", show_label=False, height=700)
328
 
329
  # ---- Bottom bar: uploads + message box + send/clear ----
330
  with gr.Row():
 
332
  label="Upload docs/images (PDF, DOCX, CSV, PNG, JPG)",
333
  file_types=["file"],
334
  file_count="multiple",
 
335
  height=68
336
  )
337
 
338
  with gr.Row():
339
+ msg = gr.Textbox(
340
+ label="",
341
+ show_label=False,
342
+ placeholder="Type a message… (paste scenarios here too; ClarityOps will adapt)",
343
+ scale=10
344
+ )
345
  send = gr.Button("Send", scale=1)
346
  clear = gr.Button("Clear chat", scale=1)
347
 
 
353
  def _store_uploads(files, current):
354
  paths = []
355
  for f in (files or []):
 
356
  paths.append(getattr(f, "name", None) or f)
357
  return (current or []) + paths
358
 
359
  uploads.change(fn=_store_uploads, inputs=[uploads, state_uploaded], outputs=state_uploaded)
360
 
361
+ # Send message -> compute reply -> update chat & history
362
  def _on_send(user_msg, history, tz, up_paths):
363
  if not user_msg or not user_msg.strip():
364
+ return history, "", history # no-op
365
  new_history = clarityops_reply(user_msg.strip(), history or [], tz, up_paths or [])
366
+ return new_history, "", new_history
367
 
368
  send.click(
369
  fn=_on_send,
370
  inputs=[msg, state_history, tz_box, state_uploaded],
371
+ outputs=[chat, msg, state_history],
372
  queue=True,
373
  )
374
 
 
376
  msg.submit(
377
  fn=_on_send,
378
  inputs=[msg, state_history, tz_box, state_uploaded],
379
+ outputs=[chat, msg, state_history],
380
  queue=True,
381
  )
382
 
 
 
 
383
  # Clear chat (keeps uploads so you can keep referencing docs)
384
  def _clear_chat():
385
+ return [], [], []
386
+ # Clear only chat + input; keep uploads
387
+ clear.click(lambda: ([], "", []), None, [chat, msg, state_history])
 
388
 
389
  if __name__ == "__main__":
390
  port = int(os.environ.get("PORT", "7860"))
391
  demo.launch(server_name="0.0.0.0", server_port=port, show_api=False, max_threads=8)