Update ui/ui_core.py
Browse files- ui/ui_core.py +12 -24
ui/ui_core.py
CHANGED
|
@@ -3,7 +3,6 @@ import os
|
|
| 3 |
import pandas as pd
|
| 4 |
import pdfplumber
|
| 5 |
import gradio as gr
|
| 6 |
-
import re
|
| 7 |
from typing import List
|
| 8 |
|
| 9 |
# β
Fix: Add src to Python path
|
|
@@ -34,7 +33,7 @@ def extract_all_text_from_csv_or_excel(file_path: str, progress=None, index=0, t
|
|
| 34 |
line = " | ".join(str(cell) for cell in row if pd.notna(cell))
|
| 35 |
if line:
|
| 36 |
lines.append(line)
|
| 37 |
-
return f"
|
| 38 |
|
| 39 |
except Exception as e:
|
| 40 |
return f"[Error reading {os.path.basename(file_path)}]: {str(e)}"
|
|
@@ -55,7 +54,7 @@ def extract_all_text_from_pdf(file_path: str, progress=None, index=0, total=1) -
|
|
| 55 |
progress((index + (i / num_pages)) / total, desc=f"Reading PDF: {os.path.basename(file_path)} ({i+1}/{num_pages})")
|
| 56 |
except Exception as e:
|
| 57 |
extracted.append(f"[Error reading page {i+1}]: {str(e)}")
|
| 58 |
-
return f"
|
| 59 |
|
| 60 |
except Exception as e:
|
| 61 |
return f"[Error reading PDF {os.path.basename(file_path)}]: {str(e)}"
|
|
@@ -79,7 +78,7 @@ def chunk_text(text: str, max_tokens: int = 8192) -> List[str]:
|
|
| 79 |
|
| 80 |
def create_ui(agent: TxAgent):
|
| 81 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
| 82 |
-
gr.Markdown("<h1 style='text-align: center;'
|
| 83 |
|
| 84 |
chatbot = gr.Chatbot(label="CPS Assistant", height=600, type="messages", show_copy_button=True)
|
| 85 |
file_upload = gr.File(
|
|
@@ -91,12 +90,6 @@ def create_ui(agent: TxAgent):
|
|
| 91 |
send_button = gr.Button("Send", variant="primary")
|
| 92 |
conversation_state = gr.State([])
|
| 93 |
|
| 94 |
-
# Centered status box styled
|
| 95 |
-
status_box = gr.Markdown(
|
| 96 |
-
"<div style='text-align:center; font-size: 18px;'>β³ Processing in progress... Please wait...</div>",
|
| 97 |
-
visible=False
|
| 98 |
-
)
|
| 99 |
-
|
| 100 |
def handle_chat(message: str, history: list, conversation: list, uploaded_files: list, progress=gr.Progress()):
|
| 101 |
context = (
|
| 102 |
"You are an expert clinical AI assistant reviewing medical form or interview data. "
|
|
@@ -108,10 +101,9 @@ def create_ui(agent: TxAgent):
|
|
| 108 |
)
|
| 109 |
|
| 110 |
try:
|
| 111 |
-
# Show loading
|
| 112 |
-
yield [{"role": "assistant", "content": "
|
| 113 |
|
| 114 |
-
# Step 1: Extract files
|
| 115 |
extracted_text = ""
|
| 116 |
if uploaded_files and isinstance(uploaded_files, list):
|
| 117 |
total_files = len(uploaded_files)
|
|
@@ -129,9 +121,8 @@ def create_ui(agent: TxAgent):
|
|
| 129 |
except Exception as file_error:
|
| 130 |
extracted_text += f"[Error processing {os.path.basename(path)}]: {str(file_error)}\n"
|
| 131 |
|
| 132 |
-
# Step 2: Chunk and sanitize
|
| 133 |
sanitized = sanitize_utf8(extracted_text.strip())
|
| 134 |
-
chunks = chunk_text(sanitized
|
| 135 |
|
| 136 |
all_responses = ""
|
| 137 |
for i, chunk in enumerate(chunks):
|
|
@@ -154,25 +145,23 @@ def create_ui(agent: TxAgent):
|
|
| 154 |
all_responses += update
|
| 155 |
|
| 156 |
all_responses = sanitize_utf8(all_responses.strip())
|
| 157 |
-
|
| 158 |
-
# Final messages
|
| 159 |
final_history = history + [
|
| 160 |
{"role": "user", "content": message},
|
| 161 |
{"role": "assistant", "content": all_responses}
|
| 162 |
]
|
| 163 |
yield final_history
|
| 164 |
|
| 165 |
-
except Exception as
|
| 166 |
-
print(f"
|
| 167 |
final_history = history + [
|
| 168 |
{"role": "user", "content": message},
|
| 169 |
-
{"role": "assistant", "content": "
|
| 170 |
]
|
| 171 |
yield final_history
|
| 172 |
|
| 173 |
inputs = [message_input, chatbot, conversation_state, file_upload]
|
| 174 |
-
send_button.click(fn=handle_chat, inputs=inputs, outputs=
|
| 175 |
-
message_input.submit(fn=handle_chat, inputs=inputs, outputs=
|
| 176 |
|
| 177 |
gr.Examples([
|
| 178 |
["Upload your medical form and ask what the doctor might've missed."],
|
|
@@ -180,5 +169,4 @@ def create_ui(agent: TxAgent):
|
|
| 180 |
["Is there anything abnormal in the attached blood work report?"]
|
| 181 |
], inputs=message_input)
|
| 182 |
|
| 183 |
-
return demo
|
| 184 |
-
|
|
|
|
| 3 |
import pandas as pd
|
| 4 |
import pdfplumber
|
| 5 |
import gradio as gr
|
|
|
|
| 6 |
from typing import List
|
| 7 |
|
| 8 |
# β
Fix: Add src to Python path
|
|
|
|
| 33 |
line = " | ".join(str(cell) for cell in row if pd.notna(cell))
|
| 34 |
if line:
|
| 35 |
lines.append(line)
|
| 36 |
+
return f"\U0001F4C4 {os.path.basename(file_path)}\n\n" + "\n".join(lines)
|
| 37 |
|
| 38 |
except Exception as e:
|
| 39 |
return f"[Error reading {os.path.basename(file_path)}]: {str(e)}"
|
|
|
|
| 54 |
progress((index + (i / num_pages)) / total, desc=f"Reading PDF: {os.path.basename(file_path)} ({i+1}/{num_pages})")
|
| 55 |
except Exception as e:
|
| 56 |
extracted.append(f"[Error reading page {i+1}]: {str(e)}")
|
| 57 |
+
return f"\U0001F4C4 {os.path.basename(file_path)}\n\n" + "\n\n".join(extracted)
|
| 58 |
|
| 59 |
except Exception as e:
|
| 60 |
return f"[Error reading PDF {os.path.basename(file_path)}]: {str(e)}"
|
|
|
|
| 78 |
|
| 79 |
def create_ui(agent: TxAgent):
|
| 80 |
with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
| 81 |
+
gr.Markdown("<h1 style='text-align: center;'>\U0001F4CB CPS: Clinical Patient Support System</h1>")
|
| 82 |
|
| 83 |
chatbot = gr.Chatbot(label="CPS Assistant", height=600, type="messages", show_copy_button=True)
|
| 84 |
file_upload = gr.File(
|
|
|
|
| 90 |
send_button = gr.Button("Send", variant="primary")
|
| 91 |
conversation_state = gr.State([])
|
| 92 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
def handle_chat(message: str, history: list, conversation: list, uploaded_files: list, progress=gr.Progress()):
|
| 94 |
context = (
|
| 95 |
"You are an expert clinical AI assistant reviewing medical form or interview data. "
|
|
|
|
| 101 |
)
|
| 102 |
|
| 103 |
try:
|
| 104 |
+
# Show centered loading message
|
| 105 |
+
yield history + [{"role": "assistant", "content": "<div style='text-align:center'>β³ Processing... Please wait while I analyze the files.</div>"}]
|
| 106 |
|
|
|
|
| 107 |
extracted_text = ""
|
| 108 |
if uploaded_files and isinstance(uploaded_files, list):
|
| 109 |
total_files = len(uploaded_files)
|
|
|
|
| 121 |
except Exception as file_error:
|
| 122 |
extracted_text += f"[Error processing {os.path.basename(path)}]: {str(file_error)}\n"
|
| 123 |
|
|
|
|
| 124 |
sanitized = sanitize_utf8(extracted_text.strip())
|
| 125 |
+
chunks = chunk_text(sanitized)
|
| 126 |
|
| 127 |
all_responses = ""
|
| 128 |
for i, chunk in enumerate(chunks):
|
|
|
|
| 145 |
all_responses += update
|
| 146 |
|
| 147 |
all_responses = sanitize_utf8(all_responses.strip())
|
|
|
|
|
|
|
| 148 |
final_history = history + [
|
| 149 |
{"role": "user", "content": message},
|
| 150 |
{"role": "assistant", "content": all_responses}
|
| 151 |
]
|
| 152 |
yield final_history
|
| 153 |
|
| 154 |
+
except Exception as chat_error:
|
| 155 |
+
print(f"Chat error: {chat_error}")
|
| 156 |
final_history = history + [
|
| 157 |
{"role": "user", "content": message},
|
| 158 |
+
{"role": "assistant", "content": "β An error occurred while processing your request."}
|
| 159 |
]
|
| 160 |
yield final_history
|
| 161 |
|
| 162 |
inputs = [message_input, chatbot, conversation_state, file_upload]
|
| 163 |
+
send_button.click(fn=handle_chat, inputs=inputs, outputs=chatbot)
|
| 164 |
+
message_input.submit(fn=handle_chat, inputs=inputs, outputs=chatbot)
|
| 165 |
|
| 166 |
gr.Examples([
|
| 167 |
["Upload your medical form and ask what the doctor might've missed."],
|
|
|
|
| 169 |
["Is there anything abnormal in the attached blood work report?"]
|
| 170 |
], inputs=message_input)
|
| 171 |
|
| 172 |
+
return demo
|
|
|