Update app.py
Browse files
app.py
CHANGED
|
@@ -173,41 +173,62 @@ User note: {note or "(none)"}.
|
|
| 173 |
return reply or "No reply from Assistant.", thread_id
|
| 174 |
|
| 175 |
# ======================
|
| 176 |
-
# Pipelines (generator)
|
| 177 |
# ======================
|
| 178 |
|
| 179 |
def run_analysis(image, note, zone, chat_history, thread_state):
|
| 180 |
-
|
| 181 |
-
|
| 182 |
-
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 199 |
|
| 200 |
def continue_chat(user_msg, chat_history, thread_state, note, zone):
|
| 201 |
if not user_msg.strip():
|
| 202 |
return chat_history, ""
|
| 203 |
-
|
| 204 |
-
|
| 205 |
-
|
| 206 |
-
|
| 207 |
-
|
| 208 |
-
|
| 209 |
-
|
| 210 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 211 |
|
| 212 |
# ======================
|
| 213 |
# UI
|
|
@@ -223,11 +244,46 @@ recognized so use this Model at your own risk.
|
|
| 223 |
**Disclaimer**: research & experimental only. Made with love by JQ.
|
| 224 |
"""
|
| 225 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 226 |
with gr.Blocks(title="Corrosion Assistant", theme=gr.themes.Soft()) as demo:
|
| 227 |
gr.Markdown(WELCOME)
|
| 228 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 229 |
with gr.Row():
|
| 230 |
with gr.Column(scale=2):
|
|
|
|
|
|
|
| 231 |
img = gr.Image(type="pil", sources=["upload","webcam"], label="Upload or webcam")
|
| 232 |
note = gr.Textbox(label="Notes / Context (optional)")
|
| 233 |
zone = gr.Dropdown(choices=ZONES, label="Zone (indicative)", value="Other / Not sure")
|
|
@@ -252,26 +308,41 @@ with gr.Blocks(title="Corrosion Assistant", theme=gr.themes.Soft()) as demo:
|
|
| 252 |
chat_state = gr.State([])
|
| 253 |
thread_state = gr.State({"thread_id": None, "label": None, "confidence": 0.0, "zone": ""})
|
| 254 |
|
|
|
|
| 255 |
analyze_btn.click(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 256 |
fn=run_analysis,
|
| 257 |
inputs=[img, note, zone, chat_state, thread_state],
|
| 258 |
-
outputs=[out_md, chat_state, thread_state]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 259 |
).then(
|
| 260 |
lambda h: h,
|
| 261 |
inputs=[chat_state],
|
| 262 |
-
outputs=[chat]
|
|
|
|
| 263 |
)
|
| 264 |
|
| 265 |
send_btn.click(
|
| 266 |
fn=continue_chat,
|
| 267 |
inputs=[chat_in, chat_state, thread_state, note, zone],
|
| 268 |
-
outputs=[chat, chat_in]
|
|
|
|
| 269 |
)
|
| 270 |
|
| 271 |
clear_btn.click(
|
| 272 |
lambda: ([], ""),
|
| 273 |
inputs=[],
|
| 274 |
-
outputs=[chat, chat_in]
|
|
|
|
| 275 |
)
|
| 276 |
|
| 277 |
demo.api_mode = "enabled"
|
|
|
|
| 173 |
return reply or "No reply from Assistant.", thread_id
|
| 174 |
|
| 175 |
# ======================
|
| 176 |
+
# Pipelines (generator) con barra di avanzamento
|
| 177 |
# ======================
|
| 178 |
|
| 179 |
def run_analysis(image, note, zone, chat_history, thread_state):
|
| 180 |
+
# La progress bar mostra avanzamenti deterministici lato server.
|
| 181 |
+
with gr.Progress() as prog:
|
| 182 |
+
prog(0.02, desc="Ricezione immagine")
|
| 183 |
+
if image is None:
|
| 184 |
+
yield "No image received.", chat_history, thread_state
|
| 185 |
+
return
|
| 186 |
+
|
| 187 |
+
prog(0.06, desc="Validazione input")
|
| 188 |
+
if not zone or zone == "Other / Not sure":
|
| 189 |
+
yield "**Please select the area/zone first.**", chat_history, thread_state
|
| 190 |
+
return
|
| 191 |
+
|
| 192 |
+
# messaggio intermedio per feedback immediato
|
| 193 |
+
yield "**Analyzing image...** Please wait.", chat_history, thread_state
|
| 194 |
+
|
| 195 |
+
prog(0.15, desc="Preprocessing")
|
| 196 |
+
# un micro ritardo solo per dare feedback visivo
|
| 197 |
+
time.sleep(0.05)
|
| 198 |
+
|
| 199 |
+
prog(0.45, desc="Classificazione (CNN)")
|
| 200 |
+
label, conf = predict_image(image)
|
| 201 |
+
|
| 202 |
+
prog(0.70, desc="Consulto PPG Assistant")
|
| 203 |
+
reply, thread_id = call_assistant(label, conf, zone, note, "Provide initial advisory.")
|
| 204 |
+
|
| 205 |
+
prog(0.98, desc="Composizione risposta")
|
| 206 |
+
|
| 207 |
+
header = f"**Model result:** `{label}` — confidence **{round(conf*100,2)}%**\n\n"
|
| 208 |
+
out_text = header + (reply or "")
|
| 209 |
+
new_history = chat_history[:] if chat_history else []
|
| 210 |
+
new_history.append(("", reply))
|
| 211 |
+
prog(1.0, desc="Fatto")
|
| 212 |
+
|
| 213 |
+
yield out_text, new_history, {"thread_id": thread_id, "label": label, "confidence": conf, "zone": zone or ""}
|
| 214 |
|
| 215 |
def continue_chat(user_msg, chat_history, thread_state, note, zone):
|
| 216 |
if not user_msg.strip():
|
| 217 |
return chat_history, ""
|
| 218 |
+
with gr.Progress() as prog:
|
| 219 |
+
prog(0.1, desc="Invio messaggio")
|
| 220 |
+
label = (thread_state or {}).get("label") or "unknown"
|
| 221 |
+
conf = (thread_state or {}).get("confidence") or 0.0
|
| 222 |
+
current_zone = zone or (thread_state or {}).get("zone") or "Not specified"
|
| 223 |
+
thread_id = (thread_state or {}).get("thread_id")
|
| 224 |
+
|
| 225 |
+
prog(0.6, desc="Consulto PPG Assistant")
|
| 226 |
+
reply, thread_id = call_assistant(label, conf, current_zone, note, user_msg, thread_id)
|
| 227 |
+
|
| 228 |
+
chat_history.append((user_msg, reply))
|
| 229 |
+
thread_state["thread_id"] = thread_id
|
| 230 |
+
prog(1.0, desc="Fatto")
|
| 231 |
+
return chat_history, ""
|
| 232 |
|
| 233 |
# ======================
|
| 234 |
# UI
|
|
|
|
| 244 |
**Disclaimer**: research & experimental only. Made with love by JQ.
|
| 245 |
"""
|
| 246 |
|
| 247 |
+
# Overlay HTML/CSS per stato di caricamento/analisi
|
| 248 |
+
LOADER_HTML = """
|
| 249 |
+
<div id="overlay-mask" style="
|
| 250 |
+
position: fixed; inset: 0; background: rgba(0,0,0,0.55);
|
| 251 |
+
display: flex; align-items: center; justify-content: center;
|
| 252 |
+
z-index: 9999; backdrop-filter: blur(2px);
|
| 253 |
+
">
|
| 254 |
+
<div style="background:#111; color:#fff; padding:24px 28px; border-radius:16px;
|
| 255 |
+
font-family: ui-sans-serif, system-ui, -apple-system; text-align:center;
|
| 256 |
+
box-shadow: 0 10px 30px rgba(0,0,0,0.5);">
|
| 257 |
+
<div class="spinner" style="
|
| 258 |
+
width:48px;height:48px;border:4px solid #444;border-top-color:#fff;border-radius:50%;
|
| 259 |
+
margin:0 auto 14px; animation: spin 1s linear infinite;"></div>
|
| 260 |
+
<div style="font-size:16px; font-weight:600;">Elaborazione in corso…</div>
|
| 261 |
+
<div style="opacity:0.85; font-size:12px; margin-top:6px;">Non toccare nulla, grazie.</div>
|
| 262 |
+
</div>
|
| 263 |
+
</div>
|
| 264 |
+
<style>
|
| 265 |
+
@keyframes spin { to { transform: rotate(360deg); } }
|
| 266 |
+
</style>
|
| 267 |
+
"""
|
| 268 |
+
|
| 269 |
+
# piccoli helper per mostrare/nascondere overlay e bloccare/sbloccare bottone
|
| 270 |
+
def _show_overlay_and_busy():
|
| 271 |
+
return gr.update(visible=True), gr.update(interactive=False, value="🔄 Analisi in corso…")
|
| 272 |
+
|
| 273 |
+
def _hide_overlay_and_idle():
|
| 274 |
+
return gr.update(visible=False), gr.update(interactive=True, value="Analyze image")
|
| 275 |
+
|
| 276 |
with gr.Blocks(title="Corrosion Assistant", theme=gr.themes.Soft()) as demo:
|
| 277 |
gr.Markdown(WELCOME)
|
| 278 |
|
| 279 |
+
# overlay nascosto di default
|
| 280 |
+
overlay = gr.HTML(LOADER_HTML, visible=False)
|
| 281 |
+
# NB: l’overlay sta in cima all’app grazie a position:fixed
|
| 282 |
+
|
| 283 |
with gr.Row():
|
| 284 |
with gr.Column(scale=2):
|
| 285 |
+
# Lato “upload”: non posso mostrare percentuale reale dell’upload,
|
| 286 |
+
# ma il bottone mostrerà spinner e l’overlay partirà subito al click.
|
| 287 |
img = gr.Image(type="pil", sources=["upload","webcam"], label="Upload or webcam")
|
| 288 |
note = gr.Textbox(label="Notes / Context (optional)")
|
| 289 |
zone = gr.Dropdown(choices=ZONES, label="Zone (indicative)", value="Other / Not sure")
|
|
|
|
| 308 |
chat_state = gr.State([])
|
| 309 |
thread_state = gr.State({"thread_id": None, "label": None, "confidence": 0.0, "zone": ""})
|
| 310 |
|
| 311 |
+
# catena: mostra overlay + disabilita bottone -> run -> nascondi overlay + riabilita -> aggiorna chat
|
| 312 |
analyze_btn.click(
|
| 313 |
+
fn=_show_overlay_and_busy,
|
| 314 |
+
inputs=[],
|
| 315 |
+
outputs=[overlay, analyze_btn],
|
| 316 |
+
show_progress=False
|
| 317 |
+
).then(
|
| 318 |
fn=run_analysis,
|
| 319 |
inputs=[img, note, zone, chat_state, thread_state],
|
| 320 |
+
outputs=[out_md, chat_state, thread_state],
|
| 321 |
+
show_progress=True # barra deterministica server-side
|
| 322 |
+
).then(
|
| 323 |
+
fn=_hide_overlay_and_idle,
|
| 324 |
+
inputs=[],
|
| 325 |
+
outputs=[overlay, analyze_btn],
|
| 326 |
+
show_progress=False
|
| 327 |
).then(
|
| 328 |
lambda h: h,
|
| 329 |
inputs=[chat_state],
|
| 330 |
+
outputs=[chat],
|
| 331 |
+
show_progress=False
|
| 332 |
)
|
| 333 |
|
| 334 |
send_btn.click(
|
| 335 |
fn=continue_chat,
|
| 336 |
inputs=[chat_in, chat_state, thread_state, note, zone],
|
| 337 |
+
outputs=[chat, chat_in],
|
| 338 |
+
show_progress=True
|
| 339 |
)
|
| 340 |
|
| 341 |
clear_btn.click(
|
| 342 |
lambda: ([], ""),
|
| 343 |
inputs=[],
|
| 344 |
+
outputs=[chat, chat_in],
|
| 345 |
+
show_progress=False
|
| 346 |
)
|
| 347 |
|
| 348 |
demo.api_mode = "enabled"
|