Update app.py
Browse files
app.py
CHANGED
|
@@ -14,7 +14,7 @@ import torchvision.models as models
|
|
| 14 |
try:
|
| 15 |
from openai import OpenAI
|
| 16 |
except Exception:
|
| 17 |
-
OpenAI = None #
|
| 18 |
|
| 19 |
import spaces # ZeroGPU decorator
|
| 20 |
|
|
@@ -173,7 +173,7 @@ def call_assistant(
|
|
| 173 |
)
|
| 174 |
user_payload = core_context + "\nUser question:\n" + (user_question or "Provide initial advisory.")
|
| 175 |
|
| 176 |
-
#
|
| 177 |
content = [{"type": "input_text", "text": user_payload}]
|
| 178 |
|
| 179 |
if image is not None:
|
|
@@ -210,7 +210,7 @@ def call_assistant(
|
|
| 210 |
instructions=extra_instructions,
|
| 211 |
)
|
| 212 |
|
| 213 |
-
# polling con timeout
|
| 214 |
t0 = time.time()
|
| 215 |
while True:
|
| 216 |
r = client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run.id)
|
|
@@ -248,98 +248,97 @@ def call_assistant(
|
|
| 248 |
|
| 249 |
def run_analysis(image, note, zone, chat_history, thread_state):
|
| 250 |
"""
|
| 251 |
-
Generator sicuro: intercetta
|
| 252 |
-
così l’overlay non resta appeso e l’utente non rimane a fissare il vuoto cosmico.
|
| 253 |
"""
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
|
| 259 |
-
|
| 260 |
-
|
| 261 |
-
|
| 262 |
-
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
|
| 268 |
-
|
| 269 |
-
|
| 270 |
-
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
|
| 276 |
-
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
|
| 280 |
-
|
| 281 |
-
|
| 282 |
-
|
| 283 |
-
|
| 284 |
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
|
| 289 |
-
|
| 290 |
|
| 291 |
-
|
| 292 |
|
| 293 |
-
|
| 294 |
-
|
| 295 |
-
|
| 296 |
-
|
| 297 |
-
|
| 298 |
-
|
| 299 |
|
| 300 |
-
|
| 301 |
-
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
yield err, chat_history, thread_state or {}
|
| 306 |
|
| 307 |
def continue_chat(user_msg, chat_history, thread_state, note, zone):
|
| 308 |
if not user_msg or not user_msg.strip():
|
| 309 |
return chat_history, ""
|
| 310 |
-
with gr.Progress() as prog:
|
| 311 |
-
try:
|
| 312 |
-
prog(0.2, desc="Sending")
|
| 313 |
-
label = (thread_state or {}).get("label") or "unknown"
|
| 314 |
-
conf = (thread_state or {}).get("confidence") or 0.0
|
| 315 |
-
current_zone = zone or (thread_state or {}).get("zone") or "Not specified"
|
| 316 |
-
thread_id = (thread_state or {}).get("thread_id")
|
| 317 |
-
|
| 318 |
-
prog(0.7, desc="Consulting PPG Assistant")
|
| 319 |
-
reply, thread_id = call_assistant(
|
| 320 |
-
label=label,
|
| 321 |
-
confidence=conf,
|
| 322 |
-
zone=current_zone,
|
| 323 |
-
note=note or "",
|
| 324 |
-
user_question=user_msg,
|
| 325 |
-
image=None, # la thread ha già il file dell'ultima analisi
|
| 326 |
-
thread_id=thread_id
|
| 327 |
-
)
|
| 328 |
|
| 329 |
-
|
| 330 |
-
|
| 331 |
-
|
| 332 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 333 |
|
| 334 |
-
|
| 335 |
-
|
| 336 |
|
| 337 |
-
|
| 338 |
-
|
| 339 |
-
|
| 340 |
-
|
| 341 |
-
|
| 342 |
-
|
| 343 |
|
| 344 |
# ======================
|
| 345 |
# UI
|
|
@@ -449,6 +448,4 @@ with gr.Blocks(title="Corrosion Assistant", theme=gr.themes.Soft()) as demo:
|
|
| 449 |
demo.api_mode = "enabled"
|
| 450 |
|
| 451 |
if __name__ == "__main__":
|
| 452 |
-
# in Space, Gradio gestisce host/porta; local dev ok
|
| 453 |
demo.launch()
|
| 454 |
-
|
|
|
|
| 14 |
try:
|
| 15 |
from openai import OpenAI
|
| 16 |
except Exception:
|
| 17 |
+
OpenAI = None # se il pacchetto non c'è, non esplodiamo
|
| 18 |
|
| 19 |
import spaces # ZeroGPU decorator
|
| 20 |
|
|
|
|
| 173 |
)
|
| 174 |
user_payload = core_context + "\nUser question:\n" + (user_question or "Provide initial advisory.")
|
| 175 |
|
| 176 |
+
# contenuto multi-part con immagine
|
| 177 |
content = [{"type": "input_text", "text": user_payload}]
|
| 178 |
|
| 179 |
if image is not None:
|
|
|
|
| 210 |
instructions=extra_instructions,
|
| 211 |
)
|
| 212 |
|
| 213 |
+
# polling con timeout
|
| 214 |
t0 = time.time()
|
| 215 |
while True:
|
| 216 |
r = client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run.id)
|
|
|
|
| 248 |
|
| 249 |
def run_analysis(image, note, zone, chat_history, thread_state):
|
| 250 |
"""
|
| 251 |
+
Generator sicuro: intercetta eccezioni e restituisce sempre qualcosa.
|
|
|
|
| 252 |
"""
|
| 253 |
+
prog = gr.Progress() # niente context manager per compat vecchie versioni
|
| 254 |
+
try:
|
| 255 |
+
prog(0.03, desc="Checking input")
|
| 256 |
+
if image is None:
|
| 257 |
+
yield "No image received.", chat_history, thread_state
|
| 258 |
+
return
|
| 259 |
+
|
| 260 |
+
if not zone or zone == "Other / Not sure":
|
| 261 |
+
yield "**Please select the area/zone first.**", chat_history, thread_state
|
| 262 |
+
return
|
| 263 |
+
|
| 264 |
+
# feedback immediato
|
| 265 |
+
yield "**Analyzing image...** Please wait.", chat_history, thread_state
|
| 266 |
+
|
| 267 |
+
prog(0.18, desc="Preprocessing")
|
| 268 |
+
time.sleep(0.05)
|
| 269 |
+
|
| 270 |
+
prog(0.50, desc="Classifying (ResNet50)")
|
| 271 |
+
label, conf = predict_image(image)
|
| 272 |
+
|
| 273 |
+
prog(0.72, desc="Consulting PPG Assistant")
|
| 274 |
+
reply, thread_id = call_assistant(
|
| 275 |
+
label=label,
|
| 276 |
+
confidence=conf,
|
| 277 |
+
zone=zone,
|
| 278 |
+
note=note or "",
|
| 279 |
+
user_question="Provide initial advisory.",
|
| 280 |
+
image=image,
|
| 281 |
+
thread_id=(thread_state or {}).get("thread_id")
|
| 282 |
+
)
|
| 283 |
|
| 284 |
+
header = f"**Model result:** `{label}` — confidence **{round(conf*100,2)}%**\n\n"
|
| 285 |
+
out_text = header + (reply or "")
|
| 286 |
+
new_history = (chat_history[:] if chat_history else [])
|
| 287 |
+
if reply:
|
| 288 |
+
new_history.append(("", reply))
|
| 289 |
|
| 290 |
+
prog(1.0, desc="Done")
|
| 291 |
|
| 292 |
+
yield out_text, new_history, {
|
| 293 |
+
"thread_id": thread_id,
|
| 294 |
+
"label": label,
|
| 295 |
+
"confidence": conf,
|
| 296 |
+
"zone": zone or "",
|
| 297 |
+
}
|
| 298 |
|
| 299 |
+
except Exception as e:
|
| 300 |
+
print("[Pipeline][ERROR]", e)
|
| 301 |
+
traceback.print_exc()
|
| 302 |
+
err = f"**Error during analysis**:\n```\n{e}\n```"
|
| 303 |
+
yield err, chat_history, thread_state or {}
|
|
|
|
| 304 |
|
| 305 |
def continue_chat(user_msg, chat_history, thread_state, note, zone):
|
| 306 |
if not user_msg or not user_msg.strip():
|
| 307 |
return chat_history, ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 308 |
|
| 309 |
+
prog = gr.Progress()
|
| 310 |
+
try:
|
| 311 |
+
prog(0.2, desc="Sending")
|
| 312 |
+
label = (thread_state or {}).get("label") or "unknown"
|
| 313 |
+
conf = (thread_state or {}).get("confidence") or 0.0
|
| 314 |
+
current_zone = zone or (thread_state or {}).get("zone") or "Not specified"
|
| 315 |
+
thread_id = (thread_state or {}).get("thread_id")
|
| 316 |
+
|
| 317 |
+
prog(0.7, desc="Consulting PPG Assistant")
|
| 318 |
+
reply, thread_id = call_assistant(
|
| 319 |
+
label=label,
|
| 320 |
+
confidence=conf,
|
| 321 |
+
zone=current_zone,
|
| 322 |
+
note=note or "",
|
| 323 |
+
user_question=user_msg,
|
| 324 |
+
image=None, # la thread ha già l'immagine dell'ultima analisi
|
| 325 |
+
thread_id=thread_id
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
chat_history = chat_history or []
|
| 329 |
+
chat_history.append((user_msg, reply))
|
| 330 |
+
if isinstance(thread_state, dict):
|
| 331 |
+
thread_state["thread_id"] = thread_id
|
| 332 |
|
| 333 |
+
prog(1.0, desc="Done")
|
| 334 |
+
return chat_history, ""
|
| 335 |
|
| 336 |
+
except Exception as e:
|
| 337 |
+
print("[Chat][ERROR]", e)
|
| 338 |
+
traceback.print_exc()
|
| 339 |
+
chat_history = chat_history or []
|
| 340 |
+
chat_history.append((user_msg, f"[Error] {e}"))
|
| 341 |
+
return chat_history, ""
|
| 342 |
|
| 343 |
# ======================
|
| 344 |
# UI
|
|
|
|
| 448 |
demo.api_mode = "enabled"
|
| 449 |
|
| 450 |
if __name__ == "__main__":
|
|
|
|
| 451 |
demo.launch()
|
|
|