jacopo22295 commited on
Commit
eab477b
·
verified ·
1 Parent(s): 07b8bfe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +131 -28
app.py CHANGED
@@ -69,8 +69,15 @@ if not OPENAI_API_KEY:
69
 
70
  client = OpenAI(api_key=OPENAI_API_KEY)
71
 
72
- # Assistant PPG ID (quello che mi hai dato)
73
- ASSISTANT_ID = "asst_20DNMEENkfBsYupFjPCwfijZ"
 
 
 
 
 
 
 
74
 
75
  # ======================
76
  # Helpers
@@ -89,53 +96,84 @@ def predict_image(image: Image.Image):
89
 
90
  def call_assistant(label, confidence, zone, note, user_question, thread_id=None):
91
  """
92
- Manda input all'assistente OpenAI. Se thread_id è None, crea un nuovo thread.
 
 
93
  Ritorna (reply, thread_id).
94
  """
95
- # se non esiste un thread, crealo
96
  if not thread_id:
97
- thread = client.beta.threads.create()
 
 
 
 
 
98
  thread_id = thread.id
99
 
100
- # messaggio utente con contesto
101
- context = f"""
102
- Classification result: {label} ({round(confidence*100,2)}%).
103
  Zone: {zone or "Not specified"}.
104
- Note: {note or "(none)"}.
 
105
 
106
- User question:
107
- {user_question}
108
 
109
- Always act as a PPG marine coatings technical specialist.
110
- Keep answers concise, technical, and always add:
111
- "Research use only; verify with official PPG specs."
112
- """
113
  client.beta.threads.messages.create(
114
  thread_id=thread_id,
115
  role="user",
116
- content=context
 
 
117
  )
118
 
119
- # run con l'assistente
120
- run = client.beta.threads.runs.create(
121
- thread_id=thread_id,
122
- assistant_id=ASSISTANT_ID
 
 
 
 
 
 
 
 
 
123
  )
124
 
125
- # polling fino a completamento
 
 
 
 
 
 
 
 
 
 
 
126
  while True:
127
  r = client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run.id)
128
  if r.status in ["completed", "failed", "cancelled", "expired"]:
129
  break
130
  time.sleep(0.8)
131
 
132
- # recupera ultimo messaggio
133
  msgs = client.beta.threads.messages.list(thread_id=thread_id)
134
  reply = None
135
  for m in msgs.data:
136
  if m.role == "assistant":
137
- reply = m.content[0].text.value
138
- break
 
 
 
 
 
139
 
140
  return reply or "No reply from Assistant.", thread_id
141
 
@@ -147,7 +185,14 @@ def run_analysis(image, note, zone, chat_history, thread_state):
147
  if image is None:
148
  return "No image received.", chat_history, thread_state
149
 
 
 
 
 
 
150
  label, conf = predict_image(image)
 
 
151
  user_question = "Provide initial advisory based on classification and note."
152
  reply, thread_id = call_assistant(label, conf, zone, note, user_question)
153
 
@@ -160,7 +205,7 @@ def run_analysis(image, note, zone, chat_history, thread_state):
160
  return out_text, new_history, {"thread_id": thread_id, "label": label, "confidence": conf, "zone": zone or ""}
161
 
162
  def continue_chat(user_msg, chat_history, thread_state, note, zone):
163
- if not user_msg.strip():
164
  return chat_history, ""
165
 
166
  label = (thread_state or {}).get("label") or "unknown"
@@ -174,6 +219,27 @@ def continue_chat(user_msg, chat_history, thread_state, note, zone):
174
  thread_state["thread_id"] = thread_id
175
  return chat_history, ""
176
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
  # ======================
178
  # UI
179
  # ======================
@@ -182,9 +248,9 @@ WELCOME = """
182
  # Corrosion Assistant — Beta
183
 
184
  **Welcome!** This demo runs a custom **ResNet50 corrosion classifier** and connects to a dedicated **PPG Assistant** on OpenAI.
185
- - **Model**: ResNet50 classifier, **trained locally** on ~9,000 images
186
- - **Data collection**: a public link for contributing images will open soon
187
- - **Disclaimer**: research & experimental only. No professional advice, no warranty.
188
 
189
  After image analysis you can continue chatting with the assistant.
190
  """
@@ -192,6 +258,21 @@ After image analysis you can continue chatting with the assistant.
192
  with gr.Blocks(title="Corrosion Assistant", theme=gr.themes.Soft()) as demo:
193
  gr.Markdown(WELCOME)
194
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
195
  with gr.Row():
196
  with gr.Column(scale=2):
197
  img = gr.Image(type="pil", sources=["upload","webcam"], label="Upload or webcam")
@@ -199,6 +280,10 @@ with gr.Blocks(title="Corrosion Assistant", theme=gr.themes.Soft()) as demo:
199
  zone = gr.Dropdown(choices=ZONES, label="Zone (indicative)", value="Other / Not sure")
200
  analyze_btn = gr.Button("Analyze image", variant="primary")
201
  with gr.Column(scale=3):
 
 
 
 
202
  out_md = gr.Markdown(label="Analysis")
203
 
204
  gr.Markdown("### Continue the conversation with the PPG Assistant")
@@ -219,20 +304,38 @@ with gr.Blocks(title="Corrosion Assistant", theme=gr.themes.Soft()) as demo:
219
  chat_state = gr.State([])
220
  thread_state = gr.State({"thread_id": None, "label": None, "confidence": 0.0, "zone": ""})
221
 
 
222
  analyze_btn.click(
 
 
 
 
223
  fn=run_analysis,
224
  inputs=[img, note, zone, chat_state, thread_state],
225
  outputs=[out_md, chat_state, thread_state]
 
 
 
 
226
  ).then(
227
  lambda h: h,
228
  inputs=[chat_state],
229
  outputs=[chat]
230
  )
231
 
 
232
  send_btn.click(
 
 
 
 
233
  fn=continue_chat,
234
  inputs=[chat_in, chat_state, thread_state, note, zone],
235
  outputs=[chat, chat_in]
 
 
 
 
236
  )
237
 
238
  clear_btn.click(
 
69
 
70
  client = OpenAI(api_key=OPENAI_API_KEY)
71
 
72
+ # Assistant PPG ID
73
+ # Usa la env se presente, altrimenti il tuo ID fornito
74
+ ASSISTANT_ID = os.environ.get("PPG_ASSISTANT_ID", "asst_20DNMEENkfBsYupFjPCwfijZ")
75
+
76
+ # Facoltativo: vector store per forzare File Search
77
+ VECTOR_STORE_ID = os.environ.get("PPG_VECTOR_STORE_ID", "")
78
+
79
+ # Se vuoi forzare la seconda lingua nelle risposte (es. "Italian")
80
+ APP_FORCE_LANG = os.environ.get("APP_FORCE_LANG", "").strip()
81
 
82
  # ======================
83
  # Helpers
 
96
 
97
  def call_assistant(label, confidence, zone, note, user_question, thread_id=None):
98
  """
99
+ Manda input all'assistente OpenAI.
100
+ - Se VECTOR_STORE_ID è impostato, lo collega al thread per File Search.
101
+ - Usa 'instructions' del run per imporre regole (file-first, dual language, ecc.).
102
  Ritorna (reply, thread_id).
103
  """
104
+ # crea thread con risorse file_search, se disponibile
105
  if not thread_id:
106
+ if VECTOR_STORE_ID:
107
+ thread = client.beta.threads.create(
108
+ tool_resources={"file_search": {"vector_store_ids": [VECTOR_STORE_ID]}}
109
+ )
110
+ else:
111
+ thread = client.beta.threads.create()
112
  thread_id = thread.id
113
 
114
+ # contesto minimo
115
+ core_context = f"""
116
+ Classification: {label} ({round(confidence*100,2)}%).
117
  Zone: {zone or "Not specified"}.
118
+ User note: {note or "(none)"}.
119
+ """
120
 
121
+ # richiesta utente
122
+ user_payload = core_context + "\nUser question:\n" + (user_question or "Provide initial advisory based on classification and note.")
123
 
 
 
 
 
124
  client.beta.threads.messages.create(
125
  thread_id=thread_id,
126
  role="user",
127
+ content=user_payload,
128
+ # Se vuoi agganciare file specifici a questo messaggio, usa attachments con file_id e tool file_search.
129
+ # attachments=[{"file_id":"<FILE_ID>", "tools":[{"type":"file_search"}]}]
130
  )
131
 
132
+ # Istruzioni addizionali per il run
133
+ second_lang_clause = f"Then provide the same content in {APP_FORCE_LANG}." if APP_FORCE_LANG else \
134
+ "Then repeat in the user's language if detectable from note; else in Italian."
135
+
136
+ extra_instructions = (
137
+ "Act as a PPG marine coatings technical specialist for ships (marine environments only). "
138
+ "Answer ONLY using information found in the attached docs via File Search (TDS/SDS, standard cycles). "
139
+ "If the docs lack details, reply 'Not in docs' and ask a targeted follow-up. "
140
+ "ALWAYS ask for the area/zone if it's missing before prescribing a cycle. "
141
+ "Structure: Diagnosis; Surface Preparation (ISO 8501-1, profile); System (primer/build/top or AF; DFT per coat; coats; recoat windows); Notes; Short disclaimer: "
142
+ "'Research use only; verify with official PPG specs.' "
143
+ "Cite file name and section/page when relevant. "
144
+ "Provide first in English. " + second_lang_clause
145
  )
146
 
147
+ run_kwargs = {
148
+ "thread_id": thread_id,
149
+ "assistant_id": ASSISTANT_ID,
150
+ "instructions": extra_instructions,
151
+ }
152
+
153
+ # Se vuoi forzare l'uso del file_search come tool principale, alcune versioni supportano:
154
+ # run_kwargs["tool_choice"] = "file_search"
155
+
156
+ run = client.beta.threads.runs.create(**run_kwargs)
157
+
158
+ # polling
159
  while True:
160
  r = client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run.id)
161
  if r.status in ["completed", "failed", "cancelled", "expired"]:
162
  break
163
  time.sleep(0.8)
164
 
165
+ # prendi ultimo messaggio assistant
166
  msgs = client.beta.threads.messages.list(thread_id=thread_id)
167
  reply = None
168
  for m in msgs.data:
169
  if m.role == "assistant":
170
+ # m.content può avere più parti; prendiamo la prima testuale
171
+ for part in m.content:
172
+ if getattr(part, "type", "") == "text":
173
+ reply = part.text.value
174
+ break
175
+ if reply:
176
+ break
177
 
178
  return reply or "No reply from Assistant.", thread_id
179
 
 
185
  if image is None:
186
  return "No image received.", chat_history, thread_state
187
 
188
+ # blocco: serve una zona chiara
189
+ if not zone or zone == "Other / Not sure":
190
+ msg = "**Please select the area/zone first.** The assistant needs the zone to propose a correct cycle."
191
+ return msg, chat_history, thread_state
192
+
193
  label, conf = predict_image(image)
194
+
195
+ # prompt iniziale
196
  user_question = "Provide initial advisory based on classification and note."
197
  reply, thread_id = call_assistant(label, conf, zone, note, user_question)
198
 
 
205
  return out_text, new_history, {"thread_id": thread_id, "label": label, "confidence": conf, "zone": zone or ""}
206
 
207
  def continue_chat(user_msg, chat_history, thread_state, note, zone):
208
+ if not user_msg or not user_msg.strip():
209
  return chat_history, ""
210
 
211
  label = (thread_state or {}).get("label") or "unknown"
 
219
  thread_state["thread_id"] = thread_id
220
  return chat_history, ""
221
 
222
+ # ======================
223
+ # UI helpers: spinner + button state
224
+ # ======================
225
+
226
+ def show_spinner():
227
+ spinner_html = gr.update(visible=True)
228
+ quick_msg = "**Analyzing image...** This may take a few seconds."
229
+ btn_state = gr.update(interactive=False)
230
+ return spinner_html, quick_msg, btn_state
231
+
232
+ def hide_spinner():
233
+ spinner_html = gr.update(visible=False)
234
+ btn_state = gr.update(interactive=True)
235
+ return spinner_html, btn_state
236
+
237
+ def show_chat_sending():
238
+ return gr.update(interactive=False)
239
+
240
+ def hide_chat_sending():
241
+ return gr.update(interactive=True)
242
+
243
  # ======================
244
  # UI
245
  # ======================
 
248
  # Corrosion Assistant — Beta
249
 
250
  **Welcome!** This demo runs a custom **ResNet50 corrosion classifier** and connects to a dedicated **PPG Assistant** on OpenAI.
251
+ - **Model**: ResNet50 classifier, **trained locally** on ~**9,000 images**
252
+ - **Data collection**: a public link for contributing images will open **soon**
253
+ - **Disclaimer**: research & experimental only. No professional advice, no warranty.
254
 
255
  After image analysis you can continue chatting with the assistant.
256
  """
 
258
  with gr.Blocks(title="Corrosion Assistant", theme=gr.themes.Soft()) as demo:
259
  gr.Markdown(WELCOME)
260
 
261
+ # CSS per spinner
262
+ gr.HTML("""
263
+ <style>
264
+ .ca-spinner { display:flex; align-items:center; gap:10px; padding:8px 0; }
265
+ .ca-spinner .loader {
266
+ border: 4px solid #e5e7eb;
267
+ border-top: 4px solid #3b82f6;
268
+ border-radius: 50%;
269
+ width: 20px; height: 20px;
270
+ animation: ca-spin 0.9s linear infinite;
271
+ }
272
+ @keyframes ca-spin { 100% { transform: rotate(360deg); } }
273
+ </style>
274
+ """)
275
+
276
  with gr.Row():
277
  with gr.Column(scale=2):
278
  img = gr.Image(type="pil", sources=["upload","webcam"], label="Upload or webcam")
 
280
  zone = gr.Dropdown(choices=ZONES, label="Zone (indicative)", value="Other / Not sure")
281
  analyze_btn = gr.Button("Analyze image", variant="primary")
282
  with gr.Column(scale=3):
283
+ spinner = gr.HTML(
284
+ "<div class='ca-spinner'><div class='loader'></div><div>Processing...</div></div>",
285
+ visible=False
286
+ )
287
  out_md = gr.Markdown(label="Analysis")
288
 
289
  gr.Markdown("### Continue the conversation with the PPG Assistant")
 
304
  chat_state = gr.State([])
305
  thread_state = gr.State({"thread_id": None, "label": None, "confidence": 0.0, "zone": ""})
306
 
307
+ # Analyze chain with spinner
308
  analyze_btn.click(
309
+ fn=show_spinner,
310
+ inputs=[],
311
+ outputs=[spinner, out_md, analyze_btn]
312
+ ).then(
313
  fn=run_analysis,
314
  inputs=[img, note, zone, chat_state, thread_state],
315
  outputs=[out_md, chat_state, thread_state]
316
+ ).then(
317
+ fn=hide_spinner,
318
+ inputs=[],
319
+ outputs=[spinner, analyze_btn]
320
  ).then(
321
  lambda h: h,
322
  inputs=[chat_state],
323
  outputs=[chat]
324
  )
325
 
326
+ # Chat chain with "sending" state
327
  send_btn.click(
328
+ fn=show_chat_sending,
329
+ inputs=[],
330
+ outputs=[send_btn]
331
+ ).then(
332
  fn=continue_chat,
333
  inputs=[chat_in, chat_state, thread_state, note, zone],
334
  outputs=[chat, chat_in]
335
+ ).then(
336
+ fn=hide_chat_sending,
337
+ inputs=[],
338
+ outputs=[send_btn]
339
  )
340
 
341
  clear_btn.click(