Hug0endob commited on
Commit
73fc44c
·
verified ·
1 Parent(s): 922dd1b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -2
app.py CHANGED
@@ -233,11 +233,51 @@ def build_messages_for_text(prompt: str, extra_text: str):
233
  return [{"role": "system", "content": SYSTEM_INSTRUCTION}, {"role": "user", "content": f"{prompt}\n\n{extra_text}"}]
234
 
235
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
236
  def stream_and_collect(client, model, messages, parts: list):
237
  try:
 
238
  stream_gen = None
239
  try:
240
- stream_gen = client.chat.stream(model=model, messages=messages)
241
  except Exception:
242
  stream_gen = None
243
  if stream_gen:
@@ -249,7 +289,7 @@ def stream_and_collect(client, model, messages, parts: list):
249
  continue
250
  parts.append(d)
251
  return
252
- res = client.chat.complete(model=model, messages=messages, stream=False)
253
  try:
254
  choices = getattr(res, "choices", None) or res.get("choices", [])
255
  except Exception:
@@ -404,6 +444,7 @@ with gr.Blocks(title="Flux", css=css) as demo:
404
  with gr.Column(scale=2):
405
  final_text = gr.Markdown(value="")
406
 
 
407
  url_input.change(fn=load_preview, inputs=[url_input], outputs=[preview_image, preview_video, gr.Textbox(visible=False)])
408
  submit.click(fn=generate_final_text, inputs=[url_input, custom_prompt, api_key], outputs=[final_text])
409
  demo.queue()
 
233
  return [{"role": "system", "content": SYSTEM_INSTRUCTION}, {"role": "user", "content": f"{prompt}\n\n{extra_text}"}]
234
 
235
 
236
+ # New helper: normalize messages so content is always a plain string
237
+ def normalize_messages(messages):
238
+ out = []
239
+ for m in messages:
240
+ if not isinstance(m, dict):
241
+ out.append(m)
242
+ continue
243
+ c = m.get("content")
244
+ if isinstance(c, list):
245
+ parts = []
246
+ for item in c:
247
+ if isinstance(item, str):
248
+ parts.append(item)
249
+ elif isinstance(item, dict):
250
+ typ = item.get("type")
251
+ if typ == "text" and item.get("text"):
252
+ parts.append(item["text"])
253
+ elif typ == "image_url" and item.get("image_url"):
254
+ parts.append(item["image_url"])
255
+ elif typ == "image_base64" and item.get("image_base64"):
256
+ # convert to data URL to satisfy string requirement
257
+ parts.append("data:image/jpeg;base64," + item["image_base64"])
258
+ else:
259
+ parts.append(item.get("text") or item.get("image_url") or item.get("image_base64") or "")
260
+ else:
261
+ parts.append(str(item))
262
+ newc = "\n\n".join(p for p in parts if p).strip()
263
+ nm = m.copy()
264
+ nm["content"] = newc
265
+ out.append(nm)
266
+ elif not isinstance(c, str):
267
+ nm = m.copy()
268
+ nm["content"] = str(c or "")
269
+ out.append(nm)
270
+ else:
271
+ out.append(m)
272
+ return out
273
+
274
+
275
  def stream_and_collect(client, model, messages, parts: list):
276
  try:
277
+ norm_msgs = normalize_messages(messages)
278
  stream_gen = None
279
  try:
280
+ stream_gen = client.chat.stream(model=model, messages=norm_msgs)
281
  except Exception:
282
  stream_gen = None
283
  if stream_gen:
 
289
  continue
290
  parts.append(d)
291
  return
292
+ res = client.chat.complete(model=model, messages=norm_msgs, stream=False)
293
  try:
294
  choices = getattr(res, "choices", None) or res.get("choices", [])
295
  except Exception:
 
444
  with gr.Column(scale=2):
445
  final_text = gr.Markdown(value="")
446
 
447
+ # Ensure preview outputs get None when not applicable so Gradio hides them
448
  url_input.change(fn=load_preview, inputs=[url_input], outputs=[preview_image, preview_video, gr.Textbox(visible=False)])
449
  submit.click(fn=generate_final_text, inputs=[url_input, custom_prompt, api_key], outputs=[final_text])
450
  demo.queue()