CB commited on
Commit
6e43b5c
·
verified ·
1 Parent(s): b722be7

Update streamlit_app.py

Browse files
Files changed (1) hide show
  1. streamlit_app.py +98 -16
streamlit_app.py CHANGED
@@ -353,23 +353,105 @@ if generate_now and not st.session_state.get("busy"):
353
  est_tokens = max_tokens
354
  est_cost_caption = f"Est. max tokens: {est_tokens}"
355
 
356
- agent = maybe_create_agent(model_used)
357
- debug_info = {"response_shape": None, "outputs_len": None, "outputs_types": None}
358
- if agent:
359
- with st.spinner("Generating description via Agent..."):
360
- if not processed:
361
- raise RuntimeError("Processed file missing for agent generation")
362
- response = agent.run(prompt_text, videos=[processed], safety_settings=safety_settings)
363
- out = getattr(response, "content", None) or getattr(response, "outputText", None) or str(response)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
364
  else:
365
- if not HAS_GENAI or genai is None:
366
- raise RuntimeError("Responses API not available; install google.generativeai SDK.")
367
- genai.configure(api_key=key_to_use)
368
- fname = file_name_or_id(processed)
369
- if not fname:
370
- raise RuntimeError("Uploaded file missing name/id")
371
- system_msg = {"role": "system", "content": prompt_text}
372
- user_msg = {"role": "user", "content": "Please summarize the attached video."}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
373
 
374
  try:
375
  response = genai.responses.generate(
 
353
  est_tokens = max_tokens
354
  est_cost_caption = f"Est. max tokens: {est_tokens}"
355
 
356
+ # Use Responses API directly (avoid phi.Agent which can raise when candidates is empty)
357
+ genai.configure(api_key=key_to_use)
358
+ fname = file_name_or_id(processed)
359
+ if not fname:
360
+ raise RuntimeError("Uploaded file missing name/id")
361
+ system_msg = {"role": "system", "content": prompt_text}
362
+ user_msg = {"role": "user", "content": "Please summarize the attached video."}
363
+
364
+ try:
365
+ response = genai.responses.generate(
366
+ model=model_used,
367
+ messages=[system_msg, user_msg],
368
+ files=[{"name": fname}],
369
+ safety_settings=safety_settings,
370
+ max_output_tokens=max_tokens,
371
+ )
372
+ except TypeError:
373
+ response = genai.responses.generate(
374
+ model=model_used,
375
+ input=[{"text": prompt_text, "files": [{"name": fname}]}],
376
+ safety_settings=safety_settings,
377
+ max_output_tokens=max_tokens,
378
+ )
379
+
380
+ # Normalize response into a safe list of output items
381
+ outputs = []
382
+ if response is None:
383
+ outputs = []
384
+ elif isinstance(response, dict):
385
+ for key in ("output", "candidates", "items", "responses"):
386
+ val = response.get(key)
387
+ if isinstance(val, list) and val:
388
+ outputs = val
389
+ break
390
+ if not outputs:
391
+ for v in response.values():
392
+ if isinstance(v, list) and v:
393
+ outputs = v
394
+ break
395
  else:
396
+ for attr in ("output", "candidates", "items", "responses"):
397
+ val = getattr(response, attr, None)
398
+ if isinstance(val, list) and val:
399
+ outputs = val
400
+ break
401
+
402
+ # Safely extract text pieces without indexing into empty lists
403
+ text_pieces = []
404
+ for item in outputs:
405
+ if item is None:
406
+ continue
407
+ # item may be dict or object
408
+ if isinstance(item, dict):
409
+ # try common fields
410
+ txt = item.get("text") or item.get("output_text") or item.get("message")
411
+ if isinstance(txt, str) and txt.strip():
412
+ text_pieces.append(txt.strip())
413
+ continue
414
+ contents = item.get("content") or item.get("output")
415
+ else:
416
+ txt = getattr(item, "text", None) or getattr(item, "output_text", None) or getattr(item, "message", None)
417
+ if isinstance(txt, str) and txt.strip():
418
+ text_pieces.append(txt.strip())
419
+ continue
420
+ contents = getattr(item, "content", None) or getattr(item, "output", None)
421
+
422
+ # contents may be string or list
423
+ if isinstance(contents, str) and contents.strip():
424
+ text_pieces.append(contents.strip())
425
+ elif isinstance(contents, (list, tuple)):
426
+ for c in contents:
427
+ if c is None:
428
+ continue
429
+ if isinstance(c, str) and c.strip():
430
+ text_pieces.append(c.strip())
431
+ continue
432
+ if isinstance(c, dict):
433
+ t = c.get("text") or c.get("content")
434
+ else:
435
+ t = getattr(c, "text", None) or getattr(c, "content", None)
436
+ if t:
437
+ text_pieces.append(str(t).strip())
438
+
439
+ # final fallback: top-level text
440
+ if not text_pieces:
441
+ top_text = getattr(response, "text", None) if not isinstance(response, dict) else (response.get("text") or response.get("message"))
442
+ if top_text:
443
+ text_pieces.append(str(top_text).strip())
444
+
445
+ # dedupe preserving order
446
+ seen = set()
447
+ filtered = []
448
+ for t in text_pieces:
449
+ if not isinstance(t, str):
450
+ continue
451
+ if t and t not in seen:
452
+ filtered.append(t)
453
+ seen.add(t)
454
+ out = "\n\n".join(filtered)
455
 
456
  try:
457
  response = genai.responses.generate(