ZENLLC commited on
Commit
e96b000
·
verified ·
1 Parent(s): cc681b6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -70
app.py CHANGED
@@ -1,4 +1,4 @@
1
- import os, time, base64, mimetypes, requests, traceback, json
2
  from dataclasses import dataclass
3
  from typing import Optional, Dict, Any, Generator, List
4
 
@@ -20,6 +20,8 @@ ENV_FALLBACK_KEY = (os.getenv("OPENAI_API_KEY") or "").strip()
20
  ALLOWED_MODELS = ["sora-2", "sora-2-pro", "sora"]
21
  ALLOWED_SIZES = ["1280x720","720x1280","1792x1024","1024x1792","1920x1080","1080x1920"]
22
 
 
 
23
  @dataclass
24
  class JobStatus:
25
  status: str
@@ -27,10 +29,23 @@ class JobStatus:
27
  output_url: Optional[str] = None
28
  output_b64: Optional[str] = None
29
 
30
- # -------- Utilities --------
31
- def _safe_video_path() -> str:
32
- return "/tmp/sora_output.mp4"
 
 
 
 
 
 
 
 
 
 
 
 
33
 
 
34
  def _file_to_b64(path: str) -> str:
35
  with open(path, "rb") as f:
36
  return base64.b64encode(f.read()).decode("utf-8")
@@ -67,22 +82,8 @@ def _make_client(user_key: Optional[str]) -> OpenAI:
67
  raise ValueError("Missing API key. Paste a valid OpenAI API key.")
68
  return OpenAI(api_key=key)
69
 
70
- # -------- Networking --------
71
  from requests import RequestException as ReqErr
72
- @retry(
73
- retry=retry_if_exception_type((ReqErr,)),
74
- wait=wait_exponential(multiplier=1, min=1, max=10),
75
- stop=stop_after_attempt(5),
76
- reraise=True,
77
- )
78
- def _download_stream(url: str, out_path: str) -> str:
79
- with requests.get(url, stream=True, timeout=60) as r:
80
- r.raise_for_status()
81
- with open(out_path, "wb") as f:
82
- for chunk in r.iter_content(1024 * 256):
83
- if chunk:
84
- f.write(chunk)
85
- return out_path
86
 
87
  # Normalize OpenAI exceptions across versions
88
  _OAI_EXC = tuple(e for e in [RateLimitError, APIConnectionError, APIStatusError] if isinstance(e, type)) or (Exception,)
@@ -95,26 +96,22 @@ _OAI_EXC = tuple(e for e in [RateLimitError, APIConnectionError, APIStatusError]
95
  reraise=True,
96
  )
97
  def _videos_generate(client: OpenAI, **kwargs) -> Any:
98
- """
99
- Try the new Videos API; fallback to Jobs API. Surface both errors if they fail.
100
- """
101
- # Path A
102
  if hasattr(client, "videos") and hasattr(client.videos, "generate"):
103
  try:
104
  return client.videos.generate(**kwargs)
105
  except Exception as e_a:
106
- # fall through to jobs
107
  last_a = e_a
108
  else:
109
  last_a = "client.videos.generate not found"
110
 
111
- # Path B
112
  if hasattr(client, "videos") and hasattr(client.videos, "jobs") and hasattr(client.videos.jobs, "create"):
113
  try:
114
  return client.videos.jobs.create(**kwargs)
115
  except Exception as e_b:
116
  raise RuntimeError(f"videos.generate failed/absent: {last_a}\njobs.create failed: {e_b}")
117
- raise RuntimeError(f"Your OpenAI SDK doesn't expose videos endpoints on this key/org. Seen attributes: {dir_safe(client)}")
118
 
119
  @retry(
120
  retry=retry_if_exception_type(_OAI_EXC),
@@ -127,10 +124,10 @@ def _videos_retrieve(client: OpenAI, job_id: str) -> Any:
127
  try:
128
  return client.videos.retrieve(job_id)
129
  except Exception:
130
- pass # try jobs next
131
  if hasattr(client, "videos") and hasattr(client.videos, "jobs") and hasattr(client.videos.jobs, "retrieve"):
132
  return client.videos.jobs.retrieve(job_id)
133
- raise RuntimeError("No videos.retrieve or videos.jobs.retrieve on this SDK. Upgrade openai python.")
134
 
135
  def _extract_status(resp: Any) -> JobStatus:
136
  status = getattr(resp, "status", None) or getattr(resp, "state", None) or "unknown"
@@ -152,13 +149,7 @@ def _extract_status(resp: Any) -> JobStatus:
152
 
153
  return JobStatus(status=status, error=err, output_url=out_url, output_b64=out_b64)
154
 
155
- def dir_safe(obj) -> Dict[str, Any]:
156
- try:
157
- return sorted([a for a in dir(obj) if not a.startswith("_")])
158
- except Exception:
159
- return {"inspect_error": "dir() failed"}
160
-
161
- # -------- Core (STREAMING; yields LISTS matching outputs) --------
162
  def generate_video_stream(
163
  api_key: str,
164
  prompt: str,
@@ -170,33 +161,17 @@ def generate_video_stream(
170
  guidance: float,
171
  init_image: Optional[str],
172
  ) -> Generator[List[Any], None, None]:
173
- """
174
- Generator that ALWAYS yields a list [video_value_or_update, status_text].
175
- Many Gradio builds ignore tuple yields; lists are safest.
176
- """
177
-
178
  # First visible tick so UI never blanks:
179
  yield [gr.update(), "Starting…"]
180
 
181
- # 0) Setup
182
  try:
183
  client = _make_client(api_key)
184
  except Exception as e_init:
185
  yield [gr.update(), f"Setup error: {e_init}"]
186
  return
187
 
188
- # 0.1) Preflight SDK visibility (prints once so we know what exists)
189
- try:
190
- vids = hasattr(client, "videos")
191
- methods = []
192
- if vids:
193
- methods = [m for m in dir(client.videos) if not m.startswith("_")]
194
- msg = f"SDK preflight → videos: {vids}, methods: {methods}"
195
- yield [gr.update(), msg]
196
- except Exception as e_pref:
197
- yield [gr.update(), f"SDK preflight error: {e_pref}"]
198
-
199
- # 1) Validate inputs
200
  try:
201
  prompt = _sanitize_prompt(prompt)
202
  model = _validate_model(model)
@@ -227,7 +202,7 @@ def generate_video_stream(
227
  yield [gr.update(), f"Validation error: {e_val}"]
228
  return
229
 
230
- # 2) Submit job
231
  try:
232
  yield [gr.update(), "Submitting job…"]
233
  job = _videos_generate(client, **req)
@@ -243,9 +218,8 @@ def generate_video_stream(
243
  yield [gr.update(), f"Submit error: {e_submit}\n{traceback.format_exc(limit=2)}"]
244
  return
245
 
246
- # 3) Poll job
247
- start = time.time()
248
- last_emit = 0
249
  while True:
250
  try:
251
  status_obj = _videos_retrieve(client, job_id)
@@ -263,22 +237,20 @@ def generate_video_stream(
263
  yield [gr.update(), f"Rendering… status={js.status}"]
264
 
265
  if js.status in ("succeeded", "completed", "complete"):
266
- out_path = _safe_video_path()
 
 
 
 
 
267
  if js.output_b64:
268
  try:
269
- with open(out_path, "wb") as f:
270
  f.write(base64.b64decode(js.output_b64))
271
- yield [out_path, f"Done with {model} ({size}, {duration}s)."]
272
  except Exception as werr:
273
  yield [gr.update(), f"Write error: {werr}"]
274
  return
275
- if js.output_url:
276
- try:
277
- _download_stream(js.output_url, out_path)
278
- yield [out_path, f"Downloaded from URL. Done with {model}."]
279
- except Exception as dl_err:
280
- yield [js.output_url, f"Ready (URL) — local download failed: {dl_err}"]
281
- return
282
  yield [gr.update(), "Job succeeded but no video payload was returned."]
283
  return
284
 
@@ -289,7 +261,7 @@ def generate_video_stream(
289
  yield [gr.update(), detail]
290
  return
291
 
292
- if now - start > 1800: # 30 min timeout
293
  yield [gr.update(), "Timed out waiting for the video. Try shorter duration."]
294
  return
295
 
@@ -301,7 +273,7 @@ def build_ui():
301
  gr.Markdown("## ZEN — Sora / Sora-2 / Sora-2-Pro (OpenAI Videos API)")
302
  gr.Markdown(
303
  "Paste an OpenAI API key (not stored). Provide a detailed prompt. "
304
- "Streaming logs ensure you always see progress or readable errors."
305
  )
306
 
307
  with gr.Row():
@@ -326,7 +298,6 @@ def build_ui():
326
  video = gr.Video(label="Result", autoplay=True)
327
  status = gr.Textbox(label="Status / Logs", interactive=False)
328
 
329
- # IMPORTANT: outputs must be exactly two and we yield LISTS [video, status]
330
  go.click(
331
  fn=generate_video_stream,
332
  inputs=[api_key, prompt, model, duration, size, seed, audio, guidance, init_image],
 
1
+ import os, time, base64, mimetypes, requests, traceback, glob
2
  from dataclasses import dataclass
3
  from typing import Optional, Dict, Any, Generator, List
4
 
 
20
  ALLOWED_MODELS = ["sora-2", "sora-2-pro", "sora"]
21
  ALLOWED_SIZES = ["1280x720","720x1280","1792x1024","1024x1792","1920x1080","1080x1920"]
22
 
23
+ TMP_PATH = "/tmp/sora_output.mp4"
24
+
25
  @dataclass
26
  class JobStatus:
27
  status: str
 
29
  output_url: Optional[str] = None
30
  output_b64: Optional[str] = None
31
 
32
+ # -------- Minimal housekeeping to keep disk usage tiny --------
33
+ def _startup_cleanup_tmp():
34
+ try:
35
+ # remove stray mp4s in /tmp older than ~1 hour
36
+ now = time.time()
37
+ for p in glob.glob("/tmp/*.mp4"):
38
+ try:
39
+ if now - os.path.getmtime(p) > 3600:
40
+ os.remove(p)
41
+ except Exception:
42
+ pass
43
+ except Exception:
44
+ pass
45
+
46
+ _startup_cleanup_tmp()
47
 
48
+ # -------- Utilities --------
49
  def _file_to_b64(path: str) -> str:
50
  with open(path, "rb") as f:
51
  return base64.b64encode(f.read()).decode("utf-8")
 
82
  raise ValueError("Missing API key. Paste a valid OpenAI API key.")
83
  return OpenAI(api_key=key)
84
 
85
+ # -------- Networking (only used for optional URL probe; no downloads) --------
86
  from requests import RequestException as ReqErr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
  # Normalize OpenAI exceptions across versions
89
  _OAI_EXC = tuple(e for e in [RateLimitError, APIConnectionError, APIStatusError] if isinstance(e, type)) or (Exception,)
 
96
  reraise=True,
97
  )
98
  def _videos_generate(client: OpenAI, **kwargs) -> Any:
99
+ # Try new Videos API
 
 
 
100
  if hasattr(client, "videos") and hasattr(client.videos, "generate"):
101
  try:
102
  return client.videos.generate(**kwargs)
103
  except Exception as e_a:
 
104
  last_a = e_a
105
  else:
106
  last_a = "client.videos.generate not found"
107
 
108
+ # Try Jobs API
109
  if hasattr(client, "videos") and hasattr(client.videos, "jobs") and hasattr(client.videos.jobs, "create"):
110
  try:
111
  return client.videos.jobs.create(**kwargs)
112
  except Exception as e_b:
113
  raise RuntimeError(f"videos.generate failed/absent: {last_a}\njobs.create failed: {e_b}")
114
+ raise RuntimeError("No videos endpoints on this SDK/account. Update openai package or check org access.")
115
 
116
  @retry(
117
  retry=retry_if_exception_type(_OAI_EXC),
 
124
  try:
125
  return client.videos.retrieve(job_id)
126
  except Exception:
127
+ pass
128
  if hasattr(client, "videos") and hasattr(client.videos, "jobs") and hasattr(client.videos.jobs, "retrieve"):
129
  return client.videos.jobs.retrieve(job_id)
130
+ raise RuntimeError("No videos.retrieve or videos.jobs.retrieve on this SDK/account.")
131
 
132
  def _extract_status(resp: Any) -> JobStatus:
133
  status = getattr(resp, "status", None) or getattr(resp, "state", None) or "unknown"
 
149
 
150
  return JobStatus(status=status, error=err, output_url=out_url, output_b64=out_b64)
151
 
152
+ # -------- Core (STREAMING; yields LISTS matching [video, status]) --------
 
 
 
 
 
 
153
  def generate_video_stream(
154
  api_key: str,
155
  prompt: str,
 
161
  guidance: float,
162
  init_image: Optional[str],
163
  ) -> Generator[List[Any], None, None]:
 
 
 
 
 
164
  # First visible tick so UI never blanks:
165
  yield [gr.update(), "Starting…"]
166
 
167
+ # Setup
168
  try:
169
  client = _make_client(api_key)
170
  except Exception as e_init:
171
  yield [gr.update(), f"Setup error: {e_init}"]
172
  return
173
 
174
+ # Validate
 
 
 
 
 
 
 
 
 
 
 
175
  try:
176
  prompt = _sanitize_prompt(prompt)
177
  model = _validate_model(model)
 
202
  yield [gr.update(), f"Validation error: {e_val}"]
203
  return
204
 
205
+ # Submit
206
  try:
207
  yield [gr.update(), "Submitting job…"]
208
  job = _videos_generate(client, **req)
 
218
  yield [gr.update(), f"Submit error: {e_submit}\n{traceback.format_exc(limit=2)}"]
219
  return
220
 
221
+ # Poll
222
+ start = time.time(); last_emit = 0
 
223
  while True:
224
  try:
225
  status_obj = _videos_retrieve(client, job_id)
 
237
  yield [gr.update(), f"Rendering… status={js.status}"]
238
 
239
  if js.status in ("succeeded", "completed", "complete"):
240
+ # 🔑 DISK-SAFE RETURNS:
241
+ # 1) Prefer URL → zero disk usage (Gradio Video can play URLs)
242
+ if js.output_url:
243
+ yield [js.output_url, f"Ready (URL). Done with {model} ({size}, {duration}s)."]
244
+ return
245
+ # 2) If the API returns only b64, write to a single temp file (overwritten every run)
246
  if js.output_b64:
247
  try:
248
+ with open(TMP_PATH, "wb") as f:
249
  f.write(base64.b64decode(js.output_b64))
250
+ yield [TMP_PATH, f"Done with {model} ({size}, {duration}s)."]
251
  except Exception as werr:
252
  yield [gr.update(), f"Write error: {werr}"]
253
  return
 
 
 
 
 
 
 
254
  yield [gr.update(), "Job succeeded but no video payload was returned."]
255
  return
256
 
 
261
  yield [gr.update(), detail]
262
  return
263
 
264
+ if now - start > 1800:
265
  yield [gr.update(), "Timed out waiting for the video. Try shorter duration."]
266
  return
267
 
 
273
  gr.Markdown("## ZEN — Sora / Sora-2 / Sora-2-Pro (OpenAI Videos API)")
274
  gr.Markdown(
275
  "Paste an OpenAI API key (not stored). Provide a detailed prompt. "
276
+ "This build avoids local downloads to keep disk usage near zero."
277
  )
278
 
279
  with gr.Row():
 
298
  video = gr.Video(label="Result", autoplay=True)
299
  status = gr.Textbox(label="Status / Logs", interactive=False)
300
 
 
301
  go.click(
302
  fn=generate_video_stream,
303
  inputs=[api_key, prompt, model, duration, size, seed, audio, guidance, init_image],