MogensR commited on
Commit
bf805fb
·
1 Parent(s): e1ccac2
Files changed (1) hide show
  1. ui.py +7 -328
ui.py CHANGED
@@ -1,25 +1,14 @@
1
  #!/usr/bin/env python3
2
  """
3
- BackgroundFX Pro — Standalone UI Application
4
- Complete file with all necessary functions included
5
  """
6
 
7
  # ==== Runtime hygiene & paths (high in file) ====
8
  import os
9
  import sys
10
- import gc
11
- import time
12
- import json
13
- import uuid
14
- import shutil
15
  import logging
16
- import tempfile
17
- import threading
18
- import traceback
19
- import subprocess
20
  from pathlib import Path
21
- from typing import Optional, Callable, Dict, Any
22
- from datetime import datetime
23
 
24
  # Quiet BLAS/OpenMP thread spam (fixes "libgomp: Invalid value..." warning)
25
  os.environ.setdefault("OMP_NUM_THREADS", "4")
@@ -45,321 +34,11 @@
45
 
46
  # Standard imports
47
  import torch
48
- import numpy as np
49
- from PIL import Image
50
- import cv2
51
  import gradio as gr
52
 
53
- # Configure logging
54
- logger = logging.getLogger("backgroundfx_pro")
55
- if not logger.handlers:
56
- h = logging.StreamHandler()
57
- h.setFormatter(logging.Formatter("[%(asctime)s] %(levelname)s: %(message)s"))
58
- logger.addHandler(h)
59
- logger.setLevel(logging.INFO)
60
-
61
- # ===============================================================================
62
- # UTILITY FUNCTIONS
63
- # ===============================================================================
64
-
65
- def _disk_stats(p: Path) -> str:
66
- """Get disk usage statistics for a path"""
67
- try:
68
- total, used, free = shutil.disk_usage(str(p))
69
- to_mb = lambda x: x // (1024 * 1024)
70
- return f"disk(total={to_mb(total)}MB, used={to_mb(used)}MB, free={to_mb(free)}MB)"
71
- except Exception:
72
- return "disk(n/a)"
73
-
74
- def startup_probe():
75
- """Comprehensive startup probe - validates system readiness"""
76
- try:
77
- logger.info("🚀 BACKGROUNDFX PRO STARTUP PROBE")
78
- logger.info(f"📁 Working directory: {os.getcwd()}")
79
- logger.info(f"🐍 Python executable: {sys.executable}")
80
-
81
- # Write probe (critical - fail fast if can't write)
82
- probe_file = TMP_ROOT / "startup_probe.txt"
83
- probe_file.write_text("startup_test_ok", encoding="utf-8")
84
- assert probe_file.read_text(encoding="utf-8") == "startup_test_ok"
85
- logger.info(f"✅ WRITE PROBE OK: {probe_file} | {_disk_stats(APP_ROOT)}")
86
-
87
- probe_file.unlink(missing_ok=True)
88
-
89
- # GPU/Torch status
90
- try:
91
- logger.info(f"🔧 Torch version: {torch.__version__}")
92
- if hasattr(torch.version, 'cuda') and torch.version.cuda:
93
- logger.info(f"🔧 CUDA version: {torch.version.cuda}")
94
-
95
- if torch.cuda.is_available():
96
- gpu_count = torch.cuda.device_count()
97
- gpu_name = torch.cuda.get_device_name(0) if gpu_count > 0 else "Unknown"
98
- gpu_memory = torch.cuda.get_device_properties(0).total_memory / 1e9
99
- logger.info(f"🔥 GPU Available: {gpu_name} ({gpu_count} devices)")
100
- logger.info(f"💾 GPU Memory: {gpu_memory:.1f}GB")
101
- else:
102
- logger.warning("⚠️ No GPU available - using CPU only")
103
- except Exception as e:
104
- logger.warning(f"⚠️ Torch check failed: {e}")
105
-
106
- # Directory verification
107
- required_dirs = ['checkpoints', 'models', 'utils']
108
- for dir_name in required_dirs:
109
- dir_path = APP_ROOT / dir_name
110
- if dir_path.exists():
111
- logger.info(f"✅ Directory found: {dir_name}")
112
- else:
113
- logger.warning(f"⚠️ Missing directory: {dir_name}")
114
-
115
- logger.info("🎯 Startup probe completed successfully - system ready!")
116
-
117
- except Exception as e:
118
- logger.error(f"❌ STARTUP PROBE FAILED: {e}")
119
- logger.error(f"📊 Disk stats: {_disk_stats(APP_ROOT)}")
120
- raise RuntimeError(f"Startup probe failed - system not ready: {e}") from e
121
-
122
- def create_job_directory() -> Path:
123
- """Create unique job directory for processing"""
124
- job_id = str(uuid.uuid4())[:8]
125
- job_dir = JOB_ROOT / f"job_{job_id}_{int(time.time())}"
126
- job_dir.mkdir(parents=True, exist_ok=True)
127
- logger.info(f"📁 Created job directory: {job_dir}")
128
- return job_dir
129
-
130
- def clear_gpu_memory():
131
- """Aggressive GPU memory cleanup"""
132
- try:
133
- if torch.cuda.is_available():
134
- torch.cuda.empty_cache()
135
- torch.cuda.synchronize()
136
- gc.collect()
137
- logger.info("🧹 GPU memory cleared")
138
- except Exception as e:
139
- logger.warning(f"⚠️ GPU cleanup warning: {e}")
140
-
141
- # ===============================================================================
142
- # PROGRESS TRACKING
143
- # ===============================================================================
144
-
145
- class ProgressTracker:
146
- """Thread-safe progress tracking for video processing"""
147
-
148
- def __init__(self):
149
- self.current_step = ""
150
- self.progress = 0.0
151
- self.start_time = time.time()
152
- self.lock = threading.Lock()
153
-
154
- def update(self, step: str, progress: float = None):
155
- """Update progress safely"""
156
- with self.lock:
157
- self.current_step = step
158
- if progress is not None:
159
- self.progress = max(0.0, min(1.0, progress))
160
-
161
- def get_status(self) -> Dict[str, Any]:
162
- """Get current status safely"""
163
- with self.lock:
164
- elapsed = time.time() - self.start_time
165
- return {
166
- "step": self.current_step,
167
- "progress": self.progress,
168
- "elapsed": elapsed
169
- }
170
-
171
- # Global progress tracker
172
- progress_tracker = ProgressTracker()
173
-
174
- # ===============================================================================
175
- # VIDEO PROCESSING PIPELINE
176
- # ===============================================================================
177
-
178
- def process_video_safe(
179
- video_path: str,
180
- background_image: Optional[Image.Image],
181
- background_type: str = "custom",
182
- background_prompt: str = "",
183
- progress_callback: Optional[Callable] = None
184
- ) -> str:
185
- """Safe video processing with comprehensive error handling"""
186
- job_dir = None
187
- try:
188
- logger.info("=" * 60)
189
- logger.info("=== STARTING SAFE VIDEO PROCESSING ===")
190
- logger.info("=" * 60)
191
-
192
- # Create job directory
193
- job_dir = create_job_directory()
194
-
195
- # Pre-flight checks
196
- logger.info(f"DEBUG: Video path: {video_path}")
197
- logger.info(f"DEBUG: Video exists: {Path(video_path).exists()}")
198
- logger.info(f"DEBUG: Job directory: {job_dir}")
199
- logger.info(f"DEBUG: Background image size: {background_image.size if background_image else 'None'}")
200
-
201
- # Import two-stage pipeline
202
- logger.info("DEBUG: Attempting to import two-stage pipeline...")
203
- from two_stage_pipeline import process_two_stage as pipeline_process
204
- logger.info("✓ Two-stage pipeline imported successfully")
205
-
206
- # Enhanced progress callback with stage monitoring
207
- current_stage = {"stage": "init"}
208
-
209
- def safe_progress_callback(step: str, progress: float = None):
210
- try:
211
- # Stage detection
212
- if "Stage 1" in step:
213
- if current_stage["stage"] != "stage1":
214
- current_stage["stage"] = "stage1"
215
- logger.info("🔄 STAGE TRANSITION: Entering Stage 1 (SAM2)")
216
- elif "Stage 2" in step:
217
- if current_stage["stage"] != "stage2":
218
- current_stage["stage"] = "stage2"
219
- logger.info("🔄 STAGE TRANSITION: Entering Stage 2 (Composition)")
220
- elif "Done" in step:
221
- if current_stage["stage"] != "complete":
222
- current_stage["stage"] = "complete"
223
- logger.info("🔄 STAGE TRANSITION: Pipeline Complete")
224
-
225
- logger.info(f"PROGRESS [{current_stage['stage'].upper()}]: {step} ({progress})")
226
- progress_tracker.update(step, progress)
227
-
228
- if progress_callback:
229
- if progress is not None:
230
- progress_callback(f"Progress: {progress:.1%} - {step}")
231
- else:
232
- progress_callback(step)
233
- except Exception as e:
234
- logger.error(f"Progress callback error: {e}")
235
-
236
- # Validation
237
- if background_image is None:
238
- raise ValueError("Background image is required")
239
-
240
- logger.info("DEBUG: Pre-pipeline validation complete")
241
- logger.info(f"DEBUG: Job dir contents before: {list(job_dir.iterdir()) if job_dir.exists() else 'does not exist'}")
242
-
243
- # Call two-stage pipeline
244
- result_path = pipeline_process(
245
- video_path=video_path,
246
- background_image=background_image,
247
- workdir=job_dir,
248
- progress=safe_progress_callback,
249
- use_matany=True
250
- )
251
-
252
- logger.info(f"DEBUG: Pipeline returned: {result_path}")
253
-
254
- # Post-processing validation
255
- if result_path:
256
- result_file = Path(result_path)
257
- logger.info(f"DEBUG: Result file exists: {result_file.exists()}")
258
- if result_file.exists():
259
- logger.info(f"DEBUG: Result file size: {result_file.stat().st_size} bytes")
260
- logger.info(f"DEBUG: Job dir contents after: {list(job_dir.iterdir())}")
261
-
262
- if not result_path or not Path(result_path).exists():
263
- raise RuntimeError("Pipeline processing failed - no output produced")
264
-
265
- logger.info("=" * 60)
266
- logger.info(f"✅ PROCESSING COMPLETED: {result_path}")
267
- logger.info("=" * 60)
268
- return result_path
269
-
270
- except Exception as e:
271
- logger.error("=" * 60)
272
- logger.error(f"❌ PROCESSING FAILED:")
273
- logger.error(f" Error type: {type(e).__name__}")
274
- logger.error(f" Error message: {e}")
275
- logger.error(f" Job directory exists: {job_dir.exists() if job_dir else 'unknown'}")
276
- if job_dir and job_dir.exists():
277
- logger.error(f" Job directory contents: {list(job_dir.iterdir())}")
278
- logger.error(" Full traceback:")
279
- logger.error(f"{traceback.format_exc()}")
280
- logger.error("=" * 60)
281
-
282
- # Cleanup GPU memory on error
283
- clear_gpu_memory()
284
- raise
285
-
286
- # ===============================================================================
287
- # GRADIO INTERFACE
288
- # ===============================================================================
289
-
290
- def create_interface():
291
- """Create and configure Gradio interface"""
292
-
293
- def process_video_wrapper(video_file, background_image):
294
- """Wrapper function for Gradio processing"""
295
- try:
296
- if not video_file:
297
- return None, "Please upload a video file"
298
-
299
- if not background_image:
300
- return None, "Please upload a background image"
301
-
302
- logger.info(f"Processing video: {video_file}")
303
- logger.info(f"Background image size: {background_image.size}")
304
-
305
- # Process video
306
- result_path = process_video_safe(
307
- video_path=video_file,
308
- background_image=background_image,
309
- background_type="custom",
310
- background_prompt="",
311
- progress_callback=lambda msg: logger.info(f"UI Progress: {msg}")
312
- )
313
-
314
- return result_path, "✅ Processing completed successfully!"
315
-
316
- except Exception as e:
317
- error_msg = f"❌ Processing failed: {str(e)}"
318
- logger.error(error_msg)
319
- return None, error_msg
320
-
321
- # Create Gradio interface
322
- with gr.Blocks(title="BackgroundFX Pro", theme=gr.themes.Soft()) as demo:
323
- gr.Markdown("# 🎬 BackgroundFX Pro")
324
- gr.Markdown("Professional video background replacement using SAM2 + MatAnyone")
325
-
326
- with gr.Row():
327
- with gr.Column():
328
- video_input = gr.Video(
329
- label="📹 Upload Video",
330
- format="mp4"
331
- )
332
- background_input = gr.Image(
333
- label="🖼️ Background Image",
334
- type="pil"
335
- )
336
-
337
- process_btn = gr.Button(
338
- "🚀 Process Video",
339
- variant="primary",
340
- size="lg"
341
- )
342
-
343
- with gr.Column():
344
- video_output = gr.Video(
345
- label="✨ Processed Video",
346
- format="mp4"
347
- )
348
- status_output = gr.Textbox(
349
- label="📊 Status",
350
- value="Ready to process...",
351
- interactive=False
352
- )
353
-
354
- # Process button click
355
- process_btn.click(
356
- fn=process_video_wrapper,
357
- inputs=[video_input, background_input],
358
- outputs=[video_output, status_output],
359
- show_progress=True
360
- )
361
-
362
- return demo
363
 
364
  # ===============================================================================
365
  # MAIN APPLICATION
@@ -375,8 +54,8 @@ def main():
375
  logger.info("🚀 Launching Gradio interface...")
376
  demo = create_interface()
377
 
378
- # Configure for single-user processing (avoid file conflicts)
379
- demo.queue(concurrency_count=1, max_size=2, api_open=False)
380
 
381
  demo.launch(
382
  server_name="0.0.0.0",
 
1
  #!/usr/bin/env python3
2
  """
3
+ BackgroundFX Pro — Main UI Application
4
+ Clean, focused main file that coordinates the application
5
  """
6
 
7
  # ==== Runtime hygiene & paths (high in file) ====
8
  import os
9
  import sys
 
 
 
 
 
10
  import logging
 
 
 
 
11
  from pathlib import Path
 
 
12
 
13
  # Quiet BLAS/OpenMP thread spam (fixes "libgomp: Invalid value..." warning)
14
  os.environ.setdefault("OMP_NUM_THREADS", "4")
 
34
 
35
  # Standard imports
36
  import torch
 
 
 
37
  import gradio as gr
38
 
39
+ # Import our modules
40
+ from ui_core_functionality import startup_probe, logger
41
+ from ui_core_interface import create_interface
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
 
43
  # ===============================================================================
44
  # MAIN APPLICATION
 
54
  logger.info("🚀 Launching Gradio interface...")
55
  demo = create_interface()
56
 
57
+ # Fixed queue configuration for Gradio 5.46.0
58
+ demo.queue()
59
 
60
  demo.launch(
61
  server_name="0.0.0.0",