Spaces:
Runtime error
Runtime error
| """ | |
| Generatore Video ULTRA-LEGGERO per HF Spaces CPU Basic (2026) | |
| - Modello: zeroscope_v2_dark_30x448x256 (\~1β1.5 GB fp16) | |
| - Risoluzione: 256x448, 8 frame, 15 steps | |
| - Target RAM peak: \~5β8 GB β dovrebbe sopravvivere su CPU Basic | |
| """ | |
| import os | |
| import uuid | |
| import threading | |
| import queue | |
| import sqlite3 | |
| import gc | |
| import torch | |
| import gradio as gr | |
| from diffusers import DiffusionPipeline | |
| from diffusers.utils import export_to_video | |
| # Config per ridurre RAM | |
| os.environ["HF_HOME"] = "/tmp/hf_cache" | |
| os.makedirs("/tmp/hf_cache", exist_ok=True) | |
| TMP_DIR = "/tmp/videos" | |
| os.makedirs(TMP_DIR, exist_ok=True) | |
| # ============================================================================== | |
| # DATABASE minimo su /tmp | |
| # ============================================================================== | |
| class MiniDB: | |
| def __init__(self, path="/tmp/jobs.db"): | |
| self.path = path | |
| self.init() | |
| def init(self): | |
| with sqlite3.connect(self.path) as conn: | |
| conn.execute(""" | |
| CREATE TABLE IF NOT EXISTS jobs ( | |
| id TEXT PRIMARY KEY, | |
| user TEXT NOT NULL, | |
| prompt TEXT NOT NULL, | |
| status TEXT DEFAULT 'queued', | |
| progress INTEGER DEFAULT 0, | |
| video_path TEXT, | |
| error TEXT | |
| ) | |
| """) | |
| def add(self, job_id, user, prompt): | |
| with sqlite3.connect(self.path) as conn: | |
| conn.execute("INSERT INTO jobs (id, user, prompt) VALUES (?, ?, ?)", (job_id, user, prompt)) | |
| def update(self, job_id, **kwargs): | |
| with sqlite3.connect(self.path) as conn: | |
| sets = ", ".join(f"{k} = ?" for k in kwargs) | |
| vals = list(kwargs.values()) + [job_id] | |
| conn.execute(f"UPDATE jobs SET {sets} WHERE id = ?", vals) | |
| def get(self, job_id): | |
| with sqlite3.connect(self.path) as conn: | |
| conn.row_factory = sqlite3.Row | |
| row = conn.execute("SELECT * FROM jobs WHERE id = ?", (job_id,)).fetchone() | |
| return dict(row) if row else None | |
| # ============================================================================== | |
| # WORKER con offload pesante | |
| # ============================================================================== | |
| class LightWorker(threading.Thread): | |
| def __init__(self, db: MiniDB): | |
| super().__init__(daemon=True) | |
| self.db = db | |
| self.q = queue.Queue() | |
| self.alive = True | |
| print("Loading ultra-light model: zeroscope_v2_dark_30x448x256 ...") | |
| try: | |
| self.pipe = DiffusionPipeline.from_pretrained( | |
| "cerspense/zeroscope_v2_dark_30x448x256", | |
| torch_dtype=torch.float16, | |
| safety_checker=None, | |
| requires_safety_checker=False | |
| ) | |
| self.pipe.enable_vae_slicing() | |
| self.pipe.enable_sequential_cpu_offload() # cruciale per CPU Basic | |
| # NON fare .to("cuda") o .enable_model_cpu_offload() prima di sequential! | |
| print("Model loaded successfully (low RAM mode)") | |
| except Exception as e: | |
| print(f"Model load failed: {e}") | |
| raise | |
| def enqueue(self, job_id, prompt): | |
| self.q.put((job_id, prompt)) | |
| def run(self): | |
| while self.alive: | |
| try: | |
| job_id, prompt = self.q.get(timeout=10) | |
| print(f"Processing {job_id}") | |
| self.db.update(job_id, status="processing", progress=10) | |
| video_path = self._gen(job_id, prompt) | |
| if video_path: | |
| self.db.update(job_id, status="done", progress=100, video_path=video_path) | |
| else: | |
| self.db.update(job_id, status="failed", error="Generation error") | |
| gc.collect() # pulizia forzata | |
| except queue.Empty: | |
| continue | |
| except Exception as e: | |
| print(f"Worker error: {e}") | |
| def _gen(self, job_id, prompt): | |
| try: | |
| self.db.update(job_id, progress=30) | |
| result = self.pipe( | |
| prompt, | |
| num_inference_steps=15, | |
| height=256, | |
| width=448, | |
| num_frames=8, | |
| guidance_scale=6.0 | |
| ) | |
| frames = result.frames[0] | |
| out_path = os.path.join(TMP_DIR, f"{job_id}.mp4") | |
| export_to_video(frames, out_path, fps=8) | |
| del frames, result | |
| gc.collect() | |
| return out_path | |
| except Exception as e: | |
| print(f"Gen error {job_id}: {e}") | |
| return None | |
| # ============================================================================== | |
| # API semplice | |
| # ============================================================================== | |
| db = MiniDB() | |
| worker = LightWorker(db) | |
| worker.start() | |
| def start_job(user_id: str, prompt: str): | |
| if not user_id.strip() or not prompt.strip(): | |
| return "Errore: inserisci ID utente e prompt validi" | |
| job_id = f"j{uuid.uuid4().hex[:10]}" | |
| db.add(job_id, user_id, prompt) | |
| worker.enqueue(job_id, prompt) | |
| return f"Job in coda!\nID: **{job_id}**\nTempo stimato: 3β12 min (CPU lenta)" | |
| def check_job(job_id: str): | |
| job = db.get(job_id) | |
| if not job: | |
| return "ID non trovato" | |
| msg = f"Stato: {job['status']}\nProgresso: {job['progress']}%" | |
| if job.get('video_path'): | |
| msg += f"\n\nVideo pronto:\n{job['video_path']}" | |
| if job.get('error'): | |
| msg += f"\nErrore: {job['error']}" | |
| return msg | |
| # ============================================================================== | |
| # UI minima | |
| # ============================================================================== | |
| with gr.Blocks(title="Video Gen - CPU Basic Ultra Light") as demo: | |
| gr.Markdown(""" | |
| # Generatore Video ULTRA LEGGERO (CPU Basic Free) | |
| - Modello: zeroscope_v2_dark_30x448x256 | |
| - Video: \~1 secondo (8 frame @ 256Γ448) | |
| - Tempo: 3β12 min per video | |
| """) | |
| with gr.Row(): | |
| user = gr.Textbox(label="ID Utente", placeholder="es. test123") | |
| prompt_box = gr.Textbox(label="Prompt", lines=3, placeholder="Un gatto salta su un tavolo di legno") | |
| btn = gr.Button("Avvia (in coda)", variant="primary") | |
| result = gr.Textbox(label="Output", lines=5) | |
| check_id = gr.Textbox(label="Controlla ID job") | |
| btn_check = gr.Button("Verifica stato") | |
| status_out = gr.Textbox(label="Stato", lines=5) | |
| btn.click(start_job, [user, prompt_box], result) | |
| btn_check.click(check_job, check_id, status_out) | |
| demo.launch(server_name="0.0.0.0", server_port=7860, show_error=True) |