File size: 5,773 Bytes
2ab2a21 b9479f6 e6fc2ef b9479f6 e6fc2ef 2ab2a21 b5806ca b9479f6 b5806ca 2ab2a21 b9479f6 2ab2a21 b9479f6 e6fc2ef b9479f6 e6fc2ef b9479f6 717e30f b9479f6 09eb679 b9479f6 09eb679 b9479f6 09eb679 717e30f 2125537 09eb679 b9479f6 09eb679 e6fc2ef b9479f6 717e30f 09eb679 b9479f6 09eb679 b9479f6 2ab2a21 b5806ca b9479f6 b5806ca 09eb679 b9479f6 2125537 09eb679 b5806ca 2ab2a21 2125537 b9479f6 2ab2a21 b5806ca b9479f6 b5806ca 717e30f b9479f6 2ab2a21 b9479f6 2ab2a21 2125537 b5806ca 2125537 09eb679 b5806ca e6fc2ef b5806ca 717e30f b9479f6 2ab2a21 e6fc2ef b9479f6 09eb679 b9479f6 e6fc2ef 2ab2a21 2125537 2ab2a21 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 |
import gradio as gr
import torch
import spaces
import os
import sys
import shutil
import importlib.util
from huggingface_hub import snapshot_download
# -----------------------------------------------------------------------------
# CONFIGURACIÓN
# -----------------------------------------------------------------------------
MODEL_ID = "NewBie-AI/NewBie-image-Exp0.1"
GITHUB_REPO_URL = "https://github.com/NewBie-AI/NewBie" # El origen del código perdido
LOCAL_MODEL_DIR = "./model_weights"
LOCAL_CODE_DIR = "./newbie_code"
# -----------------------------------------------------------------------------
# FUNCIÓN DE RESCATE: CLONAR CÓDIGO + DESCARGAR PESOS
# -----------------------------------------------------------------------------
def load_hybrid_pipeline():
print(f"🚨 INICIANDO PROTOCOLO DE RESCATE PARA {MODEL_ID}...")
# 1. Descargar Pesos (Hugging Face)
if not os.path.exists(LOCAL_MODEL_DIR):
print(" ⬇️ Descargando pesos del modelo (Safetensors)...")
snapshot_download(
repo_id=MODEL_ID,
local_dir=LOCAL_MODEL_DIR,
ignore_patterns=["*.msgpack", "*.bin"] # Optimizamos descarga
)
# 2. Descargar Código (GitHub)
if not os.path.exists(LOCAL_CODE_DIR):
print(f" ⬇️ Clonando código fuente desde {GITHUB_REPO_URL}...")
# Usamos git clone para traer el código que falta en HF
os.system(f"git clone {GITHUB_REPO_URL} {LOCAL_CODE_DIR}")
# 3. Preparar el entorno de Python
# Añadimos la carpeta clonada al path para que Python "vea" los archivos nuevos
sys.path.append(os.path.abspath(LOCAL_CODE_DIR))
# 4. BUSCAR LA CLASE 'NewbiePipeline' MANUALMENTE
print(" 🕵️♂️ Buscando la clase perdida 'NewbiePipeline' en el código clonado...")
pipeline_class = None
# Escaneamos recursivamente el repo de GitHub clonado
for root, dirs, files in os.walk(LOCAL_CODE_DIR):
for file in files:
if file.endswith(".py"):
path = os.path.join(root, file)
try:
with open(path, "r", encoding="utf-8", errors="ignore") as f:
if "class NewbiePipeline" in f.read():
print(f" 🎯 ¡CÓDIGO ENCONTRADO EN!: {file}")
# Importación dinámica (Magia negra de Python)
spec = importlib.util.spec_from_file_location("dynamic_pipeline", path)
module = importlib.util.module_from_spec(spec)
sys.modules["dynamic_pipeline"] = module
spec.loader.exec_module(module)
pipeline_class = getattr(module, "NewbiePipeline")
break
except Exception:
continue
if pipeline_class: break
if not pipeline_class:
raise RuntimeError("❌ No se encontró 'class NewbiePipeline' ni siquiera en el GitHub. El código ha cambiado.")
# 5. INSTANCIAR EL PIPELINE
print(" 🚀 Conectando código clonado con pesos descargados...")
pipe = pipeline_class.from_pretrained(
LOCAL_MODEL_DIR,
torch_dtype=torch.bfloat16,
trust_remote_code=True,
local_files_only=True
)
return pipe
# Ejecutar carga
pipe = None
try:
pipe = load_hybrid_pipeline()
print(" ✅ ¡MODELO CARGADO EXITOSAMENTE!")
except Exception as e:
print(f"❌ ERROR CRÍTICO: {e}")
# -----------------------------------------------------------------------------
# LÓGICA ZEROGPU
# -----------------------------------------------------------------------------
@spaces.GPU(duration=120)
def generate_image(prompt, negative_prompt, steps, cfg, width, height):
if pipe is None:
raise gr.Error("El modelo no está cargado. Revisa la consola.")
print("🎨 Generando...")
pipe.to("cuda")
try:
image = pipe(
prompt=prompt,
negative_prompt=negative_prompt,
num_inference_steps=int(steps),
guidance_scale=float(cfg),
width=int(width),
height=int(height)
).images[0]
return image
except Exception as e:
raise gr.Error(f"Error generando imagen: {e}")
# -----------------------------------------------------------------------------
# INTERFAZ
# -----------------------------------------------------------------------------
css = """
<style>
.container { max-width: 900px; margin: auto; }
</style>
"""
DEFAULT_PROMPT = """<character_1>
<gender>1girl</gender>
<appearance>red_eyes, white_hair, long_hair</appearance>
<clothing>kimono, floral_print</clothing>
<action>standing, holding_fan</action>
</character_1>
<general_tags>
<style>anime, vivid_colors</style>
</general_tags>"""
with gr.Blocks() as demo:
gr.HTML(css)
gr.Markdown("# ⛩️ NewBie Anime (GitHub Rescue Edition)")
with gr.Row():
with gr.Column():
prompt = gr.Textbox(label="Prompt (XML)", value=DEFAULT_PROMPT, lines=8)
neg = gr.Textbox(label="Negative", value="low quality, bad anatomy")
btn = gr.Button("Generar", variant="primary")
steps = gr.Slider(10, 50, value=28, label="Pasos")
cfg = gr.Slider(1, 15, value=7.0, label="CFG")
width = gr.Slider(512, 1280, value=1024, step=64, label="Ancho")
height = gr.Slider(512, 1280, value=1024, step=64, label="Alto")
with gr.Column():
out = gr.Image(label="Resultado")
btn.click(generate_image, inputs=[prompt, neg, steps, cfg, width, height], outputs=out)
if __name__ == "__main__":
demo.launch() |