Spaces:
Running
on
Zero
Running
on
Zero
Update model.py
Browse files
model.py
CHANGED
|
@@ -6,7 +6,7 @@ from config import Config
|
|
| 6 |
|
| 7 |
from diffusers import (
|
| 8 |
ControlNetModel,
|
| 9 |
-
TCDScheduler,
|
| 10 |
)
|
| 11 |
from diffusers.models.controlnets.multicontrolnet import MultiControlNetModel
|
| 12 |
|
|
@@ -117,11 +117,9 @@ class ModelHandler:
|
|
| 117 |
print(f" [WARNING] Failed to enable xFormers: {e}")
|
| 118 |
|
| 119 |
# 4. Set TCD Scheduler
|
| 120 |
-
# --- CHANGED: Using TCDScheduler logic from examples ---
|
| 121 |
print("Configuring TCDScheduler...")
|
| 122 |
self.pipeline.scheduler = TCDScheduler.from_config(self.pipeline.scheduler.config)
|
| 123 |
print(" [OK] TCDScheduler loaded.")
|
| 124 |
-
# --- END CHANGED ---
|
| 125 |
|
| 126 |
# 5. Load Adapters (IP-Adapter, TCD-LoRA & Style LoRA)
|
| 127 |
print("Loading Adapters...")
|
|
@@ -142,18 +140,20 @@ class ModelHandler:
|
|
| 142 |
print(f"Loading IP-Adapter from local file: {ip_adapter_local_path}")
|
| 143 |
self.pipeline.load_ip_adapter_instantid(ip_adapter_local_path)
|
| 144 |
|
| 145 |
-
# 5b. Load TCD LoRA (
|
| 146 |
print("Loading TCD-SDXL-LoRA...")
|
| 147 |
-
|
|
|
|
|
|
|
| 148 |
if not os.path.exists(tcd_lora_path):
|
| 149 |
hf_hub_download(
|
| 150 |
repo_id="h1t/TCD-SDXL-LoRA",
|
| 151 |
-
filename=
|
| 152 |
local_dir="./models",
|
| 153 |
local_dir_use_symlinks=False
|
| 154 |
)
|
| 155 |
-
self.pipeline.load_lora_weights("./models", weight_name=
|
| 156 |
-
self.pipeline.fuse_lora(lora_scale=1.0)
|
| 157 |
print(" [OK] TCD LoRA fused.")
|
| 158 |
|
| 159 |
# 5c. Load Style LoRA (Your existing one)
|
|
|
|
| 6 |
|
| 7 |
from diffusers import (
|
| 8 |
ControlNetModel,
|
| 9 |
+
TCDScheduler,
|
| 10 |
)
|
| 11 |
from diffusers.models.controlnets.multicontrolnet import MultiControlNetModel
|
| 12 |
|
|
|
|
| 117 |
print(f" [WARNING] Failed to enable xFormers: {e}")
|
| 118 |
|
| 119 |
# 4. Set TCD Scheduler
|
|
|
|
| 120 |
print("Configuring TCDScheduler...")
|
| 121 |
self.pipeline.scheduler = TCDScheduler.from_config(self.pipeline.scheduler.config)
|
| 122 |
print(" [OK] TCDScheduler loaded.")
|
|
|
|
| 123 |
|
| 124 |
# 5. Load Adapters (IP-Adapter, TCD-LoRA & Style LoRA)
|
| 125 |
print("Loading Adapters...")
|
|
|
|
| 140 |
print(f"Loading IP-Adapter from local file: {ip_adapter_local_path}")
|
| 141 |
self.pipeline.load_ip_adapter_instantid(ip_adapter_local_path)
|
| 142 |
|
| 143 |
+
# 5b. Load TCD LoRA (Fixed Filename)
|
| 144 |
print("Loading TCD-SDXL-LoRA...")
|
| 145 |
+
tcd_lora_filename = "pytorch_lora_weights.safetensors" # <-- CORRECTED FILENAME
|
| 146 |
+
tcd_lora_path = os.path.join("./models", tcd_lora_filename)
|
| 147 |
+
|
| 148 |
if not os.path.exists(tcd_lora_path):
|
| 149 |
hf_hub_download(
|
| 150 |
repo_id="h1t/TCD-SDXL-LoRA",
|
| 151 |
+
filename=tcd_lora_filename,
|
| 152 |
local_dir="./models",
|
| 153 |
local_dir_use_symlinks=False
|
| 154 |
)
|
| 155 |
+
self.pipeline.load_lora_weights("./models", weight_name=tcd_lora_filename)
|
| 156 |
+
self.pipeline.fuse_lora(lora_scale=1.0)
|
| 157 |
print(" [OK] TCD LoRA fused.")
|
| 158 |
|
| 159 |
# 5c. Load Style LoRA (Your existing one)
|