Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -97,7 +97,7 @@ current_pipe = None
|
|
| 97 |
model_lock = threading.Lock()
|
| 98 |
|
| 99 |
def load_model(model_name="dreamshaper-8"):
|
| 100 |
-
"""Thread-safe model loading with HIGH-QUALITY settings"""
|
| 101 |
global model_cache, current_model_name, current_pipe
|
| 102 |
|
| 103 |
with model_lock:
|
|
@@ -110,11 +110,15 @@ def load_model(model_name="dreamshaper-8"):
|
|
| 110 |
try:
|
| 111 |
model_id = MODEL_CHOICES.get(model_name, "lykon/dreamshaper-8")
|
| 112 |
|
|
|
|
|
|
|
| 113 |
pipe = StableDiffusionPipeline.from_pretrained(
|
| 114 |
model_id,
|
| 115 |
torch_dtype=torch.float32,
|
| 116 |
safety_checker=None,
|
| 117 |
-
requires_safety_checker=False
|
|
|
|
|
|
|
| 118 |
)
|
| 119 |
|
| 120 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
|
@@ -128,13 +132,28 @@ def load_model(model_name="dreamshaper-8"):
|
|
| 128 |
return pipe
|
| 129 |
|
| 130 |
except Exception as e:
|
| 131 |
-
print(f"โ Model loading failed: {e}")
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
|
| 135 |
-
|
| 136 |
-
|
| 137 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 138 |
|
| 139 |
# Initialize default model
|
| 140 |
print("๐ Initializing Storybook Generator API...")
|
|
|
|
| 97 |
model_lock = threading.Lock()
|
| 98 |
|
| 99 |
def load_model(model_name="dreamshaper-8"):
|
| 100 |
+
"""Thread-safe model loading with HIGH-QUALITY settings and better error handling"""
|
| 101 |
global model_cache, current_model_name, current_pipe
|
| 102 |
|
| 103 |
with model_lock:
|
|
|
|
| 110 |
try:
|
| 111 |
model_id = MODEL_CHOICES.get(model_name, "lykon/dreamshaper-8")
|
| 112 |
|
| 113 |
+
print(f"๐ง Attempting to load: {model_id}")
|
| 114 |
+
|
| 115 |
pipe = StableDiffusionPipeline.from_pretrained(
|
| 116 |
model_id,
|
| 117 |
torch_dtype=torch.float32,
|
| 118 |
safety_checker=None,
|
| 119 |
+
requires_safety_checker=False,
|
| 120 |
+
local_files_only=False, # Allow downloading if not cached
|
| 121 |
+
cache_dir="./model_cache" # Specific cache directory
|
| 122 |
)
|
| 123 |
|
| 124 |
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
|
|
|
| 132 |
return pipe
|
| 133 |
|
| 134 |
except Exception as e:
|
| 135 |
+
print(f"โ Model loading failed for {model_name}: {e}")
|
| 136 |
+
print(f"๐ Falling back to stable-diffusion-v1-5")
|
| 137 |
+
|
| 138 |
+
# Fallback to base model
|
| 139 |
+
try:
|
| 140 |
+
pipe = StableDiffusionPipeline.from_pretrained(
|
| 141 |
+
"runwayml/stable-diffusion-v1-5",
|
| 142 |
+
torch_dtype=torch.float32,
|
| 143 |
+
safety_checker=None,
|
| 144 |
+
requires_safety_checker=False
|
| 145 |
+
).to("cpu")
|
| 146 |
+
|
| 147 |
+
model_cache[model_name] = pipe
|
| 148 |
+
current_pipe = pipe
|
| 149 |
+
current_model_name = "sd-1.5"
|
| 150 |
+
|
| 151 |
+
print(f"โ
Fallback model loaded: stable-diffusion-v1-5")
|
| 152 |
+
return pipe
|
| 153 |
+
|
| 154 |
+
except Exception as fallback_error:
|
| 155 |
+
print(f"โ Critical: Fallback model also failed: {fallback_error}")
|
| 156 |
+
raise
|
| 157 |
|
| 158 |
# Initialize default model
|
| 159 |
print("๐ Initializing Storybook Generator API...")
|