yukee1992 commited on
Commit
7b28e67
ยท
verified ยท
1 Parent(s): de448e9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +28 -9
app.py CHANGED
@@ -97,7 +97,7 @@ current_pipe = None
97
  model_lock = threading.Lock()
98
 
99
  def load_model(model_name="dreamshaper-8"):
100
- """Thread-safe model loading with HIGH-QUALITY settings"""
101
  global model_cache, current_model_name, current_pipe
102
 
103
  with model_lock:
@@ -110,11 +110,15 @@ def load_model(model_name="dreamshaper-8"):
110
  try:
111
  model_id = MODEL_CHOICES.get(model_name, "lykon/dreamshaper-8")
112
 
 
 
113
  pipe = StableDiffusionPipeline.from_pretrained(
114
  model_id,
115
  torch_dtype=torch.float32,
116
  safety_checker=None,
117
- requires_safety_checker=False
 
 
118
  )
119
 
120
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
@@ -128,13 +132,28 @@ def load_model(model_name="dreamshaper-8"):
128
  return pipe
129
 
130
  except Exception as e:
131
- print(f"โŒ Model loading failed: {e}")
132
- return StableDiffusionPipeline.from_pretrained(
133
- "runwayml/stable-diffusion-v1-5",
134
- torch_dtype=torch.float32,
135
- safety_checker=None,
136
- requires_safety_checker=False
137
- ).to("cpu")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
138
 
139
  # Initialize default model
140
  print("๐Ÿš€ Initializing Storybook Generator API...")
 
97
  model_lock = threading.Lock()
98
 
99
  def load_model(model_name="dreamshaper-8"):
100
+ """Thread-safe model loading with HIGH-QUALITY settings and better error handling"""
101
  global model_cache, current_model_name, current_pipe
102
 
103
  with model_lock:
 
110
  try:
111
  model_id = MODEL_CHOICES.get(model_name, "lykon/dreamshaper-8")
112
 
113
+ print(f"๐Ÿ”ง Attempting to load: {model_id}")
114
+
115
  pipe = StableDiffusionPipeline.from_pretrained(
116
  model_id,
117
  torch_dtype=torch.float32,
118
  safety_checker=None,
119
+ requires_safety_checker=False,
120
+ local_files_only=False, # Allow downloading if not cached
121
+ cache_dir="./model_cache" # Specific cache directory
122
  )
123
 
124
  pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
 
132
  return pipe
133
 
134
  except Exception as e:
135
+ print(f"โŒ Model loading failed for {model_name}: {e}")
136
+ print(f"๐Ÿ”„ Falling back to stable-diffusion-v1-5")
137
+
138
+ # Fallback to base model
139
+ try:
140
+ pipe = StableDiffusionPipeline.from_pretrained(
141
+ "runwayml/stable-diffusion-v1-5",
142
+ torch_dtype=torch.float32,
143
+ safety_checker=None,
144
+ requires_safety_checker=False
145
+ ).to("cpu")
146
+
147
+ model_cache[model_name] = pipe
148
+ current_pipe = pipe
149
+ current_model_name = "sd-1.5"
150
+
151
+ print(f"โœ… Fallback model loaded: stable-diffusion-v1-5")
152
+ return pipe
153
+
154
+ except Exception as fallback_error:
155
+ print(f"โŒ Critical: Fallback model also failed: {fallback_error}")
156
+ raise
157
 
158
  # Initialize default model
159
  print("๐Ÿš€ Initializing Storybook Generator API...")