Vector857 commited on
Commit
486f9bf
·
verified ·
1 Parent(s): 17e6d44

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -31
app.py CHANGED
@@ -21,7 +21,7 @@ THEME_PRESETS = {
21
  "orange-red": {"primary": "#c2410c", "secondary": "#ea580c"},
22
  "violet": {"primary": "#7c3aed", "secondary": "#8b5cf6"},
23
  "ocean": {"primary": "#0369a1", "secondary": "#0ea5e9"},
24
- "emerald": {"primary": "#047857", "secondary": "#10b981"},
25
  "rose": {"primary": "#be185d", "secondary": "#ec4899"},
26
  "amber": {"primary": "#b45309", "secondary": "#f59e0b"},
27
  "cyan": {"primary": "#0e7490", "secondary": "#06b6d4"},
@@ -289,34 +289,35 @@ def generate(
289
 
290
  # ZeroGPU: LoRA loading must happen inside GPU scope
291
  pipe.to("cuda")
292
- pipe.unload_lora_weights()
293
-
294
- if speed_mode == "Fast · 8 steps":
295
- pipe.load_lora_weights(LIGHTNING_REPO, weight_name=LIGHTNING_WEIGHT, adapter_name="lightning")
296
- pipe.load_lora_weights(lora["repo"], weight_name=lora.get("weights"), low_cpu_mem_usage=True, adapter_name="style")
297
- pipe.set_adapters(["lightning", "style"], adapter_weights=[1.0, lora_scale])
298
- else:
299
- pipe.load_lora_weights(lora["repo"], weight_name=lora.get("weights"), low_cpu_mem_usage=True, adapter_name="style")
300
- pipe.set_adapters(["style"], adapter_weights=[lora_scale])
301
-
302
- if randomize_seed:
303
- seed = random.randint(0, MAX_SEED)
304
-
305
- w, h = aspect_to_wh(aspect)
306
-
307
- generator = torch.Generator(device="cuda").manual_seed(seed)
308
- image = pipe(
309
- prompt=prompt_in,
310
- negative_prompt=neg_prompt,
311
- num_inference_steps=steps,
312
- true_cfg_scale=cfg,
313
- width=w,
314
- height=h,
315
- generator=generator,
316
- ).images[0]
317
-
318
- pipe.to("cpu")
319
- torch.cuda.empty_cache()
 
320
 
321
  # update history
322
  history_state = history_state or []
@@ -397,13 +398,16 @@ def add_custom_lora(custom_text: str):
397
  </div>
398
  </div>
399
  </div>"""
400
- return gr.update(visible=True, value=card_html), gr.update(visible=True), gr.Gallery(selected_index=None), f"Custom: {weight}", existing
 
 
401
  except Exception as e:
402
  gr.Warning(str(e))
403
  return gr.update(visible=True, value=f"<span style='color:#e24b4a'>{e}</span>"), gr.update(visible=True), gr.update(), "", None
404
 
405
  def remove_custom_lora():
406
- return gr.update(visible=False), gr.update(visible=False), gr.update(), "", None
 
407
 
408
  generate.zerogpu = True
409
 
 
21
  "orange-red": {"primary": "#c2410c", "secondary": "#ea580c"},
22
  "violet": {"primary": "#7c3aed", "secondary": "#8b5cf6"},
23
  "ocean": {"primary": "#0369a1", "secondary": "#0ea5e9"},
24
+ "emerald": {"primary": "#226105", "secondary": "#73ff00"},
25
  "rose": {"primary": "#be185d", "secondary": "#ec4899"},
26
  "amber": {"primary": "#b45309", "secondary": "#f59e0b"},
27
  "cyan": {"primary": "#0e7490", "secondary": "#06b6d4"},
 
289
 
290
  # ZeroGPU: LoRA loading must happen inside GPU scope
291
  pipe.to("cuda")
292
+ try:
293
+ pipe.unload_lora_weights()
294
+
295
+ if speed_mode == "Fast · 8 steps":
296
+ pipe.load_lora_weights(LIGHTNING_REPO, weight_name=LIGHTNING_WEIGHT, adapter_name="lightning")
297
+ pipe.load_lora_weights(lora["repo"], weight_name=lora.get("weights"), low_cpu_mem_usage=True, adapter_name="style")
298
+ pipe.set_adapters(["lightning", "style"], adapter_weights=[1.0, lora_scale])
299
+ else:
300
+ pipe.load_lora_weights(lora["repo"], weight_name=lora.get("weights"), low_cpu_mem_usage=True, adapter_name="style")
301
+ pipe.set_adapters(["style"], adapter_weights=[lora_scale])
302
+
303
+ if randomize_seed:
304
+ seed = random.randint(0, MAX_SEED)
305
+
306
+ w, h = aspect_to_wh(aspect)
307
+
308
+ generator = torch.Generator(device="cuda").manual_seed(seed)
309
+ image = pipe(
310
+ prompt=prompt_in,
311
+ negative_prompt=neg_prompt,
312
+ num_inference_steps=steps,
313
+ true_cfg_scale=cfg,
314
+ width=w,
315
+ height=h,
316
+ generator=generator,
317
+ ).images[0]
318
+ finally:
319
+ pipe.to("cpu")
320
+ torch.cuda.empty_cache()
321
 
322
  # update history
323
  history_state = history_state or []
 
398
  </div>
399
  </div>
400
  </div>"""
401
+ new_gallery = [(l["image"], l["title"]) for l in loras]
402
+ return (gr.update(visible=True, value=card_html), gr.update(visible=True),
403
+ gr.update(value=new_gallery, selected_index=None), f"Custom: {weight}", existing)
404
  except Exception as e:
405
  gr.Warning(str(e))
406
  return gr.update(visible=True, value=f"<span style='color:#e24b4a'>{e}</span>"), gr.update(visible=True), gr.update(), "", None
407
 
408
  def remove_custom_lora():
409
+ gallery_reset = [(l["image"], l["title"]) for l in loras]
410
+ return gr.update(visible=False), gr.update(visible=False), gr.update(value=gallery_reset), "", None
411
 
412
  generate.zerogpu = True
413