phxdev Claude commited on
Commit
a28ad02
·
1 Parent(s): b7dac13

Fix LoRA loading error - load individually instead of batch

Browse files

- Load each LoRA individually using pipe.load_lora_weights(single_path)
- Add error handling for individual LoRA loading
- Fuse all LoRAs after individual loading
- Fixes ValueError: PEFT backend is required for this method

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>

Files changed (1) hide show
  1. app.py +12 -7
app.py CHANGED
@@ -45,7 +45,6 @@ def preload_and_load_all_loras():
45
  global loaded_loras
46
 
47
  print("Downloading and loading all LoRAs...")
48
- lora_weights = []
49
 
50
  for lora_name, lora_path in LORAS.items():
51
  if lora_name == "None" or lora_path is None:
@@ -57,15 +56,21 @@ def preload_and_load_all_loras():
57
  lora_path = download_lora_from_url(lora_path, filename)
58
 
59
  loaded_loras[lora_name] = lora_path
60
- lora_weights.append(lora_path)
61
  print(f"Downloaded {lora_name}")
 
 
 
 
 
 
 
62
 
63
- # Load all LoRAs simultaneously
64
- if lora_weights:
65
- print("Loading all LoRAs simultaneously...")
66
- pipe.load_lora_weights(lora_weights)
67
  pipe.fuse_lora(lora_scale=1.0)
68
- print(f"All {len(lora_weights)} LoRAs loaded and active!")
 
 
69
 
70
  # Load all LoRAs at startup
71
  preload_and_load_all_loras()
 
45
  global loaded_loras
46
 
47
  print("Downloading and loading all LoRAs...")
 
48
 
49
  for lora_name, lora_path in LORAS.items():
50
  if lora_name == "None" or lora_path is None:
 
56
  lora_path = download_lora_from_url(lora_path, filename)
57
 
58
  loaded_loras[lora_name] = lora_path
 
59
  print(f"Downloaded {lora_name}")
60
+
61
+ # Load each LoRA individually
62
+ try:
63
+ pipe.load_lora_weights(lora_path)
64
+ print(f"Loaded {lora_name}")
65
+ except Exception as e:
66
+ print(f"Failed to load {lora_name}: {e}")
67
 
68
+ # Fuse all loaded LoRAs
69
+ try:
 
 
70
  pipe.fuse_lora(lora_scale=1.0)
71
+ print(f"All LoRAs fused and active!")
72
+ except Exception as e:
73
+ print(f"Failed to fuse LoRAs: {e}")
74
 
75
  # Load all LoRAs at startup
76
  preload_and_load_all_loras()