Fix LoRA loading with PEFT backend and adapter names
Browse files- Add peft and requests to requirements.txt
- Use adapter names for loading multiple LoRAs
- Use pipe.set_adapters() to activate all LoRAs simultaneously
- Proper error handling for each LoRA load
🤖 Generated with [Claude Code](https://claude.ai/code)
Co-Authored-By: Claude <noreply@anthropic.com>
- app.py +14 -9
- requirements.txt +3 -1
app.py
CHANGED
|
@@ -45,6 +45,7 @@ def preload_and_load_all_loras():
|
|
| 45 |
global loaded_loras
|
| 46 |
|
| 47 |
print("Downloading and loading all LoRAs...")
|
|
|
|
| 48 |
|
| 49 |
for lora_name, lora_path in LORAS.items():
|
| 50 |
if lora_name == "None" or lora_path is None:
|
|
@@ -56,21 +57,25 @@ def preload_and_load_all_loras():
|
|
| 56 |
lora_path = download_lora_from_url(lora_path, filename)
|
| 57 |
|
| 58 |
loaded_loras[lora_name] = lora_path
|
|
|
|
| 59 |
print(f"Downloaded {lora_name}")
|
| 60 |
-
|
| 61 |
-
|
|
|
|
| 62 |
try:
|
| 63 |
-
|
| 64 |
-
|
|
|
|
| 65 |
except Exception as e:
|
| 66 |
-
print(f"Failed to load {
|
| 67 |
|
| 68 |
-
#
|
| 69 |
try:
|
| 70 |
-
|
| 71 |
-
|
|
|
|
| 72 |
except Exception as e:
|
| 73 |
-
print(f"Failed to
|
| 74 |
|
| 75 |
# Load all LoRAs at startup
|
| 76 |
preload_and_load_all_loras()
|
|
|
|
| 45 |
global loaded_loras
|
| 46 |
|
| 47 |
print("Downloading and loading all LoRAs...")
|
| 48 |
+
adapters_to_load = []
|
| 49 |
|
| 50 |
for lora_name, lora_path in LORAS.items():
|
| 51 |
if lora_name == "None" or lora_path is None:
|
|
|
|
| 57 |
lora_path = download_lora_from_url(lora_path, filename)
|
| 58 |
|
| 59 |
loaded_loras[lora_name] = lora_path
|
| 60 |
+
adapters_to_load.append(lora_path)
|
| 61 |
print(f"Downloaded {lora_name}")
|
| 62 |
+
|
| 63 |
+
# Load all LoRAs with different adapter names
|
| 64 |
+
for i, lora_path in enumerate(adapters_to_load):
|
| 65 |
try:
|
| 66 |
+
adapter_name = f"adapter_{i}"
|
| 67 |
+
pipe.load_lora_weights(lora_path, adapter_name=adapter_name)
|
| 68 |
+
print(f"Loaded adapter {adapter_name}")
|
| 69 |
except Exception as e:
|
| 70 |
+
print(f"Failed to load {lora_path}: {e}")
|
| 71 |
|
| 72 |
+
# Set all adapters as active
|
| 73 |
try:
|
| 74 |
+
adapter_names = [f"adapter_{i}" for i in range(len(adapters_to_load))]
|
| 75 |
+
pipe.set_adapters(adapter_names)
|
| 76 |
+
print(f"All {len(adapters_to_load)} LoRAs active!")
|
| 77 |
except Exception as e:
|
| 78 |
+
print(f"Failed to activate adapters: {e}")
|
| 79 |
|
| 80 |
# Load all LoRAs at startup
|
| 81 |
preload_and_load_all_loras()
|
requirements.txt
CHANGED
|
@@ -3,4 +3,6 @@ git+https://github.com/huggingface/diffusers.git
|
|
| 3 |
torch
|
| 4 |
transformers==4.42.4
|
| 5 |
xformers
|
| 6 |
-
sentencepiece
|
|
|
|
|
|
|
|
|
| 3 |
torch
|
| 4 |
transformers==4.42.4
|
| 5 |
xformers
|
| 6 |
+
sentencepiece
|
| 7 |
+
peft
|
| 8 |
+
requests
|