Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -120,8 +120,8 @@ def infer(
|
|
| 120 |
pipe.unet = PeftModel.from_pretrained(pipe.unet, unet_sub_dir)
|
| 121 |
pipe.text_encoder = PeftModel.from_pretrained(pipe.text_encoder, text_encoder_sub_dir)
|
| 122 |
|
| 123 |
-
pipe.unet.add_weighted_adapter(['default'], lora_scale, 'lora')
|
| 124 |
-
pipe.text_encoder.add_weighted_adapter(['default'], lora_scale, 'lora')
|
| 125 |
|
| 126 |
# pipe.unet.load_state_dict({k: lora_scale*v for k, v in pipe.unet.state_dict().items()})
|
| 127 |
# pipe.text_encoder.load_state_dict({k: lora_scale*v for k, v in pipe.text_encoder.state_dict().items()})
|
|
|
|
| 120 |
pipe.unet = PeftModel.from_pretrained(pipe.unet, unet_sub_dir)
|
| 121 |
pipe.text_encoder = PeftModel.from_pretrained(pipe.text_encoder, text_encoder_sub_dir)
|
| 122 |
|
| 123 |
+
pipe.unet.add_weighted_adapter(['default'], [lora_scale], 'lora')
|
| 124 |
+
pipe.text_encoder.add_weighted_adapter(['default'], [lora_scale], 'lora')
|
| 125 |
|
| 126 |
# pipe.unet.load_state_dict({k: lora_scale*v for k, v in pipe.unet.state_dict().items()})
|
| 127 |
# pipe.text_encoder.load_state_dict({k: lora_scale*v for k, v in pipe.text_encoder.state_dict().items()})
|