def apply_lora(pipe, lora_paths, scales): for lora_path, scale in zip(lora_paths, scales): adapter_name = lora_path.split("/")[-1] pipe.load_lora_weights( lora_path, weight_name="pytorch_lora_weights.safetensors", adapter_name=adapter_name ) pipe.set_adapters([adapter_name], adapter_weights=[scale]) return pipe