File size: 389 Bytes
784d77a
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
def apply_lora(pipe, lora_paths, scales):
    for lora_path, scale in zip(lora_paths, scales):
        adapter_name = lora_path.split("/")[-1]
        pipe.load_lora_weights(
            lora_path,
            weight_name="pytorch_lora_weights.safetensors",
            adapter_name=adapter_name
        )
        pipe.set_adapters([adapter_name], adapter_weights=[scale])
    return pipe