cyd0806 commited on
Commit
1d7ce38
·
verified ·
1 Parent(s): d708d3d

Upload src/lora_switching_module.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. src/lora_switching_module.py +39 -0
src/lora_switching_module.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ipdb
2
+ from peft.tuners.tuners_utils import BaseTunerLayer
3
+ from typing import List, Any, Optional, Type
4
+ def module_active_adapters(module):
5
+ if hasattr(module, 'active_adapters'):
6
+ result = [i for i in module.active_adapters if i in module.scaling.keys()]
7
+ else:
8
+ result = []
9
+ return result
10
+
11
+ class enable_lora:
12
+ def __init__(self, lora_modules: List[BaseTunerLayer], enable_adapters: List) -> None:
13
+ self.lora_modules: List[BaseTunerLayer] = [
14
+ each for each in lora_modules if isinstance(each, BaseTunerLayer)
15
+ ]
16
+ self.active_adapter_scales = [
17
+ {
18
+ active_adapter: lora_module.scaling[active_adapter]
19
+ for active_adapter in module_active_adapters(lora_module)
20
+ }
21
+ for lora_module in self.lora_modules
22
+ ]
23
+ self.enable_adapters = enable_adapters
24
+
25
+ def __enter__(self) -> None:
26
+ for lora_module in self.lora_modules:
27
+ for active_adapter in module_active_adapters(lora_module):
28
+ if active_adapter not in self.enable_adapters:
29
+ lora_module.set_scale(active_adapter,0)
30
+
31
+ def __exit__(
32
+ self,
33
+ exc_type: Optional[Type[BaseException]],
34
+ exc_val: Optional[BaseException],
35
+ exc_tb: Optional[Any],
36
+ ) -> None:
37
+ for i, lora_module in enumerate(self.lora_modules):
38
+ for active_adapter in module_active_adapters(lora_module):
39
+ lora_module.set_scale(active_adapter,self.active_adapter_scales[i][active_adapter])