ShilpaSandhya commited on
Commit
1e2c259
·
verified ·
1 Parent(s): 66ecb90

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +30 -30
  2. adapter_model.safetensors +1 -1
adapter_config.json CHANGED
@@ -20,46 +20,46 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "model.layers.4.self_attn.o_proj",
24
- "model.layers.3.mlp.gate_up_proj",
25
- "model.layers.0.mlp.gate_up_proj",
26
- "model.layers.5.mlp.down_proj",
27
- "model.layers.7.mlp.down_proj",
28
- "model.layers.7.mlp.gate_up_proj",
29
- "model.layers.6.mlp.down_proj",
30
- "model.layers.3.self_attn.qkv_proj",
31
- "model.layers.8.self_attn.qkv_proj",
32
- "model.layers.5.self_attn.o_proj",
33
- "model.layers.1.self_attn.qkv_proj",
34
- "model.layers.9.self_attn.o_proj",
35
- "model.layers.5.mlp.gate_up_proj",
36
- "model.layers.3.self_attn.o_proj",
37
  "model.layers.2.mlp.gate_up_proj",
38
- "model.layers.4.mlp.gate_up_proj",
39
- "model.layers.8.mlp.gate_up_proj",
40
- "model.layers.9.mlp.gate_up_proj",
41
- "model.layers.4.mlp.down_proj",
42
- "model.layers.2.self_attn.qkv_proj",
43
- "model.layers.9.self_attn.qkv_proj",
44
  "model.layers.2.mlp.down_proj",
45
- "model.layers.3.mlp.down_proj",
46
- "model.layers.5.self_attn.qkv_proj",
47
  "model.layers.1.mlp.gate_up_proj",
48
- "model.layers.0.self_attn.o_proj",
 
 
 
 
49
  "model.layers.6.self_attn.qkv_proj",
50
  "model.layers.8.mlp.down_proj",
 
51
  "model.layers.8.self_attn.o_proj",
52
- "model.layers.0.self_attn.qkv_proj",
 
 
 
 
53
  "model.layers.1.mlp.down_proj",
54
- "model.layers.4.self_attn.qkv_proj",
 
 
 
 
 
 
 
55
  "model.layers.6.self_attn.o_proj",
56
- "model.layers.6.mlp.gate_up_proj",
57
- "model.layers.1.self_attn.o_proj",
58
  "model.layers.7.self_attn.qkv_proj",
59
- "model.layers.9.mlp.down_proj",
 
 
 
 
 
60
  "model.layers.2.self_attn.o_proj",
61
- "model.layers.7.self_attn.o_proj",
62
- "model.layers.0.mlp.down_proj"
63
  ],
64
  "task_type": "CAUSAL_LM",
65
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  "model.layers.2.mlp.gate_up_proj",
24
+ "model.layers.1.self_attn.qkv_proj",
 
 
 
 
 
25
  "model.layers.2.mlp.down_proj",
26
+ "model.layers.9.mlp.gate_up_proj",
 
27
  "model.layers.1.mlp.gate_up_proj",
28
+ "model.layers.7.mlp.down_proj",
29
+ "model.layers.1.self_attn.o_proj",
30
+ "model.layers.7.mlp.gate_up_proj",
31
+ "model.layers.4.self_attn.qkv_proj",
32
+ "model.layers.3.mlp.gate_up_proj",
33
  "model.layers.6.self_attn.qkv_proj",
34
  "model.layers.8.mlp.down_proj",
35
+ "model.layers.3.self_attn.o_proj",
36
  "model.layers.8.self_attn.o_proj",
37
+ "model.layers.2.self_attn.qkv_proj",
38
+ "model.layers.4.mlp.gate_up_proj",
39
+ "model.layers.5.self_attn.qkv_proj",
40
+ "model.layers.0.mlp.down_proj",
41
+ "model.layers.9.mlp.down_proj",
42
  "model.layers.1.mlp.down_proj",
43
+ "model.layers.9.self_attn.qkv_proj",
44
+ "model.layers.0.self_attn.o_proj",
45
+ "model.layers.3.self_attn.qkv_proj",
46
+ "model.layers.9.self_attn.o_proj",
47
+ "model.layers.6.mlp.down_proj",
48
+ "model.layers.4.mlp.down_proj",
49
+ "model.layers.0.mlp.gate_up_proj",
50
+ "model.layers.7.self_attn.o_proj",
51
  "model.layers.6.self_attn.o_proj",
52
+ "model.layers.4.self_attn.o_proj",
 
53
  "model.layers.7.self_attn.qkv_proj",
54
+ "model.layers.5.self_attn.o_proj",
55
+ "model.layers.5.mlp.gate_up_proj",
56
+ "model.layers.6.mlp.gate_up_proj",
57
+ "model.layers.0.self_attn.qkv_proj",
58
+ "model.layers.3.mlp.down_proj",
59
+ "model.layers.8.mlp.gate_up_proj",
60
  "model.layers.2.self_attn.o_proj",
61
+ "model.layers.8.self_attn.qkv_proj",
62
+ "model.layers.5.mlp.down_proj"
63
  ],
64
  "task_type": "CAUSAL_LM",
65
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b0fa8e7364d5e3384522c29c6d87fca1beae09012a2ef6c2ed04936a2c79862b
3
  size 31467968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d48e427285c870f44392240d319de0f8a93d821d8c24cbadf13c580e543ba121
3
  size 31467968