lfhe commited on
Commit
2c09742
·
1 Parent(s): 02bedad
adapter_config.json CHANGED
@@ -11,7 +11,7 @@
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 32,
14
- "lora_dropout": 0.16,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
@@ -20,10 +20,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "down_proj",
24
- "qkv_proj",
25
  "o_proj",
26
- "gate_up_proj"
 
 
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
  "lora_alpha": 32,
14
+ "lora_dropout": 0.36,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
23
  "o_proj",
24
+ "down_proj",
25
+ "gate_up_proj",
26
+ "qkv_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f7d1ff341c75ca55c050f1dc46d0404eebe058a9e17238d973d79b9f8e272c88
3
  size 100697728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:77de07e0627a87793bd818215e89350957085cd1984ab8db7b906a414f276ca9
3
  size 100697728
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5b69fb41ddbfab834c1329b9b10c87c8f3896a7aecc9a10853d03ca223276476
3
  size 5496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2607612c793bc79529fba85787d8c3b033ae081704aebbd3ee31ef79da024f9
3
  size 5496