underactuated commited on
Commit
994c11b
·
verified ·
1 Parent(s): e734060

End of training

Browse files
DPO/adapter_config.json CHANGED
@@ -23,13 +23,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "gate_proj",
27
  "k_proj",
28
- "q_proj",
29
- "up_proj",
30
  "o_proj",
 
 
 
31
  "down_proj",
32
- "v_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
26
  "k_proj",
 
 
27
  "o_proj",
28
+ "v_proj",
29
+ "gate_proj",
30
+ "up_proj",
31
  "down_proj",
32
+ "q_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
DPO/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:df5f1182385078689b6775ae35b2286200a6a757742f2d5630353031176831c2
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69cfef8899ebf4afa129e2b1730fab2c4363efa913e5940e8bdd6abf90a906ec
3
  size 167832240
reference/adapter_config.json CHANGED
@@ -23,13 +23,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "gate_proj",
27
  "k_proj",
28
- "q_proj",
29
- "up_proj",
30
  "o_proj",
 
 
 
31
  "down_proj",
32
- "v_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
26
  "k_proj",
 
 
27
  "o_proj",
28
+ "v_proj",
29
+ "gate_proj",
30
+ "up_proj",
31
  "down_proj",
32
+ "q_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9d10ee83a80a43e9fc33d649be41f01a7ccb76e46aada1763277946ef5b24a52
3
  size 6200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:635efdff8cd0fbeed64ea7fda06668198c425efaf324a78137c9165ed5337518
3
  size 6200