underactuated commited on
Commit
0bf5bbf
·
verified ·
1 Parent(s): 6810092

End of training

Browse files
DPO/adapter_config.json CHANGED
@@ -24,12 +24,12 @@
24
  "revision": null,
25
  "target_modules": [
26
  "v_proj",
27
- "k_proj",
28
  "up_proj",
29
- "o_proj",
30
- "q_proj",
31
  "gate_proj",
32
- "down_proj"
 
 
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
24
  "revision": null,
25
  "target_modules": [
26
  "v_proj",
27
+ "down_proj",
28
  "up_proj",
 
 
29
  "gate_proj",
30
+ "q_proj",
31
+ "o_proj",
32
+ "k_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
DPO/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3b89be1fdbc27d0a2409b0a6048f5bd52abecb3e4e83093b01640d2d61f497e3
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5068ca1b2a76dfb2aa74010334f79b83d8d3ec8e599da72fccb445cb4a4b1a2
3
  size 167832240
reference/adapter_config.json CHANGED
@@ -24,12 +24,12 @@
24
  "revision": null,
25
  "target_modules": [
26
  "v_proj",
27
- "k_proj",
28
  "up_proj",
29
- "o_proj",
30
- "q_proj",
31
  "gate_proj",
32
- "down_proj"
 
 
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
24
  "revision": null,
25
  "target_modules": [
26
  "v_proj",
27
+ "down_proj",
28
  "up_proj",
 
 
29
  "gate_proj",
30
+ "q_proj",
31
+ "o_proj",
32
+ "k_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0824aebc0c620a703ed87ce3c4b3a7cd1e9e56ec7c9b3ab950a1d7de2e0a89e2
3
  size 6200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d06ae18e16095d641771cf9d9d349ac6790a227db7b7d4f8b6e059440940d74
3
  size 6200