EdBergJr commited on
Commit
4a62779
·
verified ·
1 Parent(s): 2db5c91

Training in progress, step 10

Browse files
adapter_config.json CHANGED
@@ -3,6 +3,7 @@
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "meta-llama/Llama-3.1-8B-Instruct",
5
  "bias": "none",
 
6
  "eva_config": null,
7
  "exclude_modules": null,
8
  "fan_in_fan_out": false,
@@ -23,13 +24,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "gate_proj",
27
  "down_proj",
28
- "v_proj",
29
  "o_proj",
30
- "k_proj",
31
- "q_proj",
32
- "up_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
3
  "auto_mapping": null,
4
  "base_model_name_or_path": "meta-llama/Llama-3.1-8B-Instruct",
5
  "bias": "none",
6
+ "corda_config": null,
7
  "eva_config": null,
8
  "exclude_modules": null,
9
  "fan_in_fan_out": false,
 
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
27
+ "q_proj",
28
  "down_proj",
29
+ "up_proj",
30
  "o_proj",
31
+ "v_proj",
32
+ "gate_proj",
33
+ "k_proj"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:03399b99d394ed180aa28da06442283bcd0adff29776010e4a2e251f906deb3a
3
  size 335604696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb655d9229f803bf9cfe0b28f94040bc060fdf4de9cb0ae71438040d00ac6a3b
3
  size 335604696
runs/Feb06_10-22-36_683b76cf81c1/events.out.tfevents.1738837358.683b76cf81c1.963.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71575fa748ee8c6e13fb659ac326436a9002028ad379afafa6309486c2cbf8cf
3
+ size 6345
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0887ffd4f533ee0be7bc78d7faf16ac39b2fbdf163285ad2f0acdde1736662c5
3
  size 5624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fd79536c4e2f473bb4e80614cb5dc4fe2564f50dc749f0606eddf07efd2eee5
3
  size 5624