s8n29 commited on
Commit
7984e10
·
verified ·
1 Parent(s): 14b0709

Training in progress, step 10

Browse files
adapter_config.json CHANGED
@@ -11,10 +11,6 @@
11
  "layer_replication": null,
12
  "layers_pattern": null,
13
  "layers_to_transform": [
14
- 16,
15
- 17,
16
- 18,
17
- 19,
18
  20,
19
  21,
20
  22,
@@ -39,7 +35,7 @@
39
  41
40
  ],
41
  "loftq_config": {},
42
- "lora_alpha": 32,
43
  "lora_bias": false,
44
  "lora_dropout": 0.05,
45
  "megatron_config": null,
@@ -49,13 +45,14 @@
49
  "score"
50
  ],
51
  "peft_type": "LORA",
52
- "r": 16,
53
  "rank_pattern": {},
54
  "revision": null,
55
  "target_modules": [
56
- "q_proj",
 
57
  "k_proj",
58
- "v_proj"
59
  ],
60
  "task_type": "SEQ_CLS",
61
  "use_dora": false,
 
11
  "layer_replication": null,
12
  "layers_pattern": null,
13
  "layers_to_transform": [
 
 
 
 
14
  20,
15
  21,
16
  22,
 
35
  41
36
  ],
37
  "loftq_config": {},
38
+ "lora_alpha": 16,
39
  "lora_bias": false,
40
  "lora_dropout": 0.05,
41
  "megatron_config": null,
 
45
  "score"
46
  ],
47
  "peft_type": "LORA",
48
+ "r": 8,
49
  "rank_pattern": {},
50
  "revision": null,
51
  "target_modules": [
52
+ "score.weight",
53
+ "v_proj",
54
  "k_proj",
55
+ "q_proj"
56
  ],
57
  "task_type": "SEQ_CLS",
58
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1ab58898ae2c0223f5083f01fddaec5655491ac51bba6fb2b0a9a0d0b77e0fd5
3
- size 31572576
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:581ceeebd1a9a0c0accea25ece94677c3e89f80edb12788f7251e093d33bbf79
3
+ size 13382856
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c42ee30e4de5664cecc78b73115374826f0ba2e0f39a598d5c0f62005fec6199
3
  size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2076eab00ba7bd03bda9f48f14de550892608a5666db5fe5d2d19d6c900c8caf
3
  size 5304