diagonalge commited on
Commit
c7a6bd6
·
verified ·
1 Parent(s): f493ded

Upload task output test1334test1234test1234test12334

Browse files
adapter_config.json CHANGED
@@ -24,13 +24,13 @@
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
27
- "v_proj",
28
  "down_proj",
 
29
  "q_proj",
 
30
  "k_proj",
31
- "o_proj",
32
- "up_proj",
33
- "gate_proj"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
 
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
27
+ "gate_proj",
28
  "down_proj",
29
+ "o_proj",
30
  "q_proj",
31
+ "v_proj",
32
  "k_proj",
33
+ "up_proj"
 
 
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:974f674f2be762e5778a3522d4ae6e9bf11cfeb74fd299dc26ade81ad3ad12e9
3
  size 22573704
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ace4e6b964d4cade918162a3e383897ed3750e133a82daba99e6a46269a117fe
3
  size 22573704
checkpoint-1/adapter_config.json CHANGED
@@ -24,13 +24,13 @@
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
27
- "v_proj",
28
  "down_proj",
 
29
  "q_proj",
 
30
  "k_proj",
31
- "o_proj",
32
- "up_proj",
33
- "gate_proj"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
 
24
  "rank_pattern": {},
25
  "revision": null,
26
  "target_modules": [
27
+ "gate_proj",
28
  "down_proj",
29
+ "o_proj",
30
  "q_proj",
31
+ "v_proj",
32
  "k_proj",
33
+ "up_proj"
 
 
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
checkpoint-1/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:974f674f2be762e5778a3522d4ae6e9bf11cfeb74fd299dc26ade81ad3ad12e9
3
  size 22573704
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ace4e6b964d4cade918162a3e383897ed3750e133a82daba99e6a46269a117fe
3
  size 22573704
checkpoint-1/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3712e6869c5fcc8277ee11c364070447959266f4be600636a274163ed452f41d
3
  size 11710970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:711bdee872abc49606b6fde70df2dc508857e0526c4e1d28c216366ec2aa870d
3
  size 11710970
checkpoint-1/trainer_state.json CHANGED
@@ -2,7 +2,7 @@
2
  "best_global_step": null,
3
  "best_metric": null,
4
  "best_model_checkpoint": null,
5
- "epoch": 8.17026839331672e-05,
6
  "eval_steps": 500,
7
  "global_step": 1,
8
  "is_hyper_param_search": false,
@@ -10,10 +10,10 @@
10
  "is_world_process_zero": true,
11
  "log_history": [
12
  {
13
- "epoch": 8.17026839331672e-05,
14
- "grad_norm": 0.6317720413208008,
15
  "learning_rate": 0.0,
16
- "loss": 0.6009,
17
  "step": 1
18
  }
19
  ],
 
2
  "best_global_step": null,
3
  "best_metric": null,
4
  "best_model_checkpoint": null,
5
+ "epoch": 0.009216589861751152,
6
  "eval_steps": 500,
7
  "global_step": 1,
8
  "is_hyper_param_search": false,
 
10
  "is_world_process_zero": true,
11
  "log_history": [
12
  {
13
+ "epoch": 0.009216589861751152,
14
+ "grad_norm": 0.8389255404472351,
15
  "learning_rate": 0.0,
16
+ "loss": 1.9632,
17
  "step": 1
18
  }
19
  ],
checkpoint-1/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:20b6f6d4163f74a086fa1f62460d309e425737a02f93e1f2c676c683f6116533
3
  size 7224
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6c80cc305d6c92ff876f5b47578d2bc99ffd827716ce421d9239686d0ccfb16
3
  size 7224