DavidNguyen commited on
Commit
dc64c72
·
verified ·
1 Parent(s): d1e32cd

Delete sft_full

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. sft_full/hyperrouter/added_tokens.json +0 -13
  2. sft_full/hyperrouter/config.json +0 -170
  3. sft_full/hyperrouter/generation_config.json +0 -12
  4. sft_full/hyperrouter/model-00001-of-00003.safetensors +0 -3
  5. sft_full/hyperrouter/model-00002-of-00003.safetensors +0 -3
  6. sft_full/hyperrouter/model-00003-of-00003.safetensors +0 -3
  7. sft_full/hyperrouter/model.safetensors.index.json +0 -0
  8. sft_full/hyperrouter/special_tokens_map.json +0 -24
  9. sft_full/hyperrouter/tokenizer.model +0 -3
  10. sft_full/hyperrouter/tokenizer_config.json +0 -132
  11. sft_full/hyperrouter/trainer_state.json +0 -0
  12. sft_full/hyperrouter/training_args.bin +0 -3
  13. sft_full/smoe/added_tokens.json +0 -13
  14. sft_full/smoe/config.json +0 -168
  15. sft_full/smoe/generation_config.json +0 -12
  16. sft_full/smoe/model-00001-of-00003.safetensors +0 -3
  17. sft_full/smoe/model-00002-of-00003.safetensors +0 -3
  18. sft_full/smoe/model-00003-of-00003.safetensors +0 -3
  19. sft_full/smoe/model.safetensors.index.json +0 -0
  20. sft_full/smoe/special_tokens_map.json +0 -24
  21. sft_full/smoe/tokenizer.model +0 -3
  22. sft_full/smoe/tokenizer_config.json +0 -132
  23. sft_full/smoe/trainer_state.json +0 -0
  24. sft_full/smoe/training_args.bin +0 -3
  25. sft_full/smoe_cosinegating/added_tokens.json +0 -13
  26. sft_full/smoe_cosinegating/config.json +0 -168
  27. sft_full/smoe_cosinegating/generation_config.json +0 -12
  28. sft_full/smoe_cosinegating/model-00001-of-00003.safetensors +0 -3
  29. sft_full/smoe_cosinegating/model-00002-of-00003.safetensors +0 -3
  30. sft_full/smoe_cosinegating/model-00003-of-00003.safetensors +0 -3
  31. sft_full/smoe_cosinegating/model.safetensors.index.json +0 -0
  32. sft_full/smoe_cosinegating/special_tokens_map.json +0 -24
  33. sft_full/smoe_cosinegating/tokenizer.model +0 -3
  34. sft_full/smoe_cosinegating/tokenizer_config.json +0 -132
  35. sft_full/smoe_cosinegating/trainer_state.json +0 -0
  36. sft_full/smoe_cosinegating/training_args.bin +0 -3
  37. sft_full/smoe_perturbed/added_tokens.json +0 -13
  38. sft_full/smoe_perturbed/config.json +0 -168
  39. sft_full/smoe_perturbed/generation_config.json +0 -12
  40. sft_full/smoe_perturbed/model-00001-of-00003.safetensors +0 -3
  41. sft_full/smoe_perturbed/model-00002-of-00003.safetensors +0 -3
  42. sft_full/smoe_perturbed/model-00003-of-00003.safetensors +0 -3
  43. sft_full/smoe_perturbed/model.safetensors.index.json +0 -0
  44. sft_full/smoe_perturbed/special_tokens_map.json +0 -24
  45. sft_full/smoe_perturbed/tokenizer.model +0 -3
  46. sft_full/smoe_perturbed/tokenizer_config.json +0 -132
  47. sft_full/smoe_perturbed/trainer_state.json +0 -0
  48. sft_full/smoe_perturbed/training_args.bin +0 -3
  49. sft_full/smoe_sigmoidgating/added_tokens.json +0 -13
  50. sft_full/smoe_sigmoidgating/config.json +0 -168
sft_full/hyperrouter/added_tokens.json DELETED
@@ -1,13 +0,0 @@
1
- {
2
- "<|assistant|>": 32001,
3
- "<|endoftext|>": 32000,
4
- "<|end|>": 32007,
5
- "<|placeholder1|>": 32002,
6
- "<|placeholder2|>": 32003,
7
- "<|placeholder3|>": 32004,
8
- "<|placeholder4|>": 32005,
9
- "<|placeholder5|>": 32008,
10
- "<|placeholder6|>": 32009,
11
- "<|system|>": 32006,
12
- "<|user|>": 32010
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/hyperrouter/config.json DELETED
@@ -1,170 +0,0 @@
1
- {
2
- "_name_or_path": "/cm/archive/namnv78/checkpoints/phi35-siglip224/pft",
3
- "architectures": [
4
- "LlavaPhiForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "configuration_phi3.Phi3Config",
10
- "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
11
- },
12
- "balance_loss_coef": 0.1,
13
- "bos_token_id": 1,
14
- "clip_smoe": true,
15
- "dropout": false,
16
- "embd_pdrop": 0.0,
17
- "eos_token_id": 32000,
18
- "freeze_mm_mlp_adapter": false,
19
- "hidden_act": "silu",
20
- "hidden_size": 3072,
21
- "image_aspect_ratio": "pad",
22
- "initializer_range": 0.02,
23
- "intermediate_size": 8192,
24
- "local_rank": 0,
25
- "max_position_embeddings": 131072,
26
- "mlp_smoe": true,
27
- "mm_hidden_size": 1152,
28
- "mm_patch_merge_type": "flat",
29
- "mm_projector_lr": null,
30
- "mm_projector_type": "moe",
31
- "mm_use_im_patch_token": false,
32
- "mm_use_im_start_end": false,
33
- "mm_vision_select_feature": "patch",
34
- "mm_vision_select_layer": -2,
35
- "mm_vision_tower": "google/siglip-so400m-patch14-224",
36
- "model_type": "llava_phi",
37
- "moe_name": "hyperrouter",
38
- "num_attention_heads": 32,
39
- "num_experts": 4,
40
- "num_hidden_layers": 32,
41
- "num_key_value_heads": 32,
42
- "num_layers": 3,
43
- "num_selected": 2,
44
- "original_max_position_embeddings": 4096,
45
- "pad_token_id": 32000,
46
- "resid_pdrop": 0.0,
47
- "rms_norm_eps": 1e-05,
48
- "rope_scaling": {
49
- "long_factor": [
50
- 1.0800000429153442,
51
- 1.1100000143051147,
52
- 1.1399999856948853,
53
- 1.340000033378601,
54
- 1.5899999141693115,
55
- 1.600000023841858,
56
- 1.6200000047683716,
57
- 2.620000123977661,
58
- 3.2300000190734863,
59
- 3.2300000190734863,
60
- 4.789999961853027,
61
- 7.400000095367432,
62
- 7.700000286102295,
63
- 9.09000015258789,
64
- 12.199999809265137,
65
- 17.670000076293945,
66
- 24.46000099182129,
67
- 28.57000160217285,
68
- 30.420001983642578,
69
- 30.840002059936523,
70
- 32.590003967285156,
71
- 32.93000411987305,
72
- 42.320003509521484,
73
- 44.96000289916992,
74
- 50.340003967285156,
75
- 50.45000457763672,
76
- 57.55000305175781,
77
- 57.93000411987305,
78
- 58.21000289916992,
79
- 60.1400032043457,
80
- 62.61000442504883,
81
- 62.62000274658203,
82
- 62.71000289916992,
83
- 63.1400032043457,
84
- 63.1400032043457,
85
- 63.77000427246094,
86
- 63.93000411987305,
87
- 63.96000289916992,
88
- 63.970001220703125,
89
- 64.02999877929688,
90
- 64.06999969482422,
91
- 64.08000183105469,
92
- 64.12000274658203,
93
- 64.41000366210938,
94
- 64.4800033569336,
95
- 64.51000213623047,
96
- 64.52999877929688,
97
- 64.83999633789062
98
- ],
99
- "short_factor": [
100
- 1.0,
101
- 1.0199999809265137,
102
- 1.0299999713897705,
103
- 1.0299999713897705,
104
- 1.0499999523162842,
105
- 1.0499999523162842,
106
- 1.0499999523162842,
107
- 1.0499999523162842,
108
- 1.0499999523162842,
109
- 1.0699999332427979,
110
- 1.0999999046325684,
111
- 1.1099998950958252,
112
- 1.1599998474121094,
113
- 1.1599998474121094,
114
- 1.1699998378753662,
115
- 1.2899998426437378,
116
- 1.339999794960022,
117
- 1.679999828338623,
118
- 1.7899998426437378,
119
- 1.8199998140335083,
120
- 1.8499997854232788,
121
- 1.8799997568130493,
122
- 1.9099997282028198,
123
- 1.9399996995925903,
124
- 1.9899996519088745,
125
- 2.0199997425079346,
126
- 2.0199997425079346,
127
- 2.0199997425079346,
128
- 2.0199997425079346,
129
- 2.0199997425079346,
130
- 2.0199997425079346,
131
- 2.0299997329711914,
132
- 2.0299997329711914,
133
- 2.0299997329711914,
134
- 2.0299997329711914,
135
- 2.0299997329711914,
136
- 2.0299997329711914,
137
- 2.0299997329711914,
138
- 2.0299997329711914,
139
- 2.0299997329711914,
140
- 2.0799996852874756,
141
- 2.0899996757507324,
142
- 2.189999580383301,
143
- 2.2199995517730713,
144
- 2.5899994373321533,
145
- 2.729999542236328,
146
- 2.749999523162842,
147
- 2.8399994373321533
148
- ],
149
- "type": "longrope"
150
- },
151
- "rope_theta": 10000.0,
152
- "router_z_loss_coef": 0.01,
153
- "scales": [
154
- 1,
155
- 3
156
- ],
157
- "sliding_window": 262144,
158
- "tie_word_embeddings": false,
159
- "tokenizer_model_max_length": 2048,
160
- "tokenizer_padding_side": "right",
161
- "topk_max": 2,
162
- "topk_min": 1,
163
- "torch_dtype": "bfloat16",
164
- "training": true,
165
- "transformers_version": "4.43.0",
166
- "tune_mm_mlp_adapter": false,
167
- "use_cache": true,
168
- "use_mm_proj": true,
169
- "vocab_size": 32064
170
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/hyperrouter/generation_config.json DELETED
@@ -1,12 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "do_sample": true,
5
- "eos_token_id": [
6
- 32007,
7
- 32001,
8
- 32000
9
- ],
10
- "pad_token_id": 32000,
11
- "transformers_version": "4.43.0"
12
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/hyperrouter/model-00001-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:77070ba911bb125ab3cd8424cd59c16fe002d181caf04bf90cfe15a51301c6b4
3
- size 4972489328
 
 
 
 
sft_full/hyperrouter/model-00002-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c92aba44ae28d81e099be78e65ce61b5095566a4646d0882415ff4cc63270f08
3
- size 4995022432
 
 
 
 
sft_full/hyperrouter/model-00003-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fdcfe6f0f1bf56818412c4eec4443cbe87476db39f0f4782d0f29247dc905d28
3
- size 342468696
 
 
 
 
sft_full/hyperrouter/model.safetensors.index.json DELETED
The diff for this file is too large to render. See raw diff
 
sft_full/hyperrouter/special_tokens_map.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<unk>",
17
- "unk_token": {
18
- "content": "<unk>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/hyperrouter/tokenizer.model DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
- size 499723
 
 
 
 
sft_full/hyperrouter/tokenizer_config.json DELETED
@@ -1,132 +0,0 @@
1
- {
2
- "add_bos_token": false,
3
- "add_eos_token": false,
4
- "add_prefix_space": true,
5
- "added_tokens_decoder": {
6
- "0": {
7
- "content": "<unk>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
- "1": {
15
- "content": "<s>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "2": {
23
- "content": "</s>",
24
- "lstrip": false,
25
- "normalized": false,
26
- "rstrip": true,
27
- "single_word": false,
28
- "special": false
29
- },
30
- "32000": {
31
- "content": "<|endoftext|>",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": true
37
- },
38
- "32001": {
39
- "content": "<|assistant|>",
40
- "lstrip": false,
41
- "normalized": false,
42
- "rstrip": true,
43
- "single_word": false,
44
- "special": true
45
- },
46
- "32002": {
47
- "content": "<|placeholder1|>",
48
- "lstrip": false,
49
- "normalized": false,
50
- "rstrip": true,
51
- "single_word": false,
52
- "special": true
53
- },
54
- "32003": {
55
- "content": "<|placeholder2|>",
56
- "lstrip": false,
57
- "normalized": false,
58
- "rstrip": true,
59
- "single_word": false,
60
- "special": true
61
- },
62
- "32004": {
63
- "content": "<|placeholder3|>",
64
- "lstrip": false,
65
- "normalized": false,
66
- "rstrip": true,
67
- "single_word": false,
68
- "special": true
69
- },
70
- "32005": {
71
- "content": "<|placeholder4|>",
72
- "lstrip": false,
73
- "normalized": false,
74
- "rstrip": true,
75
- "single_word": false,
76
- "special": true
77
- },
78
- "32006": {
79
- "content": "<|system|>",
80
- "lstrip": false,
81
- "normalized": false,
82
- "rstrip": true,
83
- "single_word": false,
84
- "special": true
85
- },
86
- "32007": {
87
- "content": "<|end|>",
88
- "lstrip": false,
89
- "normalized": false,
90
- "rstrip": true,
91
- "single_word": false,
92
- "special": true
93
- },
94
- "32008": {
95
- "content": "<|placeholder5|>",
96
- "lstrip": false,
97
- "normalized": false,
98
- "rstrip": true,
99
- "single_word": false,
100
- "special": true
101
- },
102
- "32009": {
103
- "content": "<|placeholder6|>",
104
- "lstrip": false,
105
- "normalized": false,
106
- "rstrip": true,
107
- "single_word": false,
108
- "special": true
109
- },
110
- "32010": {
111
- "content": "<|user|>",
112
- "lstrip": false,
113
- "normalized": false,
114
- "rstrip": true,
115
- "single_word": false,
116
- "special": true
117
- }
118
- },
119
- "bos_token": "<s>",
120
- "chat_template": "{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
121
- "clean_up_tokenization_spaces": false,
122
- "eos_token": "<|endoftext|>",
123
- "legacy": false,
124
- "model_max_length": 2048,
125
- "pad_token": "<unk>",
126
- "padding_side": "right",
127
- "sp_model_kwargs": {},
128
- "spaces_between_special_tokens": false,
129
- "tokenizer_class": "LlamaTokenizer",
130
- "unk_token": "<unk>",
131
- "use_default_system_prompt": false
132
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/hyperrouter/trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
sft_full/hyperrouter/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ce64b5e180ddbd78a6d2dbb50c07989eae0c5fc9f607e2a2025709f4528ef98d
3
- size 8184
 
 
 
 
sft_full/smoe/added_tokens.json DELETED
@@ -1,13 +0,0 @@
1
- {
2
- "<|assistant|>": 32001,
3
- "<|endoftext|>": 32000,
4
- "<|end|>": 32007,
5
- "<|placeholder1|>": 32002,
6
- "<|placeholder2|>": 32003,
7
- "<|placeholder3|>": 32004,
8
- "<|placeholder4|>": 32005,
9
- "<|placeholder5|>": 32008,
10
- "<|placeholder6|>": 32009,
11
- "<|system|>": 32006,
12
- "<|user|>": 32010
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe/config.json DELETED
@@ -1,168 +0,0 @@
1
- {
2
- "_name_or_path": "/cm/archive/namnv78/checkpoints/phi35-siglip224/pft",
3
- "architectures": [
4
- "LlavaPhiForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "configuration_phi3.Phi3Config",
10
- "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
11
- },
12
- "balance_loss_coef": 0.1,
13
- "bos_token_id": 1,
14
- "clip_smoe": true,
15
- "dropout": false,
16
- "embd_pdrop": 0.0,
17
- "eos_token_id": 32000,
18
- "freeze_mm_mlp_adapter": false,
19
- "hidden_act": "silu",
20
- "hidden_size": 3072,
21
- "image_aspect_ratio": "pad",
22
- "initializer_range": 0.02,
23
- "intermediate_size": 8192,
24
- "local_rank": 0,
25
- "max_position_embeddings": 131072,
26
- "mlp_smoe": true,
27
- "mm_hidden_size": 1152,
28
- "mm_patch_merge_type": "flat",
29
- "mm_projector_lr": null,
30
- "mm_projector_type": "moe",
31
- "mm_use_im_patch_token": false,
32
- "mm_use_im_start_end": false,
33
- "mm_vision_select_feature": "patch",
34
- "mm_vision_select_layer": -2,
35
- "mm_vision_tower": "google/siglip-so400m-patch14-224",
36
- "model_type": "llava_phi",
37
- "moe_name": "smoe",
38
- "num_attention_heads": 32,
39
- "num_experts": 4,
40
- "num_hidden_layers": 32,
41
- "num_key_value_heads": 32,
42
- "num_layers": 3,
43
- "num_selected": 2,
44
- "original_max_position_embeddings": 4096,
45
- "pad_token_id": 32000,
46
- "resid_pdrop": 0.0,
47
- "rms_norm_eps": 1e-05,
48
- "rope_scaling": {
49
- "long_factor": [
50
- 1.0800000429153442,
51
- 1.1100000143051147,
52
- 1.1399999856948853,
53
- 1.340000033378601,
54
- 1.5899999141693115,
55
- 1.600000023841858,
56
- 1.6200000047683716,
57
- 2.620000123977661,
58
- 3.2300000190734863,
59
- 3.2300000190734863,
60
- 4.789999961853027,
61
- 7.400000095367432,
62
- 7.700000286102295,
63
- 9.09000015258789,
64
- 12.199999809265137,
65
- 17.670000076293945,
66
- 24.46000099182129,
67
- 28.57000160217285,
68
- 30.420001983642578,
69
- 30.840002059936523,
70
- 32.590003967285156,
71
- 32.93000411987305,
72
- 42.320003509521484,
73
- 44.96000289916992,
74
- 50.340003967285156,
75
- 50.45000457763672,
76
- 57.55000305175781,
77
- 57.93000411987305,
78
- 58.21000289916992,
79
- 60.1400032043457,
80
- 62.61000442504883,
81
- 62.62000274658203,
82
- 62.71000289916992,
83
- 63.1400032043457,
84
- 63.1400032043457,
85
- 63.77000427246094,
86
- 63.93000411987305,
87
- 63.96000289916992,
88
- 63.970001220703125,
89
- 64.02999877929688,
90
- 64.06999969482422,
91
- 64.08000183105469,
92
- 64.12000274658203,
93
- 64.41000366210938,
94
- 64.4800033569336,
95
- 64.51000213623047,
96
- 64.52999877929688,
97
- 64.83999633789062
98
- ],
99
- "short_factor": [
100
- 1.0,
101
- 1.0199999809265137,
102
- 1.0299999713897705,
103
- 1.0299999713897705,
104
- 1.0499999523162842,
105
- 1.0499999523162842,
106
- 1.0499999523162842,
107
- 1.0499999523162842,
108
- 1.0499999523162842,
109
- 1.0699999332427979,
110
- 1.0999999046325684,
111
- 1.1099998950958252,
112
- 1.1599998474121094,
113
- 1.1599998474121094,
114
- 1.1699998378753662,
115
- 1.2899998426437378,
116
- 1.339999794960022,
117
- 1.679999828338623,
118
- 1.7899998426437378,
119
- 1.8199998140335083,
120
- 1.8499997854232788,
121
- 1.8799997568130493,
122
- 1.9099997282028198,
123
- 1.9399996995925903,
124
- 1.9899996519088745,
125
- 2.0199997425079346,
126
- 2.0199997425079346,
127
- 2.0199997425079346,
128
- 2.0199997425079346,
129
- 2.0199997425079346,
130
- 2.0199997425079346,
131
- 2.0299997329711914,
132
- 2.0299997329711914,
133
- 2.0299997329711914,
134
- 2.0299997329711914,
135
- 2.0299997329711914,
136
- 2.0299997329711914,
137
- 2.0299997329711914,
138
- 2.0299997329711914,
139
- 2.0299997329711914,
140
- 2.0799996852874756,
141
- 2.0899996757507324,
142
- 2.189999580383301,
143
- 2.2199995517730713,
144
- 2.5899994373321533,
145
- 2.729999542236328,
146
- 2.749999523162842,
147
- 2.8399994373321533
148
- ],
149
- "type": "longrope"
150
- },
151
- "rope_theta": 10000.0,
152
- "router_z_loss_coef": 0.01,
153
- "scales": [
154
- 1,
155
- 3
156
- ],
157
- "sliding_window": 262144,
158
- "tie_word_embeddings": false,
159
- "tokenizer_model_max_length": 2048,
160
- "tokenizer_padding_side": "right",
161
- "torch_dtype": "bfloat16",
162
- "training": true,
163
- "transformers_version": "4.43.0",
164
- "tune_mm_mlp_adapter": false,
165
- "use_cache": true,
166
- "use_mm_proj": true,
167
- "vocab_size": 32064
168
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe/generation_config.json DELETED
@@ -1,12 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "do_sample": true,
5
- "eos_token_id": [
6
- 32007,
7
- 32001,
8
- 32000
9
- ],
10
- "pad_token_id": 32000,
11
- "transformers_version": "4.43.0"
12
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe/model-00001-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3060186020e81e85f75d680f7a6446b1397f8e60fcf713fae963647075b186f6
3
- size 4972489328
 
 
 
 
sft_full/smoe/model-00002-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:8cb877ce803d3f1c50c57760d319415451c7d7a4e2221b564707681bffa8c6fb
3
- size 4985529648
 
 
 
 
sft_full/smoe/model-00003-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e0e2b038bcb482466796bb79ce90e466efe95f6772a7318cd0cc19be3e9a828
3
- size 248943552
 
 
 
 
sft_full/smoe/model.safetensors.index.json DELETED
The diff for this file is too large to render. See raw diff
 
sft_full/smoe/special_tokens_map.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<unk>",
17
- "unk_token": {
18
- "content": "<unk>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe/tokenizer.model DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
- size 499723
 
 
 
 
sft_full/smoe/tokenizer_config.json DELETED
@@ -1,132 +0,0 @@
1
- {
2
- "add_bos_token": false,
3
- "add_eos_token": false,
4
- "add_prefix_space": true,
5
- "added_tokens_decoder": {
6
- "0": {
7
- "content": "<unk>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
- "1": {
15
- "content": "<s>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "2": {
23
- "content": "</s>",
24
- "lstrip": false,
25
- "normalized": false,
26
- "rstrip": true,
27
- "single_word": false,
28
- "special": false
29
- },
30
- "32000": {
31
- "content": "<|endoftext|>",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": true
37
- },
38
- "32001": {
39
- "content": "<|assistant|>",
40
- "lstrip": false,
41
- "normalized": false,
42
- "rstrip": true,
43
- "single_word": false,
44
- "special": true
45
- },
46
- "32002": {
47
- "content": "<|placeholder1|>",
48
- "lstrip": false,
49
- "normalized": false,
50
- "rstrip": true,
51
- "single_word": false,
52
- "special": true
53
- },
54
- "32003": {
55
- "content": "<|placeholder2|>",
56
- "lstrip": false,
57
- "normalized": false,
58
- "rstrip": true,
59
- "single_word": false,
60
- "special": true
61
- },
62
- "32004": {
63
- "content": "<|placeholder3|>",
64
- "lstrip": false,
65
- "normalized": false,
66
- "rstrip": true,
67
- "single_word": false,
68
- "special": true
69
- },
70
- "32005": {
71
- "content": "<|placeholder4|>",
72
- "lstrip": false,
73
- "normalized": false,
74
- "rstrip": true,
75
- "single_word": false,
76
- "special": true
77
- },
78
- "32006": {
79
- "content": "<|system|>",
80
- "lstrip": false,
81
- "normalized": false,
82
- "rstrip": true,
83
- "single_word": false,
84
- "special": true
85
- },
86
- "32007": {
87
- "content": "<|end|>",
88
- "lstrip": false,
89
- "normalized": false,
90
- "rstrip": true,
91
- "single_word": false,
92
- "special": true
93
- },
94
- "32008": {
95
- "content": "<|placeholder5|>",
96
- "lstrip": false,
97
- "normalized": false,
98
- "rstrip": true,
99
- "single_word": false,
100
- "special": true
101
- },
102
- "32009": {
103
- "content": "<|placeholder6|>",
104
- "lstrip": false,
105
- "normalized": false,
106
- "rstrip": true,
107
- "single_word": false,
108
- "special": true
109
- },
110
- "32010": {
111
- "content": "<|user|>",
112
- "lstrip": false,
113
- "normalized": false,
114
- "rstrip": true,
115
- "single_word": false,
116
- "special": true
117
- }
118
- },
119
- "bos_token": "<s>",
120
- "chat_template": "{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
121
- "clean_up_tokenization_spaces": false,
122
- "eos_token": "<|endoftext|>",
123
- "legacy": false,
124
- "model_max_length": 2048,
125
- "pad_token": "<unk>",
126
- "padding_side": "right",
127
- "sp_model_kwargs": {},
128
- "spaces_between_special_tokens": false,
129
- "tokenizer_class": "LlamaTokenizer",
130
- "unk_token": "<unk>",
131
- "use_default_system_prompt": false
132
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe/trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
sft_full/smoe/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:894656220e9537a9337b5f57a61a0ede5e77ae8c5e2dd56e2d353113ca978d56
3
- size 8120
 
 
 
 
sft_full/smoe_cosinegating/added_tokens.json DELETED
@@ -1,13 +0,0 @@
1
- {
2
- "<|assistant|>": 32001,
3
- "<|endoftext|>": 32000,
4
- "<|end|>": 32007,
5
- "<|placeholder1|>": 32002,
6
- "<|placeholder2|>": 32003,
7
- "<|placeholder3|>": 32004,
8
- "<|placeholder4|>": 32005,
9
- "<|placeholder5|>": 32008,
10
- "<|placeholder6|>": 32009,
11
- "<|system|>": 32006,
12
- "<|user|>": 32010
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_cosinegating/config.json DELETED
@@ -1,168 +0,0 @@
1
- {
2
- "_name_or_path": "/cm/archive/namnv78/checkpoints/phi35-siglip224/pft",
3
- "architectures": [
4
- "LlavaPhiForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "configuration_phi3.Phi3Config",
10
- "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
11
- },
12
- "balance_loss_coef": 0.1,
13
- "bos_token_id": 1,
14
- "clip_smoe": true,
15
- "dropout": false,
16
- "embd_pdrop": 0.0,
17
- "eos_token_id": 32000,
18
- "freeze_mm_mlp_adapter": false,
19
- "hidden_act": "silu",
20
- "hidden_size": 3072,
21
- "image_aspect_ratio": "pad",
22
- "initializer_range": 0.02,
23
- "intermediate_size": 8192,
24
- "local_rank": 0,
25
- "max_position_embeddings": 131072,
26
- "mlp_smoe": true,
27
- "mm_hidden_size": 1152,
28
- "mm_patch_merge_type": "flat",
29
- "mm_projector_lr": null,
30
- "mm_projector_type": "moe",
31
- "mm_use_im_patch_token": false,
32
- "mm_use_im_start_end": false,
33
- "mm_vision_select_feature": "patch",
34
- "mm_vision_select_layer": -2,
35
- "mm_vision_tower": "google/siglip-so400m-patch14-224",
36
- "model_type": "llava_phi",
37
- "moe_name": "smoe_cosinegating",
38
- "num_attention_heads": 32,
39
- "num_experts": 4,
40
- "num_hidden_layers": 32,
41
- "num_key_value_heads": 32,
42
- "num_layers": 3,
43
- "num_selected": 2,
44
- "original_max_position_embeddings": 4096,
45
- "pad_token_id": 32000,
46
- "resid_pdrop": 0.0,
47
- "rms_norm_eps": 1e-05,
48
- "rope_scaling": {
49
- "long_factor": [
50
- 1.0800000429153442,
51
- 1.1100000143051147,
52
- 1.1399999856948853,
53
- 1.340000033378601,
54
- 1.5899999141693115,
55
- 1.600000023841858,
56
- 1.6200000047683716,
57
- 2.620000123977661,
58
- 3.2300000190734863,
59
- 3.2300000190734863,
60
- 4.789999961853027,
61
- 7.400000095367432,
62
- 7.700000286102295,
63
- 9.09000015258789,
64
- 12.199999809265137,
65
- 17.670000076293945,
66
- 24.46000099182129,
67
- 28.57000160217285,
68
- 30.420001983642578,
69
- 30.840002059936523,
70
- 32.590003967285156,
71
- 32.93000411987305,
72
- 42.320003509521484,
73
- 44.96000289916992,
74
- 50.340003967285156,
75
- 50.45000457763672,
76
- 57.55000305175781,
77
- 57.93000411987305,
78
- 58.21000289916992,
79
- 60.1400032043457,
80
- 62.61000442504883,
81
- 62.62000274658203,
82
- 62.71000289916992,
83
- 63.1400032043457,
84
- 63.1400032043457,
85
- 63.77000427246094,
86
- 63.93000411987305,
87
- 63.96000289916992,
88
- 63.970001220703125,
89
- 64.02999877929688,
90
- 64.06999969482422,
91
- 64.08000183105469,
92
- 64.12000274658203,
93
- 64.41000366210938,
94
- 64.4800033569336,
95
- 64.51000213623047,
96
- 64.52999877929688,
97
- 64.83999633789062
98
- ],
99
- "short_factor": [
100
- 1.0,
101
- 1.0199999809265137,
102
- 1.0299999713897705,
103
- 1.0299999713897705,
104
- 1.0499999523162842,
105
- 1.0499999523162842,
106
- 1.0499999523162842,
107
- 1.0499999523162842,
108
- 1.0499999523162842,
109
- 1.0699999332427979,
110
- 1.0999999046325684,
111
- 1.1099998950958252,
112
- 1.1599998474121094,
113
- 1.1599998474121094,
114
- 1.1699998378753662,
115
- 1.2899998426437378,
116
- 1.339999794960022,
117
- 1.679999828338623,
118
- 1.7899998426437378,
119
- 1.8199998140335083,
120
- 1.8499997854232788,
121
- 1.8799997568130493,
122
- 1.9099997282028198,
123
- 1.9399996995925903,
124
- 1.9899996519088745,
125
- 2.0199997425079346,
126
- 2.0199997425079346,
127
- 2.0199997425079346,
128
- 2.0199997425079346,
129
- 2.0199997425079346,
130
- 2.0199997425079346,
131
- 2.0299997329711914,
132
- 2.0299997329711914,
133
- 2.0299997329711914,
134
- 2.0299997329711914,
135
- 2.0299997329711914,
136
- 2.0299997329711914,
137
- 2.0299997329711914,
138
- 2.0299997329711914,
139
- 2.0299997329711914,
140
- 2.0799996852874756,
141
- 2.0899996757507324,
142
- 2.189999580383301,
143
- 2.2199995517730713,
144
- 2.5899994373321533,
145
- 2.729999542236328,
146
- 2.749999523162842,
147
- 2.8399994373321533
148
- ],
149
- "type": "longrope"
150
- },
151
- "rope_theta": 10000.0,
152
- "router_z_loss_coef": 0.01,
153
- "scales": [
154
- 1,
155
- 3
156
- ],
157
- "sliding_window": 262144,
158
- "tie_word_embeddings": false,
159
- "tokenizer_model_max_length": 2048,
160
- "tokenizer_padding_side": "right",
161
- "torch_dtype": "bfloat16",
162
- "training": true,
163
- "transformers_version": "4.43.0",
164
- "tune_mm_mlp_adapter": false,
165
- "use_cache": true,
166
- "use_mm_proj": true,
167
- "vocab_size": 32064
168
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_cosinegating/generation_config.json DELETED
@@ -1,12 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "do_sample": true,
5
- "eos_token_id": [
6
- 32007,
7
- 32001,
8
- 32000
9
- ],
10
- "pad_token_id": 32000,
11
- "transformers_version": "4.43.0"
12
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_cosinegating/model-00001-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0e70af001e069480c661b44bda40ec265f8151b204f2ce7e1bd4a18adc30f49d
3
- size 4972489328
 
 
 
 
sft_full/smoe_cosinegating/model-00002-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2d9697408bce9e61d9db9e6a0303f7884a18627150cd410e22f1db4e78fb7d1c
3
- size 4985533608
 
 
 
 
sft_full/smoe_cosinegating/model-00003-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8b17f0f381e5754ad51c721f0eddfd4aae81785deb6a72a998a93423e0f769f
3
- size 248943664
 
 
 
 
sft_full/smoe_cosinegating/model.safetensors.index.json DELETED
The diff for this file is too large to render. See raw diff
 
sft_full/smoe_cosinegating/special_tokens_map.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<unk>",
17
- "unk_token": {
18
- "content": "<unk>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_cosinegating/tokenizer.model DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
- size 499723
 
 
 
 
sft_full/smoe_cosinegating/tokenizer_config.json DELETED
@@ -1,132 +0,0 @@
1
- {
2
- "add_bos_token": false,
3
- "add_eos_token": false,
4
- "add_prefix_space": true,
5
- "added_tokens_decoder": {
6
- "0": {
7
- "content": "<unk>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
- "1": {
15
- "content": "<s>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "2": {
23
- "content": "</s>",
24
- "lstrip": false,
25
- "normalized": false,
26
- "rstrip": true,
27
- "single_word": false,
28
- "special": false
29
- },
30
- "32000": {
31
- "content": "<|endoftext|>",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": true
37
- },
38
- "32001": {
39
- "content": "<|assistant|>",
40
- "lstrip": false,
41
- "normalized": false,
42
- "rstrip": true,
43
- "single_word": false,
44
- "special": true
45
- },
46
- "32002": {
47
- "content": "<|placeholder1|>",
48
- "lstrip": false,
49
- "normalized": false,
50
- "rstrip": true,
51
- "single_word": false,
52
- "special": true
53
- },
54
- "32003": {
55
- "content": "<|placeholder2|>",
56
- "lstrip": false,
57
- "normalized": false,
58
- "rstrip": true,
59
- "single_word": false,
60
- "special": true
61
- },
62
- "32004": {
63
- "content": "<|placeholder3|>",
64
- "lstrip": false,
65
- "normalized": false,
66
- "rstrip": true,
67
- "single_word": false,
68
- "special": true
69
- },
70
- "32005": {
71
- "content": "<|placeholder4|>",
72
- "lstrip": false,
73
- "normalized": false,
74
- "rstrip": true,
75
- "single_word": false,
76
- "special": true
77
- },
78
- "32006": {
79
- "content": "<|system|>",
80
- "lstrip": false,
81
- "normalized": false,
82
- "rstrip": true,
83
- "single_word": false,
84
- "special": true
85
- },
86
- "32007": {
87
- "content": "<|end|>",
88
- "lstrip": false,
89
- "normalized": false,
90
- "rstrip": true,
91
- "single_word": false,
92
- "special": true
93
- },
94
- "32008": {
95
- "content": "<|placeholder5|>",
96
- "lstrip": false,
97
- "normalized": false,
98
- "rstrip": true,
99
- "single_word": false,
100
- "special": true
101
- },
102
- "32009": {
103
- "content": "<|placeholder6|>",
104
- "lstrip": false,
105
- "normalized": false,
106
- "rstrip": true,
107
- "single_word": false,
108
- "special": true
109
- },
110
- "32010": {
111
- "content": "<|user|>",
112
- "lstrip": false,
113
- "normalized": false,
114
- "rstrip": true,
115
- "single_word": false,
116
- "special": true
117
- }
118
- },
119
- "bos_token": "<s>",
120
- "chat_template": "{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
121
- "clean_up_tokenization_spaces": false,
122
- "eos_token": "<|endoftext|>",
123
- "legacy": false,
124
- "model_max_length": 2048,
125
- "pad_token": "<unk>",
126
- "padding_side": "right",
127
- "sp_model_kwargs": {},
128
- "spaces_between_special_tokens": false,
129
- "tokenizer_class": "LlamaTokenizer",
130
- "unk_token": "<unk>",
131
- "use_default_system_prompt": false
132
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_cosinegating/trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
sft_full/smoe_cosinegating/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:74abf7f3a5b16db530b49de04d931d31874d9902af82c92d683f25980e00ed19
3
- size 8184
 
 
 
 
sft_full/smoe_perturbed/added_tokens.json DELETED
@@ -1,13 +0,0 @@
1
- {
2
- "<|assistant|>": 32001,
3
- "<|endoftext|>": 32000,
4
- "<|end|>": 32007,
5
- "<|placeholder1|>": 32002,
6
- "<|placeholder2|>": 32003,
7
- "<|placeholder3|>": 32004,
8
- "<|placeholder4|>": 32005,
9
- "<|placeholder5|>": 32008,
10
- "<|placeholder6|>": 32009,
11
- "<|system|>": 32006,
12
- "<|user|>": 32010
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_perturbed/config.json DELETED
@@ -1,168 +0,0 @@
1
- {
2
- "_name_or_path": "/cm/archive/namnv78/checkpoints/phi35-siglip224/pft",
3
- "architectures": [
4
- "LlavaPhiForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "configuration_phi3.Phi3Config",
10
- "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
11
- },
12
- "balance_loss_coef": 0.1,
13
- "bos_token_id": 1,
14
- "clip_smoe": true,
15
- "dropout": false,
16
- "embd_pdrop": 0.0,
17
- "eos_token_id": 32000,
18
- "freeze_mm_mlp_adapter": false,
19
- "hidden_act": "silu",
20
- "hidden_size": 3072,
21
- "image_aspect_ratio": "pad",
22
- "initializer_range": 0.02,
23
- "intermediate_size": 8192,
24
- "local_rank": 0,
25
- "max_position_embeddings": 131072,
26
- "mlp_smoe": true,
27
- "mm_hidden_size": 1152,
28
- "mm_patch_merge_type": "flat",
29
- "mm_projector_lr": null,
30
- "mm_projector_type": "moe",
31
- "mm_use_im_patch_token": false,
32
- "mm_use_im_start_end": false,
33
- "mm_vision_select_feature": "patch",
34
- "mm_vision_select_layer": -2,
35
- "mm_vision_tower": "google/siglip-so400m-patch14-224",
36
- "model_type": "llava_phi",
37
- "moe_name": "smoe_perturbed",
38
- "num_attention_heads": 32,
39
- "num_experts": 4,
40
- "num_hidden_layers": 32,
41
- "num_key_value_heads": 32,
42
- "num_layers": 3,
43
- "num_selected": 2,
44
- "original_max_position_embeddings": 4096,
45
- "pad_token_id": 32000,
46
- "resid_pdrop": 0.0,
47
- "rms_norm_eps": 1e-05,
48
- "rope_scaling": {
49
- "long_factor": [
50
- 1.0800000429153442,
51
- 1.1100000143051147,
52
- 1.1399999856948853,
53
- 1.340000033378601,
54
- 1.5899999141693115,
55
- 1.600000023841858,
56
- 1.6200000047683716,
57
- 2.620000123977661,
58
- 3.2300000190734863,
59
- 3.2300000190734863,
60
- 4.789999961853027,
61
- 7.400000095367432,
62
- 7.700000286102295,
63
- 9.09000015258789,
64
- 12.199999809265137,
65
- 17.670000076293945,
66
- 24.46000099182129,
67
- 28.57000160217285,
68
- 30.420001983642578,
69
- 30.840002059936523,
70
- 32.590003967285156,
71
- 32.93000411987305,
72
- 42.320003509521484,
73
- 44.96000289916992,
74
- 50.340003967285156,
75
- 50.45000457763672,
76
- 57.55000305175781,
77
- 57.93000411987305,
78
- 58.21000289916992,
79
- 60.1400032043457,
80
- 62.61000442504883,
81
- 62.62000274658203,
82
- 62.71000289916992,
83
- 63.1400032043457,
84
- 63.1400032043457,
85
- 63.77000427246094,
86
- 63.93000411987305,
87
- 63.96000289916992,
88
- 63.970001220703125,
89
- 64.02999877929688,
90
- 64.06999969482422,
91
- 64.08000183105469,
92
- 64.12000274658203,
93
- 64.41000366210938,
94
- 64.4800033569336,
95
- 64.51000213623047,
96
- 64.52999877929688,
97
- 64.83999633789062
98
- ],
99
- "short_factor": [
100
- 1.0,
101
- 1.0199999809265137,
102
- 1.0299999713897705,
103
- 1.0299999713897705,
104
- 1.0499999523162842,
105
- 1.0499999523162842,
106
- 1.0499999523162842,
107
- 1.0499999523162842,
108
- 1.0499999523162842,
109
- 1.0699999332427979,
110
- 1.0999999046325684,
111
- 1.1099998950958252,
112
- 1.1599998474121094,
113
- 1.1599998474121094,
114
- 1.1699998378753662,
115
- 1.2899998426437378,
116
- 1.339999794960022,
117
- 1.679999828338623,
118
- 1.7899998426437378,
119
- 1.8199998140335083,
120
- 1.8499997854232788,
121
- 1.8799997568130493,
122
- 1.9099997282028198,
123
- 1.9399996995925903,
124
- 1.9899996519088745,
125
- 2.0199997425079346,
126
- 2.0199997425079346,
127
- 2.0199997425079346,
128
- 2.0199997425079346,
129
- 2.0199997425079346,
130
- 2.0199997425079346,
131
- 2.0299997329711914,
132
- 2.0299997329711914,
133
- 2.0299997329711914,
134
- 2.0299997329711914,
135
- 2.0299997329711914,
136
- 2.0299997329711914,
137
- 2.0299997329711914,
138
- 2.0299997329711914,
139
- 2.0299997329711914,
140
- 2.0799996852874756,
141
- 2.0899996757507324,
142
- 2.189999580383301,
143
- 2.2199995517730713,
144
- 2.5899994373321533,
145
- 2.729999542236328,
146
- 2.749999523162842,
147
- 2.8399994373321533
148
- ],
149
- "type": "longrope"
150
- },
151
- "rope_theta": 10000.0,
152
- "router_z_loss_coef": 0.01,
153
- "scales": [
154
- 1,
155
- 3
156
- ],
157
- "sliding_window": 262144,
158
- "tie_word_embeddings": false,
159
- "tokenizer_model_max_length": 2048,
160
- "tokenizer_padding_side": "right",
161
- "torch_dtype": "bfloat16",
162
- "training": true,
163
- "transformers_version": "4.43.0",
164
- "tune_mm_mlp_adapter": false,
165
- "use_cache": true,
166
- "use_mm_proj": true,
167
- "vocab_size": 32064
168
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_perturbed/generation_config.json DELETED
@@ -1,12 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 1,
4
- "do_sample": true,
5
- "eos_token_id": [
6
- 32007,
7
- 32001,
8
- 32000
9
- ],
10
- "pad_token_id": 32000,
11
- "transformers_version": "4.43.0"
12
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_perturbed/model-00001-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:87fd7ac6ec78d179eaf9a74c6bcf515318e337ab9f99c8c6d5659630e3f884a9
3
- size 4972489328
 
 
 
 
sft_full/smoe_perturbed/model-00002-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea4992fcf804b3dd4e133110725b2bfeb819559139bddaeb852e6931cf4faa74
3
- size 4985533608
 
 
 
 
sft_full/smoe_perturbed/model-00003-of-00003.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:369ab1a2c9903478d233348bcdee93fab707680031305c19354783c04fbf4004
3
- size 248943664
 
 
 
 
sft_full/smoe_perturbed/model.safetensors.index.json DELETED
The diff for this file is too large to render. See raw diff
 
sft_full/smoe_perturbed/special_tokens_map.json DELETED
@@ -1,24 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<unk>",
17
- "unk_token": {
18
- "content": "<unk>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_perturbed/tokenizer.model DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
- size 499723
 
 
 
 
sft_full/smoe_perturbed/tokenizer_config.json DELETED
@@ -1,132 +0,0 @@
1
- {
2
- "add_bos_token": false,
3
- "add_eos_token": false,
4
- "add_prefix_space": true,
5
- "added_tokens_decoder": {
6
- "0": {
7
- "content": "<unk>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false,
12
- "special": true
13
- },
14
- "1": {
15
- "content": "<s>",
16
- "lstrip": false,
17
- "normalized": false,
18
- "rstrip": false,
19
- "single_word": false,
20
- "special": true
21
- },
22
- "2": {
23
- "content": "</s>",
24
- "lstrip": false,
25
- "normalized": false,
26
- "rstrip": true,
27
- "single_word": false,
28
- "special": false
29
- },
30
- "32000": {
31
- "content": "<|endoftext|>",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false,
36
- "special": true
37
- },
38
- "32001": {
39
- "content": "<|assistant|>",
40
- "lstrip": false,
41
- "normalized": false,
42
- "rstrip": true,
43
- "single_word": false,
44
- "special": true
45
- },
46
- "32002": {
47
- "content": "<|placeholder1|>",
48
- "lstrip": false,
49
- "normalized": false,
50
- "rstrip": true,
51
- "single_word": false,
52
- "special": true
53
- },
54
- "32003": {
55
- "content": "<|placeholder2|>",
56
- "lstrip": false,
57
- "normalized": false,
58
- "rstrip": true,
59
- "single_word": false,
60
- "special": true
61
- },
62
- "32004": {
63
- "content": "<|placeholder3|>",
64
- "lstrip": false,
65
- "normalized": false,
66
- "rstrip": true,
67
- "single_word": false,
68
- "special": true
69
- },
70
- "32005": {
71
- "content": "<|placeholder4|>",
72
- "lstrip": false,
73
- "normalized": false,
74
- "rstrip": true,
75
- "single_word": false,
76
- "special": true
77
- },
78
- "32006": {
79
- "content": "<|system|>",
80
- "lstrip": false,
81
- "normalized": false,
82
- "rstrip": true,
83
- "single_word": false,
84
- "special": true
85
- },
86
- "32007": {
87
- "content": "<|end|>",
88
- "lstrip": false,
89
- "normalized": false,
90
- "rstrip": true,
91
- "single_word": false,
92
- "special": true
93
- },
94
- "32008": {
95
- "content": "<|placeholder5|>",
96
- "lstrip": false,
97
- "normalized": false,
98
- "rstrip": true,
99
- "single_word": false,
100
- "special": true
101
- },
102
- "32009": {
103
- "content": "<|placeholder6|>",
104
- "lstrip": false,
105
- "normalized": false,
106
- "rstrip": true,
107
- "single_word": false,
108
- "special": true
109
- },
110
- "32010": {
111
- "content": "<|user|>",
112
- "lstrip": false,
113
- "normalized": false,
114
- "rstrip": true,
115
- "single_word": false,
116
- "special": true
117
- }
118
- },
119
- "bos_token": "<s>",
120
- "chat_template": "{% for message in messages %}{% if message['role'] == 'system' and message['content'] %}{{'<|system|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'user' %}{{'<|user|>\n' + message['content'] + '<|end|>\n'}}{% elif message['role'] == 'assistant' %}{{'<|assistant|>\n' + message['content'] + '<|end|>\n'}}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\n' }}{% else %}{{ eos_token }}{% endif %}",
121
- "clean_up_tokenization_spaces": false,
122
- "eos_token": "<|endoftext|>",
123
- "legacy": false,
124
- "model_max_length": 2048,
125
- "pad_token": "<unk>",
126
- "padding_side": "right",
127
- "sp_model_kwargs": {},
128
- "spaces_between_special_tokens": false,
129
- "tokenizer_class": "LlamaTokenizer",
130
- "unk_token": "<unk>",
131
- "use_default_system_prompt": false
132
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_perturbed/trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
sft_full/smoe_perturbed/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:08aaf4412961001d21ae464e0d5be33c4cc8cb307123de1b06ef7f4872692493
3
- size 8184
 
 
 
 
sft_full/smoe_sigmoidgating/added_tokens.json DELETED
@@ -1,13 +0,0 @@
1
- {
2
- "<|assistant|>": 32001,
3
- "<|endoftext|>": 32000,
4
- "<|end|>": 32007,
5
- "<|placeholder1|>": 32002,
6
- "<|placeholder2|>": 32003,
7
- "<|placeholder3|>": 32004,
8
- "<|placeholder4|>": 32005,
9
- "<|placeholder5|>": 32008,
10
- "<|placeholder6|>": 32009,
11
- "<|system|>": 32006,
12
- "<|user|>": 32010
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
sft_full/smoe_sigmoidgating/config.json DELETED
@@ -1,168 +0,0 @@
1
- {
2
- "_name_or_path": "/cm/archive/namnv78/checkpoints/phi35-siglip224/pft",
3
- "architectures": [
4
- "LlavaPhiForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "configuration_phi3.Phi3Config",
10
- "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
11
- },
12
- "balance_loss_coef": 0.1,
13
- "bos_token_id": 1,
14
- "clip_smoe": true,
15
- "dropout": false,
16
- "embd_pdrop": 0.0,
17
- "eos_token_id": 32000,
18
- "freeze_mm_mlp_adapter": false,
19
- "hidden_act": "silu",
20
- "hidden_size": 3072,
21
- "image_aspect_ratio": "pad",
22
- "initializer_range": 0.02,
23
- "intermediate_size": 8192,
24
- "local_rank": 0,
25
- "max_position_embeddings": 131072,
26
- "mlp_smoe": true,
27
- "mm_hidden_size": 1152,
28
- "mm_patch_merge_type": "flat",
29
- "mm_projector_lr": null,
30
- "mm_projector_type": "moe",
31
- "mm_use_im_patch_token": false,
32
- "mm_use_im_start_end": false,
33
- "mm_vision_select_feature": "patch",
34
- "mm_vision_select_layer": -2,
35
- "mm_vision_tower": "google/siglip-so400m-patch14-224",
36
- "model_type": "llava_phi",
37
- "moe_name": "smoe_sigmoidgating",
38
- "num_attention_heads": 32,
39
- "num_experts": 4,
40
- "num_hidden_layers": 32,
41
- "num_key_value_heads": 32,
42
- "num_layers": 3,
43
- "num_selected": 2,
44
- "original_max_position_embeddings": 4096,
45
- "pad_token_id": 32000,
46
- "resid_pdrop": 0.0,
47
- "rms_norm_eps": 1e-05,
48
- "rope_scaling": {
49
- "long_factor": [
50
- 1.0800000429153442,
51
- 1.1100000143051147,
52
- 1.1399999856948853,
53
- 1.340000033378601,
54
- 1.5899999141693115,
55
- 1.600000023841858,
56
- 1.6200000047683716,
57
- 2.620000123977661,
58
- 3.2300000190734863,
59
- 3.2300000190734863,
60
- 4.789999961853027,
61
- 7.400000095367432,
62
- 7.700000286102295,
63
- 9.09000015258789,
64
- 12.199999809265137,
65
- 17.670000076293945,
66
- 24.46000099182129,
67
- 28.57000160217285,
68
- 30.420001983642578,
69
- 30.840002059936523,
70
- 32.590003967285156,
71
- 32.93000411987305,
72
- 42.320003509521484,
73
- 44.96000289916992,
74
- 50.340003967285156,
75
- 50.45000457763672,
76
- 57.55000305175781,
77
- 57.93000411987305,
78
- 58.21000289916992,
79
- 60.1400032043457,
80
- 62.61000442504883,
81
- 62.62000274658203,
82
- 62.71000289916992,
83
- 63.1400032043457,
84
- 63.1400032043457,
85
- 63.77000427246094,
86
- 63.93000411987305,
87
- 63.96000289916992,
88
- 63.970001220703125,
89
- 64.02999877929688,
90
- 64.06999969482422,
91
- 64.08000183105469,
92
- 64.12000274658203,
93
- 64.41000366210938,
94
- 64.4800033569336,
95
- 64.51000213623047,
96
- 64.52999877929688,
97
- 64.83999633789062
98
- ],
99
- "short_factor": [
100
- 1.0,
101
- 1.0199999809265137,
102
- 1.0299999713897705,
103
- 1.0299999713897705,
104
- 1.0499999523162842,
105
- 1.0499999523162842,
106
- 1.0499999523162842,
107
- 1.0499999523162842,
108
- 1.0499999523162842,
109
- 1.0699999332427979,
110
- 1.0999999046325684,
111
- 1.1099998950958252,
112
- 1.1599998474121094,
113
- 1.1599998474121094,
114
- 1.1699998378753662,
115
- 1.2899998426437378,
116
- 1.339999794960022,
117
- 1.679999828338623,
118
- 1.7899998426437378,
119
- 1.8199998140335083,
120
- 1.8499997854232788,
121
- 1.8799997568130493,
122
- 1.9099997282028198,
123
- 1.9399996995925903,
124
- 1.9899996519088745,
125
- 2.0199997425079346,
126
- 2.0199997425079346,
127
- 2.0199997425079346,
128
- 2.0199997425079346,
129
- 2.0199997425079346,
130
- 2.0199997425079346,
131
- 2.0299997329711914,
132
- 2.0299997329711914,
133
- 2.0299997329711914,
134
- 2.0299997329711914,
135
- 2.0299997329711914,
136
- 2.0299997329711914,
137
- 2.0299997329711914,
138
- 2.0299997329711914,
139
- 2.0299997329711914,
140
- 2.0799996852874756,
141
- 2.0899996757507324,
142
- 2.189999580383301,
143
- 2.2199995517730713,
144
- 2.5899994373321533,
145
- 2.729999542236328,
146
- 2.749999523162842,
147
- 2.8399994373321533
148
- ],
149
- "type": "longrope"
150
- },
151
- "rope_theta": 10000.0,
152
- "router_z_loss_coef": 0.01,
153
- "scales": [
154
- 1,
155
- 3
156
- ],
157
- "sliding_window": 262144,
158
- "tie_word_embeddings": false,
159
- "tokenizer_model_max_length": 2048,
160
- "tokenizer_padding_side": "right",
161
- "torch_dtype": "bfloat16",
162
- "training": true,
163
- "transformers_version": "4.43.0",
164
- "tune_mm_mlp_adapter": false,
165
- "use_cache": true,
166
- "use_mm_proj": true,
167
- "vocab_size": 32064
168
- }