Question Answering
Transformers
Safetensors
English
pit
U-rara commited on
Commit
0823a18
verified
1 Parent(s): b6d030a

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes. 聽 See raw diff
Files changed (50) hide show
  1. config.json +211 -0
  2. generation_config.json +7 -0
  3. global_step42881/layer_0_expert_0_mp_rank_00_model_states.pt +3 -0
  4. global_step42881/layer_0_expert_1_mp_rank_00_model_states.pt +3 -0
  5. global_step42881/layer_0_expert_2_mp_rank_00_model_states.pt +3 -0
  6. global_step42881/layer_0_expert_3_mp_rank_00_model_states.pt +3 -0
  7. global_step42881/layer_10_expert_0_mp_rank_00_model_states.pt +3 -0
  8. global_step42881/layer_10_expert_1_mp_rank_00_model_states.pt +3 -0
  9. global_step42881/layer_10_expert_2_mp_rank_00_model_states.pt +3 -0
  10. global_step42881/layer_10_expert_3_mp_rank_00_model_states.pt +3 -0
  11. global_step42881/layer_11_expert_0_mp_rank_00_model_states.pt +3 -0
  12. global_step42881/layer_11_expert_1_mp_rank_00_model_states.pt +3 -0
  13. global_step42881/layer_11_expert_2_mp_rank_00_model_states.pt +3 -0
  14. global_step42881/layer_11_expert_3_mp_rank_00_model_states.pt +3 -0
  15. global_step42881/layer_12_expert_0_mp_rank_00_model_states.pt +3 -0
  16. global_step42881/layer_12_expert_1_mp_rank_00_model_states.pt +3 -0
  17. global_step42881/layer_12_expert_2_mp_rank_00_model_states.pt +3 -0
  18. global_step42881/layer_12_expert_3_mp_rank_00_model_states.pt +3 -0
  19. global_step42881/layer_13_expert_0_mp_rank_00_model_states.pt +3 -0
  20. global_step42881/layer_13_expert_1_mp_rank_00_model_states.pt +3 -0
  21. global_step42881/layer_13_expert_2_mp_rank_00_model_states.pt +3 -0
  22. global_step42881/layer_13_expert_3_mp_rank_00_model_states.pt +3 -0
  23. global_step42881/layer_14_expert_0_mp_rank_00_model_states.pt +3 -0
  24. global_step42881/layer_14_expert_1_mp_rank_00_model_states.pt +3 -0
  25. global_step42881/layer_14_expert_2_mp_rank_00_model_states.pt +3 -0
  26. global_step42881/layer_14_expert_3_mp_rank_00_model_states.pt +3 -0
  27. global_step42881/layer_15_expert_0_mp_rank_00_model_states.pt +3 -0
  28. global_step42881/layer_15_expert_1_mp_rank_00_model_states.pt +3 -0
  29. global_step42881/layer_15_expert_2_mp_rank_00_model_states.pt +3 -0
  30. global_step42881/layer_15_expert_3_mp_rank_00_model_states.pt +3 -0
  31. global_step42881/layer_16_expert_0_mp_rank_00_model_states.pt +3 -0
  32. global_step42881/layer_16_expert_1_mp_rank_00_model_states.pt +3 -0
  33. global_step42881/layer_16_expert_2_mp_rank_00_model_states.pt +3 -0
  34. global_step42881/layer_16_expert_3_mp_rank_00_model_states.pt +3 -0
  35. global_step42881/layer_17_expert_0_mp_rank_00_model_states.pt +3 -0
  36. global_step42881/layer_17_expert_1_mp_rank_00_model_states.pt +3 -0
  37. global_step42881/layer_17_expert_2_mp_rank_00_model_states.pt +3 -0
  38. global_step42881/layer_17_expert_3_mp_rank_00_model_states.pt +3 -0
  39. global_step42881/layer_18_expert_0_mp_rank_00_model_states.pt +3 -0
  40. global_step42881/layer_18_expert_1_mp_rank_00_model_states.pt +3 -0
  41. global_step42881/layer_18_expert_2_mp_rank_00_model_states.pt +3 -0
  42. global_step42881/layer_18_expert_3_mp_rank_00_model_states.pt +3 -0
  43. global_step42881/layer_19_expert_0_mp_rank_00_model_states.pt +3 -0
  44. global_step42881/layer_19_expert_1_mp_rank_00_model_states.pt +3 -0
  45. global_step42881/layer_19_expert_2_mp_rank_00_model_states.pt +3 -0
  46. global_step42881/layer_19_expert_3_mp_rank_00_model_states.pt +3 -0
  47. global_step42881/layer_1_expert_0_mp_rank_00_model_states.pt +3 -0
  48. global_step42881/layer_1_expert_1_mp_rank_00_model_states.pt +3 -0
  49. global_step42881/layer_1_expert_2_mp_rank_00_model_states.pt +3 -0
  50. global_step42881/layer_1_expert_3_mp_rank_00_model_states.pt +3 -0
config.json ADDED
@@ -0,0 +1,211 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "PITForConditionGeneration"
4
+ ],
5
+ "hidden_sizes": [
6
+ 1280,
7
+ 2048
8
+ ],
9
+ "ignore_index": -100,
10
+ "model_type": "pit",
11
+ "moe_config": {
12
+ "capacity_factor": 1.25,
13
+ "drop_tokens": true,
14
+ "ep_size": 4,
15
+ "eval_capacity_factor": 2.0,
16
+ "k": 1,
17
+ "num_experts": 4,
18
+ "use_residual": false,
19
+ "use_rts": false,
20
+ "use_tutel": true
21
+ },
22
+ "pad_token_id": 2,
23
+ "projector_hidden_act": "gelu",
24
+ "protein_config": {
25
+ "_name_or_path": "facebook/esm2_t33_650M_UR50D",
26
+ "add_cross_attention": false,
27
+ "architectures": [
28
+ "EsmModel"
29
+ ],
30
+ "attention_probs_dropout_prob": 0.0,
31
+ "bad_words_ids": null,
32
+ "begin_suppress_tokens": null,
33
+ "bos_token_id": null,
34
+ "chunk_size_feed_forward": 0,
35
+ "classifier_dropout": null,
36
+ "cross_attention_hidden_size": null,
37
+ "decoder_start_token_id": null,
38
+ "diversity_penalty": 0.0,
39
+ "do_sample": false,
40
+ "early_stopping": false,
41
+ "emb_layer_norm_before": false,
42
+ "encoder_no_repeat_ngram_size": 0,
43
+ "eos_token_id": null,
44
+ "esmfold_config": null,
45
+ "exponential_decay_length_penalty": null,
46
+ "finetuning_task": null,
47
+ "forced_bos_token_id": null,
48
+ "forced_eos_token_id": null,
49
+ "hidden_act": "gelu",
50
+ "hidden_dropout_prob": 0.0,
51
+ "hidden_size": 1280,
52
+ "id2label": {
53
+ "0": "LABEL_0",
54
+ "1": "LABEL_1"
55
+ },
56
+ "initializer_range": 0.02,
57
+ "intermediate_size": 5120,
58
+ "is_decoder": false,
59
+ "is_encoder_decoder": false,
60
+ "is_folding_model": false,
61
+ "label2id": {
62
+ "LABEL_0": 0,
63
+ "LABEL_1": 1
64
+ },
65
+ "layer_norm_eps": 1e-05,
66
+ "length_penalty": 1.0,
67
+ "mask_token_id": 32,
68
+ "max_length": 20,
69
+ "max_position_embeddings": 1026,
70
+ "min_length": 0,
71
+ "model_type": "esm",
72
+ "no_repeat_ngram_size": 0,
73
+ "num_attention_heads": 20,
74
+ "num_beam_groups": 1,
75
+ "num_beams": 1,
76
+ "num_hidden_layers": 33,
77
+ "num_return_sequences": 1,
78
+ "output_attentions": false,
79
+ "output_hidden_states": false,
80
+ "output_scores": false,
81
+ "pad_token_id": 1,
82
+ "position_embedding_type": "rotary",
83
+ "prefix": null,
84
+ "problem_type": null,
85
+ "pruned_heads": {},
86
+ "remove_invalid_values": false,
87
+ "repetition_penalty": 1.0,
88
+ "return_dict": true,
89
+ "return_dict_in_generate": false,
90
+ "sep_token_id": null,
91
+ "suppress_tokens": null,
92
+ "task_specific_params": null,
93
+ "temperature": 1.0,
94
+ "tf_legacy_loss": false,
95
+ "tie_encoder_decoder": false,
96
+ "tie_word_embeddings": true,
97
+ "token_dropout": true,
98
+ "tokenizer_class": null,
99
+ "top_k": 50,
100
+ "top_p": 1.0,
101
+ "torch_dtype": "float32",
102
+ "torchscript": false,
103
+ "typical_p": 1.0,
104
+ "use_bfloat16": false,
105
+ "use_cache": true,
106
+ "vocab_list": null,
107
+ "vocab_size": 33
108
+ },
109
+ "protein_feature_layer": -2,
110
+ "protein_feature_select_strategy": "full",
111
+ "protein_token_index": 32000,
112
+ "sequence_only": false,
113
+ "text_config": {
114
+ "_name_or_path": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
115
+ "add_cross_attention": false,
116
+ "architectures": [
117
+ "LlamaForCausalLM"
118
+ ],
119
+ "attention_bias": false,
120
+ "attention_dropout": 0.0,
121
+ "bad_words_ids": null,
122
+ "begin_suppress_tokens": null,
123
+ "bos_token_id": 1,
124
+ "chunk_size_feed_forward": 0,
125
+ "cross_attention_hidden_size": null,
126
+ "decoder_start_token_id": null,
127
+ "diversity_penalty": 0.0,
128
+ "do_sample": false,
129
+ "early_stopping": false,
130
+ "encoder_no_repeat_ngram_size": 0,
131
+ "eos_token_id": 2,
132
+ "exponential_decay_length_penalty": null,
133
+ "finetuning_task": null,
134
+ "forced_bos_token_id": null,
135
+ "forced_eos_token_id": null,
136
+ "hidden_act": "silu",
137
+ "hidden_size": 2048,
138
+ "id2label": {
139
+ "0": "LABEL_0",
140
+ "1": "LABEL_1"
141
+ },
142
+ "initializer_range": 0.02,
143
+ "intermediate_size": 5632,
144
+ "is_decoder": false,
145
+ "is_encoder_decoder": false,
146
+ "label2id": {
147
+ "LABEL_0": 0,
148
+ "LABEL_1": 1
149
+ },
150
+ "length_penalty": 1.0,
151
+ "max_length": 20,
152
+ "max_position_embeddings": 2048,
153
+ "min_length": 0,
154
+ "model_type": "llama",
155
+ "moe_config": {
156
+ "capacity_factor": 1.25,
157
+ "drop_tokens": true,
158
+ "ep_size": 4,
159
+ "eval_capacity_factor": 2.0,
160
+ "k": 1,
161
+ "num_experts": 4,
162
+ "use_residual": false,
163
+ "use_rts": false,
164
+ "use_tutel": true
165
+ },
166
+ "no_repeat_ngram_size": 0,
167
+ "num_attention_heads": 32,
168
+ "num_beam_groups": 1,
169
+ "num_beams": 1,
170
+ "num_experts": 4,
171
+ "num_hidden_layers": 22,
172
+ "num_key_value_heads": 4,
173
+ "num_return_sequences": 1,
174
+ "output_attentions": false,
175
+ "output_hidden_states": false,
176
+ "output_scores": false,
177
+ "pad_token_id": null,
178
+ "prefix": null,
179
+ "pretraining_tp": 1,
180
+ "problem_type": null,
181
+ "pruned_heads": {},
182
+ "remove_invalid_values": false,
183
+ "repetition_penalty": 1.0,
184
+ "return_dict": true,
185
+ "return_dict_in_generate": false,
186
+ "rms_norm_eps": 1e-05,
187
+ "rope_scaling": null,
188
+ "rope_theta": 10000.0,
189
+ "router_aux_loss_coef": 0.01,
190
+ "sep_token_id": null,
191
+ "suppress_tokens": null,
192
+ "task_specific_params": null,
193
+ "temperature": 1.0,
194
+ "tf_legacy_loss": false,
195
+ "tie_encoder_decoder": false,
196
+ "tie_word_embeddings": false,
197
+ "tokenizer_class": null,
198
+ "top_k": 50,
199
+ "top_p": 1.0,
200
+ "torch_dtype": "bfloat16",
201
+ "torchscript": false,
202
+ "typical_p": 1.0,
203
+ "use_bfloat16": false,
204
+ "use_cache": true,
205
+ "vocab_size": 32008
206
+ },
207
+ "torch_dtype": "float16",
208
+ "transformers_version": "4.39.0",
209
+ "use_moe": true,
210
+ "vocab_size": 32008
211
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.39.0"
7
+ }
global_step42881/layer_0_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f9334720cb66bb8edae70792d157e6e6bbeba2a0c4ad189bfe6837d29d8c626
3
+ size 69208315
global_step42881/layer_0_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14d5aa5e2db8fe12b6a3e3b7f2b9d3b8aadc4f314cfad3988f2989973a030495
3
+ size 69208315
global_step42881/layer_0_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf7fddea22017a1c4efc8af528b7304c5ece0045c17db20fccde990fb63c648a
3
+ size 69208315
global_step42881/layer_0_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c207efa2d023042e3859864cd2eac2edce6555e4e79cb46b6c24adb8d8e3f0b3
3
+ size 69208315
global_step42881/layer_10_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fccb20750d77538e132e0665cce699e2a9ff09ffda40ff1eb5009f54e2178ae2
3
+ size 69208322
global_step42881/layer_10_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75c72c0e12bd231d014b242482263ddf798a98fddcb31ab59080c86c85bf52e6
3
+ size 69208322
global_step42881/layer_10_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf77f521c80c62cf9bcae6a670ac81792b8a056485b6d6bcea2f4169047dec78
3
+ size 69208322
global_step42881/layer_10_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65ef14de307d63a8c2f800736caee7e0d478592e0294ec2378756c9ca3c5aca2
3
+ size 69208322
global_step42881/layer_11_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7bc11da963d58cf4258fb7a12507d21db61534b7a6ca9f99769afcf36efd6982
3
+ size 69208322
global_step42881/layer_11_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6155c974492d363923d83762428b44e29620b5481d5247b2a9604d31593dccc
3
+ size 69208322
global_step42881/layer_11_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c010f792a5df1e4c183052a2026306e4a8582324f691ee199a8da70e82bd82ff
3
+ size 69208322
global_step42881/layer_11_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4925edf9301027256ca84d9e6abbeaff3123750c7a8a43134b8bdadbe9bc5179
3
+ size 69208322
global_step42881/layer_12_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f7c7d838aff342c56745bad832b01cd8e80e54bc7511719da50734ec55eac0b
3
+ size 69208322
global_step42881/layer_12_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c016ef7424a64903b3aba4e932f03f8c8d512e7c787f4f5cdb8db7df9a8db94
3
+ size 69208322
global_step42881/layer_12_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c360984aaaa3096ddf23303fa85fca6a323e78ccf02f6b6a6308a2ddbacfda98
3
+ size 69208322
global_step42881/layer_12_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72abf67665416daf2a9853a67304e3802e77d823b8f7ea9b9ed515555226e995
3
+ size 69208322
global_step42881/layer_13_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0afc36387a7336e5825be287a5229cc9c43c0566b65b4bae05e662db698fdae
3
+ size 69208322
global_step42881/layer_13_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:654f813074b2efd6bcb4b1f724d00ed0c26814a49eaa8a8b1f399dfa653ea03d
3
+ size 69208322
global_step42881/layer_13_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f8f8c1322551827f36fd22ea3dffbfce17ffd6a9152440954e903741c2b8ff6
3
+ size 69208322
global_step42881/layer_13_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:338222fc4f05b6ded3e074216ff65d3bfe7f1abf2153e7559450d5503fc6d65f
3
+ size 69208322
global_step42881/layer_14_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fce3063158fe47437da08ecda9ec18fd755783964fa99225c97d8bf6f6a1d908
3
+ size 69208322
global_step42881/layer_14_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b732ff8112ad4664205ceacde240f4e0234959d2a4969bd90edc79ff42ffeb4
3
+ size 69208322
global_step42881/layer_14_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67ea53fbe3b9b1161ef29f419a9c60712a362911a722c9cb72932fbd13f7f8f0
3
+ size 69208322
global_step42881/layer_14_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c75999f108f897b5642c0dd90b0f869b7f9be306bd583b8395944e008295146b
3
+ size 69208322
global_step42881/layer_15_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d9c4859174be3c44487b10d44253d04f116336ae3fc5330899569682cb6fb43
3
+ size 69208322
global_step42881/layer_15_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05c26d0fd0b001872c537a059fed702039c6c9a0378404af769a52745a2bef7b
3
+ size 69208322
global_step42881/layer_15_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:038c735113cff2b55fc102e81a968566df32844a4b845c41fb8b96c531522c76
3
+ size 69208322
global_step42881/layer_15_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad542b2d39a61cb8ff58b9de5ceb196aa1f71e084e1e849cb04ea0375f292db0
3
+ size 69208322
global_step42881/layer_16_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:901cebab41eeb993a66ebd4266baebb0546860afced1d9ed1f77415547185425
3
+ size 69208322
global_step42881/layer_16_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4dbbc721c38e100be41b044d8fae0a4d91cea1176dce5f66b12398d235fca2c
3
+ size 69208322
global_step42881/layer_16_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41ead3d1b9babfe071fb51f3115cadb7e843aaee3d4ab1015bcb33a9ac789c15
3
+ size 69208322
global_step42881/layer_16_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5574932be5c9851644df1627e9853c9694071b5f2c21fabbb306d01207a858e0
3
+ size 69208322
global_step42881/layer_17_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b26a37aefd9d022485c77d6554978e761884bdcb43e2a26419fa8460d66d74bd
3
+ size 69208322
global_step42881/layer_17_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92fd05577db1e58912ac6997c2cf3f6f6a221f58df5f96e00d702351322ab860
3
+ size 69208322
global_step42881/layer_17_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b400b11af8709dc291516d9c0d262b946b450f0cfbe914d280a263b5e9e7914c
3
+ size 69208322
global_step42881/layer_17_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ee0b0c5d2c2d651b51c57549b21a6ea49044bf35971db24920d00410ca4fcbd
3
+ size 69208322
global_step42881/layer_18_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdc5f0fe1647ca825d6988478802b3501d073be328d78a1522ccf277c091356e
3
+ size 69208322
global_step42881/layer_18_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84fff54421c19f82f1429f4a00bf4ff3d95fc0bfab8b943dc66a0de674cf3dbd
3
+ size 69208322
global_step42881/layer_18_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ebd64076ba54cf054d3ad5bdc2972e245a0181bbe48326c2b047439c0884e68
3
+ size 69208322
global_step42881/layer_18_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4d5846a0f1ad01ddc6494edac21df741725011234c506a5dbd7b315dc0b2374
3
+ size 69208322
global_step42881/layer_19_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef0e54bd119089ffa03847710707047ee47bf2359e328f0a3e19f3e196aba527
3
+ size 69208322
global_step42881/layer_19_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd4f8c23a6c8ed6316847bbf3b5328406ec518181d8e70ea6f130efef0d22e88
3
+ size 69208322
global_step42881/layer_19_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32990e1b9eb56847fc6a169dc36aad6b432d1ec6b61e497d7c70bf4a1293e9a3
3
+ size 69208322
global_step42881/layer_19_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3aa166dd16d7d05cfcf65e354e15c1ad60b10d04fa549cc6ac56b88b96d21b3
3
+ size 69208322
global_step42881/layer_1_expert_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2281a926c4358d4ac1277acdd7a7c2e831127bb7cdb37b7c85855f438b561222
3
+ size 69208315
global_step42881/layer_1_expert_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b6237f9b8bca602afbcf97a01215f462aeb639a2e2d2c10fcf89bbc48a5f891
3
+ size 69208315
global_step42881/layer_1_expert_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f788e8d4e86beb03b9736250c0f2894d76fcc5d1f242b05641730be5affa7fd8
3
+ size 69208315
global_step42881/layer_1_expert_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf02ac780ab9243c6fb49cf777b3bd0d53d739909b0684b4eb3a5e976ba80da3
3
+ size 69208315