cr0sh commited on
Commit
02fc7fa
·
verified ·
1 Parent(s): 07858be

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +330 -0
config.json ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "AfmoeForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "auto_map": {
7
+ "AutoConfig": "configuration_afmoe.AfmoeConfig",
8
+ "AutoModel": "modeling_afmoe.AfmoeModel",
9
+ "AutoModelForCausalLM": "modeling_afmoe.AfmoeForCausalLM"
10
+ },
11
+ "dtype": "bfloat16",
12
+ "global_attn_every_n_layers": 4,
13
+ "head_dim": 128,
14
+ "hidden_act": "silu",
15
+ "hidden_size": 2048,
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 6144,
18
+ "layer_types": [
19
+ "sliding_attention",
20
+ "sliding_attention",
21
+ "sliding_attention",
22
+ "full_attention",
23
+ "sliding_attention",
24
+ "sliding_attention",
25
+ "sliding_attention",
26
+ "full_attention",
27
+ "sliding_attention",
28
+ "sliding_attention",
29
+ "sliding_attention",
30
+ "full_attention",
31
+ "sliding_attention",
32
+ "sliding_attention",
33
+ "sliding_attention",
34
+ "full_attention",
35
+ "sliding_attention",
36
+ "sliding_attention",
37
+ "sliding_attention",
38
+ "full_attention",
39
+ "sliding_attention",
40
+ "sliding_attention",
41
+ "sliding_attention",
42
+ "full_attention",
43
+ "sliding_attention",
44
+ "sliding_attention",
45
+ "sliding_attention",
46
+ "full_attention",
47
+ "sliding_attention",
48
+ "sliding_attention",
49
+ "sliding_attention",
50
+ "full_attention"
51
+ ],
52
+ "load_balance_coeff": 0.001,
53
+ "max_position_embeddings": 131072,
54
+ "model_type": "afmoe",
55
+ "moe_intermediate_size": 1024,
56
+ "mup_enabled": true,
57
+ "n_group": 1,
58
+ "num_attention_heads": 32,
59
+ "num_dense_layers": 2,
60
+ "num_expert_groups": 1,
61
+ "num_experts": 128,
62
+ "num_experts_per_tok": 8,
63
+ "num_hidden_layers": 32,
64
+ "num_key_value_heads": 4,
65
+ "num_limited_groups": 1,
66
+ "num_shared_experts": 1,
67
+ "quantization": {
68
+ "group_size": 64,
69
+ "bits": 8,
70
+ "mode": "affine",
71
+ "model.layers.2.mlp.router.gate": {
72
+ "group_size": 64,
73
+ "bits": 8
74
+ },
75
+ "model.layers.3.mlp.router.gate": {
76
+ "group_size": 64,
77
+ "bits": 8
78
+ },
79
+ "model.layers.4.mlp.router.gate": {
80
+ "group_size": 64,
81
+ "bits": 8
82
+ },
83
+ "model.layers.5.mlp.router.gate": {
84
+ "group_size": 64,
85
+ "bits": 8
86
+ },
87
+ "model.layers.6.mlp.router.gate": {
88
+ "group_size": 64,
89
+ "bits": 8
90
+ },
91
+ "model.layers.7.mlp.router.gate": {
92
+ "group_size": 64,
93
+ "bits": 8
94
+ },
95
+ "model.layers.8.mlp.router.gate": {
96
+ "group_size": 64,
97
+ "bits": 8
98
+ },
99
+ "model.layers.9.mlp.router.gate": {
100
+ "group_size": 64,
101
+ "bits": 8
102
+ },
103
+ "model.layers.10.mlp.router.gate": {
104
+ "group_size": 64,
105
+ "bits": 8
106
+ },
107
+ "model.layers.11.mlp.router.gate": {
108
+ "group_size": 64,
109
+ "bits": 8
110
+ },
111
+ "model.layers.12.mlp.router.gate": {
112
+ "group_size": 64,
113
+ "bits": 8
114
+ },
115
+ "model.layers.13.mlp.router.gate": {
116
+ "group_size": 64,
117
+ "bits": 8
118
+ },
119
+ "model.layers.14.mlp.router.gate": {
120
+ "group_size": 64,
121
+ "bits": 8
122
+ },
123
+ "model.layers.15.mlp.router.gate": {
124
+ "group_size": 64,
125
+ "bits": 8
126
+ },
127
+ "model.layers.16.mlp.router.gate": {
128
+ "group_size": 64,
129
+ "bits": 8
130
+ },
131
+ "model.layers.17.mlp.router.gate": {
132
+ "group_size": 64,
133
+ "bits": 8
134
+ },
135
+ "model.layers.18.mlp.router.gate": {
136
+ "group_size": 64,
137
+ "bits": 8
138
+ },
139
+ "model.layers.19.mlp.router.gate": {
140
+ "group_size": 64,
141
+ "bits": 8
142
+ },
143
+ "model.layers.20.mlp.router.gate": {
144
+ "group_size": 64,
145
+ "bits": 8
146
+ },
147
+ "model.layers.21.mlp.router.gate": {
148
+ "group_size": 64,
149
+ "bits": 8
150
+ },
151
+ "model.layers.22.mlp.router.gate": {
152
+ "group_size": 64,
153
+ "bits": 8
154
+ },
155
+ "model.layers.23.mlp.router.gate": {
156
+ "group_size": 64,
157
+ "bits": 8
158
+ },
159
+ "model.layers.24.mlp.router.gate": {
160
+ "group_size": 64,
161
+ "bits": 8
162
+ },
163
+ "model.layers.25.mlp.router.gate": {
164
+ "group_size": 64,
165
+ "bits": 8
166
+ },
167
+ "model.layers.26.mlp.router.gate": {
168
+ "group_size": 64,
169
+ "bits": 8
170
+ },
171
+ "model.layers.27.mlp.router.gate": {
172
+ "group_size": 64,
173
+ "bits": 8
174
+ },
175
+ "model.layers.28.mlp.router.gate": {
176
+ "group_size": 64,
177
+ "bits": 8
178
+ },
179
+ "model.layers.29.mlp.router.gate": {
180
+ "group_size": 64,
181
+ "bits": 8
182
+ },
183
+ "model.layers.30.mlp.router.gate": {
184
+ "group_size": 64,
185
+ "bits": 8
186
+ },
187
+ "model.layers.31.mlp.router.gate": {
188
+ "group_size": 64,
189
+ "bits": 8
190
+ }
191
+ },
192
+ "quantization_config": {
193
+ "group_size": 64,
194
+ "bits": 8,
195
+ "mode": "affine",
196
+ "model.layers.2.mlp.router.gate": {
197
+ "group_size": 64,
198
+ "bits": 8
199
+ },
200
+ "model.layers.3.mlp.router.gate": {
201
+ "group_size": 64,
202
+ "bits": 8
203
+ },
204
+ "model.layers.4.mlp.router.gate": {
205
+ "group_size": 64,
206
+ "bits": 8
207
+ },
208
+ "model.layers.5.mlp.router.gate": {
209
+ "group_size": 64,
210
+ "bits": 8
211
+ },
212
+ "model.layers.6.mlp.router.gate": {
213
+ "group_size": 64,
214
+ "bits": 8
215
+ },
216
+ "model.layers.7.mlp.router.gate": {
217
+ "group_size": 64,
218
+ "bits": 8
219
+ },
220
+ "model.layers.8.mlp.router.gate": {
221
+ "group_size": 64,
222
+ "bits": 8
223
+ },
224
+ "model.layers.9.mlp.router.gate": {
225
+ "group_size": 64,
226
+ "bits": 8
227
+ },
228
+ "model.layers.10.mlp.router.gate": {
229
+ "group_size": 64,
230
+ "bits": 8
231
+ },
232
+ "model.layers.11.mlp.router.gate": {
233
+ "group_size": 64,
234
+ "bits": 8
235
+ },
236
+ "model.layers.12.mlp.router.gate": {
237
+ "group_size": 64,
238
+ "bits": 8
239
+ },
240
+ "model.layers.13.mlp.router.gate": {
241
+ "group_size": 64,
242
+ "bits": 8
243
+ },
244
+ "model.layers.14.mlp.router.gate": {
245
+ "group_size": 64,
246
+ "bits": 8
247
+ },
248
+ "model.layers.15.mlp.router.gate": {
249
+ "group_size": 64,
250
+ "bits": 8
251
+ },
252
+ "model.layers.16.mlp.router.gate": {
253
+ "group_size": 64,
254
+ "bits": 8
255
+ },
256
+ "model.layers.17.mlp.router.gate": {
257
+ "group_size": 64,
258
+ "bits": 8
259
+ },
260
+ "model.layers.18.mlp.router.gate": {
261
+ "group_size": 64,
262
+ "bits": 8
263
+ },
264
+ "model.layers.19.mlp.router.gate": {
265
+ "group_size": 64,
266
+ "bits": 8
267
+ },
268
+ "model.layers.20.mlp.router.gate": {
269
+ "group_size": 64,
270
+ "bits": 8
271
+ },
272
+ "model.layers.21.mlp.router.gate": {
273
+ "group_size": 64,
274
+ "bits": 8
275
+ },
276
+ "model.layers.22.mlp.router.gate": {
277
+ "group_size": 64,
278
+ "bits": 8
279
+ },
280
+ "model.layers.23.mlp.router.gate": {
281
+ "group_size": 64,
282
+ "bits": 8
283
+ },
284
+ "model.layers.24.mlp.router.gate": {
285
+ "group_size": 64,
286
+ "bits": 8
287
+ },
288
+ "model.layers.25.mlp.router.gate": {
289
+ "group_size": 64,
290
+ "bits": 8
291
+ },
292
+ "model.layers.26.mlp.router.gate": {
293
+ "group_size": 64,
294
+ "bits": 8
295
+ },
296
+ "model.layers.27.mlp.router.gate": {
297
+ "group_size": 64,
298
+ "bits": 8
299
+ },
300
+ "model.layers.28.mlp.router.gate": {
301
+ "group_size": 64,
302
+ "bits": 8
303
+ },
304
+ "model.layers.29.mlp.router.gate": {
305
+ "group_size": 64,
306
+ "bits": 8
307
+ },
308
+ "model.layers.30.mlp.router.gate": {
309
+ "group_size": 64,
310
+ "bits": 8
311
+ },
312
+ "model.layers.31.mlp.router.gate": {
313
+ "group_size": 64,
314
+ "bits": 8
315
+ }
316
+ },
317
+ "rms_norm_eps": 1e-05,
318
+ "rope_scaling": null,
319
+ "rope_theta": 10000,
320
+ "route_norm": true,
321
+ "route_scale": 2.826,
322
+ "score_func": "sigmoid",
323
+ "sliding_window": 2048,
324
+ "tie_word_embeddings": false,
325
+ "topk_group": 1,
326
+ "transformers_version": "4.57.3",
327
+ "use_cache": true,
328
+ "use_grouped_mm": true,
329
+ "vocab_size": 200192
330
+ }