apandacoding commited on
Commit
a463d0f
·
verified ·
1 Parent(s): 9e115cc

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. chat_template.jinja +80 -0
  2. config.json +356 -0
  3. configuration_step3p5.py +59 -0
  4. generation_config.json +10 -0
  5. hf_quant_config.json +15 -0
  6. model-00001-of-00126.safetensors +3 -0
  7. model-00002-of-00126.safetensors +3 -0
  8. model-00003-of-00126.safetensors +3 -0
  9. model-00004-of-00126.safetensors +3 -0
  10. model-00005-of-00126.safetensors +3 -0
  11. model-00006-of-00126.safetensors +3 -0
  12. model-00007-of-00126.safetensors +3 -0
  13. model-00008-of-00126.safetensors +3 -0
  14. model-00009-of-00126.safetensors +3 -0
  15. model-00010-of-00126.safetensors +3 -0
  16. model-00011-of-00126.safetensors +3 -0
  17. model-00012-of-00126.safetensors +3 -0
  18. model-00013-of-00126.safetensors +3 -0
  19. model-00014-of-00126.safetensors +3 -0
  20. model-00015-of-00126.safetensors +3 -0
  21. model-00016-of-00126.safetensors +3 -0
  22. model-00017-of-00126.safetensors +3 -0
  23. model-00018-of-00126.safetensors +3 -0
  24. model-00019-of-00126.safetensors +3 -0
  25. model-00020-of-00126.safetensors +3 -0
  26. model-00021-of-00126.safetensors +3 -0
  27. model-00022-of-00126.safetensors +3 -0
  28. model-00023-of-00126.safetensors +3 -0
  29. model-00024-of-00126.safetensors +3 -0
  30. model-00025-of-00126.safetensors +3 -0
  31. model-00026-of-00126.safetensors +3 -0
  32. model-00027-of-00126.safetensors +3 -0
  33. model-00028-of-00126.safetensors +3 -0
  34. model-00029-of-00126.safetensors +3 -0
  35. model-00030-of-00126.safetensors +3 -0
  36. model-00031-of-00126.safetensors +3 -0
  37. model-00032-of-00126.safetensors +3 -0
  38. model-00033-of-00126.safetensors +3 -0
  39. model-00034-of-00126.safetensors +3 -0
  40. model-00035-of-00126.safetensors +3 -0
  41. model-00036-of-00126.safetensors +3 -0
  42. model-00037-of-00126.safetensors +3 -0
  43. model-00038-of-00126.safetensors +3 -0
  44. model-00039-of-00126.safetensors +3 -0
  45. model-00040-of-00126.safetensors +3 -0
  46. model-00041-of-00126.safetensors +3 -0
  47. model-00042-of-00126.safetensors +3 -0
  48. model-00043-of-00126.safetensors +3 -0
  49. model-00044-of-00126.safetensors +3 -0
  50. model-00045-of-00126.safetensors +3 -0
chat_template.jinja ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {% macro render_content(content) %}{% if content is none %}{{- '' }}{% elif content is string %}{{- content }}{% elif content is mapping %}{{- content['value'] if 'value' in content else content['text'] }}{% elif content is iterable %}{% for item in content %}{% if item.type == 'text' %}{{- item['value'] if 'value' in item else item['text'] }}{% elif item.type == 'image' %}<im_patch>{% endif %}{% endfor %}{% endif %}{% endmacro %}
2
+ {{bos_token}}{%- if tools %}
3
+ {{- '<|im_start|>system\n' }}
4
+ {%- if messages[0].role == 'system' %}
5
+ {{- render_content(messages[0].content) + '\n\n' }}
6
+ {%- endif %}
7
+ {{- "# Tools\n\nYou have access to the following functions in JSONSchema format:\n\n<tools>" }}
8
+ {%- for tool in tools %}
9
+ {{- "\n" }}
10
+ {{- tool | tojson(ensure_ascii=False) }}
11
+ {%- endfor %}
12
+ {{- "\n</tools>\n\nIf you choose to call a function ONLY reply in the following format with NO suffix:\n\n<tool_call>\n<function=example_function_name>\n<parameter=example_parameter_1>\nvalue_1\n</parameter>\n<parameter=example_parameter_2>\nThis is the value for the second parameter\nthat can span\nmultiple lines\n</parameter>\n</function>\n</tool_call>\n\n<IMPORTANT>\nReminder:\n- Function calls MUST follow the specified format: an inner <function=...>\n...\n</function> block must be nested within <tool_call>\n...\n</tool_call> XML tags\n- Required parameters MUST be specified\n</IMPORTANT><|im_end|>\n" }}
13
+ {%- else %}
14
+ {%- if messages[0].role == 'system' %}
15
+ {{- '<|im_start|>system\n' + render_content(messages[0].content) + '<|im_end|>\n' }}
16
+ {%- endif %}
17
+ {%- endif %}
18
+ {%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}
19
+ {%- for message in messages[::-1] %}
20
+ {%- set index = (messages|length - 1) - loop.index0 %}
21
+ {%- if ns.multi_step_tool and message.role == "user" and render_content(message.content) is string and not(render_content(message.content).startswith('<tool_response>') and render_content(message.content).endswith('</tool_response>')) %}
22
+ {%- set ns.multi_step_tool = false %}
23
+ {%- set ns.last_query_index = index %}
24
+ {%- endif %}
25
+ {%- endfor %}
26
+ {%- for message in messages %}
27
+ {%- set content = render_content(message.content) %}
28
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) %}
29
+ {%- set role_name = 'observation' if (message.role == "system" and not loop.first and message.name == 'observation') else message.role %}
30
+ {{- '<|im_start|>' + role_name + '\n' + content + '<|im_end|>' + '\n' }}
31
+ {%- elif message.role == "assistant" %}
32
+ {%- if message.reasoning_content is string %}
33
+ {%- set reasoning_content = render_content(message.reasoning_content) %}
34
+ {%- else %}
35
+ {%- if '</think>' in content %}
36
+ {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
37
+ {%- set content = content.split('</think>')[-1].lstrip('\n') %}
38
+ {%- else %}
39
+ {%- set reasoning_content = '' %}
40
+ {%- endif %}
41
+ {%- endif %}
42
+ {%- if loop.index0 > ns.last_query_index %}
43
+ {{- '<|im_start|>' + message.role + '\n<think>\n' + reasoning_content + '\n</think>\n' + content }}
44
+ {%- else %}
45
+ {{- '<|im_start|>' + message.role + '\n' + content }}
46
+ {%- endif %}
47
+ {%- if message.tool_calls %}
48
+ {%- for tool_call in message.tool_calls %}
49
+ {%- if tool_call.function is defined %}
50
+ {%- set tool_call = tool_call.function %}
51
+ {%- endif %}
52
+ {{- '<tool_call>\n<function=' + tool_call.name + '>\n' }}
53
+ {%- if tool_call.arguments is defined %}
54
+ {%- set arguments = tool_call.arguments %}
55
+ {%- for args_name, args_value in arguments|items %}
56
+ {{- '<parameter=' + args_name + '>\n' }}
57
+ {%- set args_value = args_value | tojson(ensure_ascii=False) | safe if args_value is mapping or (args_value is sequence and args_value is not string) else args_value | string %}
58
+ {{- args_value }}
59
+ {{- '\n</parameter>\n' }}
60
+ {%- endfor %}
61
+ {%- endif %}
62
+ {{- '</function>\n</tool_call>' }}
63
+ {%- endfor %}
64
+ {%- endif %}
65
+ {{- '<|im_end|>\n' }}
66
+ {%- elif message.role == "tool" %}
67
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
68
+ {{- '<|im_start|>tool_response\n' }}
69
+ {%- endif %}
70
+ {{- '<tool_response>' }}
71
+ {{- content }}
72
+ {{- '</tool_response>' }}
73
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
74
+ {{- '<|im_end|>\n' }}
75
+ {%- endif %}
76
+ {%- endif %}
77
+ {%- endfor %}
78
+ {%- if add_generation_prompt %}
79
+ {{- '<|im_start|>assistant\n<think>\n' }}
80
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,356 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Step3p5ForCausalLM"
4
+ ],
5
+ "att_impl_type": "GQA",
6
+ "attention_other_setting": {
7
+ "attention_type": "sliding_attention",
8
+ "head_dim": 128,
9
+ "num_attention_groups": 8,
10
+ "num_attention_heads": 96,
11
+ "true_head_dim": 128
12
+ },
13
+ "auto_map": {
14
+ "AutoConfig": "configuration_step3p5.Step3p5Config",
15
+ "AutoModelForCausalLM": "modeling_step3p5.Step3p5ForCausalLM"
16
+ },
17
+ "bos_token_id": 0,
18
+ "dtype": "bfloat16",
19
+ "eos_token_id": [
20
+ 1,
21
+ 2,
22
+ 128007
23
+ ],
24
+ "head_dim": 128,
25
+ "hidden_size": 4096,
26
+ "intermediate_size": 11264,
27
+ "layer_types": [
28
+ "full_attention",
29
+ "sliding_attention",
30
+ "sliding_attention",
31
+ "sliding_attention",
32
+ "full_attention",
33
+ "sliding_attention",
34
+ "sliding_attention",
35
+ "sliding_attention",
36
+ "full_attention",
37
+ "sliding_attention",
38
+ "sliding_attention",
39
+ "sliding_attention",
40
+ "full_attention",
41
+ "sliding_attention",
42
+ "sliding_attention",
43
+ "sliding_attention",
44
+ "full_attention",
45
+ "sliding_attention",
46
+ "sliding_attention",
47
+ "sliding_attention",
48
+ "full_attention",
49
+ "sliding_attention",
50
+ "sliding_attention",
51
+ "sliding_attention",
52
+ "full_attention",
53
+ "sliding_attention",
54
+ "sliding_attention",
55
+ "sliding_attention",
56
+ "full_attention",
57
+ "sliding_attention",
58
+ "sliding_attention",
59
+ "sliding_attention",
60
+ "full_attention",
61
+ "sliding_attention",
62
+ "sliding_attention",
63
+ "sliding_attention",
64
+ "full_attention",
65
+ "sliding_attention",
66
+ "sliding_attention",
67
+ "sliding_attention",
68
+ "full_attention",
69
+ "sliding_attention",
70
+ "sliding_attention",
71
+ "sliding_attention",
72
+ "full_attention",
73
+ "sliding_attention",
74
+ "sliding_attention",
75
+ "sliding_attention"
76
+ ],
77
+ "max_position_embeddings": 262144,
78
+ "max_seq_len": 262144,
79
+ "model_type": "step3p5",
80
+ "moe_every_n_layer": 1,
81
+ "moe_intermediate_size": 1280,
82
+ "moe_layer_offset": 0,
83
+ "moe_layers_enum": "3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44",
84
+ "moe_num_experts": 288,
85
+ "moe_router_activation": "sigmoid",
86
+ "moe_router_scaling_factor": 3.0,
87
+ "moe_top_k": 8,
88
+ "need_fp32_gate": true,
89
+ "norm_expert_weight": true,
90
+ "num_attention_groups": 8,
91
+ "num_attention_heads": 64,
92
+ "num_hidden_layers": 45,
93
+ "num_nextn_predict_layers": 3,
94
+ "partial_rotary_factor": 0.5,
95
+ "partial_rotary_factors": [
96
+ 0.5,
97
+ 1.0,
98
+ 1.0,
99
+ 1.0,
100
+ 0.5,
101
+ 1.0,
102
+ 1.0,
103
+ 1.0,
104
+ 0.5,
105
+ 1.0,
106
+ 1.0,
107
+ 1.0,
108
+ 0.5,
109
+ 1.0,
110
+ 1.0,
111
+ 1.0,
112
+ 0.5,
113
+ 1.0,
114
+ 1.0,
115
+ 1.0,
116
+ 0.5,
117
+ 1.0,
118
+ 1.0,
119
+ 1.0,
120
+ 0.5,
121
+ 1.0,
122
+ 1.0,
123
+ 1.0,
124
+ 0.5,
125
+ 1.0,
126
+ 1.0,
127
+ 1.0,
128
+ 0.5,
129
+ 1.0,
130
+ 1.0,
131
+ 1.0,
132
+ 0.5,
133
+ 1.0,
134
+ 1.0,
135
+ 1.0,
136
+ 0.5,
137
+ 1.0,
138
+ 1.0,
139
+ 1.0,
140
+ 0.5,
141
+ 1.0,
142
+ 1.0,
143
+ 1.0
144
+ ],
145
+ "rms_norm_eps": 1e-05,
146
+ "rope_parameters": {
147
+ "factor": 2.0,
148
+ "high_freq_factor": 32.0,
149
+ "low_freq_factor": 1.0,
150
+ "original_max_position_embeddings": 131072,
151
+ "rope_type": "llama3"
152
+ },
153
+ "rope_scaling": {
154
+ "factor": 2.0,
155
+ "high_freq_factor": 32.0,
156
+ "low_freq_factor": 1.0,
157
+ "original_max_position_embeddings": 131072,
158
+ "rope_type": "llama3"
159
+ },
160
+ "rope_theta": [
161
+ 5000000.0,
162
+ 10000.0,
163
+ 10000.0,
164
+ 10000.0,
165
+ 5000000.0,
166
+ 10000.0,
167
+ 10000.0,
168
+ 10000.0,
169
+ 5000000.0,
170
+ 10000.0,
171
+ 10000.0,
172
+ 10000.0,
173
+ 5000000.0,
174
+ 10000.0,
175
+ 10000.0,
176
+ 10000.0,
177
+ 5000000.0,
178
+ 10000.0,
179
+ 10000.0,
180
+ 10000.0,
181
+ 5000000.0,
182
+ 10000.0,
183
+ 10000.0,
184
+ 10000.0,
185
+ 5000000.0,
186
+ 10000.0,
187
+ 10000.0,
188
+ 10000.0,
189
+ 5000000.0,
190
+ 10000.0,
191
+ 10000.0,
192
+ 10000.0,
193
+ 5000000.0,
194
+ 10000.0,
195
+ 10000.0,
196
+ 10000.0,
197
+ 5000000.0,
198
+ 10000.0,
199
+ 10000.0,
200
+ 10000.0,
201
+ 5000000.0,
202
+ 10000.0,
203
+ 10000.0,
204
+ 10000.0,
205
+ 5000000.0,
206
+ 10000.0,
207
+ 10000.0,
208
+ 10000.0
209
+ ],
210
+ "share_expert_dim": 1280,
211
+ "sink": false,
212
+ "sliding_window": 512,
213
+ "swiglu_limits": [
214
+ 0.0,
215
+ 0.0,
216
+ 0.0,
217
+ 0.0,
218
+ 0.0,
219
+ 0.0,
220
+ 0.0,
221
+ 0.0,
222
+ 0.0,
223
+ 0.0,
224
+ 0.0,
225
+ 0.0,
226
+ 0.0,
227
+ 0.0,
228
+ 0.0,
229
+ 0.0,
230
+ 0.0,
231
+ 0.0,
232
+ 0.0,
233
+ 0.0,
234
+ 0.0,
235
+ 0.0,
236
+ 0.0,
237
+ 0.0,
238
+ 0.0,
239
+ 0.0,
240
+ 0.0,
241
+ 0.0,
242
+ 0.0,
243
+ 0.0,
244
+ 0.0,
245
+ 0.0,
246
+ 0.0,
247
+ 0.0,
248
+ 0.0,
249
+ 0.0,
250
+ 0.0,
251
+ 0.0,
252
+ 0.0,
253
+ 0.0,
254
+ 0.0,
255
+ 0.0,
256
+ 0.0,
257
+ 0.0,
258
+ 0.0,
259
+ 0.0,
260
+ 0.0,
261
+ 0.0
262
+ ],
263
+ "swiglu_limits_shared": [
264
+ 0.0,
265
+ 0.0,
266
+ 0.0,
267
+ 0.0,
268
+ 0.0,
269
+ 0.0,
270
+ 0.0,
271
+ 0.0,
272
+ 0.0,
273
+ 0.0,
274
+ 0.0,
275
+ 0.0,
276
+ 0.0,
277
+ 0.0,
278
+ 0.0,
279
+ 0.0,
280
+ 0.0,
281
+ 0.0,
282
+ 0.0,
283
+ 0.0,
284
+ 0.0,
285
+ 0.0,
286
+ 0.0,
287
+ 0.0,
288
+ 0.0,
289
+ 0.0,
290
+ 0.0,
291
+ 0.0,
292
+ 0.0,
293
+ 0.0,
294
+ 0.0,
295
+ 0.0,
296
+ 0.0,
297
+ 0.0,
298
+ 0.0,
299
+ 0.0,
300
+ 0.0,
301
+ 0.0,
302
+ 0.0,
303
+ 0.0,
304
+ 0.0,
305
+ 0.0,
306
+ 0.0,
307
+ 0.0,
308
+ 16,
309
+ 0.0,
310
+ 0.0,
311
+ 0.0
312
+ ],
313
+ "transformers_version": "4.57.6",
314
+ "use_cache": false,
315
+ "use_head_wise_attn_gate": true,
316
+ "use_moe": true,
317
+ "use_moe_router_bias": true,
318
+ "use_qk_norm": true,
319
+ "use_rope_layers": [],
320
+ "vocab_size": 128896,
321
+ "yarn_only_types": [
322
+ "full_attention"
323
+ ],
324
+ "zero_centered": true,
325
+ "quantization_config": {
326
+ "config_groups": {
327
+ "group_0": {
328
+ "input_activations": {
329
+ "dynamic": false,
330
+ "num_bits": 4,
331
+ "type": "float",
332
+ "group_size": 16
333
+ },
334
+ "weights": {
335
+ "dynamic": false,
336
+ "num_bits": 4,
337
+ "type": "float",
338
+ "group_size": 16
339
+ },
340
+ "targets": [
341
+ "Linear"
342
+ ]
343
+ }
344
+ },
345
+ "ignore": [
346
+ "lm_head",
347
+ "*.moe.gate*"
348
+ ],
349
+ "quant_algo": "NVFP4",
350
+ "producer": {
351
+ "name": "modelopt",
352
+ "version": "0.41.0"
353
+ },
354
+ "quant_method": "modelopt"
355
+ }
356
+ }
configuration_step3p5.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Optional, Union
2
+
3
+ from transformers.configuration_utils import PretrainedConfig
4
+
5
+
6
+
7
+ class Step3p5Config(PretrainedConfig):
8
+ model_type = "step3p5"
9
+ architectures = ["Step3p5ForCausalLM"]
10
+
11
+ def __init__(
12
+ self,
13
+ hidden_size: int = 4096,
14
+ intermediate_size: int = 11264,
15
+ num_attention_heads: int = 64,
16
+ num_attention_groups: int = 8,
17
+ num_hidden_layers: int = 45,
18
+ max_seq_len: int = 128000,
19
+ vocab_size: int = 128815,
20
+ rms_norm_eps: float = 1e-5,
21
+ moe_intermediate_size: int = 1280,
22
+ moe_num_experts: int = 288,
23
+ moe_top_k: int = 8,
24
+ rope_theta: float = 10000,
25
+ rope_scaling: Optional[dict[str, Any]] = None,
26
+ max_position_embeddings: int = 128000,
27
+ share_expert_dims: int = 1280,
28
+ head_dim: int = 128,
29
+ norm_expert_weight: bool = True,
30
+ layer_types: list[str] = None,
31
+ sliding_window: Optional[int] = None,
32
+ moe_layers_enum: tuple[int] = (3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
33
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
34
+ 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
35
+ 35, 36, 37, 38, 39, 40, 41, 42, 43, 44),
36
+ **kwargs,
37
+ ) -> None:
38
+ self.hidden_size = hidden_size
39
+ self.intermediate_size = intermediate_size
40
+ self.num_attention_heads = num_attention_heads
41
+ self.num_attention_groups = num_attention_groups
42
+ self.num_hidden_layers = num_hidden_layers
43
+ self.max_seq_len = max_seq_len
44
+ self.vocab_size = vocab_size
45
+ self.rms_norm_eps = rms_norm_eps
46
+ self.moe_intermediate_size = moe_intermediate_size
47
+ self.moe_num_experts = moe_num_experts
48
+ self.moe_top_k = moe_top_k
49
+ self.rope_theta = rope_theta
50
+ self.rope_scaling = rope_scaling
51
+ self.max_position_embeddings = max_position_embeddings
52
+ self.share_expert_dim = share_expert_dims
53
+ self.head_dim = head_dim
54
+ self.norm_expert_weight = norm_expert_weight
55
+ self.moe_layers_enum = moe_layers_enum
56
+ self.layer_types = layer_types
57
+ self.sliding_window = sliding_window
58
+ super().__init__(**kwargs)
59
+
generation_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": [
5
+ 1,
6
+ 2,
7
+ 128007
8
+ ],
9
+ "transformers_version": "4.57.6"
10
+ }
hf_quant_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "producer": {
3
+ "name": "modelopt",
4
+ "version": "0.41.0"
5
+ },
6
+ "quantization": {
7
+ "quant_algo": "NVFP4",
8
+ "kv_cache_quant_algo": null,
9
+ "group_size": 16,
10
+ "exclude_modules": [
11
+ "lm_head",
12
+ "*.moe.gate*"
13
+ ]
14
+ }
15
+ }
model-00001-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:874ee5d6d1479d49ee36eb68cf6a87f9a24726b61b19590264e6ea9a944e87c8
3
+ size 2368563096
model-00002-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9b2d2f7ef4761b0d13d9203abe281a8cd3b7054145945a7e91a95cd9c2771c1
3
+ size 849346992
model-00003-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ced4bfcf27c9de91d23a8654ca90fd05a0ed5dbe2d4061a443935f2882936ba
3
+ size 903190688
model-00004-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8605ce49c3dd288500ab06b3c907348202a205e971307f018ac2c277969af43
3
+ size 849346984
model-00005-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27b7b7455b837df48770b0d1704fbe027d68fd0a452d21d0f51c07087774d19d
3
+ size 849346992
model-00006-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f217c8ae4fee011adb041b18ff9536282a28746b7de351c3e8208d09e157fda1
3
+ size 922138792
model-00007-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79c21b09618a71565b62e15d004c66f564cf08bd3a0d3431c10b86fca8baabfa
3
+ size 849346984
model-00008-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:183e17477cb4505c50762ba4223b51dd24e247f27922174ebd878d78de3b2527
3
+ size 849346992
model-00009-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:439ca625a70ace998d756bce23e67c752876bfd10d30676bd2cad5aa96da2b5a
3
+ size 922138792
model-00010-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4aecb1c3b86f2030fe6adb955b487903da35f8faf0bad7a02a2ecb2e380eb41b
3
+ size 849346984
model-00011-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e18339a8f15278552442f36e7571cd60684cdd473b74a003678b2675717d51ae
3
+ size 849346992
model-00012-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5110883af8d44861e293667248a0653c03a085fabcd08ee8a5a7291b0b08e2a8
3
+ size 922138792
model-00013-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79e46169087928283f8a61f24a22e553e54accac20550a9ae25dcae9f3f20c2b
3
+ size 849346984
model-00014-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70007715eadc7463d40c4a5c21b193e93a67775b495455da80a67c135d876fa6
3
+ size 849346992
model-00015-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:988adb9103a79076ec63ed326dcec4fc7d763dd2c5a5a69631dba9dd88285b43
3
+ size 903190688
model-00016-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00d5dde0fedac233cd268e444e6fea969e4de156151de7a323189fa758484e71
3
+ size 849346984
model-00017-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9771e3d34a349ed49bc3fba5fa1603f4e33707748292cacf82fba40e5178d38
3
+ size 849346992
model-00018-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a88a1664311c2934dffdae101bedb93ac1a68a64fc9e0c7149ac42a124167872
3
+ size 922138792
model-00019-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:747b5a957d5a28e7f7087e5be9a35ab48e16312ebfcddd9e986ddd7b6206c0da
3
+ size 849346984
model-00020-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad3ae2d329c18c8bd1914a33cb5caca5e8f9eaa682dfe7ac7f33ead79d6157c4
3
+ size 849346992
model-00021-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89f3a0e1c611293e056942fe65c30ab67c848bc726683e1b5ad820a247f49139
3
+ size 922138848
model-00022-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f574ec69554ec01294b5c01ce20d6aac651d54c81bc58208f2e5a9a3b185167
3
+ size 849346992
model-00023-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:330bb2b7335113bf64a0d39f26abed072d810012a9889403e5938de3c8cc825b
3
+ size 849347000
model-00024-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a48605a000705d5c301832a02d0ede25b351395f943085d25d7d4db5ad041e2d
3
+ size 922138832
model-00025-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d5b29b4b383fef8b8b6ff0f089185d6a2bf2f379287ab898d7a3f52f8790d8a
3
+ size 849346992
model-00026-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:295f38ea333bf88a7141e2061504b59e93efd91bfc491697438c6465ca1aedb2
3
+ size 849347000
model-00027-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d575a99df40f8e7af8d167ef487300eecc322e1d59134898d01a6913f1683019
3
+ size 903190736
model-00028-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c540ad8e894d21342fe0778c4a131b4a387974948c83bcc7d388584be3e6289d
3
+ size 849346992
model-00029-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9ed5460a37232bdd81d5554a8577e6aab414da381588b08ea9fd12cc0411c08
3
+ size 849347000
model-00030-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e3d5ad61cb13c34c09627a28a6efd8695ce639142408c3d711e5966ca9a190e
3
+ size 922138832
model-00031-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9cdc650d9584854cf41051d9e491846a417d0f7ad7bef1a44e94ab9c9c850e6
3
+ size 849346992
model-00032-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ede1e054d4e826195626552d60667144eb02389d99b00a87432d6bf87a206c6a
3
+ size 849347000
model-00033-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ef024149fdb69b4506bfb98ef68d8fecdb187ee23c1ac39813048cf76673d92
3
+ size 922138832
model-00034-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:88fa61a2f77e04a60c3aa0ffd990c91b4f81b554f321346d186c5905db8c1575
3
+ size 849346992
model-00035-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdd9bd5b9af4c77b94406442f86431394105cfed1848901a8fd167093b8e7b64
3
+ size 849347000
model-00036-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed77fbb74950814a52bd8aae0927a93962a58b2d5150e32996d677c2eb032eb9
3
+ size 922138832
model-00037-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:699fe46d553974822767af467392a63871716e1ad489fe68125864f8bb90d4c3
3
+ size 849346992
model-00038-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9ed117cb8e578171427427856781a1d643f1b1162d33589133daab997f3580d
3
+ size 849347000
model-00039-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db3049b6228b1876dd00b253a02df39338b2f57ff2c1af155a3fe00af2c4db5f
3
+ size 903190736
model-00040-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f99ff6b19aaff17188a733ab85a303a3d7a6b3486e0307329c7b173982a4c72
3
+ size 849346992
model-00041-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48414886533ca6fbc6554ae5c237e20d4d90a4dbaad2336bae3d8cfb566fa694
3
+ size 849347000
model-00042-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85ab86ec06bd1e6af008d59210253800712795ebb47e5e0b31485098a74e511a
3
+ size 922138832
model-00043-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76c4ae20876d1e53d9c88ef84fcd2ce617a64338be3785c6e014c27e93f58b41
3
+ size 849346992
model-00044-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac41b98b7d1e49bbd0696c8a2c9770a1fc0fc4524878ca17afd82d294e9d16dd
3
+ size 849347000
model-00045-of-00126.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a30aa3015f3c47728c0c4bbf4e70294eb0e89eb4fa671914c1364316c8a389ae
3
+ size 922138832