Upload config.json with huggingface_hub
Browse files- config.json +3 -11
config.json
CHANGED
|
@@ -59,15 +59,6 @@
|
|
| 59 |
"num_local_experts": 128,
|
| 60 |
"output_router_logits": false,
|
| 61 |
"pad_token_id": 199999,
|
| 62 |
-
"quantization_config": {
|
| 63 |
-
"modules_to_not_convert": [
|
| 64 |
-
"model.layers.*.self_attn",
|
| 65 |
-
"model.layers.*.mlp.router",
|
| 66 |
-
"model.embed_tokens",
|
| 67 |
-
"lm_head"
|
| 68 |
-
],
|
| 69 |
-
"quant_method": "mxfp4"
|
| 70 |
-
},
|
| 71 |
"rms_norm_eps": 1e-05,
|
| 72 |
"rope_scaling": {
|
| 73 |
"beta_fast": 32.0,
|
|
@@ -84,5 +75,6 @@
|
|
| 84 |
"tie_word_embeddings": false,
|
| 85 |
"transformers_version": "4.55.0.dev0",
|
| 86 |
"use_cache": true,
|
| 87 |
-
"vocab_size": 201088
|
| 88 |
-
|
|
|
|
|
|
| 59 |
"num_local_experts": 128,
|
| 60 |
"output_router_logits": false,
|
| 61 |
"pad_token_id": 199999,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
"rms_norm_eps": 1e-05,
|
| 63 |
"rope_scaling": {
|
| 64 |
"beta_fast": 32.0,
|
|
|
|
| 75 |
"tie_word_embeddings": false,
|
| 76 |
"transformers_version": "4.55.0.dev0",
|
| 77 |
"use_cache": true,
|
| 78 |
+
"vocab_size": 201088,
|
| 79 |
+
"torch_dtype": "bfloat16"
|
| 80 |
+
}
|