Bturtel commited on
Commit
880a7e0
·
verified ·
1 Parent(s): d78aaa2

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +12 -5
config.json CHANGED
@@ -4,8 +4,6 @@
4
  ],
5
  "attention_bias": true,
6
  "attention_dropout": 0.0,
7
- "bos_token_id": null,
8
- "dtype": "bfloat16",
9
  "eos_token_id": 200002,
10
  "experts_per_token": 4,
11
  "head_dim": 64,
@@ -61,21 +59,30 @@
61
  "num_local_experts": 128,
62
  "output_router_logits": false,
63
  "pad_token_id": 199999,
 
 
 
 
 
 
 
 
 
64
  "rms_norm_eps": 1e-05,
65
- "rope_parameters": {
66
  "beta_fast": 32.0,
67
  "beta_slow": 1.0,
68
  "factor": 32.0,
69
  "original_max_position_embeddings": 4096,
70
- "rope_theta": 150000,
71
  "rope_type": "yarn",
72
  "truncate": false
73
  },
 
74
  "router_aux_loss_coef": 0.9,
75
  "sliding_window": 128,
76
  "swiglu_limit": 7.0,
77
  "tie_word_embeddings": false,
78
- "transformers_version": "5.1.0",
79
  "use_cache": true,
80
  "vocab_size": 201088
81
  }
 
4
  ],
5
  "attention_bias": true,
6
  "attention_dropout": 0.0,
 
 
7
  "eos_token_id": 200002,
8
  "experts_per_token": 4,
9
  "head_dim": 64,
 
59
  "num_local_experts": 128,
60
  "output_router_logits": false,
61
  "pad_token_id": 199999,
62
+ "quantization_config": {
63
+ "modules_to_not_convert": [
64
+ "model.layers.*.self_attn",
65
+ "model.layers.*.mlp.router",
66
+ "model.embed_tokens",
67
+ "lm_head"
68
+ ],
69
+ "quant_method": "mxfp4"
70
+ },
71
  "rms_norm_eps": 1e-05,
72
+ "rope_scaling": {
73
  "beta_fast": 32.0,
74
  "beta_slow": 1.0,
75
  "factor": 32.0,
76
  "original_max_position_embeddings": 4096,
 
77
  "rope_type": "yarn",
78
  "truncate": false
79
  },
80
+ "rope_theta": 150000,
81
  "router_aux_loss_coef": 0.9,
82
  "sliding_window": 128,
83
  "swiglu_limit": 7.0,
84
  "tie_word_embeddings": false,
85
+ "transformers_version": "4.55.0.dev0",
86
  "use_cache": true,
87
  "vocab_size": 201088
88
  }