Upload checkpoints/gptoss_phase1_config.json with huggingface_hub
Browse files
checkpoints/gptoss_phase1_config.json
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"variant": "gptoss_phase1",
|
| 3 |
+
"model_type": "adamba-moe",
|
| 4 |
+
"architecture": "HybridMoEGPT (Attention + MoE + Mamba)",
|
| 5 |
+
"base_model": "gpt-oss-20b",
|
| 6 |
+
"parameters": "21.9B",
|
| 7 |
+
"n_embd": 2880,
|
| 8 |
+
"features": [
|
| 9 |
+
"mamba_integration",
|
| 10 |
+
"moe_32experts"
|
| 11 |
+
],
|
| 12 |
+
"n_layers": 36,
|
| 13 |
+
"vocab_size": 201088,
|
| 14 |
+
"num_experts": 32,
|
| 15 |
+
"experts_per_token": 4
|
| 16 |
+
}
|