| { | |
| "architectures": [ | |
| "SABERForCausalLM" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "configuration_saber.SABERConfig", | |
| "AutoModelForCausalLM": "modeling_saber.SABERForCausalLM" | |
| }, | |
| "curiosity_coeff": 0.01, | |
| "d_anchor": 96, | |
| "d_exp": 192, | |
| "d_ff": 2164, | |
| "d_model": 1536, | |
| "dtype": "float32", | |
| "enable_anchors": true, | |
| "enable_experience": true, | |
| "gradient_checkpointing": false, | |
| "head_dim": 128, | |
| "initializer_range": 0.02, | |
| "max_position_embeddings": 2048, | |
| "model_type": "saber", | |
| "n_anchors": 64, | |
| "n_heads": 12, | |
| "n_layers": 20, | |
| "predictability_mode": false, | |
| "resonant_alpha_init": 3.0, | |
| "resonant_layers": [ | |
| 0, | |
| 2, | |
| 4, | |
| 6, | |
| 8, | |
| 10, | |
| 12, | |
| 14, | |
| 16, | |
| 18 | |
| ], | |
| "rms_norm_eps": 1e-06, | |
| "rope_theta": 10000.0, | |
| "tie_word_embeddings": true, | |
| "transformers_version": "5.3.0", | |
| "use_cache": true, | |
| "vocab_size": 50257 | |
| } | |