{ "architectures": [ "SmalLmForCausalLM" ], "attention_bias": false, "attention_dropout": 0.1, "auto_map": { "AutoConfig": "config.SmalLmConfig", "AutoModelForCausalLM": "model.SmalLmForCausalLM" }, "balancing_coef": 0.0001, "bos_token_id": 1, "embedding_dropout": 0.0, "eos_token_id": 0, "expert_size": 576, "gate_noise": false, "head_size": 64, "hidden_size": 512, "high_rotations": 32, "initializer_range": 0.02, "intermediate_size": 1024, "layer_dropout": 0.1, "low_rotations": 1, "max_seq_len": 2048, "mlp_bias": false, "model_type": "smallm", "moe_bias": false, "moe_period": 2, "no_moe_layers": 10, "noisy_experts": false, "num_attention_heads": 8, "num_hidden_layers": 20, "num_kv_heads": 2, "original_seq_len": 1024, "pad_token_id": 0, "positional_bias_type": "rope", "rms_affine": false, "rms_norm_eps": 1e-06, "rope_base": 100000, "routed_experts": 8, "shared_experts": 1, "sliding_window_attention": true, "sliding_window_context": 1024, "sliding_window_period": 4, "static_residual": true, "token_experts": 3, "torch_dtype": "float32", "transformers_version": "4.50.3", "use_cache": true, "use_moe": false, "vocab_size": 60000 }