| { | |
| "model_name": "gemma4-e2b-stateless", | |
| "architecture": "gemma4", | |
| "hidden_size": 1536, | |
| "num_hidden_layers": 35, | |
| "context_length": 512, | |
| "vocab_size": 262144, | |
| "bos_token_id": 2, | |
| "eos_token_id": 1, | |
| "per_layer_dim": 256, | |
| "max_head_dim": 512, | |
| "embed_scale": 39.191835884530846, | |
| "per_layer_model_projection_scale": 0.02551551815399144, | |
| "per_layer_input_scale": 0.7071067811865476, | |
| "per_layer_embed_scale": 16.0, | |
| "external_embeddings": true, | |
| "has_multimodal": true, | |
| "stateless": true, | |
| "num_chunks": 4, | |
| "precomputed_rope": true, | |
| "chunk_layer_ranges": [ | |
| [ | |
| 0, | |
| 8 | |
| ], | |
| [ | |
| 8, | |
| 15 | |
| ], | |
| [ | |
| 15, | |
| 25 | |
| ], | |
| [ | |
| 25, | |
| 35 | |
| ] | |
| ] | |
| } |