{ "metadata": { "model_type": "gemma2", "dtype": "bfloat16", "num_layers": 26, "num_heads": 8, "vocab_size": 128256, "max_position_embeddings": 8192 }, "weight_map": { "model.safetensors": [ "transformer.*" ] } }