File size: 247 Bytes
9bc218c | 1 2 3 4 5 6 7 8 9 10 | {
"hf_hook_point_in": "model.layers.22.output",
"hf_hook_point_out": "model.layers.22.output",
"width": 65536,
"model_name": "google/gemma-3-4b-pt",
"architecture": "jump_relu",
"l0": 60,
"affine_connection": false,
"type": "sae"
} |