| { | |
| "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output", | |
| "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output", | |
| "width": 262144, | |
| "model_name": "google/gemma-3-4b-it", | |
| "architecture": "jump_relu", | |
| "l0": 60, | |
| "affine_connection": false, | |
| "type": "transcoder" | |
| } |