| { | |
| "hf_hook_point_in": "model.layers.{all}.pre_feedforward_layernorm.output", | |
| "hf_hook_point_out": "model.layers.{all}.post_feedforward_layernorm.output", | |
| "width": 524160, | |
| "model_name": "google/gemma-3-1b-it", | |
| "architecture": "jump_relu", | |
| "l0": 50, | |
| "affine_connection": false, | |
| "type": "clt" | |
| } |