File size: 307 Bytes
b1eeefa |
1 2 3 4 5 6 7 8 9 10 |
{
"hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
"hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
"width": 16384,
"model_name": "google/gemma-3-4b-it",
"architecture": "jump_relu",
"l0": 60,
"affine_connection": false,
"type": "transcoder"
} |