Add files using upload-large-folder tool
Browse files- transcoder_all/layer_10_width_262k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_12_width_16k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_14_width_262k_l0_small/config.json +9 -0
- transcoder_all/layer_15_width_262k_l0_small/config.json +9 -0
- transcoder_all/layer_1_width_262k_l0_small_affine/config.json +9 -0
- transcoder_all/layer_21_width_262k_l0_small/config.json +9 -0
- transcoder_all/layer_22_width_16k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_23_width_262k_l0_big/config.json +9 -0
- transcoder_all/layer_27_width_262k_l0_small/config.json +9 -0
- transcoder_all/layer_28_width_16k_l0_small_affine/config.json +9 -0
- transcoder_all/layer_28_width_262k_l0_big/config.json +9 -0
- transcoder_all/layer_2_width_16k_l0_big/config.json +9 -0
- transcoder_all/layer_2_width_262k_l0_small/config.json +9 -0
- transcoder_all/layer_2_width_262k_l0_small_affine/config.json +9 -0
- transcoder_all/layer_32_width_262k_l0_small_affine/config.json +9 -0
- transcoder_all/layer_33_width_262k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_36_width_16k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_37_width_16k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_38_width_16k_l0_small_affine/config.json +9 -0
- transcoder_all/layer_39_width_262k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_3_width_16k_l0_small/config.json +9 -0
- transcoder_all/layer_42_width_262k_l0_big/config.json +9 -0
- transcoder_all/layer_43_width_262k_l0_big/config.json +9 -0
- transcoder_all/layer_43_width_262k_l0_small/config.json +9 -0
- transcoder_all/layer_44_width_16k_l0_small/config.json +9 -0
- transcoder_all/layer_46_width_16k_l0_small/config.json +9 -0
- transcoder_all/layer_46_width_262k_l0_big/config.json +9 -0
- transcoder_all/layer_47_width_16k_l0_big/config.json +9 -0
- transcoder_all/layer_47_width_16k_l0_small/config.json +9 -0
- transcoder_all/layer_48_width_262k_l0_small/config.json +9 -0
- transcoder_all/layer_49_width_262k_l0_big/config.json +9 -0
- transcoder_all/layer_49_width_262k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_49_width_262k_l0_small/config.json +9 -0
- transcoder_all/layer_50_width_16k_l0_small/config.json +9 -0
- transcoder_all/layer_51_width_16k_l0_small_affine/config.json +9 -0
- transcoder_all/layer_55_width_16k_l0_small/config.json +9 -0
- transcoder_all/layer_56_width_262k_l0_small_affine/config.json +9 -0
- transcoder_all/layer_59_width_262k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_5_width_16k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_5_width_262k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_5_width_262k_l0_small_affine/config.json +9 -0
- transcoder_all/layer_61_width_262k_l0_big/config.json +9 -0
- transcoder_all/layer_61_width_262k_l0_big/params.safetensors +0 -0
- transcoder_all/layer_7_width_16k_l0_big/config.json +9 -0
- transcoder_all/layer_7_width_16k_l0_big/params.safetensors +0 -0
- transcoder_all/layer_7_width_16k_l0_big_affine/config.json +9 -0
- transcoder_all/layer_7_width_16k_l0_big_affine/params.safetensors +0 -0
transcoder_all/layer_10_width_262k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.10.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.10.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 89,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_12_width_16k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.12.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.12.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 94,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_14_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.14.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.14.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 16,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_15_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.15.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.15.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 17,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_1_width_262k_l0_small_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.1.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.1.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 10,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_21_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.21.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.21.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_22_width_16k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_23_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.23.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.23.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_27_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.27.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.27.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_28_width_16k_l0_small_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.28.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.28.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_28_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.28.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.28.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_2_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.2.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.2.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 65,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_2_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.2.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.2.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 10,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_2_width_262k_l0_small_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.2.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.2.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 10,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_32_width_262k_l0_small_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.32.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.32.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_33_width_262k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.33.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.33.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_36_width_16k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.36.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.36.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_37_width_16k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.37.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.37.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_38_width_16k_l0_small_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.38.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.38.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_39_width_262k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.39.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.39.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_3_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.3.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.3.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 11,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_42_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.42.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.42.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_43_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.43.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.43.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_43_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.43.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.43.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_44_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.44.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.44.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_46_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.46.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.46.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_46_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.46.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.46.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_47_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.47.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.47.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_47_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.47.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.47.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_48_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.48.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.48.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_49_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.49.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.49.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_49_width_262k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.49.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.49.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_49_width_262k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.49.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.49.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_50_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.50.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.50.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_51_width_16k_l0_small_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.51.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.51.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_55_width_16k_l0_small/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.55.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.55.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_56_width_262k_l0_small_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.56.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.56.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 20,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_59_width_262k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.59.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.59.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_5_width_16k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.5.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.5.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 74,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_5_width_262k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.5.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.5.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 74,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_5_width_262k_l0_small_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.5.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.5.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 12,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_61_width_262k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.61.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.61.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 262144,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 120,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_61_width_262k_l0_big/params.safetensors
ADDED
|
File without changes
|
transcoder_all/layer_7_width_16k_l0_big/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.7.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 80,
|
| 8 |
+
"affine_connection": false
|
| 9 |
+
}
|
transcoder_all/layer_7_width_16k_l0_big/params.safetensors
ADDED
|
File without changes
|
transcoder_all/layer_7_width_16k_l0_big_affine/config.json
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"hf_hook_point_in": "model.layers.7.pre_feedforward_layernorm.output",
|
| 3 |
+
"hf_hook_point_out": "model.layers.7.post_feedforward_layernorm.output",
|
| 4 |
+
"width": 16384,
|
| 5 |
+
"model_name": "gemma-v3-27b-it",
|
| 6 |
+
"architecture": "jump_relu",
|
| 7 |
+
"l0": 80,
|
| 8 |
+
"affine_connection": true
|
| 9 |
+
}
|
transcoder_all/layer_7_width_16k_l0_big_affine/params.safetensors
ADDED
|
File without changes
|