SAELens
CallumMcDougallGDM commited on
Commit
36765ee
·
verified ·
1 Parent(s): ebb94ec

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. transcoder/layer_17_width_16k_l0_big/config.json +10 -0
  2. transcoder/layer_17_width_16k_l0_big_affine/config.json +10 -0
  3. transcoder/layer_17_width_16k_l0_medium_affine/config.json +10 -0
  4. transcoder/layer_17_width_16k_l0_small/config.json +10 -0
  5. transcoder/layer_17_width_262k_l0_big/config.json +10 -0
  6. transcoder/layer_17_width_262k_l0_big_affine/config.json +10 -0
  7. transcoder/layer_17_width_262k_l0_medium/config.json +10 -0
  8. transcoder/layer_17_width_262k_l0_medium_affine/config.json +10 -0
  9. transcoder/layer_17_width_262k_l0_small_affine/config.json +10 -0
  10. transcoder/layer_17_width_65k_l0_big/config.json +10 -0
  11. transcoder/layer_17_width_65k_l0_big_affine/config.json +10 -0
  12. transcoder/layer_17_width_65k_l0_medium/config.json +10 -0
  13. transcoder/layer_17_width_65k_l0_medium_affine/config.json +10 -0
  14. transcoder/layer_17_width_65k_l0_small/config.json +10 -0
  15. transcoder/layer_17_width_65k_l0_small_affine/config.json +10 -0
  16. transcoder/layer_22_width_16k_l0_big/config.json +10 -0
  17. transcoder/layer_22_width_16k_l0_medium/config.json +10 -0
  18. transcoder/layer_22_width_16k_l0_small/config.json +10 -0
  19. transcoder/layer_22_width_16k_l0_small_affine/config.json +10 -0
  20. transcoder/layer_22_width_262k_l0_big/config.json +10 -0
  21. transcoder/layer_22_width_262k_l0_big_affine/config.json +10 -0
  22. transcoder/layer_22_width_262k_l0_medium/config.json +10 -0
  23. transcoder/layer_22_width_262k_l0_medium_affine/config.json +10 -0
  24. transcoder/layer_22_width_262k_l0_small_affine/config.json +10 -0
  25. transcoder/layer_22_width_65k_l0_big/config.json +10 -0
  26. transcoder/layer_22_width_65k_l0_big_affine/config.json +10 -0
  27. transcoder/layer_22_width_65k_l0_medium/config.json +10 -0
  28. transcoder/layer_22_width_65k_l0_medium_affine/config.json +10 -0
  29. transcoder/layer_22_width_65k_l0_small/config.json +10 -0
  30. transcoder/layer_22_width_65k_l0_small_affine/config.json +10 -0
  31. transcoder/layer_29_width_16k_l0_big/config.json +10 -0
  32. transcoder/layer_29_width_16k_l0_big_affine/config.json +10 -0
  33. transcoder/layer_29_width_16k_l0_medium/config.json +10 -0
  34. transcoder/layer_29_width_16k_l0_medium_affine/config.json +10 -0
  35. transcoder/layer_29_width_16k_l0_small/config.json +10 -0
  36. transcoder/layer_29_width_16k_l0_small_affine/config.json +10 -0
  37. transcoder/layer_29_width_262k_l0_big/config.json +10 -0
  38. transcoder/layer_29_width_262k_l0_big_affine/config.json +10 -0
  39. transcoder/layer_29_width_262k_l0_medium/config.json +10 -0
  40. transcoder/layer_29_width_262k_l0_medium_affine/config.json +10 -0
  41. transcoder/layer_29_width_262k_l0_small/config.json +10 -0
  42. transcoder/layer_29_width_65k_l0_big/config.json +10 -0
  43. transcoder/layer_29_width_65k_l0_big_affine/config.json +10 -0
  44. transcoder/layer_29_width_65k_l0_medium/config.json +10 -0
  45. transcoder/layer_29_width_65k_l0_medium_affine/config.json +10 -0
  46. transcoder/layer_29_width_65k_l0_small/config.json +10 -0
  47. transcoder/layer_29_width_65k_l0_small_affine/config.json +10 -0
  48. transcoder/layer_9_width_16k_l0_big/config.json +10 -0
  49. transcoder/layer_9_width_16k_l0_big_affine/config.json +10 -0
  50. transcoder/layer_9_width_16k_l0_medium/config.json +10 -0
transcoder/layer_17_width_16k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_16k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_16k_l0_medium_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_262k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_262k_l0_medium_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_262k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_65k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_65k_l0_medium_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_65k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_17_width_65k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.17.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.17.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_16k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_16k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_262k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_262k_l0_medium_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_262k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_65k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_65k_l0_medium_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_65k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_22_width_65k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_16k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_16k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_16k_l0_medium_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_16k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_16k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_262k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_262k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_262k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_262k_l0_medium_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_262k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 262144,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_65k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_65k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 150,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_65k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_65k_l0_medium_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 60,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_65k_l0_small/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_29_width_65k_l0_small_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
+ "width": 65536,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_9_width_16k_l0_big/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 131,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_9_width_16k_l0_big_affine/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 131,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
+ }
transcoder/layer_9_width_16k_l0_medium/config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hf_hook_point_in": "model.layers.9.pre_feedforward_layernorm.output",
3
+ "hf_hook_point_out": "model.layers.9.post_feedforward_layernorm.output",
4
+ "width": 16384,
5
+ "model_name": "google/gemma-3-4b-pt",
6
+ "architecture": "jump_relu",
7
+ "l0": 53,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
+ }