SAELens
CallumMcDougallGDM commited on
Commit
77c2870
·
verified ·
1 Parent(s): 855a36a

Add files using upload-large-folder tool

Browse files
transcoder_all/layer_21_width_262k_l0_big_affine/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.21.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.21.post_feedforward_layernorm.output",
4
  "width": 262144,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
- "affine_connection": true
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.21.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.21.post_feedforward_layernorm.output",
4
  "width": 262144,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_22_width_16k_l0_big/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
  "width": 16384,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
- "affine_connection": false
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.22.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.22.post_feedforward_layernorm.output",
4
  "width": 16384,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_23_width_16k_l0_small/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.23.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.23.post_feedforward_layernorm.output",
4
  "width": 16384,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 20,
8
- "affine_connection": false
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.23.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.23.post_feedforward_layernorm.output",
4
  "width": 16384,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_29_width_262k_l0_small/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
  "width": 262144,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 20,
8
- "affine_connection": false
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.29.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.29.post_feedforward_layernorm.output",
4
  "width": 262144,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 20,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_38_width_16k_l0_big_affine/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.38.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.38.post_feedforward_layernorm.output",
4
  "width": 16384,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
- "affine_connection": true
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.38.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.38.post_feedforward_layernorm.output",
4
  "width": 16384,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_41_width_262k_l0_big/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.41.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.41.post_feedforward_layernorm.output",
4
  "width": 262144,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
- "affine_connection": false
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.41.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.41.post_feedforward_layernorm.output",
4
  "width": 262144,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
+ "affine_connection": false,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_43_width_16k_l0_big_affine/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.43.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.43.post_feedforward_layernorm.output",
4
  "width": 16384,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
- "affine_connection": true
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.43.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.43.post_feedforward_layernorm.output",
4
  "width": 16384,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_45_width_262k_l0_big_affine/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.45.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.45.post_feedforward_layernorm.output",
4
  "width": 262144,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
- "affine_connection": true
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.45.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.45.post_feedforward_layernorm.output",
4
  "width": 262144,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_46_width_262k_l0_small_affine/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.46.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.46.post_feedforward_layernorm.output",
4
  "width": 262144,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 20,
8
- "affine_connection": true
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.46.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.46.post_feedforward_layernorm.output",
4
  "width": 262144,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 20,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_47_width_16k_l0_big_affine/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.47.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.47.post_feedforward_layernorm.output",
4
  "width": 16384,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
- "affine_connection": true
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.47.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.47.post_feedforward_layernorm.output",
4
  "width": 16384,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 120,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_5_width_16k_l0_big_affine/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.5.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.5.post_feedforward_layernorm.output",
4
  "width": 16384,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 78,
8
- "affine_connection": true
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.5.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.5.post_feedforward_layernorm.output",
4
  "width": 16384,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 78,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
  }
transcoder_all/layer_8_width_262k_l0_big_affine/config.json CHANGED
@@ -2,8 +2,9 @@
2
  "hf_hook_point_in": "model.layers.8.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.8.post_feedforward_layernorm.output",
4
  "width": 262144,
5
- "model_name": "gemma-v3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 90,
8
- "affine_connection": true
 
9
  }
 
2
  "hf_hook_point_in": "model.layers.8.pre_feedforward_layernorm.output",
3
  "hf_hook_point_out": "model.layers.8.post_feedforward_layernorm.output",
4
  "width": 262144,
5
+ "model_name": "google/gemma-3-12b-pt",
6
  "architecture": "jump_relu",
7
  "l0": 90,
8
+ "affine_connection": true,
9
+ "type": "transcoder"
10
  }