Upload folder using huggingface_hub
Browse files
checkpoints_google_gemma-2b_blocks.12._mul_fractal_topk_l32/config.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"model_name": "google/gemma-2b",
|
| 3 |
+
"layer": 12,
|
| 4 |
+
"hook_point": "resid_post",
|
| 5 |
+
"act_size": 2048,
|
| 6 |
+
"sae_type": "mul_fractal_topk",
|
| 7 |
+
"dict_size": 65536,
|
| 8 |
+
"aux_penalty": 0.03125,
|
| 9 |
+
"input_unit_norm": true,
|
| 10 |
+
"batch_norm_on_queries": false,
|
| 11 |
+
"affine_batch_norm": false,
|
| 12 |
+
"linear_heads": 0,
|
| 13 |
+
"topk2": 32,
|
| 14 |
+
"topk1": 50,
|
| 15 |
+
"topk2_warmup_steps_fraction": 0.0,
|
| 16 |
+
"start_topk2": 32,
|
| 17 |
+
"topk1_warmup_steps_fraction": 0.0,
|
| 18 |
+
"start_topk1": 50,
|
| 19 |
+
"topk2_aux": 512,
|
| 20 |
+
"cartesian_op": "mul",
|
| 21 |
+
"router_depth": 2,
|
| 22 |
+
"router_tree_width": null,
|
| 23 |
+
"num_mkeys": 2,
|
| 24 |
+
"num_nkeys": 8,
|
| 25 |
+
"num_heads": 4096,
|
| 26 |
+
"n_batches_to_dead": 10,
|
| 27 |
+
"lr": 0.0008,
|
| 28 |
+
"bandwidth": 0.001,
|
| 29 |
+
"l1_coeff": 0.0018,
|
| 30 |
+
"num_tokens": 1000000000,
|
| 31 |
+
"seq_len": 1024,
|
| 32 |
+
"model_batch_size": 64,
|
| 33 |
+
"num_batches_in_buffer": 5,
|
| 34 |
+
"max_grad_norm": 1.0,
|
| 35 |
+
"batch_size": 8192,
|
| 36 |
+
"weight_decay": 0.0,
|
| 37 |
+
"warmup_fraction": 0.1,
|
| 38 |
+
"scheduler_type": "linear",
|
| 39 |
+
"device": "cuda",
|
| 40 |
+
"dtype": "torch.float32",
|
| 41 |
+
"sae_dtype": "torch.float32",
|
| 42 |
+
"dataset_path": "HuggingFaceFW/fineweb-edu",
|
| 43 |
+
"wandb_project": "kron-sae",
|
| 44 |
+
"hf_space_id": "unsloth/Meta-Llama-3.1-8B",
|
| 45 |
+
"enable_wandb": false,
|
| 46 |
+
"enable_comet_ml": true,
|
| 47 |
+
"sae_name": "sae",
|
| 48 |
+
"seed": 42,
|
| 49 |
+
"performance_log_steps": 100,
|
| 50 |
+
"save_checkpoint_steps": 90000000,
|
| 51 |
+
"wandb_run_suffix": "bench_exp10",
|
| 52 |
+
"sweep_pair": null
|
| 53 |
+
}
|
checkpoints_google_gemma-2b_blocks.12._mul_fractal_topk_l32/sae.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8f5c69320012b7d66dade9e90bfc5e5692db14a63b3c45159775ad37d01300fa
|
| 3 |
+
size 872852253
|