causal_sae / cfg.json
aemack's picture
Upload folder using huggingface_hub
3911d7f verified
raw
history blame contribute delete
786 Bytes
{"d_in": 2304, "d_sae": 16384, "dtype": "float32", "device": "cuda", "apply_b_dec_to_input": false, "normalize_activations": "none", "reshape_activations": "none", "metadata": {"sae_lens_version": "6.6.3", "sae_lens_training_version": "6.6.3", "dataset_path": "monology/pile-uncopyrighted", "hook_name": "blocks.12.hook_resid_post", "model_name": "google/gemma-2-2b", "model_class_name": "HookedTransformer", "hook_head_index": null, "context_size": 512, "seqpos_slice": [null], "model_from_pretrained_kwargs": {"center_writing_weights": false}, "prepend_bos": true, "exclude_special_tokens": false, "sequence_separator_token": "bos", "disable_concat_sequences": false}, "decoder_init_norm": 0.1, "discrete_threshold": 1e-05, "top_k": 8, "l1_coefficient": 1.0, "architecture": "causal"}