File size: 650 Bytes
62330fb | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 | {
"crosscoder_kind": "multilayer_sparc",
"extract_side": "aligned",
"base_model": "meta-llama/Llama-3.2-3B-Instruct",
"aligned_model": "MInAlA/Llama-3.2-3B-Instruct-PPO-merged",
"aligned_run_id": "llama32-3b-ppo",
"layers": [
10,
11,
12
],
"center_layer": null,
"layer_window": 1,
"layer_policy": "matched_aligned_window",
"position": "last_prompt",
"dataset_name": "argilla/ultrafeedback-binarized-preferences-cleaned",
"max_prompt_tokens": 512,
"peft": false,
"topk_mode": "model_balanced_layer_agg",
"activation_artifact": "output/aligned_activations/llama32-3b-ppo/activations/aligned_activations.pt"
} |