File size: 625 Bytes
62330fb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
{
  "crosscoder_kind": "multilayer_sparc",
  "extract_side": "aligned",
  "base_model": "HuggingFaceTB/SmolLM3-3B",
  "aligned_model": "MInAlA/SmolLM3-3B-PPO-merged",
  "aligned_run_id": "smollm3-ppo",
  "layers": [
    17,
    18,
    19
  ],
  "center_layer": null,
  "layer_window": 1,
  "layer_policy": "matched_aligned_window",
  "position": "last_prompt",
  "dataset_name": "argilla/ultrafeedback-binarized-preferences-cleaned",
  "max_prompt_tokens": 512,
  "peft": false,
  "topk_mode": "model_balanced_layer_agg",
  "activation_artifact": "output/aligned_activations/smollm3-ppo/activations/aligned_activations.pt"
}