File size: 603 Bytes
2bc8e46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
# Example: Full ablation — all strategies on GPT-2
# Run with: obliteratus run examples/full_study.yaml

model:
  name: gpt2
  task: causal_lm
  dtype: float32
  device: cpu

dataset:
  name: wikitext
  subset: wikitext-2-raw-v1
  split: test
  text_column: text
  max_samples: 50

strategies:
  - name: layer_removal
    params: {}
  - name: head_pruning
    params: {}
  - name: ffn_ablation
    params: {}
  - name: embedding_ablation
    params:
      chunk_size: 48  # ablate 48 dims at a time (GPT-2 has 768)

metrics:
  - perplexity

batch_size: 4
max_length: 256
output_dir: results/gpt2_full