Spaces:
Running
Running
| # Example: Full ablation — all strategies on GPT-2 | |
| # Run with: obliteratus run examples/full_study.yaml | |
| model: | |
| name: gpt2 | |
| task: causal_lm | |
| dtype: float32 | |
| device: cpu | |
| dataset: | |
| name: wikitext | |
| subset: wikitext-2-raw-v1 | |
| split: test | |
| text_column: text | |
| max_samples: 50 | |
| strategies: | |
| - name: layer_removal | |
| params: {} | |
| - name: head_pruning | |
| params: {} | |
| - name: ffn_ablation | |
| params: {} | |
| - name: embedding_ablation | |
| params: | |
| chunk_size: 48 # ablate 48 dims at a time (GPT-2 has 768) | |
| metrics: | |
| - perplexity | |
| batch_size: 4 | |
| max_length: 256 | |
| output_dir: results/gpt2_full | |