File size: 646 Bytes
fa64206
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
training:
  num_epochs: 3
  batch_size: 16
  learning_rate: 2e-5

dataset:
  name: imdb
  split: train[:10%]

model:
  adapter:
    reduction_factor: 16
  lora:
    r: 4
    alpha: 32
  student:
    hidden_size: 384

evaluation:
  models:
    - bert-base-uncased
    - distilbert-base-uncased
    - roberta-base
    - gpt2
    - bart-base
    - electra-small-discriminator
    - t5-small
    - xlm-roberta-base
    - albert-base-v2
    - xlnet-base-cased
    - deberta-base
    - camembert-base
    - marianmt-en-de
    - m2m100_418M

wandb:
  project: fine_tuning_comparison
  entity: your_wandb_username