File size: 1,147 Bytes
cd5fbbd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
{
    "task_name": "mrpc",
    "train_file": null,
    "validation_file": null,
    "max_length": 300,
    "pad_to_max_length": false,
    "model_name_or_path": "roberta-base",
    "use_slow_tokenizer": false,
    "per_device_train_batch_size": 8,
    "per_device_eval_batch_size": 8,
    "learning_rate": 5e-05,
    "weight_decay": 0.0,
    "num_train_epochs": 3,
    "max_train_steps": 10000,
    "peft_method": null,
    "gradient_accumulation_steps": 1,
    "lr_scheduler_type": "linear",
    "num_warmup_steps": 0,
    "output_dir": "./outputs",
    "seed": 65,
    "push_to_hub": false,
    "hub_model_id": null,
    "hub_token": null,
    "checkpointing_steps": "1000",
    "resume_from_checkpoint": null,
    "with_tracking": false,
    "report_to": "all",
    "ignore_mismatched_sizes": true,
    "save": false,
    "load_step": 999,
    "lora_r": 8,
    "lora_alpha": 16,
    "lora_dropout": 0.1,
    "laplace_hessian": "kron",
    "laplace_sub": "all",
    "laplace_prior": "homo",
    "laplace_optim_step": 1000,
    "testing_set": "train_val",
    "cache_dir": "/content/cache/huggingface/metrics/",
    "laplace_predict": "mc_corr"
}