File size: 1,133 Bytes
0e1ef4f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
{
"batch_size": 16,
"deprel": "full",
"dev": [
"la_giuseppe/dev.conllu"
],
"dropout": 0.5,
"embed_tags": [],
"epochs": 100,
"epochs_frozen": 20,
"exp": "giuseppe_killer",
"label_smoothing": 0.03,
"learning_rate": 2e-05,
"learning_rate_decay": "cos",
"learning_rate_warmup": 2000,
"load": [],
"logdir": "logs/giuseppe_killer-latinpipe_evalatin24-7099847-250219_111733-s42",
"max_train_sentence_len": 150,
"optimizer": "adam",
"parse": 1,
"parse_attention_dim": 512,
"rnn_dim": 512,
"rnn_layers": 2,
"rnn_type": "LSTMTorch",
"save_checkpoint": true,
"script": "latinpipe_evalatin24.py",
"seed": 42,
"single_root": 1,
"steps_per_epoch": 1000,
"subword_combination": "first",
"tags": [
2,
1,
4
],
"task_hidden_layer": 2048,
"test": [
"la_giuseppe/test.conllu"
],
"threads": 4,
"train": [
"la_giuseppe/train.conllu"
],
"train_sampling_exponent": 0.5,
"transformers": [
"bowphs/LaBerta",
"bowphs/PhilBerta",
"FacebookAI/xlm-roberta-large"
],
"treebank_ids": false,
"verbose": 2,
"wandb": false,
"word_masking": null
} |