| { | |
| "batch_size": 16, | |
| "deprel": "full", | |
| "dev": [ | |
| "la_giuseppe/dev.conllu" | |
| ], | |
| "dropout": 0.5, | |
| "embed_tags": [], | |
| "epochs": 100, | |
| "epochs_frozen": 20, | |
| "exp": "giuseppe_killer", | |
| "label_smoothing": 0.03, | |
| "learning_rate": 2e-05, | |
| "learning_rate_decay": "cos", | |
| "learning_rate_warmup": 2000, | |
| "load": [], | |
| "logdir": "logs/giuseppe_killer-latinpipe_evalatin24-7099847-250219_111733-s42", | |
| "max_train_sentence_len": 150, | |
| "optimizer": "adam", | |
| "parse": 1, | |
| "parse_attention_dim": 512, | |
| "rnn_dim": 512, | |
| "rnn_layers": 2, | |
| "rnn_type": "LSTMTorch", | |
| "save_checkpoint": true, | |
| "script": "latinpipe_evalatin24.py", | |
| "seed": 42, | |
| "single_root": 1, | |
| "steps_per_epoch": 1000, | |
| "subword_combination": "first", | |
| "tags": [ | |
| 2, | |
| 1, | |
| 4 | |
| ], | |
| "task_hidden_layer": 2048, | |
| "test": [ | |
| "la_giuseppe/test.conllu" | |
| ], | |
| "threads": 4, | |
| "train": [ | |
| "la_giuseppe/train.conllu" | |
| ], | |
| "train_sampling_exponent": 0.5, | |
| "transformers": [ | |
| "bowphs/LaBerta", | |
| "bowphs/PhilBerta", | |
| "FacebookAI/xlm-roberta-large" | |
| ], | |
| "treebank_ids": false, | |
| "verbose": 2, | |
| "wandb": false, | |
| "word_masking": null | |
| } |